Repository: apache/spark-connect-go Branch: master Commit: 0e3d565b63e6 Files: 104 Total size: 2.1 MB Directory structure: gitextract_ljoq267_/ ├── .asf.yaml ├── .github/ │ ├── PULL_REQUEST_TEMPLATE │ ├── dependabot.yml │ └── workflows/ │ └── build.yml ├── .gitignore ├── .gitmodules ├── .golangci.yml ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── NOTICE ├── README.md ├── buf.gen.yaml ├── buf.work.yaml ├── cmd/ │ ├── spark-connect-example-raw-grpc-client/ │ │ └── main.go │ └── spark-connect-example-spark-session/ │ └── main.go ├── dev/ │ ├── .rat-excludes │ ├── README.md │ ├── check-license │ ├── gen.py │ ├── release.py │ └── requirements.txt ├── go.mod ├── go.sum ├── internal/ │ ├── generated/ │ │ ├── base.pb.go │ │ ├── base_grpc.pb.go │ │ ├── catalog.pb.go │ │ ├── commands.pb.go │ │ ├── common.pb.go │ │ ├── example_plugins.pb.go │ │ ├── expressions.pb.go │ │ ├── ml.pb.go │ │ ├── ml_common.pb.go │ │ ├── pipelines.pb.go │ │ ├── relations.pb.go │ │ └── types.pb.go │ └── tests/ │ └── integration/ │ ├── dataframe_test.go │ ├── functions_test.go │ ├── helper.go │ ├── spark_runner.go │ └── sql_test.go ├── java/ │ ├── .gitignore │ ├── README.md │ ├── build.sbt │ ├── run.sh │ └── src/ │ └── main/ │ └── scala/ │ └── org/ │ └── apache/ │ └── spark/ │ └── golang/ │ └── Runner.scala ├── merge_connect_go_pr.py ├── quick-start.md └── spark/ ├── client/ │ ├── base/ │ │ └── base.go │ ├── channel/ │ │ ├── channel.go │ │ ├── channel_test.go │ │ └── compat.go │ ├── client.go │ ├── client_test.go │ ├── conf.go │ ├── options/ │ │ └── options.go │ ├── retry.go │ ├── retry_test.go │ └── testutils/ │ └── utils.go ├── mocks/ │ ├── mock_executor.go │ └── mocks.go ├── sparkerrors/ │ ├── errors.go │ └── errors_test.go ├── sql/ │ ├── column/ │ │ ├── column.go │ │ ├── column_test.go │ │ ├── expressions.go │ │ └── expressions_test.go │ ├── dataframe.go │ ├── dataframe_test.go │ ├── dataframenafunctions.go │ ├── dataframereader.go │ ├── dataframereader_test.go │ ├── dataframestatfunctions.go │ ├── dataframewriter.go │ ├── dataframewriter_test.go │ ├── executeplanclient.go │ ├── functions/ │ │ ├── buiitins.go │ │ └── generated.go │ ├── group.go │ ├── group_test.go │ ├── mocks_test.go │ ├── plan.go │ ├── plan_test.go │ ├── sparksession.go │ ├── sparksession_integration_test.go │ ├── sparksession_test.go │ ├── types/ │ │ ├── arrow.go │ │ ├── arrow_test.go │ │ ├── builtin.go │ │ ├── builtin_test.go │ │ ├── conversion.go │ │ ├── conversion_test.go │ │ ├── datatype.go │ │ ├── datatype_test.go │ │ ├── row.go │ │ ├── row_json_test.go │ │ ├── row_test.go │ │ ├── structtype.go │ │ └── structtype_test.go │ └── utils/ │ ├── check.go │ ├── check_test.go │ ├── consts.go │ └── consts_test.go └── version.go ================================================ FILE CONTENTS ================================================ ================================================ FILE: .asf.yaml ================================================ # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # https://cwiki.apache.org/confluence/display/INFRA/git+-+.asf.yaml+features --- github: description: "Apache Spark Connect Client for Golang" homepage: https://spark.apache.org/ enabled_merge_buttons: merge: false squash: true rebase: true features: # Enable the "Issues" tab issues: true # Enable the "Projects" tab projects: true notifications: pullrequests: reviews@spark.apache.org issues: reviews@spark.apache.org commits: commits@spark.apache.org ================================================ FILE: .github/PULL_REQUEST_TEMPLATE ================================================ ### What changes were proposed in this pull request? ### Why are the changes needed? ### Does this PR introduce _any_ user-facing change? ### How was this patch tested? ================================================ FILE: .github/dependabot.yml ================================================ # To get started with Dependabot version updates, you'll need to specify which # package ecosystems to update and where the package manifests are located. # Please see the documentation for all configuration options: # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file version: 2 updates: - package-ecosystem: "gomod" # See documentation for possible values directory: "/" # Location of package manifests schedule: interval: "weekly" ================================================ FILE: .github/workflows/build.yml ================================================ # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # Intentionally has a general name. # because the test status check created in GitHub Actions # currently randomly picks any associated workflow. # So, the name was changed to make sense in that context too. # See also https://github.community/t/specify-check-suite-when-creating-a-checkrun/118380/10 name: "On pull requests" on: workflow_dispatch: pull_request: push: branches: - master env: SPARK_VERSION: '4.0.0' HADOOP_VERSION: '3' permissions: # Required: allow read access to the content for analysis. contents: read # Optional: allow read access to pull request. Use with `only-new-issues` option. pull-requests: read # Optional: allow write access to checks to allow the action to annotate code in the PR. checks: write jobs: build: name: Build & Test Client runs-on: ubuntu-latest steps: - name: Checkout Repository uses: actions/checkout@v4 with: submodules: recursive - uses: actions/setup-go@v5 name: Setup Go with: go-version-file: 'go.mod' - uses: actions/setup-python@v5 with: python-version: '3.10' - uses: actions/setup-java@v4 with: java-version: '17' distribution: zulu - name: Cache Spark Installation uses: actions/cache@v4 id: cache with: key: v2-spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }} path: | /home/runner/deps/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }} - name: Setup SBT uses: sbt/setup-sbt@v1 - name: Setup Apache Spark if: steps.cache.outputs.cache-hit != 'true' run: | set -x echo "Apache Spark is not installed" # Access the directory. mkdir -p ~/deps/ wget -q https://dlcdn.apache.org/spark/spark-${{ env.SPARK_VERSION }}/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }}.tgz tar -xzf spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }}.tgz -C ~/deps/ # Delete the old file rm spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }}.tgz ls -lah ~/deps/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }} du -hs ~/deps/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }} # Setup the Environment Variables echo "Apache Spark is ready to use" echo "SPARK_HOME=/home/runner/deps/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }}" >> "$GITHUB_ENV" - name: Run Build & Test run: | go mod download -x # Remove dependency on gen until Spark 4 has the fix for the pipelines.proto # make gen make make test - name: Run Example Spark Submit Application run: | export SPARK_HOME=/home/runner/deps/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }} make cd java sbt publishLocal ./run.sh ../cmd/spark-connect-example-spark-session/spark-connect-example-spark-session - name: Run Integration Test run: | export SPARK_HOME=/home/runner/deps/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }} # Remove dependency on gen until Spark 4 has the fix for the pipelines.proto # make gen make && make integration - name: Run Code Coverage run: | export SPARK_HOME=/home/runner/deps/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }} make coverage # Disable the code coverage check for now until https://github.com/PaloAltoNetworks/cov/issues/8 is fixed. # - uses: PaloAltoNetworks/cov@3.0.0 # with: # cov_mode: coverage # main_branch: master # cov_threshold: 60 - name: golangci-lint uses: golangci/golangci-lint-action@v8 with: version: v2.1 ================================================ FILE: .gitignore ================================================ # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # All generated files internal/generated.out # Ignore Coverage Files coverage* cov.report # Ignore IDE files .idea/ # Ignore binaries cmd/spark-connect-example-raw-grpc-client/spark-connect-example-raw-grpc-client cmd/spark-connect-example-spark-session/spark-connect-example-spark-session target lib deps .DS_Store ================================================ FILE: .gitmodules ================================================ [submodule "spark"] path = sparksrc url = https://github.com/apache/spark.git [submodule "sparksrc"] branch = branch-4.0 ================================================ FILE: .golangci.yml ================================================ # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # version: "2" linters: exclusions: generated: lax presets: - comments - common-false-positives - legacy - std-error-handling paths: - third_party$ - builtin$ - examples$ formatters: enable: - gofumpt exclusions: generated: lax paths: - third_party$ - builtin$ - examples$ ================================================ FILE: CONTRIBUTING.md ================================================ ## Contributing to Spark *Before opening a pull request*, review the [Contributing to Spark guide](https://spark.apache.org/contributing.html). It lists steps that are required before creating a PR. In particular, consider: - Is the change important and ready enough to ask the community to spend time reviewing? - Have you searched for existing, related JIRAs and pull requests? - Is this a new feature that can stand alone as a [third party project](https://spark.apache.org/third-party-projects.html) ? - Is the change being proposed clearly explained and motivated? When you contribute code, you affirm that the contribution is your original work and that you license the work to the project under the project's open source license. Whether or not you state this explicitly, by submitting any copyrighted material via pull request, email, or other means you agree to license the material under the project's open source license and warrant that you have the legal authority to do so. ### Code Style and Checks When submitting code we use a number of checks in our continous integration system to ensure a consitent style and adherence to license rules. You can run these checks locally by running: ```bash make check ``` This requires the following tools to be present in your `PATH`: 1. Java for checking license headers 2. [gofumpt](https://github.com/mvdan/gofumpt) for formatting Go code 3. [golangci-lint](https://golangci-lint.run/) for linting Go code ### Running Tests To run the tests locally, you can run: ```bash make test ``` This will run the unit tests. If you want to run the integration tests, you can run (you need to set environment variable `SPARK_HOME` pointing to existing directory with unpacked Apache Spark 3.5+ distribution): ```bash make integration ``` Lastly, if you want to run all tests (unit and integration) and generate the coverage analysis, you can run: ```bash make fulltest ``` The output of the coverage analysis will be in the `coverage.out` file. An HTML version of the coverage report is generated and accessible at `coverage.html`. ### How to write tests Please make sure that you have proper testing for the new code your adding. As part of the code base we started to add mocks that allow you to simulate a lot of the necessary API and don't require a running Spark instance. `mock.ProtoClient` is a mock implementation of the `SparkConnectService_ExecutePlanClient` interface which is the server-side stream of messages coming as a response from the server. `testutils.NewConnectServiceClientMock` will create a mock client that implements the `SparkConnectServiceClient` interface. The combination of these two mocks allows you to test the client side of the code without having to connect to Spark. ### What to contribute We welcome contributions of all kinds to the `spark-connect-go` project. Some examples of contributions are providing implementations of functionality that is missing in the Go implementation. Some examples are, but are not limited to: * Adding an existing feature of the DataFrame API in Golang. * Adding support for a builtin function in the Spark API in Golang. * Improving error handling in the client. If you are unsure about whether a contribution is a good fit, feel free to open an issue in the Apache Spark Jira. ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: Makefile ================================================ # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # FIRST_GOPATH := $(firstword $(subst :, ,$(GOPATH))) PKGS := $(shell go list ./... | grep -v /tests | grep -v /xcpb | grep -v /gpb | grep -v /generated) GOFILES_NOVENDOR := $(shell find . -name vendor -prune -o -type f -name '*.go' -not -name '*.pb.go' -print) GOFILES_BUILD := $(shell find . -type f -name '*.go' -not -name '*_test.go') PROTOFILES := $(shell find . -name vendor -prune -o -type f -name '*.proto' -print) ALLGOFILES := $(shell find . -type f -name '*.go' -not -name '*.pb.go') DATE := $(shell date -u -d "@$(SOURCE_DATE_EPOCH)" '+%FT%T%z' 2>/dev/null || date -u '+%FT%T%z') BUILDFLAGS_NOPIE := BUILDFLAGS ?= $(BUILDFLAGS_NOPIE) -buildmode=pie TESTFLAGS ?= PWD := $(shell pwd) PREFIX ?= $(GOPATH) BINDIR ?= $(PREFIX)/bin GO := go GOOS ?= $(shell go version | cut -d' ' -f4 | cut -d'/' -f1) GOARCH ?= $(shell go version | cut -d' ' -f4 | cut -d'/' -f2) TAGS ?= netgo SHELL = bash GOFUMPT_SPLIT_LONG_LINES := on ## Build tools BUF := $(GO) run github.com/bufbuild/buf/cmd/buf@v1.26.1 BINARIES := cmd/spark-connect-example-spark-session/spark-connect-example-spark-session cmd/spark-connect-example-raw-grpc-client/spark-connect-example-raw-grpc-client # Define the location of SPARK_HOME because we need that to depend on the build paths MAKEFILE_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) PROTO_SRC = $(shell find internal/generated -type f -name *.proto ) OK := $(shell tput setaf 6; echo ' [OK]'; tput sgr0;) all: build build: $(BUILD_OUTPUT) $(BINARIES) cmd/spark-connect-example-raw-grpc-client/spark-connect-example-raw-grpc-client: $(GOFILES_BUILD) @echo ">> BUILD, output = $@" @cd $(dir $@) && $(GO) build -o $(notdir $@) $(BUILDFLAGS) @printf '%s\n' '$(OK)' cmd/spark-connect-example-spark-session/spark-connect-example-spark-session: $(GOFILES_BUILD) @echo ">> BUILD, output = $@" @cd $(dir $@) && $(GO) build -o $(notdir $@) $(BUILDFLAGS) @printf '%s\n' '$(OK)' internal/generated.out: @echo -n ">> BUILD, output = $@" $(BUF) generate --debug -vvv @touch internal/generated.out @printf '%s\n' '$(OK)' gen: internal/generated.out $(GOFILES_BUILD): $(BUILD_OUTPUT): $(GOFILES_BUILD) @echo -n ">> BUILD, output = $@" @$(GO) build -o $@ $(BUILDFLAGS) @printf '%s\n' '$(OK)' lint: $(BUILD_OUTPUT) @golangci-lint run fmt: @echo -n ">> glongci-lint: fix" env GOFUMPT_SPLIT_LONG_LINES=$(GOFUMPT_SPLIT_LONG_LINES) golangci-lint run --fix test: $(BUILD_OUTPUT) @echo ">> TEST, \"verbose\"" @$(foreach pkg, $(PKGS),\ @echo -n " ";\ $(GO) test -v -run '(Test|Example)' $(BUILDFLAGS) $(TESTFLAGS) $(pkg) || exit 1) coverage: $(BUILD_OUTPUT) @echo ">> TEST, \"coverage\"" @$(GO) test -cover -coverprofile=coverage.out -covermode=atomic -coverpkg=./spark/...,./internal/tests/... ./spark/... ./internal/tests/... @$(GO) tool cover -html=coverage.out -o coverage.html integration: $(BUILD_OUTPUT) @echo ">> TEST, \"integration\"" @$(GO) test ./internal/tests/... check: @echo -n ">> CHECK" ./dev/check-license @echo -n ">> glongci-lint: " env GOFUMPT_SPLIT_LONG_LINES=$(GOFUMPT_SPLIT_LONG_LINES) golangci-lint run clean: @echo -n ">> CLEAN" @$(GO) clean -i ./... @rm -f ./coverage-all.html @rm -f ./coverage-all.out @rm -f ./coverage.out @find . -type f -name "coverage.out" -delete @printf '%s\n' '$(OK)' cleangen: @rm -rf ./internal/generated @rm -f ./internal/generated.out cleanall: clean cleangen ================================================ FILE: NOTICE ================================================ Apache Spark Copyright 2014 and onwards The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (http://www.apache.org/). ================================================ FILE: README.md ================================================ # Apache Spark Connect Client for Golang This project houses the **experimental** client for [Spark Connect](https://spark.apache.org/docs/latest/spark-connect-overview.html) for [Apache Spark](https://spark.apache.org/) written in [Golang](https://go.dev/). ## Current State of the Project Currently, the Spark Connect client for Golang is highly experimental and should not be used in any production setting. In addition, the PMC of the Apache Spark project reserves the right to withdraw and abandon the development of this project if it is not sustainable. ## Getting started This section explains how to run Spark Connect Go locally. Step 1: Install Golang: https://go.dev/doc/install. Step 2: Ensure you have installed `buf CLI` installed, [more info here](https://buf.build/docs/installation/) Step 3: Run the following commands to setup the Spark Connect client. Building with Spark in case you need to re-generate the source files from the proto sources. ``` git clone https://github.com/apache/spark-connect-go.git git submodule update --init --recursive make gen && make test ``` Building without Spark ``` git clone https://github.com/apache/spark-connect-go.git make && make test ``` Step 4: Setup the Spark Driver on localhost. 1. [Download Spark distribution](https://spark.apache.org/downloads.html) (4.0.0+), unzip the package. 2. Start the Spark Connect server with the following command (make sure to use a package version that matches your Spark distribution): ``` sbin/start-connect-server.sh ``` Step 5: Run the example Go application. ``` go run cmd/spark-connect-example-spark-session/main.go ``` ## Runnning Spark Connect Go Application in a Spark Cluster To run the Spark Connect Go application in a Spark Cluster, you need to build the Go application and submit it to the Spark Cluster. You can find a more detailed example runner and wrapper script in the `java` directory. See the guide here: [Sample Spark-Submit Wrapper](java/README.md). ## How to write Spark Connect Go Application in your own project See [Quick Start Guide](quick-start.md) ## High Level Design The overall goal of the design is to find a good balance of principle of the least surprise for develoeprs that are familiar with the APIs of Apache Spark and idiomatic Go usage. The high-level structure of the packages follows roughly the PySpark giudance but with Go idioms. ## Contributing Please review the [Contribution to Spark guide](https://spark.apache.org/contributing.html) for information on how to get started contributing to the project. ================================================ FILE: buf.gen.yaml ================================================ # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # version: v1 plugins: - plugin: buf.build/protocolbuffers/go:v1.30.0 out: . - plugin: buf.build/grpc/go:v1.3.0 out: . ================================================ FILE: buf.work.yaml ================================================ # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # version: v1 directories: - sparksrc/sql/connect/common/src/main/protobuf ================================================ FILE: cmd/spark-connect-example-raw-grpc-client/main.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "context" "flag" "log" "time" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/google/uuid" "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" ) var remote = flag.String("remote", "localhost:15002", "the remote address of Spark Connect server to connect to") func main() { opts := []grpc.DialOption{ grpc.WithTransportCredentials(insecure.NewCredentials()), } conn, err := grpc.NewClient(*remote, opts...) if err != nil { log.Fatalf("Failed: %s", err) } defer conn.Close() client := proto.NewSparkConnectServiceClient(conn) ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() configRequest := proto.ConfigRequest{ SessionId: uuid.NewString(), Operation: &proto.ConfigRequest_Operation{ OpType: &proto.ConfigRequest_Operation_GetAll{ GetAll: &proto.ConfigRequest_GetAll{}, }, }, } configResponse, err := client.Config(ctx, &configRequest) if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("configResponse: %v", configResponse) } ================================================ FILE: cmd/spark-connect-example-spark-session/main.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "context" "flag" "fmt" "log" "github.com/apache/spark-connect-go/spark/sql/types" "github.com/apache/spark-connect-go/spark/sql/functions" "github.com/apache/spark-connect-go/spark/sql" "github.com/apache/spark-connect-go/spark/sql/utils" ) var ( remote = flag.String("remote", "sc://localhost:15002", "the remote address of Spark Connect server to connect to") filedir = flag.String("filedir", "/tmp", "the root directory to save the files generated by this example program") ) func main() { flag.Parse() ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote(*remote).Build(ctx) if err != nil { log.Fatalf("Failed: %s", err) } defer utils.WarnOnError(spark.Stop, func(err error) {}) df, err := spark.Sql(ctx, "select id from range(100)") if err != nil { log.Fatalf("Failed: %s", err) } df, _ = df.FilterByString(ctx, "id < 10") err = df.Show(ctx, 100, false) if err != nil { log.Fatalf("Failed: %s", err) } df, err = spark.Sql(ctx, "select * from range(100)") if err != nil { log.Fatalf("Failed: %s", err) } df, _ = df.Filter(ctx, functions.Col("id").Lt(functions.Expr("10"))) err = df.Show(ctx, 100, false) if err != nil { log.Fatalf("Failed: %s", err) } df, _ = spark.Sql(ctx, "select * from range(100)") df, err = df.Filter(ctx, functions.Col("id").Lt(functions.Lit(types.Int64(20)))) if err != nil { log.Fatalf("Failed: %s", err) } err = df.Show(ctx, 100, false) if err != nil { log.Fatalf("Failed: %s", err) } df, err = spark.Sql(ctx, "select 'apple' as word, 123 as count union all select 'orange' as word, 456 as count") if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("DataFrame from sql: select 'apple' as word, 123 as count union all select 'orange' as word, 456 as count") err = df.Show(ctx, 100, false) if err != nil { log.Fatalf("Failed: %s", err) } schema, err := df.Schema(ctx) if err != nil { log.Fatalf("Failed: %s", err) } for _, f := range schema.Fields { log.Printf("Field in dataframe schema: %s - %s", f.Name, f.DataType.TypeName()) } rows, err := df.Collect(ctx) if err != nil { log.Fatalf("Failed: %s", err) } schema, err = df.Schema(ctx) if err != nil { log.Fatalf("Failed: %s", err) } for _, f := range schema.Fields { log.Printf("Field in row: %s - %s", f.Name, f.DataType.TypeName()) } for _, row := range rows { log.Printf("Row: %v", row) } err = df.Writer().Mode("overwrite"). Format("parquet"). Save(ctx, fmt.Sprintf("file://%s/spark-connect-write-example-output.parquet", *filedir)) if err != nil { log.Fatalf("Failed: %s", err) } df, err = spark.Read().Format("parquet"). Load(fmt.Sprintf("file://%s/spark-connect-write-example-output.parquet", *filedir)) if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("DataFrame from reading parquet") err = df.Show(ctx, 100, false) if err != nil { log.Fatalf("Failed: %s", err) } err = df.CreateTempView(ctx, "view1", true, false) if err != nil { log.Fatalf("Failed: %s", err) } df, err = spark.Sql(ctx, "select count, word from view1 order by count") if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("DataFrame from sql: select count, word from view1 order by count") err = df.Show(ctx, 100, false) if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("Repartition with one partition") df, err = df.Repartition(ctx, 1, nil) if err != nil { log.Fatalf("Failed: %s", err) } err = df.Writer().Mode("overwrite"). Format("parquet"). Save(ctx, fmt.Sprintf("file://%s/spark-connect-write-example-output-one-partition.parquet", *filedir)) if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("Repartition with two partitions") df, err = df.Repartition(ctx, 2, nil) if err != nil { log.Fatalf("Failed: %s", err) } err = df.Writer().Mode("overwrite"). Format("parquet"). Save(ctx, fmt.Sprintf("file://%s/spark-connect-write-example-output-two-partition.parquet", *filedir)) if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("Repartition with columns") df, err = df.Repartition(ctx, 0, []string{"word", "count"}) if err != nil { log.Fatalf("Failed: %s", err) } err = df.Writer().Mode("overwrite"). Format("parquet"). Save(ctx, fmt.Sprintf("file://%s/spark-connect-write-example-output-repartition-with-column.parquet", *filedir)) if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("Repartition by range with columns") df, err = df.RepartitionByRange(ctx, 0, functions.Col("word").Desc()) if err != nil { log.Fatalf("Failed: %s", err) } err = df.Writer().Mode("overwrite"). Format("parquet"). Save(ctx, fmt.Sprintf("file:///%s/spark-connect-write-example-output-repartition-by-range-with-column.parquet", *filedir)) if err != nil { log.Fatalf("Failed: %s", err) } } ================================================ FILE: dev/.rat-excludes ================================================ .gitignore .gitmodules .gitattributes .project coverage* LICENSE NOTICE TAGS RELEASE .*md .rat-excludes sparksrc target generated.out go.sum deps cov.report build.properties spark-connect-go.code-workspace ================================================ FILE: dev/README.md ================================================ # Release Script for Apache Spark Connect Go This directory contains the release automation script for the Apache Spark Connect Go project. ## Prerequisites 1. **Python Environment**: Create a virtual environment and install dependencies: ```bash python -m venv venv source venv/bin/activate # On Windows: venv\Scripts\activate pip install -r requirements.txt ``` 2. **GitHub Token**: Create a GitHub personal access token with the following permissions: - `repo` (Full control of private repositories) - `write:packages` (Upload packages to GitHub Package Registry) 3. **GPG Key**: Ensure you have a GPG key set up for signing: ```bash # List available keys gpg --list-secret-keys # If you don't have a key, create one gpg --gen-key ``` ## Usage ```bash ./release.py --tag --prev-tag --commit --gpg-user [options] ``` ### Required Arguments - `--tag`: The new tag version (e.g., `v0.2.0`) - `--prev-tag`: The previous tag version for generating release notes (e.g., `v0.1.0`) - `--commit`: The commit SHA that the tag should point to - `--gpg-user`: Your GPG user ID for signing (email or key ID) ### Optional Arguments - `--prerelease`: Mark the release as a pre-release - `--repo`: GitHub repository in format `owner/name` (default: `apache/spark-connect-go`) - `--token`: GitHub token (alternatively set `GITHUB_TOKEN` environment variable) ### Environment Variables - `GITHUB_TOKEN`: GitHub personal access token ## Example Usage ```bash # Set GitHub token export GITHUB_TOKEN=ghp_your_token_here # Create a regular release ./release.py \ --tag v0.2.0 \ --prev-tag v0.1.0 \ --commit abc123def456 \ --gpg-user your.email@example.com # Create a pre-release ./release.py \ --tag v0.2.0-rc1 \ --prev-tag v0.1.0 \ --commit abc123def456 \ --gpg-user your.email@example.com \ --prerelease ``` ## What the Script Does 1. **Creates and pushes tag**: Creates a Git tag at the specified commit and pushes it to GitHub 2. **Generates release notes**: Automatically creates initial release notes from commits between tags 3. **Interactive input**: Prompts you to enter/modify the release description 4. **Creates GitHub release**: Creates a draft release on GitHub 5. **Downloads artifacts**: Downloads the automatically generated source archives (.tar.gz, .zip) 6. **Signs artifacts**: Creates detached GPG signatures for each artifact 7. **Verifies signatures**: Confirms that all signatures are valid 8. **Uploads signatures**: Uploads the signature files to the GitHub release ## Output The script creates: - A new Git tag pushed to GitHub - A draft GitHub release with: - Source code archives (automatically generated by GitHub) - Detached GPG signatures (.asc files) - Release notes based on commits ## Security Notes - All artifacts are signed with your GPG key - Signatures are verified before upload - The release is created as a draft first for review - Your GPG key must be available in your keyring ## Troubleshooting ### GPG Issues ```bash # If GPG signing fails, check your key gpg --list-secret-keys # Test signing echo "test" | gpg --clearsign --local-user your.email@example.com ``` ### GitHub API Issues - Ensure your token has the correct permissions - Check rate limits if requests fail - Verify repository access ### Git Issues - Ensure you're in the correct repository directory - Check that the commit SHA exists - Verify you have push permissions to the repository ================================================ FILE: dev/check-license ================================================ #!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # acquire_rat_jar () { URL="${DEFAULT_ARTIFACT_REPOSITORY:-https://repo1.maven.org/maven2/}org/apache/rat/apache-rat/${RAT_VERSION}/apache-rat-${RAT_VERSION}.jar" JAR="$rat_jar" # Download rat launch jar if it hasn't been downloaded yet if [ ! -f "$JAR" ]; then # Download printf "Attempting to fetch rat\n" JAR_DL="${JAR}.part" if [ $(command -v curl) ]; then curl -L --silent "${URL}" > "$JAR_DL" && mv "$JAR_DL" "$JAR" elif [ $(command -v wget) ]; then wget --quiet ${URL} -O "$JAR_DL" && mv "$JAR_DL" "$JAR" else printf "You do not have curl or wget installed, please install rat manually.\n" exit -1 fi fi unzip -tq "$JAR" &> /dev/null if [ $? -ne 0 ]; then # We failed to download rm "$JAR" printf "Our attempt to download rat locally to ${JAR} failed. Please install rat manually.\n" exit -1 fi } # Go to the Spark project root directory FWDIR="$(cd "`dirname "$0"`"/..; pwd)" cd "$FWDIR" if test -x "$JAVA_HOME/bin/java"; then declare java_cmd="$JAVA_HOME/bin/java" else declare java_cmd=java fi export RAT_VERSION=0.15 export rat_jar="$FWDIR"/lib/apache-rat-${RAT_VERSION}.jar mkdir -p "$FWDIR"/lib [[ -f "$rat_jar" ]] || acquire_rat_jar || { echo "Download failed. Obtain the rat jar manually and place it at $rat_jar" exit 1 } mkdir -p target $java_cmd -jar "$rat_jar" -E "$FWDIR"/dev/.rat-excludes -d "$FWDIR" > target/rat-results.txt if [ $? -ne 0 ]; then echo "RAT exited abnormally" exit 1 fi ERRORS="$(cat target/rat-results.txt | grep -e "??")" if test ! -z "$ERRORS"; then echo "Could not find Apache license headers in the following files:" echo "$ERRORS" exit 1 else echo -e "RAT checks passed." fi ================================================ FILE: dev/gen.py ================================================ # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This is a basic script to generate the builtin functions based on the # currently available PySpark installation. # Simply call the script as follows: # # python gen.py > spark/client/functions/generated.go import pyspark.sql.connect.functions as F import inspect import typing import types def normalize(input: str) -> str: vals = [x[0].upper() + x[1:] for x in input.split("_")] return "".join(vals) print(""" // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package functions import "github.com/apache/spark-connect-go/spark/sql/column" """) for fun in F.__dict__: if fun.startswith("_"): continue if not callable(F.__dict__[fun]): continue if "pyspark.sql.connect.functions" not in F.__dict__[fun].__module__: continue if fun == "expr" or fun == "col" or fun == "column" or fun == "lit": continue # Ignore the aliases of the old distinct. if "Distinct" in fun: continue sig = inspect.signature(F.__dict__[fun]) # Ignore all functions that take callables as parameters has_callable = False for p in sig.parameters: param = sig.parameters[p] if "Callable" in str(param.annotation): has_callable = True break if has_callable: print(f"// TODO: {fun}: {sig}") print() continue if "udf" in fun.lower(): print(f"// Ignore UDF: {fun}: {sig}") print() continue if "udt" in fun.lower(): print(f"// Ignore UDT: {fun}: {sig}") print() continue # Convert parameters into Golang res_params = [] conversions = [] args = [] valid = True for p in sig.parameters: param = sig.parameters[p] if param.annotation == inspect.Parameter.empty: res_params.append(f"{p} interface{{}}") args.append(p) elif param.kind == inspect.Parameter.VAR_POSITIONAL and param.annotation == "ColumnOrName": res_params.append(f"{p} ...column.Column") conversions.append("vals := make([]column.Column, 0)") for x in args: conversions.append(f"vals = append(vals, {x})") conversions.append(f"vals = append(vals, {p}...)") args = ["vals..."] elif type(param.annotation) == str and str(param.annotation) == "ColumnOrName" and param.kind != inspect.Parameter.VAR_POSITIONAL and param.kind != inspect.Parameter.VAR_KEYWORD: res_params.append(f"{p} column.Column") args.append(p) elif len(typing.get_args(param.annotation)) > 1 and typing.ForwardRef("ColumnOrName") in typing.get_args(param.annotation): # Find the parameter with ColumnOrName tmp = [x for x in typing.get_args(param.annotation) if typing.ForwardRef("ColumnOrName") == x] assert len(tmp) == 1 res_params.append(f"{p} column.Column") args.append(p) elif param.annotation == str or typing.get_args(param.annotation) == (str, types.NoneType): res_params.append(f"{p} string") conversions.append(f"lit_{p} := StringLit({p})") args.append(f"lit_{p}") elif param.annotation == int or typing.get_args(param.annotation) == (int, types.NoneType): res_params.append(f"{p} int64") conversions.append(f"lit_{p} := Int64Lit({p})") args.append(f"lit_{p}") elif param.annotation == float or typing.get_args(param.annotation) == (float, types.NoneType): res_params.append(f"{p} float64") conversions.append(f"lit_{p} := Float64Lit({p})") args.append(f"lit_{p}") else: valid = False break if not valid: print(f"// TODO: {fun}: {sig}") print() else: name = normalize(fun) # Generate the doc string if F.__dict__[fun].__doc__ is not None: lines = list(map(str.lstrip, F.__dict__[fun].__doc__.split("\n"))) pos = list(map(lambda x: x.startswith("..") or x.startswith("Parameters"), lines)).index(True) lines = "\n".join(lines[:pos]).strip().split("\n") lines[0] = name + " - " + lines[0] lines = ["// " + l for l in lines] doc = "\n".join(lines) + "\n//" print(doc) print(f"// {name} is the Golang equivalent of {fun}: {sig}") print(f"func {name}({', '.join(res_params)}) column.Column {{") for c in conversions: print(f" {c}") print(f" return column.NewColumn(column.NewUnresolvedFunctionWithColumns(\"{fun}\", {', '.join(args)}))") print(f"}}") print() ================================================ FILE: dev/release.py ================================================ #!/usr/bin/env python3 """ Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import argparse import os import subprocess import sys import tempfile import requests from pathlib import Path from typing import List, Dict, Any import git from github import Github def run_command(cmd: List[str], cwd: str = None, check: bool = True) -> subprocess.CompletedProcess: """Run a shell command and return the result.""" print(f"Running: {' '.join(cmd)}") result = subprocess.run(cmd, cwd=cwd, capture_output=True, text=True, check=False) if result.returncode != 0 and check: print(f"Command failed with return code {result.returncode}") print(f"STDOUT: {result.stdout}") print(f"STDERR: {result.stderr}") sys.exit(1) return result def get_commits_between_tags(repo_path: str, previous_tag: str, commit_sha: str) -> List[Dict[str, str]]: """Get commits between previous tag and current commit.""" try: repo = git.Repo(repo_path) # Get commits from previous tag to current commit commits = list(repo.iter_commits(f"{previous_tag}..{commit_sha}")) commit_info = [] for commit in commits: commit_info.append({ 'sha': commit.hexsha[:8], # Short commit ID 'author': commit.author.name, 'message': commit.message.split('\n')[0] # Subject line only }) return commit_info except Exception as e: print(f"Error getting commits: {e}") return [] def create_release_notes(commits: List[Dict[str, str]]) -> str: """Create initial release notes from commits.""" if not commits: return "## Changes\n\nNo commits found between releases.\n" notes = "## Changes\n\n" for commit in commits: notes += f"* {commit['sha']} - {commit['message']} ({commit['author']})\n" return notes def verify_gpg_key(gpg_user: str) -> bool: """Verify that the GPG key exists and can be used for signing.""" try: result = run_command(['gpg', '--list-secret-keys', gpg_user], check=False) return result.returncode == 0 except Exception: return False def sign_file(file_path: str, gpg_user: str) -> str: """Create a detached GPG signature for a file.""" signature_path = f"{file_path}.asc" cmd = [ 'gpg', '--local-user', gpg_user, '--armor', '--detach-sign', file_path ] run_command(cmd) if not os.path.exists(signature_path): raise RuntimeError(f"Signature file {signature_path} was not created") return signature_path def verify_signature(file_path: str, signature_path: str) -> bool: """Verify a GPG signature.""" try: result = run_command(['gpg', '--verify', signature_path, file_path], check=False) return result.returncode == 0 except Exception: return False def download_file(url: str, local_path: str): """Download a file from URL to local path.""" print(f"Downloading {url} to {local_path}") response = requests.get(url, stream=True) response.raise_for_status() with open(local_path, 'wb') as f: for chunk in response.iter_content(chunk_size=8192): f.write(chunk) def upload_release_asset(release, file_path: str): """Upload a file as a release asset.""" print(f"Uploading {file_path} to release") filename = os.path.basename(file_path) # Use the release object's upload_asset method # PyGithub expects: upload_asset(path, label=None, content_type=None, name=None) release.upload_asset(file_path, label=filename, name=filename) def main(): parser = argparse.ArgumentParser(description='Create and sign Apache Spark Connect Go release') parser.add_argument('--tag', required=True, help='New tag version (e.g., v0.2.0)') parser.add_argument('--prev-tag', required=True, help='Previous tag version (e.g., v0.1.0)') parser.add_argument('--commit', required=True, help='Commit SHA for the tag') parser.add_argument('--gpg-user', required=True, help='GPG user ID for signing') parser.add_argument('--prerelease', action='store_true', help='Mark as pre-release') parser.add_argument('--repo', default='apache/spark-connect-go', help='GitHub repository (owner/name)') parser.add_argument('--token', help='GitHub token (or set GITHUB_TOKEN env var)') args = parser.parse_args() # Get GitHub token github_token = args.token or os.environ.get('GITHUB_TOKEN') if not github_token: print("Error: GitHub token is required. Use --token or set GITHUB_TOKEN environment variable.") sys.exit(1) # Verify GPG key exists if not verify_gpg_key(args.gpg_user): print(f"Error: GPG key for user '{args.gpg_user}' not found or not usable") sys.exit(1) # Initialize GitHub client github_client = Github(github_token) repo = github_client.get_repo(args.repo) print(f"Creating release for {args.repo}") print(f"Tag: {args.tag}") print(f"Commit: {args.commit}") print(f"Previous tag: {args.prev_tag}") print(f"GPG user: {args.gpg_user}") print(f"Pre-release: {args.prerelease}") # Step 1: Create and push tag print("\n=== Step 1: Creating and pushing tag ===") repo_path = os.getcwd() try: local_repo = git.Repo(repo_path) # Create tag new_tag = local_repo.create_tag(args.tag, ref=args.commit, message=f"Release {args.tag}") print(f"Created tag {args.tag} at commit {args.commit}") # Push tag origin = local_repo.remote('origin') origin.push(new_tag) print(f"Pushed tag {args.tag} to GitHub") except Exception as e: print(f"Error creating/pushing tag: {e}") sys.exit(1) # Step 2: Get commits for release notes print("\n=== Step 2: Generating release notes ===") commits = get_commits_between_tags(repo_path, args.prev_tag, args.commit) initial_release_notes = create_release_notes(commits) # Step 3: Prompt user for release description print("\n=== Step 3: Release description ===") print("Initial release notes based on commits:") print(initial_release_notes) print("\nPlease enter the final release description (press Ctrl+D when done):") lines = [] try: while True: line = input() lines.append(line) except EOFError: pass # Join the lines and add the initial release notes final_release_notes = '\n'.join(lines).strip() spacer = "\n\n" if final_release_notes else "" final_release_notes += spacer + initial_release_notes # Step 4: Create GitHub release print("\n=== Step 4: Creating GitHub release ===") try: release = repo.create_git_release( tag=args.tag, name=f"Release {args.tag}", message=final_release_notes, draft=True, prerelease=args.prerelease ) print(f"Created draft release: {release.html_url}") except Exception as e: print(f"Error creating release: {e}") sys.exit(1) # Step 5: Download release artifacts print("\n=== Step 5: Downloading release artifacts ===") # GitHub automatically creates source archives artifacts = [ f"{args.tag}.tar.gz", f"{args.tag}.zip" ] with tempfile.TemporaryDirectory() as temp_dir: downloaded_files = [] for artifact in artifacts: # Construct download URL for source archive download_url = f"https://github.com/{args.repo}/archive/refs/tags/{artifact}" local_file = os.path.join(temp_dir, f"spark-connect-go-{artifact}") try: download_file(download_url, local_file) downloaded_files.append(local_file) except Exception as e: print(f"Error downloading {artifact}: {e}") continue if not downloaded_files: print("Error: No artifacts were downloaded") sys.exit(1) # Step 6: Sign artifacts print("\n=== Step 6: Signing artifacts ===") signatures = [] for file_path in downloaded_files: try: print(f"Signing {os.path.basename(file_path)}") signature_path = sign_file(file_path, args.gpg_user) signatures.append(signature_path) print(f"Created signature: {os.path.basename(signature_path)}") except Exception as e: print(f"Error signing {file_path}: {e}") continue # Step 7: Verify signatures print("\n=== Step 7: Verifying signatures ===") for i, file_path in enumerate(downloaded_files): if i < len(signatures): signature_path = signatures[i] if verify_signature(file_path, signature_path): print(f"✓ Signature verified for {os.path.basename(file_path)}") else: print(f"✗ Signature verification failed for {os.path.basename(file_path)}") sys.exit(1) # Step 8: Upload signatures to release print("\n=== Step 8: Uploading signatures to release ===") for signature_path in signatures: try: upload_release_asset(release, signature_path) print(f"Uploaded {os.path.basename(signature_path)}") except Exception as e: print(f"Error uploading {signature_path}: {e}") continue print(f"\n=== Release created successfully ===") print(f"Release URL: {release.html_url}") print(f"Tag: {args.tag}") print(f"Status: Draft") print(f"Pre-release: {args.prerelease}") print("\nNext steps:") print("1. Review the release on GitHub") print("2. Test the release artifacts") print("3. Publish the release when ready") if __name__ == '__main__': main() ================================================ FILE: dev/requirements.txt ================================================ requests>=2.28.0 PyGithub>=1.58.0 gitpython>=3.1.30 ================================================ FILE: go.mod ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. module github.com/apache/spark-connect-go go 1.23.2 require ( github.com/apache/arrow-go/v18 v18.4.0 github.com/go-errors/errors v1.5.1 github.com/google/uuid v1.6.0 github.com/stretchr/testify v1.10.0 google.golang.org/genproto/googleapis/rpc v0.0.0-20250707201910-8d1bb00bc6a7 google.golang.org/grpc v1.75.0 google.golang.org/protobuf v1.36.7 ) require ( cloud.google.com/go/compute/metadata v0.7.0 // indirect github.com/kr/pretty v0.3.0 // indirect golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 // indirect golang.org/x/net v0.41.0 // indirect golang.org/x/sync v0.15.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect ) require ( github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/goccy/go-json v0.10.5 // indirect github.com/google/flatbuffers v25.2.10+incompatible // indirect github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/cpuid/v2 v2.2.11 // indirect github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect golang.org/x/mod v0.25.0 // indirect golang.org/x/oauth2 v0.30.0 golang.org/x/sys v0.33.0 // indirect golang.org/x/text v0.26.0 // indirect golang.org/x/tools v0.34.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) ================================================ FILE: go.sum ================================================ cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ= github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY= github.com/apache/arrow-go/v18 v18.4.0 h1:/RvkGqH517iY8bZKc4FD5/kkdwXJGjxf28JIXbJ/oB0= github.com/apache/arrow-go/v18 v18.4.0/go.mod h1:Aawvwhj8x2jURIzD9Moy72cF0FyJXOpkYpdmGRHcw14= github.com/apache/thrift v0.22.0 h1:r7mTJdj51TMDe6RtcmNdQxgn9XcyfGDOzegMDRg47uc= github.com/apache/thrift v0.22.0/go.mod h1:1e7J/O1Ae6ZQMTYdy9xa3w9k+XHWPfRvdPyJeynQ+/g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs= github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q= github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid/v2 v2.2.11 h1:0OwqZRYI2rFrjS4kvkDnqJkKHdHaRnCm68/DY4OxRzU= github.com/klauspost/cpuid/v2 v2.2.11/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU= github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI= go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg= go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc= go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps= go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM= golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8= golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= google.golang.org/genproto/googleapis/rpc v0.0.0-20250707201910-8d1bb00bc6a7 h1:pFyd6EwwL2TqFf8emdthzeX+gZE1ElRq3iM8pui4KBY= google.golang.org/genproto/googleapis/rpc v0.0.0-20250707201910-8d1bb00bc6a7/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.75.0 h1:+TW+dqTd2Biwe6KKfhE5JpiYIBWq865PhKGSXiivqt4= google.golang.org/grpc v1.75.0/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A= google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= ================================================ FILE: internal/generated/base.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/base.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" anypb "google.golang.org/protobuf/types/known/anypb" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // Plan explanation mode. type AnalyzePlanRequest_Explain_ExplainMode int32 const ( AnalyzePlanRequest_Explain_EXPLAIN_MODE_UNSPECIFIED AnalyzePlanRequest_Explain_ExplainMode = 0 // Generates only physical plan. AnalyzePlanRequest_Explain_EXPLAIN_MODE_SIMPLE AnalyzePlanRequest_Explain_ExplainMode = 1 // Generates parsed logical plan, analyzed logical plan, optimized logical plan and physical plan. // Parsed Logical plan is a unresolved plan that extracted from the query. Analyzed logical plans // transforms which translates unresolvedAttribute and unresolvedRelation into fully typed objects. // The optimized logical plan transforms through a set of optimization rules, resulting in the // physical plan. AnalyzePlanRequest_Explain_EXPLAIN_MODE_EXTENDED AnalyzePlanRequest_Explain_ExplainMode = 2 // Generates code for the statement, if any and a physical plan. AnalyzePlanRequest_Explain_EXPLAIN_MODE_CODEGEN AnalyzePlanRequest_Explain_ExplainMode = 3 // If plan node statistics are available, generates a logical plan and also the statistics. AnalyzePlanRequest_Explain_EXPLAIN_MODE_COST AnalyzePlanRequest_Explain_ExplainMode = 4 // Generates a physical plan outline and also node details. AnalyzePlanRequest_Explain_EXPLAIN_MODE_FORMATTED AnalyzePlanRequest_Explain_ExplainMode = 5 ) // Enum value maps for AnalyzePlanRequest_Explain_ExplainMode. var ( AnalyzePlanRequest_Explain_ExplainMode_name = map[int32]string{ 0: "EXPLAIN_MODE_UNSPECIFIED", 1: "EXPLAIN_MODE_SIMPLE", 2: "EXPLAIN_MODE_EXTENDED", 3: "EXPLAIN_MODE_CODEGEN", 4: "EXPLAIN_MODE_COST", 5: "EXPLAIN_MODE_FORMATTED", } AnalyzePlanRequest_Explain_ExplainMode_value = map[string]int32{ "EXPLAIN_MODE_UNSPECIFIED": 0, "EXPLAIN_MODE_SIMPLE": 1, "EXPLAIN_MODE_EXTENDED": 2, "EXPLAIN_MODE_CODEGEN": 3, "EXPLAIN_MODE_COST": 4, "EXPLAIN_MODE_FORMATTED": 5, } ) func (x AnalyzePlanRequest_Explain_ExplainMode) Enum() *AnalyzePlanRequest_Explain_ExplainMode { p := new(AnalyzePlanRequest_Explain_ExplainMode) *p = x return p } func (x AnalyzePlanRequest_Explain_ExplainMode) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (AnalyzePlanRequest_Explain_ExplainMode) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_base_proto_enumTypes[0].Descriptor() } func (AnalyzePlanRequest_Explain_ExplainMode) Type() protoreflect.EnumType { return &file_spark_connect_base_proto_enumTypes[0] } func (x AnalyzePlanRequest_Explain_ExplainMode) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use AnalyzePlanRequest_Explain_ExplainMode.Descriptor instead. func (AnalyzePlanRequest_Explain_ExplainMode) EnumDescriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 1, 0} } type InterruptRequest_InterruptType int32 const ( InterruptRequest_INTERRUPT_TYPE_UNSPECIFIED InterruptRequest_InterruptType = 0 // Interrupt all running executions within the session with the provided session_id. InterruptRequest_INTERRUPT_TYPE_ALL InterruptRequest_InterruptType = 1 // Interrupt all running executions within the session with the provided operation_tag. InterruptRequest_INTERRUPT_TYPE_TAG InterruptRequest_InterruptType = 2 // Interrupt the running execution within the session with the provided operation_id. InterruptRequest_INTERRUPT_TYPE_OPERATION_ID InterruptRequest_InterruptType = 3 ) // Enum value maps for InterruptRequest_InterruptType. var ( InterruptRequest_InterruptType_name = map[int32]string{ 0: "INTERRUPT_TYPE_UNSPECIFIED", 1: "INTERRUPT_TYPE_ALL", 2: "INTERRUPT_TYPE_TAG", 3: "INTERRUPT_TYPE_OPERATION_ID", } InterruptRequest_InterruptType_value = map[string]int32{ "INTERRUPT_TYPE_UNSPECIFIED": 0, "INTERRUPT_TYPE_ALL": 1, "INTERRUPT_TYPE_TAG": 2, "INTERRUPT_TYPE_OPERATION_ID": 3, } ) func (x InterruptRequest_InterruptType) Enum() *InterruptRequest_InterruptType { p := new(InterruptRequest_InterruptType) *p = x return p } func (x InterruptRequest_InterruptType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (InterruptRequest_InterruptType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_base_proto_enumTypes[1].Descriptor() } func (InterruptRequest_InterruptType) Type() protoreflect.EnumType { return &file_spark_connect_base_proto_enumTypes[1] } func (x InterruptRequest_InterruptType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use InterruptRequest_InterruptType.Descriptor instead. func (InterruptRequest_InterruptType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{13, 0} } // The type of this query context. type FetchErrorDetailsResponse_QueryContext_ContextType int32 const ( FetchErrorDetailsResponse_QueryContext_SQL FetchErrorDetailsResponse_QueryContext_ContextType = 0 FetchErrorDetailsResponse_QueryContext_DATAFRAME FetchErrorDetailsResponse_QueryContext_ContextType = 1 ) // Enum value maps for FetchErrorDetailsResponse_QueryContext_ContextType. var ( FetchErrorDetailsResponse_QueryContext_ContextType_name = map[int32]string{ 0: "SQL", 1: "DATAFRAME", } FetchErrorDetailsResponse_QueryContext_ContextType_value = map[string]int32{ "SQL": 0, "DATAFRAME": 1, } ) func (x FetchErrorDetailsResponse_QueryContext_ContextType) Enum() *FetchErrorDetailsResponse_QueryContext_ContextType { p := new(FetchErrorDetailsResponse_QueryContext_ContextType) *p = x return p } func (x FetchErrorDetailsResponse_QueryContext_ContextType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (FetchErrorDetailsResponse_QueryContext_ContextType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_base_proto_enumTypes[2].Descriptor() } func (FetchErrorDetailsResponse_QueryContext_ContextType) Type() protoreflect.EnumType { return &file_spark_connect_base_proto_enumTypes[2] } func (x FetchErrorDetailsResponse_QueryContext_ContextType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use FetchErrorDetailsResponse_QueryContext_ContextType.Descriptor instead. func (FetchErrorDetailsResponse_QueryContext_ContextType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{22, 1, 0} } // A [[Plan]] is the structure that carries the runtime information for the execution from the // client to the server. A [[Plan]] can either be of the type [[Relation]] which is a reference // to the underlying logical plan or it can be of the [[Command]] type that is used to execute // commands on the server. type Plan struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to OpType: // // *Plan_Root // *Plan_Command OpType isPlan_OpType `protobuf_oneof:"op_type"` } func (x *Plan) Reset() { *x = Plan{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Plan) String() string { return protoimpl.X.MessageStringOf(x) } func (*Plan) ProtoMessage() {} func (x *Plan) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Plan.ProtoReflect.Descriptor instead. func (*Plan) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{0} } func (m *Plan) GetOpType() isPlan_OpType { if m != nil { return m.OpType } return nil } func (x *Plan) GetRoot() *Relation { if x, ok := x.GetOpType().(*Plan_Root); ok { return x.Root } return nil } func (x *Plan) GetCommand() *Command { if x, ok := x.GetOpType().(*Plan_Command); ok { return x.Command } return nil } type isPlan_OpType interface { isPlan_OpType() } type Plan_Root struct { Root *Relation `protobuf:"bytes,1,opt,name=root,proto3,oneof"` } type Plan_Command struct { Command *Command `protobuf:"bytes,2,opt,name=command,proto3,oneof"` } func (*Plan_Root) isPlan_OpType() {} func (*Plan_Command) isPlan_OpType() {} // User Context is used to refer to one particular user session that is executing // queries in the backend. type UserContext struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` UserName string `protobuf:"bytes,2,opt,name=user_name,json=userName,proto3" json:"user_name,omitempty"` // To extend the existing user context message that is used to identify incoming requests, // Spark Connect leverages the Any protobuf type that can be used to inject arbitrary other // messages into this message. Extensions are stored as a `repeated` type to be able to // handle multiple active extensions. Extensions []*anypb.Any `protobuf:"bytes,999,rep,name=extensions,proto3" json:"extensions,omitempty"` } func (x *UserContext) Reset() { *x = UserContext{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *UserContext) String() string { return protoimpl.X.MessageStringOf(x) } func (*UserContext) ProtoMessage() {} func (x *UserContext) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use UserContext.ProtoReflect.Descriptor instead. func (*UserContext) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{1} } func (x *UserContext) GetUserId() string { if x != nil { return x.UserId } return "" } func (x *UserContext) GetUserName() string { if x != nil { return x.UserName } return "" } func (x *UserContext) GetExtensions() []*anypb.Any { if x != nil { return x.Extensions } return nil } // Request to perform plan analyze, optionally to explain the plan. type AnalyzePlanRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id specifies a spark session for a user id (which is specified // by user_context.user_id). The session_id is set by the client to be able to // collate streaming responses from different queries within the dedicated session. // The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,17,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // (Required) User context UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,3,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` // Types that are assignable to Analyze: // // *AnalyzePlanRequest_Schema_ // *AnalyzePlanRequest_Explain_ // *AnalyzePlanRequest_TreeString_ // *AnalyzePlanRequest_IsLocal_ // *AnalyzePlanRequest_IsStreaming_ // *AnalyzePlanRequest_InputFiles_ // *AnalyzePlanRequest_SparkVersion_ // *AnalyzePlanRequest_DdlParse // *AnalyzePlanRequest_SameSemantics_ // *AnalyzePlanRequest_SemanticHash_ // *AnalyzePlanRequest_Persist_ // *AnalyzePlanRequest_Unpersist_ // *AnalyzePlanRequest_GetStorageLevel_ // *AnalyzePlanRequest_JsonToDdl Analyze isAnalyzePlanRequest_Analyze `protobuf_oneof:"analyze"` } func (x *AnalyzePlanRequest) Reset() { *x = AnalyzePlanRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest) ProtoMessage() {} func (x *AnalyzePlanRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2} } func (x *AnalyzePlanRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *AnalyzePlanRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *AnalyzePlanRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *AnalyzePlanRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } func (m *AnalyzePlanRequest) GetAnalyze() isAnalyzePlanRequest_Analyze { if m != nil { return m.Analyze } return nil } func (x *AnalyzePlanRequest) GetSchema() *AnalyzePlanRequest_Schema { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_Schema_); ok { return x.Schema } return nil } func (x *AnalyzePlanRequest) GetExplain() *AnalyzePlanRequest_Explain { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_Explain_); ok { return x.Explain } return nil } func (x *AnalyzePlanRequest) GetTreeString() *AnalyzePlanRequest_TreeString { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_TreeString_); ok { return x.TreeString } return nil } func (x *AnalyzePlanRequest) GetIsLocal() *AnalyzePlanRequest_IsLocal { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_IsLocal_); ok { return x.IsLocal } return nil } func (x *AnalyzePlanRequest) GetIsStreaming() *AnalyzePlanRequest_IsStreaming { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_IsStreaming_); ok { return x.IsStreaming } return nil } func (x *AnalyzePlanRequest) GetInputFiles() *AnalyzePlanRequest_InputFiles { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_InputFiles_); ok { return x.InputFiles } return nil } func (x *AnalyzePlanRequest) GetSparkVersion() *AnalyzePlanRequest_SparkVersion { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_SparkVersion_); ok { return x.SparkVersion } return nil } func (x *AnalyzePlanRequest) GetDdlParse() *AnalyzePlanRequest_DDLParse { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_DdlParse); ok { return x.DdlParse } return nil } func (x *AnalyzePlanRequest) GetSameSemantics() *AnalyzePlanRequest_SameSemantics { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_SameSemantics_); ok { return x.SameSemantics } return nil } func (x *AnalyzePlanRequest) GetSemanticHash() *AnalyzePlanRequest_SemanticHash { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_SemanticHash_); ok { return x.SemanticHash } return nil } func (x *AnalyzePlanRequest) GetPersist() *AnalyzePlanRequest_Persist { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_Persist_); ok { return x.Persist } return nil } func (x *AnalyzePlanRequest) GetUnpersist() *AnalyzePlanRequest_Unpersist { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_Unpersist_); ok { return x.Unpersist } return nil } func (x *AnalyzePlanRequest) GetGetStorageLevel() *AnalyzePlanRequest_GetStorageLevel { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_GetStorageLevel_); ok { return x.GetStorageLevel } return nil } func (x *AnalyzePlanRequest) GetJsonToDdl() *AnalyzePlanRequest_JsonToDDL { if x, ok := x.GetAnalyze().(*AnalyzePlanRequest_JsonToDdl); ok { return x.JsonToDdl } return nil } type isAnalyzePlanRequest_Analyze interface { isAnalyzePlanRequest_Analyze() } type AnalyzePlanRequest_Schema_ struct { Schema *AnalyzePlanRequest_Schema `protobuf:"bytes,4,opt,name=schema,proto3,oneof"` } type AnalyzePlanRequest_Explain_ struct { Explain *AnalyzePlanRequest_Explain `protobuf:"bytes,5,opt,name=explain,proto3,oneof"` } type AnalyzePlanRequest_TreeString_ struct { TreeString *AnalyzePlanRequest_TreeString `protobuf:"bytes,6,opt,name=tree_string,json=treeString,proto3,oneof"` } type AnalyzePlanRequest_IsLocal_ struct { IsLocal *AnalyzePlanRequest_IsLocal `protobuf:"bytes,7,opt,name=is_local,json=isLocal,proto3,oneof"` } type AnalyzePlanRequest_IsStreaming_ struct { IsStreaming *AnalyzePlanRequest_IsStreaming `protobuf:"bytes,8,opt,name=is_streaming,json=isStreaming,proto3,oneof"` } type AnalyzePlanRequest_InputFiles_ struct { InputFiles *AnalyzePlanRequest_InputFiles `protobuf:"bytes,9,opt,name=input_files,json=inputFiles,proto3,oneof"` } type AnalyzePlanRequest_SparkVersion_ struct { SparkVersion *AnalyzePlanRequest_SparkVersion `protobuf:"bytes,10,opt,name=spark_version,json=sparkVersion,proto3,oneof"` } type AnalyzePlanRequest_DdlParse struct { DdlParse *AnalyzePlanRequest_DDLParse `protobuf:"bytes,11,opt,name=ddl_parse,json=ddlParse,proto3,oneof"` } type AnalyzePlanRequest_SameSemantics_ struct { SameSemantics *AnalyzePlanRequest_SameSemantics `protobuf:"bytes,12,opt,name=same_semantics,json=sameSemantics,proto3,oneof"` } type AnalyzePlanRequest_SemanticHash_ struct { SemanticHash *AnalyzePlanRequest_SemanticHash `protobuf:"bytes,13,opt,name=semantic_hash,json=semanticHash,proto3,oneof"` } type AnalyzePlanRequest_Persist_ struct { Persist *AnalyzePlanRequest_Persist `protobuf:"bytes,14,opt,name=persist,proto3,oneof"` } type AnalyzePlanRequest_Unpersist_ struct { Unpersist *AnalyzePlanRequest_Unpersist `protobuf:"bytes,15,opt,name=unpersist,proto3,oneof"` } type AnalyzePlanRequest_GetStorageLevel_ struct { GetStorageLevel *AnalyzePlanRequest_GetStorageLevel `protobuf:"bytes,16,opt,name=get_storage_level,json=getStorageLevel,proto3,oneof"` } type AnalyzePlanRequest_JsonToDdl struct { JsonToDdl *AnalyzePlanRequest_JsonToDDL `protobuf:"bytes,18,opt,name=json_to_ddl,json=jsonToDdl,proto3,oneof"` } func (*AnalyzePlanRequest_Schema_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_Explain_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_TreeString_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_IsLocal_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_IsStreaming_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_InputFiles_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_SparkVersion_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_DdlParse) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_SameSemantics_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_SemanticHash_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_Persist_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_Unpersist_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_GetStorageLevel_) isAnalyzePlanRequest_Analyze() {} func (*AnalyzePlanRequest_JsonToDdl) isAnalyzePlanRequest_Analyze() {} // Response to performing analysis of the query. Contains relevant metadata to be able to // reason about the performance. // Next ID: 16 type AnalyzePlanResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,15,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` // Types that are assignable to Result: // // *AnalyzePlanResponse_Schema_ // *AnalyzePlanResponse_Explain_ // *AnalyzePlanResponse_TreeString_ // *AnalyzePlanResponse_IsLocal_ // *AnalyzePlanResponse_IsStreaming_ // *AnalyzePlanResponse_InputFiles_ // *AnalyzePlanResponse_SparkVersion_ // *AnalyzePlanResponse_DdlParse // *AnalyzePlanResponse_SameSemantics_ // *AnalyzePlanResponse_SemanticHash_ // *AnalyzePlanResponse_Persist_ // *AnalyzePlanResponse_Unpersist_ // *AnalyzePlanResponse_GetStorageLevel_ // *AnalyzePlanResponse_JsonToDdl Result isAnalyzePlanResponse_Result `protobuf_oneof:"result"` } func (x *AnalyzePlanResponse) Reset() { *x = AnalyzePlanResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse) ProtoMessage() {} func (x *AnalyzePlanResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3} } func (x *AnalyzePlanResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *AnalyzePlanResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } func (m *AnalyzePlanResponse) GetResult() isAnalyzePlanResponse_Result { if m != nil { return m.Result } return nil } func (x *AnalyzePlanResponse) GetSchema() *AnalyzePlanResponse_Schema { if x, ok := x.GetResult().(*AnalyzePlanResponse_Schema_); ok { return x.Schema } return nil } func (x *AnalyzePlanResponse) GetExplain() *AnalyzePlanResponse_Explain { if x, ok := x.GetResult().(*AnalyzePlanResponse_Explain_); ok { return x.Explain } return nil } func (x *AnalyzePlanResponse) GetTreeString() *AnalyzePlanResponse_TreeString { if x, ok := x.GetResult().(*AnalyzePlanResponse_TreeString_); ok { return x.TreeString } return nil } func (x *AnalyzePlanResponse) GetIsLocal() *AnalyzePlanResponse_IsLocal { if x, ok := x.GetResult().(*AnalyzePlanResponse_IsLocal_); ok { return x.IsLocal } return nil } func (x *AnalyzePlanResponse) GetIsStreaming() *AnalyzePlanResponse_IsStreaming { if x, ok := x.GetResult().(*AnalyzePlanResponse_IsStreaming_); ok { return x.IsStreaming } return nil } func (x *AnalyzePlanResponse) GetInputFiles() *AnalyzePlanResponse_InputFiles { if x, ok := x.GetResult().(*AnalyzePlanResponse_InputFiles_); ok { return x.InputFiles } return nil } func (x *AnalyzePlanResponse) GetSparkVersion() *AnalyzePlanResponse_SparkVersion { if x, ok := x.GetResult().(*AnalyzePlanResponse_SparkVersion_); ok { return x.SparkVersion } return nil } func (x *AnalyzePlanResponse) GetDdlParse() *AnalyzePlanResponse_DDLParse { if x, ok := x.GetResult().(*AnalyzePlanResponse_DdlParse); ok { return x.DdlParse } return nil } func (x *AnalyzePlanResponse) GetSameSemantics() *AnalyzePlanResponse_SameSemantics { if x, ok := x.GetResult().(*AnalyzePlanResponse_SameSemantics_); ok { return x.SameSemantics } return nil } func (x *AnalyzePlanResponse) GetSemanticHash() *AnalyzePlanResponse_SemanticHash { if x, ok := x.GetResult().(*AnalyzePlanResponse_SemanticHash_); ok { return x.SemanticHash } return nil } func (x *AnalyzePlanResponse) GetPersist() *AnalyzePlanResponse_Persist { if x, ok := x.GetResult().(*AnalyzePlanResponse_Persist_); ok { return x.Persist } return nil } func (x *AnalyzePlanResponse) GetUnpersist() *AnalyzePlanResponse_Unpersist { if x, ok := x.GetResult().(*AnalyzePlanResponse_Unpersist_); ok { return x.Unpersist } return nil } func (x *AnalyzePlanResponse) GetGetStorageLevel() *AnalyzePlanResponse_GetStorageLevel { if x, ok := x.GetResult().(*AnalyzePlanResponse_GetStorageLevel_); ok { return x.GetStorageLevel } return nil } func (x *AnalyzePlanResponse) GetJsonToDdl() *AnalyzePlanResponse_JsonToDDL { if x, ok := x.GetResult().(*AnalyzePlanResponse_JsonToDdl); ok { return x.JsonToDdl } return nil } type isAnalyzePlanResponse_Result interface { isAnalyzePlanResponse_Result() } type AnalyzePlanResponse_Schema_ struct { Schema *AnalyzePlanResponse_Schema `protobuf:"bytes,2,opt,name=schema,proto3,oneof"` } type AnalyzePlanResponse_Explain_ struct { Explain *AnalyzePlanResponse_Explain `protobuf:"bytes,3,opt,name=explain,proto3,oneof"` } type AnalyzePlanResponse_TreeString_ struct { TreeString *AnalyzePlanResponse_TreeString `protobuf:"bytes,4,opt,name=tree_string,json=treeString,proto3,oneof"` } type AnalyzePlanResponse_IsLocal_ struct { IsLocal *AnalyzePlanResponse_IsLocal `protobuf:"bytes,5,opt,name=is_local,json=isLocal,proto3,oneof"` } type AnalyzePlanResponse_IsStreaming_ struct { IsStreaming *AnalyzePlanResponse_IsStreaming `protobuf:"bytes,6,opt,name=is_streaming,json=isStreaming,proto3,oneof"` } type AnalyzePlanResponse_InputFiles_ struct { InputFiles *AnalyzePlanResponse_InputFiles `protobuf:"bytes,7,opt,name=input_files,json=inputFiles,proto3,oneof"` } type AnalyzePlanResponse_SparkVersion_ struct { SparkVersion *AnalyzePlanResponse_SparkVersion `protobuf:"bytes,8,opt,name=spark_version,json=sparkVersion,proto3,oneof"` } type AnalyzePlanResponse_DdlParse struct { DdlParse *AnalyzePlanResponse_DDLParse `protobuf:"bytes,9,opt,name=ddl_parse,json=ddlParse,proto3,oneof"` } type AnalyzePlanResponse_SameSemantics_ struct { SameSemantics *AnalyzePlanResponse_SameSemantics `protobuf:"bytes,10,opt,name=same_semantics,json=sameSemantics,proto3,oneof"` } type AnalyzePlanResponse_SemanticHash_ struct { SemanticHash *AnalyzePlanResponse_SemanticHash `protobuf:"bytes,11,opt,name=semantic_hash,json=semanticHash,proto3,oneof"` } type AnalyzePlanResponse_Persist_ struct { Persist *AnalyzePlanResponse_Persist `protobuf:"bytes,12,opt,name=persist,proto3,oneof"` } type AnalyzePlanResponse_Unpersist_ struct { Unpersist *AnalyzePlanResponse_Unpersist `protobuf:"bytes,13,opt,name=unpersist,proto3,oneof"` } type AnalyzePlanResponse_GetStorageLevel_ struct { GetStorageLevel *AnalyzePlanResponse_GetStorageLevel `protobuf:"bytes,14,opt,name=get_storage_level,json=getStorageLevel,proto3,oneof"` } type AnalyzePlanResponse_JsonToDdl struct { JsonToDdl *AnalyzePlanResponse_JsonToDDL `protobuf:"bytes,16,opt,name=json_to_ddl,json=jsonToDdl,proto3,oneof"` } func (*AnalyzePlanResponse_Schema_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_Explain_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_TreeString_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_IsLocal_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_IsStreaming_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_InputFiles_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_SparkVersion_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_DdlParse) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_SameSemantics_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_SemanticHash_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_Persist_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_Unpersist_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_GetStorageLevel_) isAnalyzePlanResponse_Result() {} func (*AnalyzePlanResponse_JsonToDdl) isAnalyzePlanResponse_Result() {} // A request to be executed by the service. type ExecutePlanRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id specifies a spark session for a user id (which is specified // by user_context.user_id). The session_id is set by the client to be able to // collate streaming responses from different queries within the dedicated session. // The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,8,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // (Required) User context // // user_context.user_id and session+id both identify a unique remote spark session on the // server side. UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // (Optional) // Provide an id for this request. If not provided, it will be generated by the server. // It is returned in every ExecutePlanResponse.operation_id of the ExecutePlan response stream. // The id must be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` OperationId *string `protobuf:"bytes,6,opt,name=operation_id,json=operationId,proto3,oneof" json:"operation_id,omitempty"` // (Required) The logical plan to be executed / analyzed. Plan *Plan `protobuf:"bytes,3,opt,name=plan,proto3" json:"plan,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,4,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` // Repeated element for options that can be passed to the request. This element is currently // unused but allows to pass in an extension value used for arbitrary options. RequestOptions []*ExecutePlanRequest_RequestOption `protobuf:"bytes,5,rep,name=request_options,json=requestOptions,proto3" json:"request_options,omitempty"` // Tags to tag the given execution with. // Tags cannot contain ',' character and cannot be empty strings. // Used by Interrupt with interrupt.tag. Tags []string `protobuf:"bytes,7,rep,name=tags,proto3" json:"tags,omitempty"` } func (x *ExecutePlanRequest) Reset() { *x = ExecutePlanRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanRequest) ProtoMessage() {} func (x *ExecutePlanRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanRequest.ProtoReflect.Descriptor instead. func (*ExecutePlanRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{4} } func (x *ExecutePlanRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ExecutePlanRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *ExecutePlanRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *ExecutePlanRequest) GetOperationId() string { if x != nil && x.OperationId != nil { return *x.OperationId } return "" } func (x *ExecutePlanRequest) GetPlan() *Plan { if x != nil { return x.Plan } return nil } func (x *ExecutePlanRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } func (x *ExecutePlanRequest) GetRequestOptions() []*ExecutePlanRequest_RequestOption { if x != nil { return x.RequestOptions } return nil } func (x *ExecutePlanRequest) GetTags() []string { if x != nil { return x.Tags } return nil } // The response of a query, can be one or more for each request. Responses belonging to the // same input query, carry the same `session_id`. // Next ID: 17 type ExecutePlanResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,15,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` // Identifies the ExecutePlan execution. // If set by the client in ExecutePlanRequest.operationId, that value is returned. // Otherwise generated by the server. // It is an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` OperationId string `protobuf:"bytes,12,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` // Identified the response in the stream. // The id is an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` ResponseId string `protobuf:"bytes,13,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"` // Union type for the different response messages. // // Types that are assignable to ResponseType: // // *ExecutePlanResponse_ArrowBatch_ // *ExecutePlanResponse_SqlCommandResult_ // *ExecutePlanResponse_WriteStreamOperationStartResult // *ExecutePlanResponse_StreamingQueryCommandResult // *ExecutePlanResponse_GetResourcesCommandResult // *ExecutePlanResponse_StreamingQueryManagerCommandResult // *ExecutePlanResponse_StreamingQueryListenerEventsResult // *ExecutePlanResponse_ResultComplete_ // *ExecutePlanResponse_CreateResourceProfileCommandResult // *ExecutePlanResponse_ExecutionProgress_ // *ExecutePlanResponse_CheckpointCommandResult // *ExecutePlanResponse_MlCommandResult // *ExecutePlanResponse_PipelineEventResult // *ExecutePlanResponse_PipelineCommandResult // *ExecutePlanResponse_Extension ResponseType isExecutePlanResponse_ResponseType `protobuf_oneof:"response_type"` // Metrics for the query execution. Typically, this field is only present in the last // batch of results and then represent the overall state of the query execution. Metrics *ExecutePlanResponse_Metrics `protobuf:"bytes,4,opt,name=metrics,proto3" json:"metrics,omitempty"` // The metrics observed during the execution of the query plan. ObservedMetrics []*ExecutePlanResponse_ObservedMetrics `protobuf:"bytes,6,rep,name=observed_metrics,json=observedMetrics,proto3" json:"observed_metrics,omitempty"` // (Optional) The Spark schema. This field is available when `collect` is called. Schema *DataType `protobuf:"bytes,7,opt,name=schema,proto3" json:"schema,omitempty"` } func (x *ExecutePlanResponse) Reset() { *x = ExecutePlanResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse) ProtoMessage() {} func (x *ExecutePlanResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5} } func (x *ExecutePlanResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ExecutePlanResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } func (x *ExecutePlanResponse) GetOperationId() string { if x != nil { return x.OperationId } return "" } func (x *ExecutePlanResponse) GetResponseId() string { if x != nil { return x.ResponseId } return "" } func (m *ExecutePlanResponse) GetResponseType() isExecutePlanResponse_ResponseType { if m != nil { return m.ResponseType } return nil } func (x *ExecutePlanResponse) GetArrowBatch() *ExecutePlanResponse_ArrowBatch { if x, ok := x.GetResponseType().(*ExecutePlanResponse_ArrowBatch_); ok { return x.ArrowBatch } return nil } func (x *ExecutePlanResponse) GetSqlCommandResult() *ExecutePlanResponse_SqlCommandResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_SqlCommandResult_); ok { return x.SqlCommandResult } return nil } func (x *ExecutePlanResponse) GetWriteStreamOperationStartResult() *WriteStreamOperationStartResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_WriteStreamOperationStartResult); ok { return x.WriteStreamOperationStartResult } return nil } func (x *ExecutePlanResponse) GetStreamingQueryCommandResult() *StreamingQueryCommandResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_StreamingQueryCommandResult); ok { return x.StreamingQueryCommandResult } return nil } func (x *ExecutePlanResponse) GetGetResourcesCommandResult() *GetResourcesCommandResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_GetResourcesCommandResult); ok { return x.GetResourcesCommandResult } return nil } func (x *ExecutePlanResponse) GetStreamingQueryManagerCommandResult() *StreamingQueryManagerCommandResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_StreamingQueryManagerCommandResult); ok { return x.StreamingQueryManagerCommandResult } return nil } func (x *ExecutePlanResponse) GetStreamingQueryListenerEventsResult() *StreamingQueryListenerEventsResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_StreamingQueryListenerEventsResult); ok { return x.StreamingQueryListenerEventsResult } return nil } func (x *ExecutePlanResponse) GetResultComplete() *ExecutePlanResponse_ResultComplete { if x, ok := x.GetResponseType().(*ExecutePlanResponse_ResultComplete_); ok { return x.ResultComplete } return nil } func (x *ExecutePlanResponse) GetCreateResourceProfileCommandResult() *CreateResourceProfileCommandResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_CreateResourceProfileCommandResult); ok { return x.CreateResourceProfileCommandResult } return nil } func (x *ExecutePlanResponse) GetExecutionProgress() *ExecutePlanResponse_ExecutionProgress { if x, ok := x.GetResponseType().(*ExecutePlanResponse_ExecutionProgress_); ok { return x.ExecutionProgress } return nil } func (x *ExecutePlanResponse) GetCheckpointCommandResult() *CheckpointCommandResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_CheckpointCommandResult); ok { return x.CheckpointCommandResult } return nil } func (x *ExecutePlanResponse) GetMlCommandResult() *MlCommandResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_MlCommandResult); ok { return x.MlCommandResult } return nil } func (x *ExecutePlanResponse) GetPipelineEventResult() *PipelineEventResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_PipelineEventResult); ok { return x.PipelineEventResult } return nil } func (x *ExecutePlanResponse) GetPipelineCommandResult() *PipelineCommandResult { if x, ok := x.GetResponseType().(*ExecutePlanResponse_PipelineCommandResult); ok { return x.PipelineCommandResult } return nil } func (x *ExecutePlanResponse) GetExtension() *anypb.Any { if x, ok := x.GetResponseType().(*ExecutePlanResponse_Extension); ok { return x.Extension } return nil } func (x *ExecutePlanResponse) GetMetrics() *ExecutePlanResponse_Metrics { if x != nil { return x.Metrics } return nil } func (x *ExecutePlanResponse) GetObservedMetrics() []*ExecutePlanResponse_ObservedMetrics { if x != nil { return x.ObservedMetrics } return nil } func (x *ExecutePlanResponse) GetSchema() *DataType { if x != nil { return x.Schema } return nil } type isExecutePlanResponse_ResponseType interface { isExecutePlanResponse_ResponseType() } type ExecutePlanResponse_ArrowBatch_ struct { ArrowBatch *ExecutePlanResponse_ArrowBatch `protobuf:"bytes,2,opt,name=arrow_batch,json=arrowBatch,proto3,oneof"` } type ExecutePlanResponse_SqlCommandResult_ struct { // Special case for executing SQL commands. SqlCommandResult *ExecutePlanResponse_SqlCommandResult `protobuf:"bytes,5,opt,name=sql_command_result,json=sqlCommandResult,proto3,oneof"` } type ExecutePlanResponse_WriteStreamOperationStartResult struct { // Response for a streaming query. WriteStreamOperationStartResult *WriteStreamOperationStartResult `protobuf:"bytes,8,opt,name=write_stream_operation_start_result,json=writeStreamOperationStartResult,proto3,oneof"` } type ExecutePlanResponse_StreamingQueryCommandResult struct { // Response for commands on a streaming query. StreamingQueryCommandResult *StreamingQueryCommandResult `protobuf:"bytes,9,opt,name=streaming_query_command_result,json=streamingQueryCommandResult,proto3,oneof"` } type ExecutePlanResponse_GetResourcesCommandResult struct { // Response for 'SparkContext.resources'. GetResourcesCommandResult *GetResourcesCommandResult `protobuf:"bytes,10,opt,name=get_resources_command_result,json=getResourcesCommandResult,proto3,oneof"` } type ExecutePlanResponse_StreamingQueryManagerCommandResult struct { // Response for commands on the streaming query manager. StreamingQueryManagerCommandResult *StreamingQueryManagerCommandResult `protobuf:"bytes,11,opt,name=streaming_query_manager_command_result,json=streamingQueryManagerCommandResult,proto3,oneof"` } type ExecutePlanResponse_StreamingQueryListenerEventsResult struct { // Response for commands on the client side streaming query listener. StreamingQueryListenerEventsResult *StreamingQueryListenerEventsResult `protobuf:"bytes,16,opt,name=streaming_query_listener_events_result,json=streamingQueryListenerEventsResult,proto3,oneof"` } type ExecutePlanResponse_ResultComplete_ struct { // Response type informing if the stream is complete in reattachable execution. ResultComplete *ExecutePlanResponse_ResultComplete `protobuf:"bytes,14,opt,name=result_complete,json=resultComplete,proto3,oneof"` } type ExecutePlanResponse_CreateResourceProfileCommandResult struct { // Response for command that creates ResourceProfile. CreateResourceProfileCommandResult *CreateResourceProfileCommandResult `protobuf:"bytes,17,opt,name=create_resource_profile_command_result,json=createResourceProfileCommandResult,proto3,oneof"` } type ExecutePlanResponse_ExecutionProgress_ struct { // (Optional) Intermediate query progress reports. ExecutionProgress *ExecutePlanResponse_ExecutionProgress `protobuf:"bytes,18,opt,name=execution_progress,json=executionProgress,proto3,oneof"` } type ExecutePlanResponse_CheckpointCommandResult struct { // Response for command that checkpoints a DataFrame. CheckpointCommandResult *CheckpointCommandResult `protobuf:"bytes,19,opt,name=checkpoint_command_result,json=checkpointCommandResult,proto3,oneof"` } type ExecutePlanResponse_MlCommandResult struct { // ML command response MlCommandResult *MlCommandResult `protobuf:"bytes,20,opt,name=ml_command_result,json=mlCommandResult,proto3,oneof"` } type ExecutePlanResponse_PipelineEventResult struct { // Response containing pipeline event that is streamed back to the client during a pipeline run PipelineEventResult *PipelineEventResult `protobuf:"bytes,21,opt,name=pipeline_event_result,json=pipelineEventResult,proto3,oneof"` } type ExecutePlanResponse_PipelineCommandResult struct { // Pipeline command response PipelineCommandResult *PipelineCommandResult `protobuf:"bytes,22,opt,name=pipeline_command_result,json=pipelineCommandResult,proto3,oneof"` } type ExecutePlanResponse_Extension struct { // Support arbitrary result objects. Extension *anypb.Any `protobuf:"bytes,999,opt,name=extension,proto3,oneof"` } func (*ExecutePlanResponse_ArrowBatch_) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_SqlCommandResult_) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_WriteStreamOperationStartResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_StreamingQueryCommandResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_GetResourcesCommandResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_StreamingQueryManagerCommandResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_StreamingQueryListenerEventsResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_ResultComplete_) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_CreateResourceProfileCommandResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_ExecutionProgress_) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_CheckpointCommandResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_MlCommandResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_PipelineEventResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_PipelineCommandResult) isExecutePlanResponse_ResponseType() {} func (*ExecutePlanResponse_Extension) isExecutePlanResponse_ResponseType() {} // The key-value pair for the config request and response. type KeyValue struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The key. Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` // (Optional) The value. Value *string `protobuf:"bytes,2,opt,name=value,proto3,oneof" json:"value,omitempty"` } func (x *KeyValue) Reset() { *x = KeyValue{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *KeyValue) String() string { return protoimpl.X.MessageStringOf(x) } func (*KeyValue) ProtoMessage() {} func (x *KeyValue) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use KeyValue.ProtoReflect.Descriptor instead. func (*KeyValue) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{6} } func (x *KeyValue) GetKey() string { if x != nil { return x.Key } return "" } func (x *KeyValue) GetValue() string { if x != nil && x.Value != nil { return *x.Value } return "" } // Request to update or fetch the configurations. type ConfigRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id specifies a spark session for a user id (which is specified // by user_context.user_id). The session_id is set by the client to be able to // collate streaming responses from different queries within the dedicated session. // The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,8,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // (Required) User context UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // (Required) The operation for the config. Operation *ConfigRequest_Operation `protobuf:"bytes,3,opt,name=operation,proto3" json:"operation,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,4,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` } func (x *ConfigRequest) Reset() { *x = ConfigRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest) ProtoMessage() {} func (x *ConfigRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest.ProtoReflect.Descriptor instead. func (*ConfigRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7} } func (x *ConfigRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ConfigRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *ConfigRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *ConfigRequest) GetOperation() *ConfigRequest_Operation { if x != nil { return x.Operation } return nil } func (x *ConfigRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } // Response to the config request. // Next ID: 5 type ConfigResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,4,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` // (Optional) The result key-value pairs. // // Available when the operation is 'Get', 'GetWithDefault', 'GetOption', 'GetAll'. // Also available for the operation 'IsModifiable' with boolean string "true" and "false". Pairs []*KeyValue `protobuf:"bytes,2,rep,name=pairs,proto3" json:"pairs,omitempty"` // (Optional) // // Warning messages for deprecated or unsupported configurations. Warnings []string `protobuf:"bytes,3,rep,name=warnings,proto3" json:"warnings,omitempty"` } func (x *ConfigResponse) Reset() { *x = ConfigResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigResponse) ProtoMessage() {} func (x *ConfigResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigResponse.ProtoReflect.Descriptor instead. func (*ConfigResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{8} } func (x *ConfigResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ConfigResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } func (x *ConfigResponse) GetPairs() []*KeyValue { if x != nil { return x.Pairs } return nil } func (x *ConfigResponse) GetWarnings() []string { if x != nil { return x.Warnings } return nil } // Request to transfer client-local artifacts. type AddArtifactsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id specifies a spark session for a user id (which is specified // by user_context.user_id). The session_id is set by the client to be able to // collate streaming responses from different queries within the dedicated session. // The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // User context UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,7,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,6,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` // The payload is either a batch of artifacts or a partial chunk of a large artifact. // // Types that are assignable to Payload: // // *AddArtifactsRequest_Batch_ // *AddArtifactsRequest_BeginChunk // *AddArtifactsRequest_Chunk Payload isAddArtifactsRequest_Payload `protobuf_oneof:"payload"` } func (x *AddArtifactsRequest) Reset() { *x = AddArtifactsRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AddArtifactsRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*AddArtifactsRequest) ProtoMessage() {} func (x *AddArtifactsRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AddArtifactsRequest.ProtoReflect.Descriptor instead. func (*AddArtifactsRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{9} } func (x *AddArtifactsRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *AddArtifactsRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *AddArtifactsRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *AddArtifactsRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } func (m *AddArtifactsRequest) GetPayload() isAddArtifactsRequest_Payload { if m != nil { return m.Payload } return nil } func (x *AddArtifactsRequest) GetBatch() *AddArtifactsRequest_Batch { if x, ok := x.GetPayload().(*AddArtifactsRequest_Batch_); ok { return x.Batch } return nil } func (x *AddArtifactsRequest) GetBeginChunk() *AddArtifactsRequest_BeginChunkedArtifact { if x, ok := x.GetPayload().(*AddArtifactsRequest_BeginChunk); ok { return x.BeginChunk } return nil } func (x *AddArtifactsRequest) GetChunk() *AddArtifactsRequest_ArtifactChunk { if x, ok := x.GetPayload().(*AddArtifactsRequest_Chunk); ok { return x.Chunk } return nil } type isAddArtifactsRequest_Payload interface { isAddArtifactsRequest_Payload() } type AddArtifactsRequest_Batch_ struct { Batch *AddArtifactsRequest_Batch `protobuf:"bytes,3,opt,name=batch,proto3,oneof"` } type AddArtifactsRequest_BeginChunk struct { // The metadata and the initial chunk of a large artifact chunked into multiple requests. // The server side is notified about the total size of the large artifact as well as the // number of chunks to expect. BeginChunk *AddArtifactsRequest_BeginChunkedArtifact `protobuf:"bytes,4,opt,name=begin_chunk,json=beginChunk,proto3,oneof"` } type AddArtifactsRequest_Chunk struct { // A chunk of an artifact excluding metadata. This can be any chunk of a large artifact // excluding the first chunk (which is included in `BeginChunkedArtifact`). Chunk *AddArtifactsRequest_ArtifactChunk `protobuf:"bytes,5,opt,name=chunk,proto3,oneof"` } func (*AddArtifactsRequest_Batch_) isAddArtifactsRequest_Payload() {} func (*AddArtifactsRequest_BeginChunk) isAddArtifactsRequest_Payload() {} func (*AddArtifactsRequest_Chunk) isAddArtifactsRequest_Payload() {} // Response to adding an artifact. Contains relevant metadata to verify successful transfer of // artifact(s). // Next ID: 4 type AddArtifactsResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Session id in which the AddArtifact was running. SessionId string `protobuf:"bytes,2,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,3,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` // The list of artifact(s) seen by the server. Artifacts []*AddArtifactsResponse_ArtifactSummary `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` } func (x *AddArtifactsResponse) Reset() { *x = AddArtifactsResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AddArtifactsResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*AddArtifactsResponse) ProtoMessage() {} func (x *AddArtifactsResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AddArtifactsResponse.ProtoReflect.Descriptor instead. func (*AddArtifactsResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{10} } func (x *AddArtifactsResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *AddArtifactsResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } func (x *AddArtifactsResponse) GetArtifacts() []*AddArtifactsResponse_ArtifactSummary { if x != nil { return x.Artifacts } return nil } // Request to get current statuses of artifacts at the server side. type ArtifactStatusesRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id specifies a spark session for a user id (which is specified // by user_context.user_id). The session_id is set by the client to be able to // collate streaming responses from different queries within the dedicated session. // The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,5,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // User context UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,3,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` // The name of the artifact is expected in the form of a "Relative Path" that is made up of a // sequence of directories and the final file element. // Examples of "Relative Path"s: "jars/test.jar", "classes/xyz.class", "abc.xyz", "a/b/X.jar". // The server is expected to maintain the hierarchy of files as defined by their name. (i.e // The relative path of the file on the server's filesystem will be the same as the name of // the provided artifact) Names []string `protobuf:"bytes,4,rep,name=names,proto3" json:"names,omitempty"` } func (x *ArtifactStatusesRequest) Reset() { *x = ArtifactStatusesRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ArtifactStatusesRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactStatusesRequest) ProtoMessage() {} func (x *ArtifactStatusesRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactStatusesRequest.ProtoReflect.Descriptor instead. func (*ArtifactStatusesRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{11} } func (x *ArtifactStatusesRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ArtifactStatusesRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *ArtifactStatusesRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *ArtifactStatusesRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } func (x *ArtifactStatusesRequest) GetNames() []string { if x != nil { return x.Names } return nil } // Response to checking artifact statuses. // Next ID: 4 type ArtifactStatusesResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Session id in which the ArtifactStatus was running. SessionId string `protobuf:"bytes,2,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,3,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` // A map of artifact names to their statuses. Statuses map[string]*ArtifactStatusesResponse_ArtifactStatus `protobuf:"bytes,1,rep,name=statuses,proto3" json:"statuses,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *ArtifactStatusesResponse) Reset() { *x = ArtifactStatusesResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ArtifactStatusesResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactStatusesResponse) ProtoMessage() {} func (x *ArtifactStatusesResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactStatusesResponse.ProtoReflect.Descriptor instead. func (*ArtifactStatusesResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{12} } func (x *ArtifactStatusesResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ArtifactStatusesResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } func (x *ArtifactStatusesResponse) GetStatuses() map[string]*ArtifactStatusesResponse_ArtifactStatus { if x != nil { return x.Statuses } return nil } type InterruptRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id specifies a spark session for a user id (which is specified // by user_context.user_id). The session_id is set by the client to be able to // collate streaming responses from different queries within the dedicated session. // The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,7,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // (Required) User context UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,3,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` // (Required) The type of interrupt to execute. InterruptType InterruptRequest_InterruptType `protobuf:"varint,4,opt,name=interrupt_type,json=interruptType,proto3,enum=spark.connect.InterruptRequest_InterruptType" json:"interrupt_type,omitempty"` // Types that are assignable to Interrupt: // // *InterruptRequest_OperationTag // *InterruptRequest_OperationId Interrupt isInterruptRequest_Interrupt `protobuf_oneof:"interrupt"` } func (x *InterruptRequest) Reset() { *x = InterruptRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *InterruptRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*InterruptRequest) ProtoMessage() {} func (x *InterruptRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use InterruptRequest.ProtoReflect.Descriptor instead. func (*InterruptRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{13} } func (x *InterruptRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *InterruptRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *InterruptRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *InterruptRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } func (x *InterruptRequest) GetInterruptType() InterruptRequest_InterruptType { if x != nil { return x.InterruptType } return InterruptRequest_INTERRUPT_TYPE_UNSPECIFIED } func (m *InterruptRequest) GetInterrupt() isInterruptRequest_Interrupt { if m != nil { return m.Interrupt } return nil } func (x *InterruptRequest) GetOperationTag() string { if x, ok := x.GetInterrupt().(*InterruptRequest_OperationTag); ok { return x.OperationTag } return "" } func (x *InterruptRequest) GetOperationId() string { if x, ok := x.GetInterrupt().(*InterruptRequest_OperationId); ok { return x.OperationId } return "" } type isInterruptRequest_Interrupt interface { isInterruptRequest_Interrupt() } type InterruptRequest_OperationTag struct { // if interrupt_tag == INTERRUPT_TYPE_TAG, interrupt operation with this tag. OperationTag string `protobuf:"bytes,5,opt,name=operation_tag,json=operationTag,proto3,oneof"` } type InterruptRequest_OperationId struct { // if interrupt_tag == INTERRUPT_TYPE_OPERATION_ID, interrupt operation with this operation_id. OperationId string `protobuf:"bytes,6,opt,name=operation_id,json=operationId,proto3,oneof"` } func (*InterruptRequest_OperationTag) isInterruptRequest_Interrupt() {} func (*InterruptRequest_OperationId) isInterruptRequest_Interrupt() {} // Next ID: 4 type InterruptResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Session id in which the interrupt was running. SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,3,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` // Operation ids of the executions which were interrupted. InterruptedIds []string `protobuf:"bytes,2,rep,name=interrupted_ids,json=interruptedIds,proto3" json:"interrupted_ids,omitempty"` } func (x *InterruptResponse) Reset() { *x = InterruptResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *InterruptResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*InterruptResponse) ProtoMessage() {} func (x *InterruptResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use InterruptResponse.ProtoReflect.Descriptor instead. func (*InterruptResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{14} } func (x *InterruptResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *InterruptResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } func (x *InterruptResponse) GetInterruptedIds() []string { if x != nil { return x.InterruptedIds } return nil } type ReattachOptions struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // If true, the request can be reattached to using ReattachExecute. // ReattachExecute can be used either if the stream broke with a GRPC network error, // or if the server closed the stream without sending a response with StreamStatus.complete=true. // The server will keep a buffer of responses in case a response is lost, and // ReattachExecute needs to back-track. // // If false, the execution response stream will will not be reattachable, and all responses are // immediately released by the server after being sent. Reattachable bool `protobuf:"varint,1,opt,name=reattachable,proto3" json:"reattachable,omitempty"` } func (x *ReattachOptions) Reset() { *x = ReattachOptions{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ReattachOptions) String() string { return protoimpl.X.MessageStringOf(x) } func (*ReattachOptions) ProtoMessage() {} func (x *ReattachOptions) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ReattachOptions.ProtoReflect.Descriptor instead. func (*ReattachOptions) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{15} } func (x *ReattachOptions) GetReattachable() bool { if x != nil { return x.Reattachable } return false } type ReattachExecuteRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id of the request to reattach to. // This must be an id of existing session. SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,6,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // (Required) User context // // user_context.user_id and session+id both identify a unique remote spark session on the // server side. UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // (Required) // Provide an id of the request to reattach to. // This must be an id of existing operation. OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,4,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` // (Optional) // Last already processed response id from the response stream. // After reattach, server will resume the response stream after that response. // If not specified, server will restart the stream from the start. // // Note: server controls the amount of responses that it buffers and it may drop responses, // that are far behind the latest returned response, so this can't be used to arbitrarily // scroll back the cursor. If the response is no longer available, this will result in an error. LastResponseId *string `protobuf:"bytes,5,opt,name=last_response_id,json=lastResponseId,proto3,oneof" json:"last_response_id,omitempty"` } func (x *ReattachExecuteRequest) Reset() { *x = ReattachExecuteRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ReattachExecuteRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*ReattachExecuteRequest) ProtoMessage() {} func (x *ReattachExecuteRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ReattachExecuteRequest.ProtoReflect.Descriptor instead. func (*ReattachExecuteRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{16} } func (x *ReattachExecuteRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ReattachExecuteRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *ReattachExecuteRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *ReattachExecuteRequest) GetOperationId() string { if x != nil { return x.OperationId } return "" } func (x *ReattachExecuteRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } func (x *ReattachExecuteRequest) GetLastResponseId() string { if x != nil && x.LastResponseId != nil { return *x.LastResponseId } return "" } type ReleaseExecuteRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id of the request to reattach to. // This must be an id of existing session. SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,7,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // (Required) User context // // user_context.user_id and session+id both identify a unique remote spark session on the // server side. UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // (Required) // Provide an id of the request to reattach to. // This must be an id of existing operation. OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,4,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` // Types that are assignable to Release: // // *ReleaseExecuteRequest_ReleaseAll_ // *ReleaseExecuteRequest_ReleaseUntil_ Release isReleaseExecuteRequest_Release `protobuf_oneof:"release"` } func (x *ReleaseExecuteRequest) Reset() { *x = ReleaseExecuteRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ReleaseExecuteRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*ReleaseExecuteRequest) ProtoMessage() {} func (x *ReleaseExecuteRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ReleaseExecuteRequest.ProtoReflect.Descriptor instead. func (*ReleaseExecuteRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{17} } func (x *ReleaseExecuteRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ReleaseExecuteRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *ReleaseExecuteRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *ReleaseExecuteRequest) GetOperationId() string { if x != nil { return x.OperationId } return "" } func (x *ReleaseExecuteRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } func (m *ReleaseExecuteRequest) GetRelease() isReleaseExecuteRequest_Release { if m != nil { return m.Release } return nil } func (x *ReleaseExecuteRequest) GetReleaseAll() *ReleaseExecuteRequest_ReleaseAll { if x, ok := x.GetRelease().(*ReleaseExecuteRequest_ReleaseAll_); ok { return x.ReleaseAll } return nil } func (x *ReleaseExecuteRequest) GetReleaseUntil() *ReleaseExecuteRequest_ReleaseUntil { if x, ok := x.GetRelease().(*ReleaseExecuteRequest_ReleaseUntil_); ok { return x.ReleaseUntil } return nil } type isReleaseExecuteRequest_Release interface { isReleaseExecuteRequest_Release() } type ReleaseExecuteRequest_ReleaseAll_ struct { ReleaseAll *ReleaseExecuteRequest_ReleaseAll `protobuf:"bytes,5,opt,name=release_all,json=releaseAll,proto3,oneof"` } type ReleaseExecuteRequest_ReleaseUntil_ struct { ReleaseUntil *ReleaseExecuteRequest_ReleaseUntil `protobuf:"bytes,6,opt,name=release_until,json=releaseUntil,proto3,oneof"` } func (*ReleaseExecuteRequest_ReleaseAll_) isReleaseExecuteRequest_Release() {} func (*ReleaseExecuteRequest_ReleaseUntil_) isReleaseExecuteRequest_Release() {} // Next ID: 4 type ReleaseExecuteResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Session id in which the release was running. SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,3,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` // Operation id of the operation on which the release executed. // If the operation couldn't be found (because e.g. it was concurrently released), will be unset. // Otherwise, it will be equal to the operation_id from request. OperationId *string `protobuf:"bytes,2,opt,name=operation_id,json=operationId,proto3,oneof" json:"operation_id,omitempty"` } func (x *ReleaseExecuteResponse) Reset() { *x = ReleaseExecuteResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ReleaseExecuteResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*ReleaseExecuteResponse) ProtoMessage() {} func (x *ReleaseExecuteResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ReleaseExecuteResponse.ProtoReflect.Descriptor instead. func (*ReleaseExecuteResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{18} } func (x *ReleaseExecuteResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ReleaseExecuteResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } func (x *ReleaseExecuteResponse) GetOperationId() string { if x != nil && x.OperationId != nil { return *x.OperationId } return "" } type ReleaseSessionRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // // The session_id of the request to reattach to. // This must be an id of existing session. SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Required) User context // // user_context.user_id and session+id both identify a unique remote spark session on the // server side. UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,3,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` // Signals the server to allow the client to reconnect to the session after it is released. // // By default, the server tombstones the session upon release, preventing reconnections and // fully cleaning the session state. // // If this flag is set to true, the server may permit the client to reconnect to the session // post-release, even if the session state has been cleaned. This can result in missing state, // such as Temporary Views, Temporary UDFs, or the Current Catalog, in the reconnected session. // // Use this option sparingly and only when the client fully understands the implications of // reconnecting to a released session. The client must ensure that any queries executed do not // rely on the session state prior to its release. AllowReconnect bool `protobuf:"varint,4,opt,name=allow_reconnect,json=allowReconnect,proto3" json:"allow_reconnect,omitempty"` } func (x *ReleaseSessionRequest) Reset() { *x = ReleaseSessionRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ReleaseSessionRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*ReleaseSessionRequest) ProtoMessage() {} func (x *ReleaseSessionRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ReleaseSessionRequest.ProtoReflect.Descriptor instead. func (*ReleaseSessionRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{19} } func (x *ReleaseSessionRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ReleaseSessionRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *ReleaseSessionRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } func (x *ReleaseSessionRequest) GetAllowReconnect() bool { if x != nil { return x.AllowReconnect } return false } // Next ID: 3 type ReleaseSessionResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Session id of the session on which the release executed. SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,2,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` } func (x *ReleaseSessionResponse) Reset() { *x = ReleaseSessionResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ReleaseSessionResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*ReleaseSessionResponse) ProtoMessage() {} func (x *ReleaseSessionResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ReleaseSessionResponse.ProtoReflect.Descriptor instead. func (*ReleaseSessionResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{20} } func (x *ReleaseSessionResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *ReleaseSessionResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } type FetchErrorDetailsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) // The session_id specifies a Spark session for a user identified by user_context.user_id. // The id should be a UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`. SessionId string `protobuf:"bytes,1,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // (Optional) // // Server-side generated idempotency key from the previous responses (if any). Server // can use this to validate that the server side session has not changed. ClientObservedServerSideSessionId *string `protobuf:"bytes,5,opt,name=client_observed_server_side_session_id,json=clientObservedServerSideSessionId,proto3,oneof" json:"client_observed_server_side_session_id,omitempty"` // User context UserContext *UserContext `protobuf:"bytes,2,opt,name=user_context,json=userContext,proto3" json:"user_context,omitempty"` // (Required) // The id of the error. ErrorId string `protobuf:"bytes,3,opt,name=error_id,json=errorId,proto3" json:"error_id,omitempty"` // Provides optional information about the client sending the request. This field // can be used for language or version specific information and is only intended for // logging purposes and will not be interpreted by the server. ClientType *string `protobuf:"bytes,4,opt,name=client_type,json=clientType,proto3,oneof" json:"client_type,omitempty"` } func (x *FetchErrorDetailsRequest) Reset() { *x = FetchErrorDetailsRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *FetchErrorDetailsRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*FetchErrorDetailsRequest) ProtoMessage() {} func (x *FetchErrorDetailsRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use FetchErrorDetailsRequest.ProtoReflect.Descriptor instead. func (*FetchErrorDetailsRequest) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{21} } func (x *FetchErrorDetailsRequest) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *FetchErrorDetailsRequest) GetClientObservedServerSideSessionId() string { if x != nil && x.ClientObservedServerSideSessionId != nil { return *x.ClientObservedServerSideSessionId } return "" } func (x *FetchErrorDetailsRequest) GetUserContext() *UserContext { if x != nil { return x.UserContext } return nil } func (x *FetchErrorDetailsRequest) GetErrorId() string { if x != nil { return x.ErrorId } return "" } func (x *FetchErrorDetailsRequest) GetClientType() string { if x != nil && x.ClientType != nil { return *x.ClientType } return "" } // Next ID: 5 type FetchErrorDetailsResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Server-side generated idempotency key that the client can use to assert that the server side // session has not changed. ServerSideSessionId string `protobuf:"bytes,3,opt,name=server_side_session_id,json=serverSideSessionId,proto3" json:"server_side_session_id,omitempty"` SessionId string `protobuf:"bytes,4,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` // The index of the root error in errors. The field will not be set if the error is not found. RootErrorIdx *int32 `protobuf:"varint,1,opt,name=root_error_idx,json=rootErrorIdx,proto3,oneof" json:"root_error_idx,omitempty"` // A list of errors. Errors []*FetchErrorDetailsResponse_Error `protobuf:"bytes,2,rep,name=errors,proto3" json:"errors,omitempty"` } func (x *FetchErrorDetailsResponse) Reset() { *x = FetchErrorDetailsResponse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *FetchErrorDetailsResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*FetchErrorDetailsResponse) ProtoMessage() {} func (x *FetchErrorDetailsResponse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use FetchErrorDetailsResponse.ProtoReflect.Descriptor instead. func (*FetchErrorDetailsResponse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{22} } func (x *FetchErrorDetailsResponse) GetServerSideSessionId() string { if x != nil { return x.ServerSideSessionId } return "" } func (x *FetchErrorDetailsResponse) GetSessionId() string { if x != nil { return x.SessionId } return "" } func (x *FetchErrorDetailsResponse) GetRootErrorIdx() int32 { if x != nil && x.RootErrorIdx != nil { return *x.RootErrorIdx } return 0 } func (x *FetchErrorDetailsResponse) GetErrors() []*FetchErrorDetailsResponse_Error { if x != nil { return x.Errors } return nil } type CheckpointCommandResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan checkpointed. Relation *CachedRemoteRelation `protobuf:"bytes,1,opt,name=relation,proto3" json:"relation,omitempty"` } func (x *CheckpointCommandResult) Reset() { *x = CheckpointCommandResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CheckpointCommandResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*CheckpointCommandResult) ProtoMessage() {} func (x *CheckpointCommandResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CheckpointCommandResult.ProtoReflect.Descriptor instead. func (*CheckpointCommandResult) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{23} } func (x *CheckpointCommandResult) GetRelation() *CachedRemoteRelation { if x != nil { return x.Relation } return nil } type AnalyzePlanRequest_Schema struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to be analyzed. Plan *Plan `protobuf:"bytes,1,opt,name=plan,proto3" json:"plan,omitempty"` } func (x *AnalyzePlanRequest_Schema) Reset() { *x = AnalyzePlanRequest_Schema{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_Schema) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_Schema) ProtoMessage() {} func (x *AnalyzePlanRequest_Schema) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_Schema.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_Schema) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 0} } func (x *AnalyzePlanRequest_Schema) GetPlan() *Plan { if x != nil { return x.Plan } return nil } // Explains the input plan based on a configurable mode. type AnalyzePlanRequest_Explain struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to be analyzed. Plan *Plan `protobuf:"bytes,1,opt,name=plan,proto3" json:"plan,omitempty"` // (Required) For analyzePlan rpc calls, configure the mode to explain plan in strings. ExplainMode AnalyzePlanRequest_Explain_ExplainMode `protobuf:"varint,2,opt,name=explain_mode,json=explainMode,proto3,enum=spark.connect.AnalyzePlanRequest_Explain_ExplainMode" json:"explain_mode,omitempty"` } func (x *AnalyzePlanRequest_Explain) Reset() { *x = AnalyzePlanRequest_Explain{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_Explain) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_Explain) ProtoMessage() {} func (x *AnalyzePlanRequest_Explain) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_Explain.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_Explain) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 1} } func (x *AnalyzePlanRequest_Explain) GetPlan() *Plan { if x != nil { return x.Plan } return nil } func (x *AnalyzePlanRequest_Explain) GetExplainMode() AnalyzePlanRequest_Explain_ExplainMode { if x != nil { return x.ExplainMode } return AnalyzePlanRequest_Explain_EXPLAIN_MODE_UNSPECIFIED } type AnalyzePlanRequest_TreeString struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to be analyzed. Plan *Plan `protobuf:"bytes,1,opt,name=plan,proto3" json:"plan,omitempty"` // (Optional) Max level of the schema. Level *int32 `protobuf:"varint,2,opt,name=level,proto3,oneof" json:"level,omitempty"` } func (x *AnalyzePlanRequest_TreeString) Reset() { *x = AnalyzePlanRequest_TreeString{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_TreeString) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_TreeString) ProtoMessage() {} func (x *AnalyzePlanRequest_TreeString) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_TreeString.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_TreeString) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 2} } func (x *AnalyzePlanRequest_TreeString) GetPlan() *Plan { if x != nil { return x.Plan } return nil } func (x *AnalyzePlanRequest_TreeString) GetLevel() int32 { if x != nil && x.Level != nil { return *x.Level } return 0 } type AnalyzePlanRequest_IsLocal struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to be analyzed. Plan *Plan `protobuf:"bytes,1,opt,name=plan,proto3" json:"plan,omitempty"` } func (x *AnalyzePlanRequest_IsLocal) Reset() { *x = AnalyzePlanRequest_IsLocal{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_IsLocal) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_IsLocal) ProtoMessage() {} func (x *AnalyzePlanRequest_IsLocal) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[27] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_IsLocal.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_IsLocal) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 3} } func (x *AnalyzePlanRequest_IsLocal) GetPlan() *Plan { if x != nil { return x.Plan } return nil } type AnalyzePlanRequest_IsStreaming struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to be analyzed. Plan *Plan `protobuf:"bytes,1,opt,name=plan,proto3" json:"plan,omitempty"` } func (x *AnalyzePlanRequest_IsStreaming) Reset() { *x = AnalyzePlanRequest_IsStreaming{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_IsStreaming) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_IsStreaming) ProtoMessage() {} func (x *AnalyzePlanRequest_IsStreaming) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[28] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_IsStreaming.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_IsStreaming) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 4} } func (x *AnalyzePlanRequest_IsStreaming) GetPlan() *Plan { if x != nil { return x.Plan } return nil } type AnalyzePlanRequest_InputFiles struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to be analyzed. Plan *Plan `protobuf:"bytes,1,opt,name=plan,proto3" json:"plan,omitempty"` } func (x *AnalyzePlanRequest_InputFiles) Reset() { *x = AnalyzePlanRequest_InputFiles{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[29] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_InputFiles) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_InputFiles) ProtoMessage() {} func (x *AnalyzePlanRequest_InputFiles) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[29] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_InputFiles.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_InputFiles) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 5} } func (x *AnalyzePlanRequest_InputFiles) GetPlan() *Plan { if x != nil { return x.Plan } return nil } type AnalyzePlanRequest_SparkVersion struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *AnalyzePlanRequest_SparkVersion) Reset() { *x = AnalyzePlanRequest_SparkVersion{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_SparkVersion) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_SparkVersion) ProtoMessage() {} func (x *AnalyzePlanRequest_SparkVersion) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[30] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_SparkVersion.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_SparkVersion) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 6} } type AnalyzePlanRequest_DDLParse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The DDL formatted string to be parsed. DdlString string `protobuf:"bytes,1,opt,name=ddl_string,json=ddlString,proto3" json:"ddl_string,omitempty"` } func (x *AnalyzePlanRequest_DDLParse) Reset() { *x = AnalyzePlanRequest_DDLParse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_DDLParse) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_DDLParse) ProtoMessage() {} func (x *AnalyzePlanRequest_DDLParse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[31] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_DDLParse.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_DDLParse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 7} } func (x *AnalyzePlanRequest_DDLParse) GetDdlString() string { if x != nil { return x.DdlString } return "" } // Returns `true` when the logical query plans are equal and therefore return same results. type AnalyzePlanRequest_SameSemantics struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The plan to be compared. TargetPlan *Plan `protobuf:"bytes,1,opt,name=target_plan,json=targetPlan,proto3" json:"target_plan,omitempty"` // (Required) The other plan to be compared. OtherPlan *Plan `protobuf:"bytes,2,opt,name=other_plan,json=otherPlan,proto3" json:"other_plan,omitempty"` } func (x *AnalyzePlanRequest_SameSemantics) Reset() { *x = AnalyzePlanRequest_SameSemantics{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_SameSemantics) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_SameSemantics) ProtoMessage() {} func (x *AnalyzePlanRequest_SameSemantics) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[32] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_SameSemantics.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_SameSemantics) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 8} } func (x *AnalyzePlanRequest_SameSemantics) GetTargetPlan() *Plan { if x != nil { return x.TargetPlan } return nil } func (x *AnalyzePlanRequest_SameSemantics) GetOtherPlan() *Plan { if x != nil { return x.OtherPlan } return nil } type AnalyzePlanRequest_SemanticHash struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to get a hashCode. Plan *Plan `protobuf:"bytes,1,opt,name=plan,proto3" json:"plan,omitempty"` } func (x *AnalyzePlanRequest_SemanticHash) Reset() { *x = AnalyzePlanRequest_SemanticHash{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_SemanticHash) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_SemanticHash) ProtoMessage() {} func (x *AnalyzePlanRequest_SemanticHash) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[33] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_SemanticHash.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_SemanticHash) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 9} } func (x *AnalyzePlanRequest_SemanticHash) GetPlan() *Plan { if x != nil { return x.Plan } return nil } type AnalyzePlanRequest_Persist struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to persist. Relation *Relation `protobuf:"bytes,1,opt,name=relation,proto3" json:"relation,omitempty"` // (Optional) The storage level. StorageLevel *StorageLevel `protobuf:"bytes,2,opt,name=storage_level,json=storageLevel,proto3,oneof" json:"storage_level,omitempty"` } func (x *AnalyzePlanRequest_Persist) Reset() { *x = AnalyzePlanRequest_Persist{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[34] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_Persist) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_Persist) ProtoMessage() {} func (x *AnalyzePlanRequest_Persist) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[34] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_Persist.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_Persist) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 10} } func (x *AnalyzePlanRequest_Persist) GetRelation() *Relation { if x != nil { return x.Relation } return nil } func (x *AnalyzePlanRequest_Persist) GetStorageLevel() *StorageLevel { if x != nil { return x.StorageLevel } return nil } type AnalyzePlanRequest_Unpersist struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to unpersist. Relation *Relation `protobuf:"bytes,1,opt,name=relation,proto3" json:"relation,omitempty"` // (Optional) Whether to block until all blocks are deleted. Blocking *bool `protobuf:"varint,2,opt,name=blocking,proto3,oneof" json:"blocking,omitempty"` } func (x *AnalyzePlanRequest_Unpersist) Reset() { *x = AnalyzePlanRequest_Unpersist{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[35] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_Unpersist) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_Unpersist) ProtoMessage() {} func (x *AnalyzePlanRequest_Unpersist) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[35] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_Unpersist.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_Unpersist) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 11} } func (x *AnalyzePlanRequest_Unpersist) GetRelation() *Relation { if x != nil { return x.Relation } return nil } func (x *AnalyzePlanRequest_Unpersist) GetBlocking() bool { if x != nil && x.Blocking != nil { return *x.Blocking } return false } type AnalyzePlanRequest_GetStorageLevel struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to get the storage level. Relation *Relation `protobuf:"bytes,1,opt,name=relation,proto3" json:"relation,omitempty"` } func (x *AnalyzePlanRequest_GetStorageLevel) Reset() { *x = AnalyzePlanRequest_GetStorageLevel{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[36] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_GetStorageLevel) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_GetStorageLevel) ProtoMessage() {} func (x *AnalyzePlanRequest_GetStorageLevel) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[36] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_GetStorageLevel.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_GetStorageLevel) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 12} } func (x *AnalyzePlanRequest_GetStorageLevel) GetRelation() *Relation { if x != nil { return x.Relation } return nil } type AnalyzePlanRequest_JsonToDDL struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The JSON formatted string to be converted to DDL. JsonString string `protobuf:"bytes,1,opt,name=json_string,json=jsonString,proto3" json:"json_string,omitempty"` } func (x *AnalyzePlanRequest_JsonToDDL) Reset() { *x = AnalyzePlanRequest_JsonToDDL{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[37] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanRequest_JsonToDDL) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanRequest_JsonToDDL) ProtoMessage() {} func (x *AnalyzePlanRequest_JsonToDDL) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[37] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanRequest_JsonToDDL.ProtoReflect.Descriptor instead. func (*AnalyzePlanRequest_JsonToDDL) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{2, 13} } func (x *AnalyzePlanRequest_JsonToDDL) GetJsonString() string { if x != nil { return x.JsonString } return "" } type AnalyzePlanResponse_Schema struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Schema *DataType `protobuf:"bytes,1,opt,name=schema,proto3" json:"schema,omitempty"` } func (x *AnalyzePlanResponse_Schema) Reset() { *x = AnalyzePlanResponse_Schema{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[38] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_Schema) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_Schema) ProtoMessage() {} func (x *AnalyzePlanResponse_Schema) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[38] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_Schema.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_Schema) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 0} } func (x *AnalyzePlanResponse_Schema) GetSchema() *DataType { if x != nil { return x.Schema } return nil } type AnalyzePlanResponse_Explain struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ExplainString string `protobuf:"bytes,1,opt,name=explain_string,json=explainString,proto3" json:"explain_string,omitempty"` } func (x *AnalyzePlanResponse_Explain) Reset() { *x = AnalyzePlanResponse_Explain{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[39] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_Explain) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_Explain) ProtoMessage() {} func (x *AnalyzePlanResponse_Explain) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[39] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_Explain.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_Explain) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 1} } func (x *AnalyzePlanResponse_Explain) GetExplainString() string { if x != nil { return x.ExplainString } return "" } type AnalyzePlanResponse_TreeString struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TreeString string `protobuf:"bytes,1,opt,name=tree_string,json=treeString,proto3" json:"tree_string,omitempty"` } func (x *AnalyzePlanResponse_TreeString) Reset() { *x = AnalyzePlanResponse_TreeString{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[40] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_TreeString) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_TreeString) ProtoMessage() {} func (x *AnalyzePlanResponse_TreeString) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[40] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_TreeString.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_TreeString) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 2} } func (x *AnalyzePlanResponse_TreeString) GetTreeString() string { if x != nil { return x.TreeString } return "" } type AnalyzePlanResponse_IsLocal struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields IsLocal bool `protobuf:"varint,1,opt,name=is_local,json=isLocal,proto3" json:"is_local,omitempty"` } func (x *AnalyzePlanResponse_IsLocal) Reset() { *x = AnalyzePlanResponse_IsLocal{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[41] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_IsLocal) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_IsLocal) ProtoMessage() {} func (x *AnalyzePlanResponse_IsLocal) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[41] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_IsLocal.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_IsLocal) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 3} } func (x *AnalyzePlanResponse_IsLocal) GetIsLocal() bool { if x != nil { return x.IsLocal } return false } type AnalyzePlanResponse_IsStreaming struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields IsStreaming bool `protobuf:"varint,1,opt,name=is_streaming,json=isStreaming,proto3" json:"is_streaming,omitempty"` } func (x *AnalyzePlanResponse_IsStreaming) Reset() { *x = AnalyzePlanResponse_IsStreaming{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[42] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_IsStreaming) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_IsStreaming) ProtoMessage() {} func (x *AnalyzePlanResponse_IsStreaming) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[42] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_IsStreaming.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_IsStreaming) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 4} } func (x *AnalyzePlanResponse_IsStreaming) GetIsStreaming() bool { if x != nil { return x.IsStreaming } return false } type AnalyzePlanResponse_InputFiles struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // A best-effort snapshot of the files that compose this Dataset Files []string `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` } func (x *AnalyzePlanResponse_InputFiles) Reset() { *x = AnalyzePlanResponse_InputFiles{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[43] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_InputFiles) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_InputFiles) ProtoMessage() {} func (x *AnalyzePlanResponse_InputFiles) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[43] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_InputFiles.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_InputFiles) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 5} } func (x *AnalyzePlanResponse_InputFiles) GetFiles() []string { if x != nil { return x.Files } return nil } type AnalyzePlanResponse_SparkVersion struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` } func (x *AnalyzePlanResponse_SparkVersion) Reset() { *x = AnalyzePlanResponse_SparkVersion{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[44] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_SparkVersion) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_SparkVersion) ProtoMessage() {} func (x *AnalyzePlanResponse_SparkVersion) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[44] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_SparkVersion.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_SparkVersion) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 6} } func (x *AnalyzePlanResponse_SparkVersion) GetVersion() string { if x != nil { return x.Version } return "" } type AnalyzePlanResponse_DDLParse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Parsed *DataType `protobuf:"bytes,1,opt,name=parsed,proto3" json:"parsed,omitempty"` } func (x *AnalyzePlanResponse_DDLParse) Reset() { *x = AnalyzePlanResponse_DDLParse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[45] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_DDLParse) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_DDLParse) ProtoMessage() {} func (x *AnalyzePlanResponse_DDLParse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[45] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_DDLParse.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_DDLParse) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 7} } func (x *AnalyzePlanResponse_DDLParse) GetParsed() *DataType { if x != nil { return x.Parsed } return nil } type AnalyzePlanResponse_SameSemantics struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Result bool `protobuf:"varint,1,opt,name=result,proto3" json:"result,omitempty"` } func (x *AnalyzePlanResponse_SameSemantics) Reset() { *x = AnalyzePlanResponse_SameSemantics{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[46] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_SameSemantics) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_SameSemantics) ProtoMessage() {} func (x *AnalyzePlanResponse_SameSemantics) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[46] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_SameSemantics.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_SameSemantics) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 8} } func (x *AnalyzePlanResponse_SameSemantics) GetResult() bool { if x != nil { return x.Result } return false } type AnalyzePlanResponse_SemanticHash struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Result int32 `protobuf:"varint,1,opt,name=result,proto3" json:"result,omitempty"` } func (x *AnalyzePlanResponse_SemanticHash) Reset() { *x = AnalyzePlanResponse_SemanticHash{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[47] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_SemanticHash) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_SemanticHash) ProtoMessage() {} func (x *AnalyzePlanResponse_SemanticHash) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[47] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_SemanticHash.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_SemanticHash) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 9} } func (x *AnalyzePlanResponse_SemanticHash) GetResult() int32 { if x != nil { return x.Result } return 0 } type AnalyzePlanResponse_Persist struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *AnalyzePlanResponse_Persist) Reset() { *x = AnalyzePlanResponse_Persist{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[48] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_Persist) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_Persist) ProtoMessage() {} func (x *AnalyzePlanResponse_Persist) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[48] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_Persist.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_Persist) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 10} } type AnalyzePlanResponse_Unpersist struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *AnalyzePlanResponse_Unpersist) Reset() { *x = AnalyzePlanResponse_Unpersist{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[49] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_Unpersist) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_Unpersist) ProtoMessage() {} func (x *AnalyzePlanResponse_Unpersist) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[49] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_Unpersist.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_Unpersist) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 11} } type AnalyzePlanResponse_GetStorageLevel struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The StorageLevel as a result of get_storage_level request. StorageLevel *StorageLevel `protobuf:"bytes,1,opt,name=storage_level,json=storageLevel,proto3" json:"storage_level,omitempty"` } func (x *AnalyzePlanResponse_GetStorageLevel) Reset() { *x = AnalyzePlanResponse_GetStorageLevel{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[50] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_GetStorageLevel) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_GetStorageLevel) ProtoMessage() {} func (x *AnalyzePlanResponse_GetStorageLevel) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[50] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_GetStorageLevel.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_GetStorageLevel) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 12} } func (x *AnalyzePlanResponse_GetStorageLevel) GetStorageLevel() *StorageLevel { if x != nil { return x.StorageLevel } return nil } type AnalyzePlanResponse_JsonToDDL struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields DdlString string `protobuf:"bytes,1,opt,name=ddl_string,json=ddlString,proto3" json:"ddl_string,omitempty"` } func (x *AnalyzePlanResponse_JsonToDDL) Reset() { *x = AnalyzePlanResponse_JsonToDDL{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[51] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AnalyzePlanResponse_JsonToDDL) String() string { return protoimpl.X.MessageStringOf(x) } func (*AnalyzePlanResponse_JsonToDDL) ProtoMessage() {} func (x *AnalyzePlanResponse_JsonToDDL) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[51] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AnalyzePlanResponse_JsonToDDL.ProtoReflect.Descriptor instead. func (*AnalyzePlanResponse_JsonToDDL) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{3, 13} } func (x *AnalyzePlanResponse_JsonToDDL) GetDdlString() string { if x != nil { return x.DdlString } return "" } type ExecutePlanRequest_RequestOption struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to RequestOption: // // *ExecutePlanRequest_RequestOption_ReattachOptions // *ExecutePlanRequest_RequestOption_Extension RequestOption isExecutePlanRequest_RequestOption_RequestOption `protobuf_oneof:"request_option"` } func (x *ExecutePlanRequest_RequestOption) Reset() { *x = ExecutePlanRequest_RequestOption{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[52] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanRequest_RequestOption) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanRequest_RequestOption) ProtoMessage() {} func (x *ExecutePlanRequest_RequestOption) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[52] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanRequest_RequestOption.ProtoReflect.Descriptor instead. func (*ExecutePlanRequest_RequestOption) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{4, 0} } func (m *ExecutePlanRequest_RequestOption) GetRequestOption() isExecutePlanRequest_RequestOption_RequestOption { if m != nil { return m.RequestOption } return nil } func (x *ExecutePlanRequest_RequestOption) GetReattachOptions() *ReattachOptions { if x, ok := x.GetRequestOption().(*ExecutePlanRequest_RequestOption_ReattachOptions); ok { return x.ReattachOptions } return nil } func (x *ExecutePlanRequest_RequestOption) GetExtension() *anypb.Any { if x, ok := x.GetRequestOption().(*ExecutePlanRequest_RequestOption_Extension); ok { return x.Extension } return nil } type isExecutePlanRequest_RequestOption_RequestOption interface { isExecutePlanRequest_RequestOption_RequestOption() } type ExecutePlanRequest_RequestOption_ReattachOptions struct { ReattachOptions *ReattachOptions `protobuf:"bytes,1,opt,name=reattach_options,json=reattachOptions,proto3,oneof"` } type ExecutePlanRequest_RequestOption_Extension struct { // Extension type for request options Extension *anypb.Any `protobuf:"bytes,999,opt,name=extension,proto3,oneof"` } func (*ExecutePlanRequest_RequestOption_ReattachOptions) isExecutePlanRequest_RequestOption_RequestOption() { } func (*ExecutePlanRequest_RequestOption_Extension) isExecutePlanRequest_RequestOption_RequestOption() { } // A SQL command returns an opaque Relation that can be directly used as input for the next // call. type ExecutePlanResponse_SqlCommandResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Relation *Relation `protobuf:"bytes,1,opt,name=relation,proto3" json:"relation,omitempty"` } func (x *ExecutePlanResponse_SqlCommandResult) Reset() { *x = ExecutePlanResponse_SqlCommandResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[53] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_SqlCommandResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_SqlCommandResult) ProtoMessage() {} func (x *ExecutePlanResponse_SqlCommandResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[53] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_SqlCommandResult.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_SqlCommandResult) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 0} } func (x *ExecutePlanResponse_SqlCommandResult) GetRelation() *Relation { if x != nil { return x.Relation } return nil } // Batch results of metrics. type ExecutePlanResponse_ArrowBatch struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Count rows in `data`. Must match the number of rows inside `data`. RowCount int64 `protobuf:"varint,1,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"` // Serialized Arrow data. Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` // If set, row offset of the start of this ArrowBatch in execution results. StartOffset *int64 `protobuf:"varint,3,opt,name=start_offset,json=startOffset,proto3,oneof" json:"start_offset,omitempty"` } func (x *ExecutePlanResponse_ArrowBatch) Reset() { *x = ExecutePlanResponse_ArrowBatch{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[54] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_ArrowBatch) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_ArrowBatch) ProtoMessage() {} func (x *ExecutePlanResponse_ArrowBatch) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[54] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_ArrowBatch.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_ArrowBatch) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 1} } func (x *ExecutePlanResponse_ArrowBatch) GetRowCount() int64 { if x != nil { return x.RowCount } return 0 } func (x *ExecutePlanResponse_ArrowBatch) GetData() []byte { if x != nil { return x.Data } return nil } func (x *ExecutePlanResponse_ArrowBatch) GetStartOffset() int64 { if x != nil && x.StartOffset != nil { return *x.StartOffset } return 0 } type ExecutePlanResponse_Metrics struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Metrics []*ExecutePlanResponse_Metrics_MetricObject `protobuf:"bytes,1,rep,name=metrics,proto3" json:"metrics,omitempty"` } func (x *ExecutePlanResponse_Metrics) Reset() { *x = ExecutePlanResponse_Metrics{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[55] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_Metrics) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_Metrics) ProtoMessage() {} func (x *ExecutePlanResponse_Metrics) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[55] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_Metrics.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_Metrics) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 2} } func (x *ExecutePlanResponse_Metrics) GetMetrics() []*ExecutePlanResponse_Metrics_MetricObject { if x != nil { return x.Metrics } return nil } type ExecutePlanResponse_ObservedMetrics struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Values []*Expression_Literal `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` Keys []string `protobuf:"bytes,3,rep,name=keys,proto3" json:"keys,omitempty"` PlanId int64 `protobuf:"varint,4,opt,name=plan_id,json=planId,proto3" json:"plan_id,omitempty"` } func (x *ExecutePlanResponse_ObservedMetrics) Reset() { *x = ExecutePlanResponse_ObservedMetrics{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[56] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_ObservedMetrics) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_ObservedMetrics) ProtoMessage() {} func (x *ExecutePlanResponse_ObservedMetrics) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[56] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_ObservedMetrics.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_ObservedMetrics) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 3} } func (x *ExecutePlanResponse_ObservedMetrics) GetName() string { if x != nil { return x.Name } return "" } func (x *ExecutePlanResponse_ObservedMetrics) GetValues() []*Expression_Literal { if x != nil { return x.Values } return nil } func (x *ExecutePlanResponse_ObservedMetrics) GetKeys() []string { if x != nil { return x.Keys } return nil } func (x *ExecutePlanResponse_ObservedMetrics) GetPlanId() int64 { if x != nil { return x.PlanId } return 0 } type ExecutePlanResponse_ResultComplete struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *ExecutePlanResponse_ResultComplete) Reset() { *x = ExecutePlanResponse_ResultComplete{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[57] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_ResultComplete) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_ResultComplete) ProtoMessage() {} func (x *ExecutePlanResponse_ResultComplete) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[57] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_ResultComplete.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_ResultComplete) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 4} } // This message is used to communicate progress about the query progress during the execution. type ExecutePlanResponse_ExecutionProgress struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Captures the progress of each individual stage. Stages []*ExecutePlanResponse_ExecutionProgress_StageInfo `protobuf:"bytes,1,rep,name=stages,proto3" json:"stages,omitempty"` // Captures the currently in progress tasks. NumInflightTasks int64 `protobuf:"varint,2,opt,name=num_inflight_tasks,json=numInflightTasks,proto3" json:"num_inflight_tasks,omitempty"` } func (x *ExecutePlanResponse_ExecutionProgress) Reset() { *x = ExecutePlanResponse_ExecutionProgress{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[58] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_ExecutionProgress) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_ExecutionProgress) ProtoMessage() {} func (x *ExecutePlanResponse_ExecutionProgress) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[58] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_ExecutionProgress.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_ExecutionProgress) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 5} } func (x *ExecutePlanResponse_ExecutionProgress) GetStages() []*ExecutePlanResponse_ExecutionProgress_StageInfo { if x != nil { return x.Stages } return nil } func (x *ExecutePlanResponse_ExecutionProgress) GetNumInflightTasks() int64 { if x != nil { return x.NumInflightTasks } return 0 } type ExecutePlanResponse_Metrics_MetricObject struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` PlanId int64 `protobuf:"varint,2,opt,name=plan_id,json=planId,proto3" json:"plan_id,omitempty"` Parent int64 `protobuf:"varint,3,opt,name=parent,proto3" json:"parent,omitempty"` ExecutionMetrics map[string]*ExecutePlanResponse_Metrics_MetricValue `protobuf:"bytes,4,rep,name=execution_metrics,json=executionMetrics,proto3" json:"execution_metrics,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *ExecutePlanResponse_Metrics_MetricObject) Reset() { *x = ExecutePlanResponse_Metrics_MetricObject{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[59] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_Metrics_MetricObject) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_Metrics_MetricObject) ProtoMessage() {} func (x *ExecutePlanResponse_Metrics_MetricObject) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[59] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_Metrics_MetricObject.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_Metrics_MetricObject) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 2, 0} } func (x *ExecutePlanResponse_Metrics_MetricObject) GetName() string { if x != nil { return x.Name } return "" } func (x *ExecutePlanResponse_Metrics_MetricObject) GetPlanId() int64 { if x != nil { return x.PlanId } return 0 } func (x *ExecutePlanResponse_Metrics_MetricObject) GetParent() int64 { if x != nil { return x.Parent } return 0 } func (x *ExecutePlanResponse_Metrics_MetricObject) GetExecutionMetrics() map[string]*ExecutePlanResponse_Metrics_MetricValue { if x != nil { return x.ExecutionMetrics } return nil } type ExecutePlanResponse_Metrics_MetricValue struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Value int64 `protobuf:"varint,2,opt,name=value,proto3" json:"value,omitempty"` MetricType string `protobuf:"bytes,3,opt,name=metric_type,json=metricType,proto3" json:"metric_type,omitempty"` } func (x *ExecutePlanResponse_Metrics_MetricValue) Reset() { *x = ExecutePlanResponse_Metrics_MetricValue{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[60] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_Metrics_MetricValue) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_Metrics_MetricValue) ProtoMessage() {} func (x *ExecutePlanResponse_Metrics_MetricValue) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[60] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_Metrics_MetricValue.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_Metrics_MetricValue) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 2, 1} } func (x *ExecutePlanResponse_Metrics_MetricValue) GetName() string { if x != nil { return x.Name } return "" } func (x *ExecutePlanResponse_Metrics_MetricValue) GetValue() int64 { if x != nil { return x.Value } return 0 } func (x *ExecutePlanResponse_Metrics_MetricValue) GetMetricType() string { if x != nil { return x.MetricType } return "" } type ExecutePlanResponse_ExecutionProgress_StageInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields StageId int64 `protobuf:"varint,1,opt,name=stage_id,json=stageId,proto3" json:"stage_id,omitempty"` NumTasks int64 `protobuf:"varint,2,opt,name=num_tasks,json=numTasks,proto3" json:"num_tasks,omitempty"` NumCompletedTasks int64 `protobuf:"varint,3,opt,name=num_completed_tasks,json=numCompletedTasks,proto3" json:"num_completed_tasks,omitempty"` InputBytesRead int64 `protobuf:"varint,4,opt,name=input_bytes_read,json=inputBytesRead,proto3" json:"input_bytes_read,omitempty"` Done bool `protobuf:"varint,5,opt,name=done,proto3" json:"done,omitempty"` } func (x *ExecutePlanResponse_ExecutionProgress_StageInfo) Reset() { *x = ExecutePlanResponse_ExecutionProgress_StageInfo{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[62] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutePlanResponse_ExecutionProgress_StageInfo) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutePlanResponse_ExecutionProgress_StageInfo) ProtoMessage() {} func (x *ExecutePlanResponse_ExecutionProgress_StageInfo) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[62] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutePlanResponse_ExecutionProgress_StageInfo.ProtoReflect.Descriptor instead. func (*ExecutePlanResponse_ExecutionProgress_StageInfo) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{5, 5, 0} } func (x *ExecutePlanResponse_ExecutionProgress_StageInfo) GetStageId() int64 { if x != nil { return x.StageId } return 0 } func (x *ExecutePlanResponse_ExecutionProgress_StageInfo) GetNumTasks() int64 { if x != nil { return x.NumTasks } return 0 } func (x *ExecutePlanResponse_ExecutionProgress_StageInfo) GetNumCompletedTasks() int64 { if x != nil { return x.NumCompletedTasks } return 0 } func (x *ExecutePlanResponse_ExecutionProgress_StageInfo) GetInputBytesRead() int64 { if x != nil { return x.InputBytesRead } return 0 } func (x *ExecutePlanResponse_ExecutionProgress_StageInfo) GetDone() bool { if x != nil { return x.Done } return false } type ConfigRequest_Operation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to OpType: // // *ConfigRequest_Operation_Set // *ConfigRequest_Operation_Get // *ConfigRequest_Operation_GetWithDefault // *ConfigRequest_Operation_GetOption // *ConfigRequest_Operation_GetAll // *ConfigRequest_Operation_Unset // *ConfigRequest_Operation_IsModifiable OpType isConfigRequest_Operation_OpType `protobuf_oneof:"op_type"` } func (x *ConfigRequest_Operation) Reset() { *x = ConfigRequest_Operation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[63] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest_Operation) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest_Operation) ProtoMessage() {} func (x *ConfigRequest_Operation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[63] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest_Operation.ProtoReflect.Descriptor instead. func (*ConfigRequest_Operation) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7, 0} } func (m *ConfigRequest_Operation) GetOpType() isConfigRequest_Operation_OpType { if m != nil { return m.OpType } return nil } func (x *ConfigRequest_Operation) GetSet() *ConfigRequest_Set { if x, ok := x.GetOpType().(*ConfigRequest_Operation_Set); ok { return x.Set } return nil } func (x *ConfigRequest_Operation) GetGet() *ConfigRequest_Get { if x, ok := x.GetOpType().(*ConfigRequest_Operation_Get); ok { return x.Get } return nil } func (x *ConfigRequest_Operation) GetGetWithDefault() *ConfigRequest_GetWithDefault { if x, ok := x.GetOpType().(*ConfigRequest_Operation_GetWithDefault); ok { return x.GetWithDefault } return nil } func (x *ConfigRequest_Operation) GetGetOption() *ConfigRequest_GetOption { if x, ok := x.GetOpType().(*ConfigRequest_Operation_GetOption); ok { return x.GetOption } return nil } func (x *ConfigRequest_Operation) GetGetAll() *ConfigRequest_GetAll { if x, ok := x.GetOpType().(*ConfigRequest_Operation_GetAll); ok { return x.GetAll } return nil } func (x *ConfigRequest_Operation) GetUnset() *ConfigRequest_Unset { if x, ok := x.GetOpType().(*ConfigRequest_Operation_Unset); ok { return x.Unset } return nil } func (x *ConfigRequest_Operation) GetIsModifiable() *ConfigRequest_IsModifiable { if x, ok := x.GetOpType().(*ConfigRequest_Operation_IsModifiable); ok { return x.IsModifiable } return nil } type isConfigRequest_Operation_OpType interface { isConfigRequest_Operation_OpType() } type ConfigRequest_Operation_Set struct { Set *ConfigRequest_Set `protobuf:"bytes,1,opt,name=set,proto3,oneof"` } type ConfigRequest_Operation_Get struct { Get *ConfigRequest_Get `protobuf:"bytes,2,opt,name=get,proto3,oneof"` } type ConfigRequest_Operation_GetWithDefault struct { GetWithDefault *ConfigRequest_GetWithDefault `protobuf:"bytes,3,opt,name=get_with_default,json=getWithDefault,proto3,oneof"` } type ConfigRequest_Operation_GetOption struct { GetOption *ConfigRequest_GetOption `protobuf:"bytes,4,opt,name=get_option,json=getOption,proto3,oneof"` } type ConfigRequest_Operation_GetAll struct { GetAll *ConfigRequest_GetAll `protobuf:"bytes,5,opt,name=get_all,json=getAll,proto3,oneof"` } type ConfigRequest_Operation_Unset struct { Unset *ConfigRequest_Unset `protobuf:"bytes,6,opt,name=unset,proto3,oneof"` } type ConfigRequest_Operation_IsModifiable struct { IsModifiable *ConfigRequest_IsModifiable `protobuf:"bytes,7,opt,name=is_modifiable,json=isModifiable,proto3,oneof"` } func (*ConfigRequest_Operation_Set) isConfigRequest_Operation_OpType() {} func (*ConfigRequest_Operation_Get) isConfigRequest_Operation_OpType() {} func (*ConfigRequest_Operation_GetWithDefault) isConfigRequest_Operation_OpType() {} func (*ConfigRequest_Operation_GetOption) isConfigRequest_Operation_OpType() {} func (*ConfigRequest_Operation_GetAll) isConfigRequest_Operation_OpType() {} func (*ConfigRequest_Operation_Unset) isConfigRequest_Operation_OpType() {} func (*ConfigRequest_Operation_IsModifiable) isConfigRequest_Operation_OpType() {} type ConfigRequest_Set struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The config key-value pairs to set. Pairs []*KeyValue `protobuf:"bytes,1,rep,name=pairs,proto3" json:"pairs,omitempty"` // (Optional) Whether to ignore failures. Silent *bool `protobuf:"varint,2,opt,name=silent,proto3,oneof" json:"silent,omitempty"` } func (x *ConfigRequest_Set) Reset() { *x = ConfigRequest_Set{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[64] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest_Set) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest_Set) ProtoMessage() {} func (x *ConfigRequest_Set) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[64] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest_Set.ProtoReflect.Descriptor instead. func (*ConfigRequest_Set) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7, 1} } func (x *ConfigRequest_Set) GetPairs() []*KeyValue { if x != nil { return x.Pairs } return nil } func (x *ConfigRequest_Set) GetSilent() bool { if x != nil && x.Silent != nil { return *x.Silent } return false } type ConfigRequest_Get struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The config keys to get. Keys []string `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` } func (x *ConfigRequest_Get) Reset() { *x = ConfigRequest_Get{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[65] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest_Get) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest_Get) ProtoMessage() {} func (x *ConfigRequest_Get) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[65] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest_Get.ProtoReflect.Descriptor instead. func (*ConfigRequest_Get) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7, 2} } func (x *ConfigRequest_Get) GetKeys() []string { if x != nil { return x.Keys } return nil } type ConfigRequest_GetWithDefault struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The config key-value pairs to get. The value will be used as the default value. Pairs []*KeyValue `protobuf:"bytes,1,rep,name=pairs,proto3" json:"pairs,omitempty"` } func (x *ConfigRequest_GetWithDefault) Reset() { *x = ConfigRequest_GetWithDefault{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[66] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest_GetWithDefault) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest_GetWithDefault) ProtoMessage() {} func (x *ConfigRequest_GetWithDefault) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[66] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest_GetWithDefault.ProtoReflect.Descriptor instead. func (*ConfigRequest_GetWithDefault) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7, 3} } func (x *ConfigRequest_GetWithDefault) GetPairs() []*KeyValue { if x != nil { return x.Pairs } return nil } type ConfigRequest_GetOption struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The config keys to get optionally. Keys []string `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` } func (x *ConfigRequest_GetOption) Reset() { *x = ConfigRequest_GetOption{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[67] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest_GetOption) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest_GetOption) ProtoMessage() {} func (x *ConfigRequest_GetOption) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[67] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest_GetOption.ProtoReflect.Descriptor instead. func (*ConfigRequest_GetOption) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7, 4} } func (x *ConfigRequest_GetOption) GetKeys() []string { if x != nil { return x.Keys } return nil } type ConfigRequest_GetAll struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) The prefix of the config key to get. Prefix *string `protobuf:"bytes,1,opt,name=prefix,proto3,oneof" json:"prefix,omitempty"` } func (x *ConfigRequest_GetAll) Reset() { *x = ConfigRequest_GetAll{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[68] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest_GetAll) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest_GetAll) ProtoMessage() {} func (x *ConfigRequest_GetAll) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[68] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest_GetAll.ProtoReflect.Descriptor instead. func (*ConfigRequest_GetAll) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7, 5} } func (x *ConfigRequest_GetAll) GetPrefix() string { if x != nil && x.Prefix != nil { return *x.Prefix } return "" } type ConfigRequest_Unset struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The config keys to unset. Keys []string `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` } func (x *ConfigRequest_Unset) Reset() { *x = ConfigRequest_Unset{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[69] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest_Unset) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest_Unset) ProtoMessage() {} func (x *ConfigRequest_Unset) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[69] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest_Unset.ProtoReflect.Descriptor instead. func (*ConfigRequest_Unset) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7, 6} } func (x *ConfigRequest_Unset) GetKeys() []string { if x != nil { return x.Keys } return nil } type ConfigRequest_IsModifiable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The config keys to check the config is modifiable. Keys []string `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` } func (x *ConfigRequest_IsModifiable) Reset() { *x = ConfigRequest_IsModifiable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[70] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ConfigRequest_IsModifiable) String() string { return protoimpl.X.MessageStringOf(x) } func (*ConfigRequest_IsModifiable) ProtoMessage() {} func (x *ConfigRequest_IsModifiable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[70] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ConfigRequest_IsModifiable.ProtoReflect.Descriptor instead. func (*ConfigRequest_IsModifiable) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{7, 7} } func (x *ConfigRequest_IsModifiable) GetKeys() []string { if x != nil { return x.Keys } return nil } // A chunk of an Artifact. type AddArtifactsRequest_ArtifactChunk struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Data chunk. Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` // CRC to allow server to verify integrity of the chunk. Crc int64 `protobuf:"varint,2,opt,name=crc,proto3" json:"crc,omitempty"` } func (x *AddArtifactsRequest_ArtifactChunk) Reset() { *x = AddArtifactsRequest_ArtifactChunk{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[71] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AddArtifactsRequest_ArtifactChunk) String() string { return protoimpl.X.MessageStringOf(x) } func (*AddArtifactsRequest_ArtifactChunk) ProtoMessage() {} func (x *AddArtifactsRequest_ArtifactChunk) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[71] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AddArtifactsRequest_ArtifactChunk.ProtoReflect.Descriptor instead. func (*AddArtifactsRequest_ArtifactChunk) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{9, 0} } func (x *AddArtifactsRequest_ArtifactChunk) GetData() []byte { if x != nil { return x.Data } return nil } func (x *AddArtifactsRequest_ArtifactChunk) GetCrc() int64 { if x != nil { return x.Crc } return 0 } // An artifact that is contained in a single `ArtifactChunk`. // Generally, this message represents tiny artifacts such as REPL-generated class files. type AddArtifactsRequest_SingleChunkArtifact struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The name of the artifact is expected in the form of a "Relative Path" that is made up of a // sequence of directories and the final file element. // Examples of "Relative Path"s: "jars/test.jar", "classes/xyz.class", "abc.xyz", "a/b/X.jar". // The server is expected to maintain the hierarchy of files as defined by their name. (i.e // The relative path of the file on the server's filesystem will be the same as the name of // the provided artifact) Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // A single data chunk. Data *AddArtifactsRequest_ArtifactChunk `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` } func (x *AddArtifactsRequest_SingleChunkArtifact) Reset() { *x = AddArtifactsRequest_SingleChunkArtifact{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[72] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AddArtifactsRequest_SingleChunkArtifact) String() string { return protoimpl.X.MessageStringOf(x) } func (*AddArtifactsRequest_SingleChunkArtifact) ProtoMessage() {} func (x *AddArtifactsRequest_SingleChunkArtifact) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[72] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AddArtifactsRequest_SingleChunkArtifact.ProtoReflect.Descriptor instead. func (*AddArtifactsRequest_SingleChunkArtifact) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{9, 1} } func (x *AddArtifactsRequest_SingleChunkArtifact) GetName() string { if x != nil { return x.Name } return "" } func (x *AddArtifactsRequest_SingleChunkArtifact) GetData() *AddArtifactsRequest_ArtifactChunk { if x != nil { return x.Data } return nil } // A number of `SingleChunkArtifact` batched into a single RPC. type AddArtifactsRequest_Batch struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Artifacts []*AddArtifactsRequest_SingleChunkArtifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` } func (x *AddArtifactsRequest_Batch) Reset() { *x = AddArtifactsRequest_Batch{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[73] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AddArtifactsRequest_Batch) String() string { return protoimpl.X.MessageStringOf(x) } func (*AddArtifactsRequest_Batch) ProtoMessage() {} func (x *AddArtifactsRequest_Batch) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[73] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AddArtifactsRequest_Batch.ProtoReflect.Descriptor instead. func (*AddArtifactsRequest_Batch) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{9, 2} } func (x *AddArtifactsRequest_Batch) GetArtifacts() []*AddArtifactsRequest_SingleChunkArtifact { if x != nil { return x.Artifacts } return nil } // Signals the beginning/start of a chunked artifact. // A large artifact is transferred through a payload of `BeginChunkedArtifact` followed by a // sequence of `ArtifactChunk`s. type AddArtifactsRequest_BeginChunkedArtifact struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Name of the artifact undergoing chunking. Follows the same conventions as the `name` in // the `Artifact` message. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Total size of the artifact in bytes. TotalBytes int64 `protobuf:"varint,2,opt,name=total_bytes,json=totalBytes,proto3" json:"total_bytes,omitempty"` // Number of chunks the artifact is split into. // This includes the `initial_chunk`. NumChunks int64 `protobuf:"varint,3,opt,name=num_chunks,json=numChunks,proto3" json:"num_chunks,omitempty"` // The first/initial chunk. InitialChunk *AddArtifactsRequest_ArtifactChunk `protobuf:"bytes,4,opt,name=initial_chunk,json=initialChunk,proto3" json:"initial_chunk,omitempty"` } func (x *AddArtifactsRequest_BeginChunkedArtifact) Reset() { *x = AddArtifactsRequest_BeginChunkedArtifact{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[74] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AddArtifactsRequest_BeginChunkedArtifact) String() string { return protoimpl.X.MessageStringOf(x) } func (*AddArtifactsRequest_BeginChunkedArtifact) ProtoMessage() {} func (x *AddArtifactsRequest_BeginChunkedArtifact) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[74] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AddArtifactsRequest_BeginChunkedArtifact.ProtoReflect.Descriptor instead. func (*AddArtifactsRequest_BeginChunkedArtifact) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{9, 3} } func (x *AddArtifactsRequest_BeginChunkedArtifact) GetName() string { if x != nil { return x.Name } return "" } func (x *AddArtifactsRequest_BeginChunkedArtifact) GetTotalBytes() int64 { if x != nil { return x.TotalBytes } return 0 } func (x *AddArtifactsRequest_BeginChunkedArtifact) GetNumChunks() int64 { if x != nil { return x.NumChunks } return 0 } func (x *AddArtifactsRequest_BeginChunkedArtifact) GetInitialChunk() *AddArtifactsRequest_ArtifactChunk { if x != nil { return x.InitialChunk } return nil } // Metadata of an artifact. type AddArtifactsResponse_ArtifactSummary struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Whether the CRC (Cyclic Redundancy Check) is successful on server verification. // The server discards any artifact that fails the CRC. // If false, the client may choose to resend the artifact specified by `name`. IsCrcSuccessful bool `protobuf:"varint,2,opt,name=is_crc_successful,json=isCrcSuccessful,proto3" json:"is_crc_successful,omitempty"` } func (x *AddArtifactsResponse_ArtifactSummary) Reset() { *x = AddArtifactsResponse_ArtifactSummary{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[75] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AddArtifactsResponse_ArtifactSummary) String() string { return protoimpl.X.MessageStringOf(x) } func (*AddArtifactsResponse_ArtifactSummary) ProtoMessage() {} func (x *AddArtifactsResponse_ArtifactSummary) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[75] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AddArtifactsResponse_ArtifactSummary.ProtoReflect.Descriptor instead. func (*AddArtifactsResponse_ArtifactSummary) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{10, 0} } func (x *AddArtifactsResponse_ArtifactSummary) GetName() string { if x != nil { return x.Name } return "" } func (x *AddArtifactsResponse_ArtifactSummary) GetIsCrcSuccessful() bool { if x != nil { return x.IsCrcSuccessful } return false } type ArtifactStatusesResponse_ArtifactStatus struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Exists or not particular artifact at the server. Exists bool `protobuf:"varint,1,opt,name=exists,proto3" json:"exists,omitempty"` } func (x *ArtifactStatusesResponse_ArtifactStatus) Reset() { *x = ArtifactStatusesResponse_ArtifactStatus{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[77] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ArtifactStatusesResponse_ArtifactStatus) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactStatusesResponse_ArtifactStatus) ProtoMessage() {} func (x *ArtifactStatusesResponse_ArtifactStatus) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[77] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactStatusesResponse_ArtifactStatus.ProtoReflect.Descriptor instead. func (*ArtifactStatusesResponse_ArtifactStatus) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{12, 1} } func (x *ArtifactStatusesResponse_ArtifactStatus) GetExists() bool { if x != nil { return x.Exists } return false } // Release and close operation completely. // This will also interrupt the query if it is running execution, and wait for it to be torn down. type ReleaseExecuteRequest_ReleaseAll struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *ReleaseExecuteRequest_ReleaseAll) Reset() { *x = ReleaseExecuteRequest_ReleaseAll{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[78] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ReleaseExecuteRequest_ReleaseAll) String() string { return protoimpl.X.MessageStringOf(x) } func (*ReleaseExecuteRequest_ReleaseAll) ProtoMessage() {} func (x *ReleaseExecuteRequest_ReleaseAll) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[78] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ReleaseExecuteRequest_ReleaseAll.ProtoReflect.Descriptor instead. func (*ReleaseExecuteRequest_ReleaseAll) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{17, 0} } // Release all responses from the operation response stream up to and including // the response with the given by response_id. // While server determines by itself how much of a buffer of responses to keep, client providing // explicit release calls will help reduce resource consumption. // Noop if response_id not found in cached responses. type ReleaseExecuteRequest_ReleaseUntil struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"` } func (x *ReleaseExecuteRequest_ReleaseUntil) Reset() { *x = ReleaseExecuteRequest_ReleaseUntil{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[79] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ReleaseExecuteRequest_ReleaseUntil) String() string { return protoimpl.X.MessageStringOf(x) } func (*ReleaseExecuteRequest_ReleaseUntil) ProtoMessage() {} func (x *ReleaseExecuteRequest_ReleaseUntil) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[79] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ReleaseExecuteRequest_ReleaseUntil.ProtoReflect.Descriptor instead. func (*ReleaseExecuteRequest_ReleaseUntil) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{17, 1} } func (x *ReleaseExecuteRequest_ReleaseUntil) GetResponseId() string { if x != nil { return x.ResponseId } return "" } type FetchErrorDetailsResponse_StackTraceElement struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The fully qualified name of the class containing the execution point. DeclaringClass string `protobuf:"bytes,1,opt,name=declaring_class,json=declaringClass,proto3" json:"declaring_class,omitempty"` // The name of the method containing the execution point. MethodName string `protobuf:"bytes,2,opt,name=method_name,json=methodName,proto3" json:"method_name,omitempty"` // The name of the file containing the execution point. FileName *string `protobuf:"bytes,3,opt,name=file_name,json=fileName,proto3,oneof" json:"file_name,omitempty"` // The line number of the source line containing the execution point. LineNumber int32 `protobuf:"varint,4,opt,name=line_number,json=lineNumber,proto3" json:"line_number,omitempty"` } func (x *FetchErrorDetailsResponse_StackTraceElement) Reset() { *x = FetchErrorDetailsResponse_StackTraceElement{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[80] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *FetchErrorDetailsResponse_StackTraceElement) String() string { return protoimpl.X.MessageStringOf(x) } func (*FetchErrorDetailsResponse_StackTraceElement) ProtoMessage() {} func (x *FetchErrorDetailsResponse_StackTraceElement) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[80] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use FetchErrorDetailsResponse_StackTraceElement.ProtoReflect.Descriptor instead. func (*FetchErrorDetailsResponse_StackTraceElement) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{22, 0} } func (x *FetchErrorDetailsResponse_StackTraceElement) GetDeclaringClass() string { if x != nil { return x.DeclaringClass } return "" } func (x *FetchErrorDetailsResponse_StackTraceElement) GetMethodName() string { if x != nil { return x.MethodName } return "" } func (x *FetchErrorDetailsResponse_StackTraceElement) GetFileName() string { if x != nil && x.FileName != nil { return *x.FileName } return "" } func (x *FetchErrorDetailsResponse_StackTraceElement) GetLineNumber() int32 { if x != nil { return x.LineNumber } return 0 } // QueryContext defines the schema for the query context of a SparkThrowable. // It helps users understand where the error occurs while executing queries. type FetchErrorDetailsResponse_QueryContext struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ContextType FetchErrorDetailsResponse_QueryContext_ContextType `protobuf:"varint,10,opt,name=context_type,json=contextType,proto3,enum=spark.connect.FetchErrorDetailsResponse_QueryContext_ContextType" json:"context_type,omitempty"` // The object type of the query which throws the exception. // If the exception is directly from the main query, it should be an empty string. // Otherwise, it should be the exact object type in upper case. For example, a "VIEW". ObjectType string `protobuf:"bytes,1,opt,name=object_type,json=objectType,proto3" json:"object_type,omitempty"` // The object name of the query which throws the exception. // If the exception is directly from the main query, it should be an empty string. // Otherwise, it should be the object name. For example, a view name "V1". ObjectName string `protobuf:"bytes,2,opt,name=object_name,json=objectName,proto3" json:"object_name,omitempty"` // The starting index in the query text which throws the exception. The index starts from 0. StartIndex int32 `protobuf:"varint,3,opt,name=start_index,json=startIndex,proto3" json:"start_index,omitempty"` // The stopping index in the query which throws the exception. The index starts from 0. StopIndex int32 `protobuf:"varint,4,opt,name=stop_index,json=stopIndex,proto3" json:"stop_index,omitempty"` // The corresponding fragment of the query which throws the exception. Fragment string `protobuf:"bytes,5,opt,name=fragment,proto3" json:"fragment,omitempty"` // The user code (call site of the API) that caused throwing the exception. CallSite string `protobuf:"bytes,6,opt,name=call_site,json=callSite,proto3" json:"call_site,omitempty"` // Summary of the exception cause. Summary string `protobuf:"bytes,7,opt,name=summary,proto3" json:"summary,omitempty"` } func (x *FetchErrorDetailsResponse_QueryContext) Reset() { *x = FetchErrorDetailsResponse_QueryContext{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[81] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *FetchErrorDetailsResponse_QueryContext) String() string { return protoimpl.X.MessageStringOf(x) } func (*FetchErrorDetailsResponse_QueryContext) ProtoMessage() {} func (x *FetchErrorDetailsResponse_QueryContext) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[81] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use FetchErrorDetailsResponse_QueryContext.ProtoReflect.Descriptor instead. func (*FetchErrorDetailsResponse_QueryContext) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{22, 1} } func (x *FetchErrorDetailsResponse_QueryContext) GetContextType() FetchErrorDetailsResponse_QueryContext_ContextType { if x != nil { return x.ContextType } return FetchErrorDetailsResponse_QueryContext_SQL } func (x *FetchErrorDetailsResponse_QueryContext) GetObjectType() string { if x != nil { return x.ObjectType } return "" } func (x *FetchErrorDetailsResponse_QueryContext) GetObjectName() string { if x != nil { return x.ObjectName } return "" } func (x *FetchErrorDetailsResponse_QueryContext) GetStartIndex() int32 { if x != nil { return x.StartIndex } return 0 } func (x *FetchErrorDetailsResponse_QueryContext) GetStopIndex() int32 { if x != nil { return x.StopIndex } return 0 } func (x *FetchErrorDetailsResponse_QueryContext) GetFragment() string { if x != nil { return x.Fragment } return "" } func (x *FetchErrorDetailsResponse_QueryContext) GetCallSite() string { if x != nil { return x.CallSite } return "" } func (x *FetchErrorDetailsResponse_QueryContext) GetSummary() string { if x != nil { return x.Summary } return "" } // SparkThrowable defines the schema for SparkThrowable exceptions. type FetchErrorDetailsResponse_SparkThrowable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Succinct, human-readable, unique, and consistent representation of the error category. ErrorClass *string `protobuf:"bytes,1,opt,name=error_class,json=errorClass,proto3,oneof" json:"error_class,omitempty"` // The message parameters for the error framework. MessageParameters map[string]string `protobuf:"bytes,2,rep,name=message_parameters,json=messageParameters,proto3" json:"message_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // The query context of a SparkThrowable. QueryContexts []*FetchErrorDetailsResponse_QueryContext `protobuf:"bytes,3,rep,name=query_contexts,json=queryContexts,proto3" json:"query_contexts,omitempty"` // Portable error identifier across SQL engines // If null, error class or SQLSTATE is not set. SqlState *string `protobuf:"bytes,4,opt,name=sql_state,json=sqlState,proto3,oneof" json:"sql_state,omitempty"` } func (x *FetchErrorDetailsResponse_SparkThrowable) Reset() { *x = FetchErrorDetailsResponse_SparkThrowable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[82] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *FetchErrorDetailsResponse_SparkThrowable) String() string { return protoimpl.X.MessageStringOf(x) } func (*FetchErrorDetailsResponse_SparkThrowable) ProtoMessage() {} func (x *FetchErrorDetailsResponse_SparkThrowable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[82] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use FetchErrorDetailsResponse_SparkThrowable.ProtoReflect.Descriptor instead. func (*FetchErrorDetailsResponse_SparkThrowable) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{22, 2} } func (x *FetchErrorDetailsResponse_SparkThrowable) GetErrorClass() string { if x != nil && x.ErrorClass != nil { return *x.ErrorClass } return "" } func (x *FetchErrorDetailsResponse_SparkThrowable) GetMessageParameters() map[string]string { if x != nil { return x.MessageParameters } return nil } func (x *FetchErrorDetailsResponse_SparkThrowable) GetQueryContexts() []*FetchErrorDetailsResponse_QueryContext { if x != nil { return x.QueryContexts } return nil } func (x *FetchErrorDetailsResponse_SparkThrowable) GetSqlState() string { if x != nil && x.SqlState != nil { return *x.SqlState } return "" } // Error defines the schema for the representing exception. type FetchErrorDetailsResponse_Error struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The fully qualified names of the exception class and its parent classes. ErrorTypeHierarchy []string `protobuf:"bytes,1,rep,name=error_type_hierarchy,json=errorTypeHierarchy,proto3" json:"error_type_hierarchy,omitempty"` // The detailed message of the exception. Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` // The stackTrace of the exception. It will be set // if the SQLConf spark.sql.connect.serverStacktrace.enabled is true. StackTrace []*FetchErrorDetailsResponse_StackTraceElement `protobuf:"bytes,3,rep,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` // The index of the cause error in errors. CauseIdx *int32 `protobuf:"varint,4,opt,name=cause_idx,json=causeIdx,proto3,oneof" json:"cause_idx,omitempty"` // The structured data of a SparkThrowable exception. SparkThrowable *FetchErrorDetailsResponse_SparkThrowable `protobuf:"bytes,5,opt,name=spark_throwable,json=sparkThrowable,proto3,oneof" json:"spark_throwable,omitempty"` } func (x *FetchErrorDetailsResponse_Error) Reset() { *x = FetchErrorDetailsResponse_Error{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_base_proto_msgTypes[83] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *FetchErrorDetailsResponse_Error) String() string { return protoimpl.X.MessageStringOf(x) } func (*FetchErrorDetailsResponse_Error) ProtoMessage() {} func (x *FetchErrorDetailsResponse_Error) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_base_proto_msgTypes[83] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use FetchErrorDetailsResponse_Error.ProtoReflect.Descriptor instead. func (*FetchErrorDetailsResponse_Error) Descriptor() ([]byte, []int) { return file_spark_connect_base_proto_rawDescGZIP(), []int{22, 3} } func (x *FetchErrorDetailsResponse_Error) GetErrorTypeHierarchy() []string { if x != nil { return x.ErrorTypeHierarchy } return nil } func (x *FetchErrorDetailsResponse_Error) GetMessage() string { if x != nil { return x.Message } return "" } func (x *FetchErrorDetailsResponse_Error) GetStackTrace() []*FetchErrorDetailsResponse_StackTraceElement { if x != nil { return x.StackTrace } return nil } func (x *FetchErrorDetailsResponse_Error) GetCauseIdx() int32 { if x != nil && x.CauseIdx != nil { return *x.CauseIdx } return 0 } func (x *FetchErrorDetailsResponse_Error) GetSparkThrowable() *FetchErrorDetailsResponse_SparkThrowable { if x != nil { return x.SparkThrowable } return nil } var File_spark_connect_base_proto protoreflect.FileDescriptor var file_spark_connect_base_proto_rawDesc = []byte{ 0x0a, 0x18, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1a, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x16, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x6d, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x74, 0x0a, 0x04, 0x50, 0x6c, 0x61, 0x6e, 0x12, 0x2d, 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x12, 0x32, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x42, 0x09, 0x0a, 0x07, 0x6f, 0x70, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x7a, 0x0a, 0x0b, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xf5, 0x14, 0x0a, 0x12, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x42, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x48, 0x00, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x45, 0x0a, 0x07, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x48, 0x00, 0x52, 0x07, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x12, 0x4f, 0x0a, 0x0b, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x54, 0x72, 0x65, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x0a, 0x74, 0x72, 0x65, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x46, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x07, 0x69, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x12, 0x52, 0x0a, 0x0c, 0x69, 0x73, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x0b, 0x69, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x4f, 0x0a, 0x0b, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x55, 0x0a, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x49, 0x0a, 0x09, 0x64, 0x64, 0x6c, 0x5f, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x44, 0x44, 0x4c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x48, 0x00, 0x52, 0x08, 0x64, 0x64, 0x6c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x58, 0x0a, 0x0e, 0x73, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x61, 0x6d, 0x65, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x73, 0x48, 0x00, 0x52, 0x0d, 0x73, 0x61, 0x6d, 0x65, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x73, 0x12, 0x55, 0x0a, 0x0d, 0x73, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x48, 0x61, 0x73, 0x68, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x48, 0x61, 0x73, 0x68, 0x12, 0x45, 0x0a, 0x07, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x50, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x07, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x12, 0x4b, 0x0a, 0x09, 0x75, 0x6e, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x55, 0x6e, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x09, 0x75, 0x6e, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x12, 0x5f, 0x0a, 0x11, 0x67, 0x65, 0x74, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x48, 0x00, 0x52, 0x0f, 0x67, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x4d, 0x0a, 0x0b, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x5f, 0x64, 0x64, 0x6c, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4a, 0x73, 0x6f, 0x6e, 0x54, 0x6f, 0x44, 0x44, 0x4c, 0x48, 0x00, 0x52, 0x09, 0x6a, 0x73, 0x6f, 0x6e, 0x54, 0x6f, 0x44, 0x64, 0x6c, 0x1a, 0x31, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x27, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x1a, 0xbb, 0x02, 0x0a, 0x07, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x12, 0x27, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x58, 0x0a, 0x0c, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x35, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x2e, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0b, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x22, 0xac, 0x01, 0x0a, 0x0b, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x1c, 0x0a, 0x18, 0x45, 0x58, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x17, 0x0a, 0x13, 0x45, 0x58, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x53, 0x49, 0x4d, 0x50, 0x4c, 0x45, 0x10, 0x01, 0x12, 0x19, 0x0a, 0x15, 0x45, 0x58, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x45, 0x58, 0x54, 0x45, 0x4e, 0x44, 0x45, 0x44, 0x10, 0x02, 0x12, 0x18, 0x0a, 0x14, 0x45, 0x58, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x43, 0x4f, 0x44, 0x45, 0x47, 0x45, 0x4e, 0x10, 0x03, 0x12, 0x15, 0x0a, 0x11, 0x45, 0x58, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x43, 0x4f, 0x53, 0x54, 0x10, 0x04, 0x12, 0x1a, 0x0a, 0x16, 0x45, 0x58, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x54, 0x45, 0x44, 0x10, 0x05, 0x1a, 0x5a, 0x0a, 0x0a, 0x54, 0x72, 0x65, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x27, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x19, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x1a, 0x32, 0x0a, 0x07, 0x49, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x12, 0x27, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x1a, 0x36, 0x0a, 0x0b, 0x49, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x27, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x1a, 0x35, 0x0a, 0x0a, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x1a, 0x0e, 0x0a, 0x0c, 0x53, 0x70, 0x61, 0x72, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x29, 0x0a, 0x08, 0x44, 0x44, 0x4c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x64, 0x6c, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x64, 0x6c, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x1a, 0x79, 0x0a, 0x0d, 0x53, 0x61, 0x6d, 0x65, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x73, 0x12, 0x34, 0x0a, 0x0b, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x0a, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x50, 0x6c, 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x0a, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x09, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x50, 0x6c, 0x61, 0x6e, 0x1a, 0x37, 0x0a, 0x0c, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x48, 0x61, 0x73, 0x68, 0x12, 0x27, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x1a, 0x97, 0x01, 0x0a, 0x07, 0x50, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x12, 0x33, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x45, 0x0a, 0x0d, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x88, 0x01, 0x01, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x1a, 0x6e, 0x0a, 0x09, 0x55, 0x6e, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x12, 0x33, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x0a, 0x08, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x08, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x1a, 0x46, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x33, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x2c, 0x0a, 0x09, 0x4a, 0x73, 0x6f, 0x6e, 0x54, 0x6f, 0x44, 0x44, 0x4c, 0x12, 0x1f, 0x0a, 0x0b, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6a, 0x73, 0x6f, 0x6e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x42, 0x09, 0x0a, 0x07, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xca, 0x0e, 0x0a, 0x13, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x43, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x48, 0x00, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x46, 0x0a, 0x07, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x48, 0x00, 0x52, 0x07, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x12, 0x50, 0x0a, 0x0b, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x54, 0x72, 0x65, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x0a, 0x74, 0x72, 0x65, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x47, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x49, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x07, 0x69, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x12, 0x53, 0x0a, 0x0c, 0x69, 0x73, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x49, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x0b, 0x69, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x50, 0x0a, 0x0b, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x56, 0x0a, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x4a, 0x0a, 0x09, 0x64, 0x64, 0x6c, 0x5f, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x44, 0x4c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x48, 0x00, 0x52, 0x08, 0x64, 0x64, 0x6c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x59, 0x0a, 0x0e, 0x73, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x61, 0x6d, 0x65, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x73, 0x48, 0x00, 0x52, 0x0d, 0x73, 0x61, 0x6d, 0x65, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x73, 0x12, 0x56, 0x0a, 0x0d, 0x73, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x48, 0x61, 0x73, 0x68, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x48, 0x61, 0x73, 0x68, 0x12, 0x46, 0x0a, 0x07, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x50, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x07, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x12, 0x4c, 0x0a, 0x09, 0x75, 0x6e, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x55, 0x6e, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x09, 0x75, 0x6e, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x12, 0x60, 0x0a, 0x11, 0x67, 0x65, 0x74, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x48, 0x00, 0x52, 0x0f, 0x67, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x4e, 0x0a, 0x0b, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x5f, 0x64, 0x64, 0x6c, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4a, 0x73, 0x6f, 0x6e, 0x54, 0x6f, 0x44, 0x44, 0x4c, 0x48, 0x00, 0x52, 0x09, 0x6a, 0x73, 0x6f, 0x6e, 0x54, 0x6f, 0x44, 0x64, 0x6c, 0x1a, 0x39, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x1a, 0x30, 0x0a, 0x07, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x1a, 0x2d, 0x0a, 0x0a, 0x54, 0x72, 0x65, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74, 0x72, 0x65, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x1a, 0x24, 0x0a, 0x07, 0x49, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x1a, 0x30, 0x0a, 0x0b, 0x49, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x73, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x69, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x1a, 0x22, 0x0a, 0x0a, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x1a, 0x28, 0x0a, 0x0c, 0x53, 0x70, 0x61, 0x72, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x3b, 0x0a, 0x08, 0x44, 0x44, 0x4c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x1a, 0x27, 0x0a, 0x0d, 0x53, 0x61, 0x6d, 0x65, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x1a, 0x26, 0x0a, 0x0c, 0x53, 0x65, 0x6d, 0x61, 0x6e, 0x74, 0x69, 0x63, 0x48, 0x61, 0x73, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x1a, 0x09, 0x0a, 0x07, 0x50, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x1a, 0x0b, 0x0a, 0x09, 0x55, 0x6e, 0x70, 0x65, 0x72, 0x73, 0x69, 0x73, 0x74, 0x1a, 0x53, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x40, 0x0a, 0x0d, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x0c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x1a, 0x2a, 0x0a, 0x09, 0x4a, 0x73, 0x6f, 0x6e, 0x54, 0x6f, 0x44, 0x44, 0x4c, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x64, 0x6c, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x64, 0x6c, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22, 0xa3, 0x05, 0x0a, 0x12, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x26, 0x0a, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0b, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x27, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x58, 0x0a, 0x0f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x1a, 0xa5, 0x01, 0x0a, 0x0d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x4b, 0x0a, 0x10, 0x72, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0f, 0x72, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x35, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x48, 0x00, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x10, 0x0a, 0x0e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xee, 0x18, 0x0a, 0x13, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x49, 0x64, 0x12, 0x50, 0x0a, 0x0b, 0x61, 0x72, 0x72, 0x6f, 0x77, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x41, 0x72, 0x72, 0x6f, 0x77, 0x42, 0x61, 0x74, 0x63, 0x68, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x72, 0x72, 0x6f, 0x77, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x63, 0x0a, 0x12, 0x73, 0x71, 0x6c, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x10, 0x73, 0x71, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x7e, 0x0a, 0x23, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x1f, 0x77, 0x72, 0x69, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x71, 0x0a, 0x1e, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x1b, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x6b, 0x0a, 0x1c, 0x67, 0x65, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x19, 0x67, 0x65, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x87, 0x01, 0x0a, 0x26, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x22, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x87, 0x01, 0x0a, 0x26, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x22, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x5c, 0x0a, 0x0f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x0e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x87, 0x01, 0x0a, 0x26, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x22, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x65, 0x0a, 0x12, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x48, 0x00, 0x52, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x64, 0x0a, 0x19, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x17, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x4c, 0x0a, 0x11, 0x6d, 0x6c, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x0f, 0x6d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x58, 0x0a, 0x15, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x13, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x5e, 0x0a, 0x17, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x16, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x15, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x35, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x48, 0x00, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x44, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x5d, 0x0a, 0x10, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x0f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x1a, 0x47, 0x0a, 0x10, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x33, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x76, 0x0a, 0x0a, 0x41, 0x72, 0x72, 0x6f, 0x77, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x77, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x72, 0x6f, 0x77, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x26, 0x0a, 0x0c, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x88, 0x01, 0x01, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x1a, 0x85, 0x04, 0x0a, 0x07, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x51, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x1a, 0xcc, 0x02, 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x70, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x12, 0x7a, 0x0a, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x4d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x1a, 0x7b, 0x0a, 0x15, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x4c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x58, 0x0a, 0x0b, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x1a, 0x8d, 0x01, 0x0a, 0x0f, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x39, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x70, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x1a, 0x10, 0x0a, 0x0e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x1a, 0xcd, 0x02, 0x0a, 0x11, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x56, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x67, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x2e, 0x53, 0x74, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x06, 0x73, 0x74, 0x61, 0x67, 0x65, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x6e, 0x75, 0x6d, 0x5f, 0x69, 0x6e, 0x66, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x10, 0x6e, 0x75, 0x6d, 0x49, 0x6e, 0x66, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x1a, 0xb1, 0x01, 0x0a, 0x09, 0x53, 0x74, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x19, 0x0a, 0x08, 0x73, 0x74, 0x61, 0x67, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x73, 0x74, 0x61, 0x67, 0x65, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x6e, 0x75, 0x6d, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6e, 0x75, 0x6d, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x11, 0x6e, 0x75, 0x6d, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x28, 0x0a, 0x10, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x72, 0x65, 0x61, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x65, 0x61, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x6f, 0x6e, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x04, 0x64, 0x6f, 0x6e, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x41, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x19, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xaf, 0x09, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x44, 0x0a, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x1a, 0xf2, 0x03, 0x0a, 0x09, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x34, 0x0a, 0x03, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x48, 0x00, 0x52, 0x03, 0x73, 0x65, 0x74, 0x12, 0x34, 0x0a, 0x03, 0x67, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x00, 0x52, 0x03, 0x67, 0x65, 0x74, 0x12, 0x57, 0x0a, 0x10, 0x67, 0x65, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x69, 0x74, 0x68, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x0e, 0x67, 0x65, 0x74, 0x57, 0x69, 0x74, 0x68, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x47, 0x0a, 0x0a, 0x67, 0x65, 0x74, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x09, 0x67, 0x65, 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x07, 0x67, 0x65, 0x74, 0x5f, 0x61, 0x6c, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x48, 0x00, 0x52, 0x06, 0x67, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x12, 0x3a, 0x0a, 0x05, 0x75, 0x6e, 0x73, 0x65, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x55, 0x6e, 0x73, 0x65, 0x74, 0x48, 0x00, 0x52, 0x05, 0x75, 0x6e, 0x73, 0x65, 0x74, 0x12, 0x50, 0x0a, 0x0d, 0x69, 0x73, 0x5f, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x73, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0c, 0x69, 0x73, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x6f, 0x70, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x1a, 0x5c, 0x0a, 0x03, 0x53, 0x65, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x70, 0x61, 0x69, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x70, 0x61, 0x69, 0x72, 0x73, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x69, 0x6c, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x06, 0x73, 0x69, 0x6c, 0x65, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x69, 0x6c, 0x65, 0x6e, 0x74, 0x1a, 0x19, 0x0a, 0x03, 0x47, 0x65, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x1a, 0x3f, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x57, 0x69, 0x74, 0x68, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x70, 0x61, 0x69, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x70, 0x61, 0x69, 0x72, 0x73, 0x1a, 0x1f, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x1a, 0x30, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x12, 0x1b, 0x0a, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x1a, 0x1b, 0x0a, 0x05, 0x55, 0x6e, 0x73, 0x65, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x1a, 0x22, 0x0a, 0x0c, 0x49, 0x73, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xaf, 0x01, 0x0a, 0x0e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x2d, 0x0a, 0x05, 0x70, 0x61, 0x69, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x70, 0x61, 0x69, 0x72, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x22, 0xea, 0x07, 0x0a, 0x13, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x40, 0x0a, 0x05, 0x62, 0x61, 0x74, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x62, 0x61, 0x74, 0x63, 0x68, 0x12, 0x5a, 0x0a, 0x0b, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x5f, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x42, 0x65, 0x67, 0x69, 0x6e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x65, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x48, 0x00, 0x52, 0x0a, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x12, 0x48, 0x0a, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x48, 0x00, 0x52, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x1a, 0x35, 0x0a, 0x0d, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x72, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x03, 0x63, 0x72, 0x63, 0x1a, 0x6f, 0x0a, 0x13, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x44, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x5d, 0x0a, 0x05, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x54, 0x0a, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x1a, 0xc1, 0x01, 0x0a, 0x14, 0x42, 0x65, 0x67, 0x69, 0x6e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x65, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6e, 0x75, 0x6d, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x12, 0x55, 0x0a, 0x0d, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x52, 0x0c, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x42, 0x09, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x90, 0x02, 0x0a, 0x14, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x51, 0x0a, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x1a, 0x51, 0x0a, 0x0f, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x69, 0x73, 0x5f, 0x63, 0x72, 0x63, 0x5f, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x69, 0x73, 0x43, 0x72, 0x63, 0x53, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x22, 0xc6, 0x02, 0x0a, 0x17, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x14, 0x0a, 0x05, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xe0, 0x02, 0x0a, 0x18, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x51, 0x0a, 0x08, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x1a, 0x73, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x4c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x28, 0x0a, 0x0e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x22, 0xdb, 0x04, 0x0a, 0x10, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x54, 0x0a, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x25, 0x0a, 0x0d, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x61, 0x67, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x61, 0x67, 0x12, 0x23, 0x0a, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0x80, 0x01, 0x0a, 0x0d, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x1a, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x52, 0x55, 0x50, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x16, 0x0a, 0x12, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x52, 0x55, 0x50, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x41, 0x4c, 0x4c, 0x10, 0x01, 0x12, 0x16, 0x0a, 0x12, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x52, 0x55, 0x50, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x41, 0x47, 0x10, 0x02, 0x12, 0x1f, 0x0a, 0x1b, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x52, 0x55, 0x50, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4f, 0x50, 0x45, 0x52, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x49, 0x44, 0x10, 0x03, 0x42, 0x0b, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x90, 0x01, 0x0a, 0x11, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x65, 0x64, 0x49, 0x64, 0x73, 0x22, 0x35, 0x0a, 0x0f, 0x52, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x72, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x72, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x22, 0x96, 0x03, 0x0a, 0x16, 0x52, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x2d, 0x0a, 0x10, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0e, 0x6c, 0x61, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x69, 0x64, 0x22, 0xc9, 0x04, 0x0a, 0x15, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x52, 0x0a, 0x0b, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x5f, 0x61, 0x6c, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x41, 0x6c, 0x6c, 0x48, 0x00, 0x52, 0x0a, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x41, 0x6c, 0x6c, 0x12, 0x58, 0x0a, 0x0d, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x5f, 0x75, 0x6e, 0x74, 0x69, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x55, 0x6e, 0x74, 0x69, 0x6c, 0x48, 0x00, 0x52, 0x0c, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x55, 0x6e, 0x74, 0x69, 0x6c, 0x1a, 0x0c, 0x0a, 0x0a, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x41, 0x6c, 0x6c, 0x1a, 0x2f, 0x0a, 0x0c, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x55, 0x6e, 0x74, 0x69, 0x6c, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x49, 0x64, 0x42, 0x09, 0x0a, 0x07, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xa5, 0x01, 0x0a, 0x16, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x22, 0xd4, 0x01, 0x0a, 0x15, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x27, 0x0a, 0x0f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x65, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x6c, 0x0a, 0x16, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xcc, 0x02, 0x0a, 0x18, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x26, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x21, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x49, 0x64, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x42, 0x29, 0x0a, 0x27, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x93, 0x0c, 0x0a, 0x19, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x16, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x69, 0x64, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x0e, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x0c, 0x72, 0x6f, 0x6f, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x49, 0x64, 0x78, 0x88, 0x01, 0x01, 0x12, 0x46, 0x0a, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x1a, 0xae, 0x01, 0x0a, 0x11, 0x53, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x64, 0x65, 0x63, 0x6c, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x64, 0x65, 0x63, 0x6c, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x1a, 0xf0, 0x02, 0x0a, 0x0c, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x64, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x41, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x6f, 0x70, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x73, 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x73, 0x69, 0x74, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x61, 0x6c, 0x6c, 0x53, 0x69, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x22, 0x25, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x07, 0x0a, 0x03, 0x53, 0x51, 0x4c, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x44, 0x41, 0x54, 0x41, 0x46, 0x52, 0x41, 0x4d, 0x45, 0x10, 0x01, 0x1a, 0x99, 0x03, 0x0a, 0x0e, 0x53, 0x70, 0x61, 0x72, 0x6b, 0x54, 0x68, 0x72, 0x6f, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x24, 0x0a, 0x0b, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x88, 0x01, 0x01, 0x12, 0x7d, 0x0a, 0x12, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x4e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x6b, 0x54, 0x68, 0x72, 0x6f, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x5c, 0x0a, 0x0e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0d, 0x71, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x73, 0x12, 0x20, 0x0a, 0x09, 0x73, 0x71, 0x6c, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x08, 0x73, 0x71, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x1a, 0x44, 0x0a, 0x16, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x73, 0x71, 0x6c, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, 0xdb, 0x02, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x30, 0x0a, 0x14, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x68, 0x69, 0x65, 0x72, 0x61, 0x72, 0x63, 0x68, 0x79, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x54, 0x79, 0x70, 0x65, 0x48, 0x69, 0x65, 0x72, 0x61, 0x72, 0x63, 0x68, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x5b, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x12, 0x20, 0x0a, 0x09, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x08, 0x63, 0x61, 0x75, 0x73, 0x65, 0x49, 0x64, 0x78, 0x88, 0x01, 0x01, 0x12, 0x65, 0x0a, 0x0f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x5f, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x6b, 0x54, 0x68, 0x72, 0x6f, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x01, 0x52, 0x0e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x54, 0x68, 0x72, 0x6f, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x69, 0x64, 0x78, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x5f, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x78, 0x22, 0x5a, 0x0a, 0x17, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x3f, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x32, 0xb2, 0x07, 0x0a, 0x13, 0x53, 0x70, 0x61, 0x72, 0x6b, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x58, 0x0a, 0x0b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x12, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x56, 0x0a, 0x0b, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x12, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x47, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x0c, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x12, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x64, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x28, 0x01, 0x12, 0x63, 0x0a, 0x0e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x09, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x12, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x60, 0x0a, 0x0f, 0x52, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x12, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x5f, 0x0a, 0x0e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x12, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5f, 0x0a, 0x0e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x68, 0x0a, 0x11, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_base_proto_rawDescOnce sync.Once file_spark_connect_base_proto_rawDescData = file_spark_connect_base_proto_rawDesc ) func file_spark_connect_base_proto_rawDescGZIP() []byte { file_spark_connect_base_proto_rawDescOnce.Do(func() { file_spark_connect_base_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_base_proto_rawDescData) }) return file_spark_connect_base_proto_rawDescData } var file_spark_connect_base_proto_enumTypes = make([]protoimpl.EnumInfo, 3) var file_spark_connect_base_proto_msgTypes = make([]protoimpl.MessageInfo, 85) var file_spark_connect_base_proto_goTypes = []interface{}{ (AnalyzePlanRequest_Explain_ExplainMode)(0), // 0: spark.connect.AnalyzePlanRequest.Explain.ExplainMode (InterruptRequest_InterruptType)(0), // 1: spark.connect.InterruptRequest.InterruptType (FetchErrorDetailsResponse_QueryContext_ContextType)(0), // 2: spark.connect.FetchErrorDetailsResponse.QueryContext.ContextType (*Plan)(nil), // 3: spark.connect.Plan (*UserContext)(nil), // 4: spark.connect.UserContext (*AnalyzePlanRequest)(nil), // 5: spark.connect.AnalyzePlanRequest (*AnalyzePlanResponse)(nil), // 6: spark.connect.AnalyzePlanResponse (*ExecutePlanRequest)(nil), // 7: spark.connect.ExecutePlanRequest (*ExecutePlanResponse)(nil), // 8: spark.connect.ExecutePlanResponse (*KeyValue)(nil), // 9: spark.connect.KeyValue (*ConfigRequest)(nil), // 10: spark.connect.ConfigRequest (*ConfigResponse)(nil), // 11: spark.connect.ConfigResponse (*AddArtifactsRequest)(nil), // 12: spark.connect.AddArtifactsRequest (*AddArtifactsResponse)(nil), // 13: spark.connect.AddArtifactsResponse (*ArtifactStatusesRequest)(nil), // 14: spark.connect.ArtifactStatusesRequest (*ArtifactStatusesResponse)(nil), // 15: spark.connect.ArtifactStatusesResponse (*InterruptRequest)(nil), // 16: spark.connect.InterruptRequest (*InterruptResponse)(nil), // 17: spark.connect.InterruptResponse (*ReattachOptions)(nil), // 18: spark.connect.ReattachOptions (*ReattachExecuteRequest)(nil), // 19: spark.connect.ReattachExecuteRequest (*ReleaseExecuteRequest)(nil), // 20: spark.connect.ReleaseExecuteRequest (*ReleaseExecuteResponse)(nil), // 21: spark.connect.ReleaseExecuteResponse (*ReleaseSessionRequest)(nil), // 22: spark.connect.ReleaseSessionRequest (*ReleaseSessionResponse)(nil), // 23: spark.connect.ReleaseSessionResponse (*FetchErrorDetailsRequest)(nil), // 24: spark.connect.FetchErrorDetailsRequest (*FetchErrorDetailsResponse)(nil), // 25: spark.connect.FetchErrorDetailsResponse (*CheckpointCommandResult)(nil), // 26: spark.connect.CheckpointCommandResult (*AnalyzePlanRequest_Schema)(nil), // 27: spark.connect.AnalyzePlanRequest.Schema (*AnalyzePlanRequest_Explain)(nil), // 28: spark.connect.AnalyzePlanRequest.Explain (*AnalyzePlanRequest_TreeString)(nil), // 29: spark.connect.AnalyzePlanRequest.TreeString (*AnalyzePlanRequest_IsLocal)(nil), // 30: spark.connect.AnalyzePlanRequest.IsLocal (*AnalyzePlanRequest_IsStreaming)(nil), // 31: spark.connect.AnalyzePlanRequest.IsStreaming (*AnalyzePlanRequest_InputFiles)(nil), // 32: spark.connect.AnalyzePlanRequest.InputFiles (*AnalyzePlanRequest_SparkVersion)(nil), // 33: spark.connect.AnalyzePlanRequest.SparkVersion (*AnalyzePlanRequest_DDLParse)(nil), // 34: spark.connect.AnalyzePlanRequest.DDLParse (*AnalyzePlanRequest_SameSemantics)(nil), // 35: spark.connect.AnalyzePlanRequest.SameSemantics (*AnalyzePlanRequest_SemanticHash)(nil), // 36: spark.connect.AnalyzePlanRequest.SemanticHash (*AnalyzePlanRequest_Persist)(nil), // 37: spark.connect.AnalyzePlanRequest.Persist (*AnalyzePlanRequest_Unpersist)(nil), // 38: spark.connect.AnalyzePlanRequest.Unpersist (*AnalyzePlanRequest_GetStorageLevel)(nil), // 39: spark.connect.AnalyzePlanRequest.GetStorageLevel (*AnalyzePlanRequest_JsonToDDL)(nil), // 40: spark.connect.AnalyzePlanRequest.JsonToDDL (*AnalyzePlanResponse_Schema)(nil), // 41: spark.connect.AnalyzePlanResponse.Schema (*AnalyzePlanResponse_Explain)(nil), // 42: spark.connect.AnalyzePlanResponse.Explain (*AnalyzePlanResponse_TreeString)(nil), // 43: spark.connect.AnalyzePlanResponse.TreeString (*AnalyzePlanResponse_IsLocal)(nil), // 44: spark.connect.AnalyzePlanResponse.IsLocal (*AnalyzePlanResponse_IsStreaming)(nil), // 45: spark.connect.AnalyzePlanResponse.IsStreaming (*AnalyzePlanResponse_InputFiles)(nil), // 46: spark.connect.AnalyzePlanResponse.InputFiles (*AnalyzePlanResponse_SparkVersion)(nil), // 47: spark.connect.AnalyzePlanResponse.SparkVersion (*AnalyzePlanResponse_DDLParse)(nil), // 48: spark.connect.AnalyzePlanResponse.DDLParse (*AnalyzePlanResponse_SameSemantics)(nil), // 49: spark.connect.AnalyzePlanResponse.SameSemantics (*AnalyzePlanResponse_SemanticHash)(nil), // 50: spark.connect.AnalyzePlanResponse.SemanticHash (*AnalyzePlanResponse_Persist)(nil), // 51: spark.connect.AnalyzePlanResponse.Persist (*AnalyzePlanResponse_Unpersist)(nil), // 52: spark.connect.AnalyzePlanResponse.Unpersist (*AnalyzePlanResponse_GetStorageLevel)(nil), // 53: spark.connect.AnalyzePlanResponse.GetStorageLevel (*AnalyzePlanResponse_JsonToDDL)(nil), // 54: spark.connect.AnalyzePlanResponse.JsonToDDL (*ExecutePlanRequest_RequestOption)(nil), // 55: spark.connect.ExecutePlanRequest.RequestOption (*ExecutePlanResponse_SqlCommandResult)(nil), // 56: spark.connect.ExecutePlanResponse.SqlCommandResult (*ExecutePlanResponse_ArrowBatch)(nil), // 57: spark.connect.ExecutePlanResponse.ArrowBatch (*ExecutePlanResponse_Metrics)(nil), // 58: spark.connect.ExecutePlanResponse.Metrics (*ExecutePlanResponse_ObservedMetrics)(nil), // 59: spark.connect.ExecutePlanResponse.ObservedMetrics (*ExecutePlanResponse_ResultComplete)(nil), // 60: spark.connect.ExecutePlanResponse.ResultComplete (*ExecutePlanResponse_ExecutionProgress)(nil), // 61: spark.connect.ExecutePlanResponse.ExecutionProgress (*ExecutePlanResponse_Metrics_MetricObject)(nil), // 62: spark.connect.ExecutePlanResponse.Metrics.MetricObject (*ExecutePlanResponse_Metrics_MetricValue)(nil), // 63: spark.connect.ExecutePlanResponse.Metrics.MetricValue nil, // 64: spark.connect.ExecutePlanResponse.Metrics.MetricObject.ExecutionMetricsEntry (*ExecutePlanResponse_ExecutionProgress_StageInfo)(nil), // 65: spark.connect.ExecutePlanResponse.ExecutionProgress.StageInfo (*ConfigRequest_Operation)(nil), // 66: spark.connect.ConfigRequest.Operation (*ConfigRequest_Set)(nil), // 67: spark.connect.ConfigRequest.Set (*ConfigRequest_Get)(nil), // 68: spark.connect.ConfigRequest.Get (*ConfigRequest_GetWithDefault)(nil), // 69: spark.connect.ConfigRequest.GetWithDefault (*ConfigRequest_GetOption)(nil), // 70: spark.connect.ConfigRequest.GetOption (*ConfigRequest_GetAll)(nil), // 71: spark.connect.ConfigRequest.GetAll (*ConfigRequest_Unset)(nil), // 72: spark.connect.ConfigRequest.Unset (*ConfigRequest_IsModifiable)(nil), // 73: spark.connect.ConfigRequest.IsModifiable (*AddArtifactsRequest_ArtifactChunk)(nil), // 74: spark.connect.AddArtifactsRequest.ArtifactChunk (*AddArtifactsRequest_SingleChunkArtifact)(nil), // 75: spark.connect.AddArtifactsRequest.SingleChunkArtifact (*AddArtifactsRequest_Batch)(nil), // 76: spark.connect.AddArtifactsRequest.Batch (*AddArtifactsRequest_BeginChunkedArtifact)(nil), // 77: spark.connect.AddArtifactsRequest.BeginChunkedArtifact (*AddArtifactsResponse_ArtifactSummary)(nil), // 78: spark.connect.AddArtifactsResponse.ArtifactSummary nil, // 79: spark.connect.ArtifactStatusesResponse.StatusesEntry (*ArtifactStatusesResponse_ArtifactStatus)(nil), // 80: spark.connect.ArtifactStatusesResponse.ArtifactStatus (*ReleaseExecuteRequest_ReleaseAll)(nil), // 81: spark.connect.ReleaseExecuteRequest.ReleaseAll (*ReleaseExecuteRequest_ReleaseUntil)(nil), // 82: spark.connect.ReleaseExecuteRequest.ReleaseUntil (*FetchErrorDetailsResponse_StackTraceElement)(nil), // 83: spark.connect.FetchErrorDetailsResponse.StackTraceElement (*FetchErrorDetailsResponse_QueryContext)(nil), // 84: spark.connect.FetchErrorDetailsResponse.QueryContext (*FetchErrorDetailsResponse_SparkThrowable)(nil), // 85: spark.connect.FetchErrorDetailsResponse.SparkThrowable (*FetchErrorDetailsResponse_Error)(nil), // 86: spark.connect.FetchErrorDetailsResponse.Error nil, // 87: spark.connect.FetchErrorDetailsResponse.SparkThrowable.MessageParametersEntry (*Relation)(nil), // 88: spark.connect.Relation (*Command)(nil), // 89: spark.connect.Command (*anypb.Any)(nil), // 90: google.protobuf.Any (*WriteStreamOperationStartResult)(nil), // 91: spark.connect.WriteStreamOperationStartResult (*StreamingQueryCommandResult)(nil), // 92: spark.connect.StreamingQueryCommandResult (*GetResourcesCommandResult)(nil), // 93: spark.connect.GetResourcesCommandResult (*StreamingQueryManagerCommandResult)(nil), // 94: spark.connect.StreamingQueryManagerCommandResult (*StreamingQueryListenerEventsResult)(nil), // 95: spark.connect.StreamingQueryListenerEventsResult (*CreateResourceProfileCommandResult)(nil), // 96: spark.connect.CreateResourceProfileCommandResult (*MlCommandResult)(nil), // 97: spark.connect.MlCommandResult (*PipelineEventResult)(nil), // 98: spark.connect.PipelineEventResult (*PipelineCommandResult)(nil), // 99: spark.connect.PipelineCommandResult (*DataType)(nil), // 100: spark.connect.DataType (*CachedRemoteRelation)(nil), // 101: spark.connect.CachedRemoteRelation (*StorageLevel)(nil), // 102: spark.connect.StorageLevel (*Expression_Literal)(nil), // 103: spark.connect.Expression.Literal } var file_spark_connect_base_proto_depIdxs = []int32{ 88, // 0: spark.connect.Plan.root:type_name -> spark.connect.Relation 89, // 1: spark.connect.Plan.command:type_name -> spark.connect.Command 90, // 2: spark.connect.UserContext.extensions:type_name -> google.protobuf.Any 4, // 3: spark.connect.AnalyzePlanRequest.user_context:type_name -> spark.connect.UserContext 27, // 4: spark.connect.AnalyzePlanRequest.schema:type_name -> spark.connect.AnalyzePlanRequest.Schema 28, // 5: spark.connect.AnalyzePlanRequest.explain:type_name -> spark.connect.AnalyzePlanRequest.Explain 29, // 6: spark.connect.AnalyzePlanRequest.tree_string:type_name -> spark.connect.AnalyzePlanRequest.TreeString 30, // 7: spark.connect.AnalyzePlanRequest.is_local:type_name -> spark.connect.AnalyzePlanRequest.IsLocal 31, // 8: spark.connect.AnalyzePlanRequest.is_streaming:type_name -> spark.connect.AnalyzePlanRequest.IsStreaming 32, // 9: spark.connect.AnalyzePlanRequest.input_files:type_name -> spark.connect.AnalyzePlanRequest.InputFiles 33, // 10: spark.connect.AnalyzePlanRequest.spark_version:type_name -> spark.connect.AnalyzePlanRequest.SparkVersion 34, // 11: spark.connect.AnalyzePlanRequest.ddl_parse:type_name -> spark.connect.AnalyzePlanRequest.DDLParse 35, // 12: spark.connect.AnalyzePlanRequest.same_semantics:type_name -> spark.connect.AnalyzePlanRequest.SameSemantics 36, // 13: spark.connect.AnalyzePlanRequest.semantic_hash:type_name -> spark.connect.AnalyzePlanRequest.SemanticHash 37, // 14: spark.connect.AnalyzePlanRequest.persist:type_name -> spark.connect.AnalyzePlanRequest.Persist 38, // 15: spark.connect.AnalyzePlanRequest.unpersist:type_name -> spark.connect.AnalyzePlanRequest.Unpersist 39, // 16: spark.connect.AnalyzePlanRequest.get_storage_level:type_name -> spark.connect.AnalyzePlanRequest.GetStorageLevel 40, // 17: spark.connect.AnalyzePlanRequest.json_to_ddl:type_name -> spark.connect.AnalyzePlanRequest.JsonToDDL 41, // 18: spark.connect.AnalyzePlanResponse.schema:type_name -> spark.connect.AnalyzePlanResponse.Schema 42, // 19: spark.connect.AnalyzePlanResponse.explain:type_name -> spark.connect.AnalyzePlanResponse.Explain 43, // 20: spark.connect.AnalyzePlanResponse.tree_string:type_name -> spark.connect.AnalyzePlanResponse.TreeString 44, // 21: spark.connect.AnalyzePlanResponse.is_local:type_name -> spark.connect.AnalyzePlanResponse.IsLocal 45, // 22: spark.connect.AnalyzePlanResponse.is_streaming:type_name -> spark.connect.AnalyzePlanResponse.IsStreaming 46, // 23: spark.connect.AnalyzePlanResponse.input_files:type_name -> spark.connect.AnalyzePlanResponse.InputFiles 47, // 24: spark.connect.AnalyzePlanResponse.spark_version:type_name -> spark.connect.AnalyzePlanResponse.SparkVersion 48, // 25: spark.connect.AnalyzePlanResponse.ddl_parse:type_name -> spark.connect.AnalyzePlanResponse.DDLParse 49, // 26: spark.connect.AnalyzePlanResponse.same_semantics:type_name -> spark.connect.AnalyzePlanResponse.SameSemantics 50, // 27: spark.connect.AnalyzePlanResponse.semantic_hash:type_name -> spark.connect.AnalyzePlanResponse.SemanticHash 51, // 28: spark.connect.AnalyzePlanResponse.persist:type_name -> spark.connect.AnalyzePlanResponse.Persist 52, // 29: spark.connect.AnalyzePlanResponse.unpersist:type_name -> spark.connect.AnalyzePlanResponse.Unpersist 53, // 30: spark.connect.AnalyzePlanResponse.get_storage_level:type_name -> spark.connect.AnalyzePlanResponse.GetStorageLevel 54, // 31: spark.connect.AnalyzePlanResponse.json_to_ddl:type_name -> spark.connect.AnalyzePlanResponse.JsonToDDL 4, // 32: spark.connect.ExecutePlanRequest.user_context:type_name -> spark.connect.UserContext 3, // 33: spark.connect.ExecutePlanRequest.plan:type_name -> spark.connect.Plan 55, // 34: spark.connect.ExecutePlanRequest.request_options:type_name -> spark.connect.ExecutePlanRequest.RequestOption 57, // 35: spark.connect.ExecutePlanResponse.arrow_batch:type_name -> spark.connect.ExecutePlanResponse.ArrowBatch 56, // 36: spark.connect.ExecutePlanResponse.sql_command_result:type_name -> spark.connect.ExecutePlanResponse.SqlCommandResult 91, // 37: spark.connect.ExecutePlanResponse.write_stream_operation_start_result:type_name -> spark.connect.WriteStreamOperationStartResult 92, // 38: spark.connect.ExecutePlanResponse.streaming_query_command_result:type_name -> spark.connect.StreamingQueryCommandResult 93, // 39: spark.connect.ExecutePlanResponse.get_resources_command_result:type_name -> spark.connect.GetResourcesCommandResult 94, // 40: spark.connect.ExecutePlanResponse.streaming_query_manager_command_result:type_name -> spark.connect.StreamingQueryManagerCommandResult 95, // 41: spark.connect.ExecutePlanResponse.streaming_query_listener_events_result:type_name -> spark.connect.StreamingQueryListenerEventsResult 60, // 42: spark.connect.ExecutePlanResponse.result_complete:type_name -> spark.connect.ExecutePlanResponse.ResultComplete 96, // 43: spark.connect.ExecutePlanResponse.create_resource_profile_command_result:type_name -> spark.connect.CreateResourceProfileCommandResult 61, // 44: spark.connect.ExecutePlanResponse.execution_progress:type_name -> spark.connect.ExecutePlanResponse.ExecutionProgress 26, // 45: spark.connect.ExecutePlanResponse.checkpoint_command_result:type_name -> spark.connect.CheckpointCommandResult 97, // 46: spark.connect.ExecutePlanResponse.ml_command_result:type_name -> spark.connect.MlCommandResult 98, // 47: spark.connect.ExecutePlanResponse.pipeline_event_result:type_name -> spark.connect.PipelineEventResult 99, // 48: spark.connect.ExecutePlanResponse.pipeline_command_result:type_name -> spark.connect.PipelineCommandResult 90, // 49: spark.connect.ExecutePlanResponse.extension:type_name -> google.protobuf.Any 58, // 50: spark.connect.ExecutePlanResponse.metrics:type_name -> spark.connect.ExecutePlanResponse.Metrics 59, // 51: spark.connect.ExecutePlanResponse.observed_metrics:type_name -> spark.connect.ExecutePlanResponse.ObservedMetrics 100, // 52: spark.connect.ExecutePlanResponse.schema:type_name -> spark.connect.DataType 4, // 53: spark.connect.ConfigRequest.user_context:type_name -> spark.connect.UserContext 66, // 54: spark.connect.ConfigRequest.operation:type_name -> spark.connect.ConfigRequest.Operation 9, // 55: spark.connect.ConfigResponse.pairs:type_name -> spark.connect.KeyValue 4, // 56: spark.connect.AddArtifactsRequest.user_context:type_name -> spark.connect.UserContext 76, // 57: spark.connect.AddArtifactsRequest.batch:type_name -> spark.connect.AddArtifactsRequest.Batch 77, // 58: spark.connect.AddArtifactsRequest.begin_chunk:type_name -> spark.connect.AddArtifactsRequest.BeginChunkedArtifact 74, // 59: spark.connect.AddArtifactsRequest.chunk:type_name -> spark.connect.AddArtifactsRequest.ArtifactChunk 78, // 60: spark.connect.AddArtifactsResponse.artifacts:type_name -> spark.connect.AddArtifactsResponse.ArtifactSummary 4, // 61: spark.connect.ArtifactStatusesRequest.user_context:type_name -> spark.connect.UserContext 79, // 62: spark.connect.ArtifactStatusesResponse.statuses:type_name -> spark.connect.ArtifactStatusesResponse.StatusesEntry 4, // 63: spark.connect.InterruptRequest.user_context:type_name -> spark.connect.UserContext 1, // 64: spark.connect.InterruptRequest.interrupt_type:type_name -> spark.connect.InterruptRequest.InterruptType 4, // 65: spark.connect.ReattachExecuteRequest.user_context:type_name -> spark.connect.UserContext 4, // 66: spark.connect.ReleaseExecuteRequest.user_context:type_name -> spark.connect.UserContext 81, // 67: spark.connect.ReleaseExecuteRequest.release_all:type_name -> spark.connect.ReleaseExecuteRequest.ReleaseAll 82, // 68: spark.connect.ReleaseExecuteRequest.release_until:type_name -> spark.connect.ReleaseExecuteRequest.ReleaseUntil 4, // 69: spark.connect.ReleaseSessionRequest.user_context:type_name -> spark.connect.UserContext 4, // 70: spark.connect.FetchErrorDetailsRequest.user_context:type_name -> spark.connect.UserContext 86, // 71: spark.connect.FetchErrorDetailsResponse.errors:type_name -> spark.connect.FetchErrorDetailsResponse.Error 101, // 72: spark.connect.CheckpointCommandResult.relation:type_name -> spark.connect.CachedRemoteRelation 3, // 73: spark.connect.AnalyzePlanRequest.Schema.plan:type_name -> spark.connect.Plan 3, // 74: spark.connect.AnalyzePlanRequest.Explain.plan:type_name -> spark.connect.Plan 0, // 75: spark.connect.AnalyzePlanRequest.Explain.explain_mode:type_name -> spark.connect.AnalyzePlanRequest.Explain.ExplainMode 3, // 76: spark.connect.AnalyzePlanRequest.TreeString.plan:type_name -> spark.connect.Plan 3, // 77: spark.connect.AnalyzePlanRequest.IsLocal.plan:type_name -> spark.connect.Plan 3, // 78: spark.connect.AnalyzePlanRequest.IsStreaming.plan:type_name -> spark.connect.Plan 3, // 79: spark.connect.AnalyzePlanRequest.InputFiles.plan:type_name -> spark.connect.Plan 3, // 80: spark.connect.AnalyzePlanRequest.SameSemantics.target_plan:type_name -> spark.connect.Plan 3, // 81: spark.connect.AnalyzePlanRequest.SameSemantics.other_plan:type_name -> spark.connect.Plan 3, // 82: spark.connect.AnalyzePlanRequest.SemanticHash.plan:type_name -> spark.connect.Plan 88, // 83: spark.connect.AnalyzePlanRequest.Persist.relation:type_name -> spark.connect.Relation 102, // 84: spark.connect.AnalyzePlanRequest.Persist.storage_level:type_name -> spark.connect.StorageLevel 88, // 85: spark.connect.AnalyzePlanRequest.Unpersist.relation:type_name -> spark.connect.Relation 88, // 86: spark.connect.AnalyzePlanRequest.GetStorageLevel.relation:type_name -> spark.connect.Relation 100, // 87: spark.connect.AnalyzePlanResponse.Schema.schema:type_name -> spark.connect.DataType 100, // 88: spark.connect.AnalyzePlanResponse.DDLParse.parsed:type_name -> spark.connect.DataType 102, // 89: spark.connect.AnalyzePlanResponse.GetStorageLevel.storage_level:type_name -> spark.connect.StorageLevel 18, // 90: spark.connect.ExecutePlanRequest.RequestOption.reattach_options:type_name -> spark.connect.ReattachOptions 90, // 91: spark.connect.ExecutePlanRequest.RequestOption.extension:type_name -> google.protobuf.Any 88, // 92: spark.connect.ExecutePlanResponse.SqlCommandResult.relation:type_name -> spark.connect.Relation 62, // 93: spark.connect.ExecutePlanResponse.Metrics.metrics:type_name -> spark.connect.ExecutePlanResponse.Metrics.MetricObject 103, // 94: spark.connect.ExecutePlanResponse.ObservedMetrics.values:type_name -> spark.connect.Expression.Literal 65, // 95: spark.connect.ExecutePlanResponse.ExecutionProgress.stages:type_name -> spark.connect.ExecutePlanResponse.ExecutionProgress.StageInfo 64, // 96: spark.connect.ExecutePlanResponse.Metrics.MetricObject.execution_metrics:type_name -> spark.connect.ExecutePlanResponse.Metrics.MetricObject.ExecutionMetricsEntry 63, // 97: spark.connect.ExecutePlanResponse.Metrics.MetricObject.ExecutionMetricsEntry.value:type_name -> spark.connect.ExecutePlanResponse.Metrics.MetricValue 67, // 98: spark.connect.ConfigRequest.Operation.set:type_name -> spark.connect.ConfigRequest.Set 68, // 99: spark.connect.ConfigRequest.Operation.get:type_name -> spark.connect.ConfigRequest.Get 69, // 100: spark.connect.ConfigRequest.Operation.get_with_default:type_name -> spark.connect.ConfigRequest.GetWithDefault 70, // 101: spark.connect.ConfigRequest.Operation.get_option:type_name -> spark.connect.ConfigRequest.GetOption 71, // 102: spark.connect.ConfigRequest.Operation.get_all:type_name -> spark.connect.ConfigRequest.GetAll 72, // 103: spark.connect.ConfigRequest.Operation.unset:type_name -> spark.connect.ConfigRequest.Unset 73, // 104: spark.connect.ConfigRequest.Operation.is_modifiable:type_name -> spark.connect.ConfigRequest.IsModifiable 9, // 105: spark.connect.ConfigRequest.Set.pairs:type_name -> spark.connect.KeyValue 9, // 106: spark.connect.ConfigRequest.GetWithDefault.pairs:type_name -> spark.connect.KeyValue 74, // 107: spark.connect.AddArtifactsRequest.SingleChunkArtifact.data:type_name -> spark.connect.AddArtifactsRequest.ArtifactChunk 75, // 108: spark.connect.AddArtifactsRequest.Batch.artifacts:type_name -> spark.connect.AddArtifactsRequest.SingleChunkArtifact 74, // 109: spark.connect.AddArtifactsRequest.BeginChunkedArtifact.initial_chunk:type_name -> spark.connect.AddArtifactsRequest.ArtifactChunk 80, // 110: spark.connect.ArtifactStatusesResponse.StatusesEntry.value:type_name -> spark.connect.ArtifactStatusesResponse.ArtifactStatus 2, // 111: spark.connect.FetchErrorDetailsResponse.QueryContext.context_type:type_name -> spark.connect.FetchErrorDetailsResponse.QueryContext.ContextType 87, // 112: spark.connect.FetchErrorDetailsResponse.SparkThrowable.message_parameters:type_name -> spark.connect.FetchErrorDetailsResponse.SparkThrowable.MessageParametersEntry 84, // 113: spark.connect.FetchErrorDetailsResponse.SparkThrowable.query_contexts:type_name -> spark.connect.FetchErrorDetailsResponse.QueryContext 83, // 114: spark.connect.FetchErrorDetailsResponse.Error.stack_trace:type_name -> spark.connect.FetchErrorDetailsResponse.StackTraceElement 85, // 115: spark.connect.FetchErrorDetailsResponse.Error.spark_throwable:type_name -> spark.connect.FetchErrorDetailsResponse.SparkThrowable 7, // 116: spark.connect.SparkConnectService.ExecutePlan:input_type -> spark.connect.ExecutePlanRequest 5, // 117: spark.connect.SparkConnectService.AnalyzePlan:input_type -> spark.connect.AnalyzePlanRequest 10, // 118: spark.connect.SparkConnectService.Config:input_type -> spark.connect.ConfigRequest 12, // 119: spark.connect.SparkConnectService.AddArtifacts:input_type -> spark.connect.AddArtifactsRequest 14, // 120: spark.connect.SparkConnectService.ArtifactStatus:input_type -> spark.connect.ArtifactStatusesRequest 16, // 121: spark.connect.SparkConnectService.Interrupt:input_type -> spark.connect.InterruptRequest 19, // 122: spark.connect.SparkConnectService.ReattachExecute:input_type -> spark.connect.ReattachExecuteRequest 20, // 123: spark.connect.SparkConnectService.ReleaseExecute:input_type -> spark.connect.ReleaseExecuteRequest 22, // 124: spark.connect.SparkConnectService.ReleaseSession:input_type -> spark.connect.ReleaseSessionRequest 24, // 125: spark.connect.SparkConnectService.FetchErrorDetails:input_type -> spark.connect.FetchErrorDetailsRequest 8, // 126: spark.connect.SparkConnectService.ExecutePlan:output_type -> spark.connect.ExecutePlanResponse 6, // 127: spark.connect.SparkConnectService.AnalyzePlan:output_type -> spark.connect.AnalyzePlanResponse 11, // 128: spark.connect.SparkConnectService.Config:output_type -> spark.connect.ConfigResponse 13, // 129: spark.connect.SparkConnectService.AddArtifacts:output_type -> spark.connect.AddArtifactsResponse 15, // 130: spark.connect.SparkConnectService.ArtifactStatus:output_type -> spark.connect.ArtifactStatusesResponse 17, // 131: spark.connect.SparkConnectService.Interrupt:output_type -> spark.connect.InterruptResponse 8, // 132: spark.connect.SparkConnectService.ReattachExecute:output_type -> spark.connect.ExecutePlanResponse 21, // 133: spark.connect.SparkConnectService.ReleaseExecute:output_type -> spark.connect.ReleaseExecuteResponse 23, // 134: spark.connect.SparkConnectService.ReleaseSession:output_type -> spark.connect.ReleaseSessionResponse 25, // 135: spark.connect.SparkConnectService.FetchErrorDetails:output_type -> spark.connect.FetchErrorDetailsResponse 126, // [126:136] is the sub-list for method output_type 116, // [116:126] is the sub-list for method input_type 116, // [116:116] is the sub-list for extension type_name 116, // [116:116] is the sub-list for extension extendee 0, // [0:116] is the sub-list for field type_name } func init() { file_spark_connect_base_proto_init() } func file_spark_connect_base_proto_init() { if File_spark_connect_base_proto != nil { return } file_spark_connect_commands_proto_init() file_spark_connect_common_proto_init() file_spark_connect_expressions_proto_init() file_spark_connect_relations_proto_init() file_spark_connect_types_proto_init() file_spark_connect_ml_proto_init() file_spark_connect_pipelines_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_base_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Plan); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*UserContext); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*KeyValue); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AddArtifactsRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AddArtifactsResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ArtifactStatusesRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ArtifactStatusesResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*InterruptRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*InterruptResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReattachOptions); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReattachExecuteRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReleaseExecuteRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReleaseExecuteResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReleaseSessionRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReleaseSessionResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchErrorDetailsRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchErrorDetailsResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CheckpointCommandResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_Schema); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_Explain); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_TreeString); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_IsLocal); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_IsStreaming); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_InputFiles); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_SparkVersion); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_DDLParse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_SameSemantics); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_SemanticHash); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_Persist); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_Unpersist); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_GetStorageLevel); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanRequest_JsonToDDL); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_Schema); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_Explain); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_TreeString); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_IsLocal); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_IsStreaming); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_InputFiles); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_SparkVersion); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[45].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_DDLParse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_SameSemantics); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[47].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_SemanticHash); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_Persist); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_Unpersist); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[50].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_GetStorageLevel); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[51].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AnalyzePlanResponse_JsonToDDL); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[52].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanRequest_RequestOption); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[53].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_SqlCommandResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[54].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_ArrowBatch); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[55].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_Metrics); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[56].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_ObservedMetrics); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[57].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_ResultComplete); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[58].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_ExecutionProgress); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[59].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_Metrics_MetricObject); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[60].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_Metrics_MetricValue); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[62].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutePlanResponse_ExecutionProgress_StageInfo); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[63].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest_Operation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[64].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest_Set); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[65].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest_Get); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[66].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest_GetWithDefault); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[67].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest_GetOption); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[68].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest_GetAll); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[69].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest_Unset); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[70].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigRequest_IsModifiable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[71].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AddArtifactsRequest_ArtifactChunk); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[72].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AddArtifactsRequest_SingleChunkArtifact); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[73].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AddArtifactsRequest_Batch); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[74].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AddArtifactsRequest_BeginChunkedArtifact); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[75].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AddArtifactsResponse_ArtifactSummary); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[77].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ArtifactStatusesResponse_ArtifactStatus); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[78].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReleaseExecuteRequest_ReleaseAll); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[79].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReleaseExecuteRequest_ReleaseUntil); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[80].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchErrorDetailsResponse_StackTraceElement); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[81].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchErrorDetailsResponse_QueryContext); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[82].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchErrorDetailsResponse_SparkThrowable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_base_proto_msgTypes[83].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchErrorDetailsResponse_Error); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_base_proto_msgTypes[0].OneofWrappers = []interface{}{ (*Plan_Root)(nil), (*Plan_Command)(nil), } file_spark_connect_base_proto_msgTypes[2].OneofWrappers = []interface{}{ (*AnalyzePlanRequest_Schema_)(nil), (*AnalyzePlanRequest_Explain_)(nil), (*AnalyzePlanRequest_TreeString_)(nil), (*AnalyzePlanRequest_IsLocal_)(nil), (*AnalyzePlanRequest_IsStreaming_)(nil), (*AnalyzePlanRequest_InputFiles_)(nil), (*AnalyzePlanRequest_SparkVersion_)(nil), (*AnalyzePlanRequest_DdlParse)(nil), (*AnalyzePlanRequest_SameSemantics_)(nil), (*AnalyzePlanRequest_SemanticHash_)(nil), (*AnalyzePlanRequest_Persist_)(nil), (*AnalyzePlanRequest_Unpersist_)(nil), (*AnalyzePlanRequest_GetStorageLevel_)(nil), (*AnalyzePlanRequest_JsonToDdl)(nil), } file_spark_connect_base_proto_msgTypes[3].OneofWrappers = []interface{}{ (*AnalyzePlanResponse_Schema_)(nil), (*AnalyzePlanResponse_Explain_)(nil), (*AnalyzePlanResponse_TreeString_)(nil), (*AnalyzePlanResponse_IsLocal_)(nil), (*AnalyzePlanResponse_IsStreaming_)(nil), (*AnalyzePlanResponse_InputFiles_)(nil), (*AnalyzePlanResponse_SparkVersion_)(nil), (*AnalyzePlanResponse_DdlParse)(nil), (*AnalyzePlanResponse_SameSemantics_)(nil), (*AnalyzePlanResponse_SemanticHash_)(nil), (*AnalyzePlanResponse_Persist_)(nil), (*AnalyzePlanResponse_Unpersist_)(nil), (*AnalyzePlanResponse_GetStorageLevel_)(nil), (*AnalyzePlanResponse_JsonToDdl)(nil), } file_spark_connect_base_proto_msgTypes[4].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[5].OneofWrappers = []interface{}{ (*ExecutePlanResponse_ArrowBatch_)(nil), (*ExecutePlanResponse_SqlCommandResult_)(nil), (*ExecutePlanResponse_WriteStreamOperationStartResult)(nil), (*ExecutePlanResponse_StreamingQueryCommandResult)(nil), (*ExecutePlanResponse_GetResourcesCommandResult)(nil), (*ExecutePlanResponse_StreamingQueryManagerCommandResult)(nil), (*ExecutePlanResponse_StreamingQueryListenerEventsResult)(nil), (*ExecutePlanResponse_ResultComplete_)(nil), (*ExecutePlanResponse_CreateResourceProfileCommandResult)(nil), (*ExecutePlanResponse_ExecutionProgress_)(nil), (*ExecutePlanResponse_CheckpointCommandResult)(nil), (*ExecutePlanResponse_MlCommandResult)(nil), (*ExecutePlanResponse_PipelineEventResult)(nil), (*ExecutePlanResponse_PipelineCommandResult)(nil), (*ExecutePlanResponse_Extension)(nil), } file_spark_connect_base_proto_msgTypes[6].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[7].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[9].OneofWrappers = []interface{}{ (*AddArtifactsRequest_Batch_)(nil), (*AddArtifactsRequest_BeginChunk)(nil), (*AddArtifactsRequest_Chunk)(nil), } file_spark_connect_base_proto_msgTypes[11].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[13].OneofWrappers = []interface{}{ (*InterruptRequest_OperationTag)(nil), (*InterruptRequest_OperationId)(nil), } file_spark_connect_base_proto_msgTypes[16].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[17].OneofWrappers = []interface{}{ (*ReleaseExecuteRequest_ReleaseAll_)(nil), (*ReleaseExecuteRequest_ReleaseUntil_)(nil), } file_spark_connect_base_proto_msgTypes[18].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[19].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[21].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[22].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[26].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[34].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[35].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[52].OneofWrappers = []interface{}{ (*ExecutePlanRequest_RequestOption_ReattachOptions)(nil), (*ExecutePlanRequest_RequestOption_Extension)(nil), } file_spark_connect_base_proto_msgTypes[54].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[63].OneofWrappers = []interface{}{ (*ConfigRequest_Operation_Set)(nil), (*ConfigRequest_Operation_Get)(nil), (*ConfigRequest_Operation_GetWithDefault)(nil), (*ConfigRequest_Operation_GetOption)(nil), (*ConfigRequest_Operation_GetAll)(nil), (*ConfigRequest_Operation_Unset)(nil), (*ConfigRequest_Operation_IsModifiable)(nil), } file_spark_connect_base_proto_msgTypes[64].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[68].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[80].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[82].OneofWrappers = []interface{}{} file_spark_connect_base_proto_msgTypes[83].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_base_proto_rawDesc, NumEnums: 3, NumMessages: 85, NumExtensions: 0, NumServices: 1, }, GoTypes: file_spark_connect_base_proto_goTypes, DependencyIndexes: file_spark_connect_base_proto_depIdxs, EnumInfos: file_spark_connect_base_proto_enumTypes, MessageInfos: file_spark_connect_base_proto_msgTypes, }.Build() File_spark_connect_base_proto = out.File file_spark_connect_base_proto_rawDesc = nil file_spark_connect_base_proto_goTypes = nil file_spark_connect_base_proto_depIdxs = nil } ================================================ FILE: internal/generated/base_grpc.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.3.0 // - protoc (unknown) // source: spark/connect/base.proto package generated import ( context "context" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" ) // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. // Requires gRPC-Go v1.32.0 or later. const _ = grpc.SupportPackageIsVersion7 const ( SparkConnectService_ExecutePlan_FullMethodName = "/spark.connect.SparkConnectService/ExecutePlan" SparkConnectService_AnalyzePlan_FullMethodName = "/spark.connect.SparkConnectService/AnalyzePlan" SparkConnectService_Config_FullMethodName = "/spark.connect.SparkConnectService/Config" SparkConnectService_AddArtifacts_FullMethodName = "/spark.connect.SparkConnectService/AddArtifacts" SparkConnectService_ArtifactStatus_FullMethodName = "/spark.connect.SparkConnectService/ArtifactStatus" SparkConnectService_Interrupt_FullMethodName = "/spark.connect.SparkConnectService/Interrupt" SparkConnectService_ReattachExecute_FullMethodName = "/spark.connect.SparkConnectService/ReattachExecute" SparkConnectService_ReleaseExecute_FullMethodName = "/spark.connect.SparkConnectService/ReleaseExecute" SparkConnectService_ReleaseSession_FullMethodName = "/spark.connect.SparkConnectService/ReleaseSession" SparkConnectService_FetchErrorDetails_FullMethodName = "/spark.connect.SparkConnectService/FetchErrorDetails" ) // SparkConnectServiceClient is the client API for SparkConnectService service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. type SparkConnectServiceClient interface { // Executes a request that contains the query and returns a stream of [[Response]]. // // It is guaranteed that there is at least one ARROW batch returned even if the result set is empty. ExecutePlan(ctx context.Context, in *ExecutePlanRequest, opts ...grpc.CallOption) (SparkConnectService_ExecutePlanClient, error) // Analyzes a query and returns a [[AnalyzeResponse]] containing metadata about the query. AnalyzePlan(ctx context.Context, in *AnalyzePlanRequest, opts ...grpc.CallOption) (*AnalyzePlanResponse, error) // Update or fetch the configurations and returns a [[ConfigResponse]] containing the result. Config(ctx context.Context, in *ConfigRequest, opts ...grpc.CallOption) (*ConfigResponse, error) // Add artifacts to the session and returns a [[AddArtifactsResponse]] containing metadata about // the added artifacts. AddArtifacts(ctx context.Context, opts ...grpc.CallOption) (SparkConnectService_AddArtifactsClient, error) // Check statuses of artifacts in the session and returns them in a [[ArtifactStatusesResponse]] ArtifactStatus(ctx context.Context, in *ArtifactStatusesRequest, opts ...grpc.CallOption) (*ArtifactStatusesResponse, error) // Interrupts running executions Interrupt(ctx context.Context, in *InterruptRequest, opts ...grpc.CallOption) (*InterruptResponse, error) // Reattach to an existing reattachable execution. // The ExecutePlan must have been started with ReattachOptions.reattachable=true. // If the ExecutePlanResponse stream ends without a ResultComplete message, there is more to // continue. If there is a ResultComplete, the client should use ReleaseExecute with ReattachExecute(ctx context.Context, in *ReattachExecuteRequest, opts ...grpc.CallOption) (SparkConnectService_ReattachExecuteClient, error) // Release an reattachable execution, or parts thereof. // The ExecutePlan must have been started with ReattachOptions.reattachable=true. // Non reattachable executions are released automatically and immediately after the ExecutePlan // RPC and ReleaseExecute may not be used. ReleaseExecute(ctx context.Context, in *ReleaseExecuteRequest, opts ...grpc.CallOption) (*ReleaseExecuteResponse, error) // Release a session. // All the executions in the session will be released. Any further requests for the session with // that session_id for the given user_id will fail. If the session didn't exist or was already // released, this is a noop. ReleaseSession(ctx context.Context, in *ReleaseSessionRequest, opts ...grpc.CallOption) (*ReleaseSessionResponse, error) // FetchErrorDetails retrieves the matched exception with details based on a provided error id. FetchErrorDetails(ctx context.Context, in *FetchErrorDetailsRequest, opts ...grpc.CallOption) (*FetchErrorDetailsResponse, error) } type sparkConnectServiceClient struct { cc grpc.ClientConnInterface } func NewSparkConnectServiceClient(cc grpc.ClientConnInterface) SparkConnectServiceClient { return &sparkConnectServiceClient{cc} } func (c *sparkConnectServiceClient) ExecutePlan(ctx context.Context, in *ExecutePlanRequest, opts ...grpc.CallOption) (SparkConnectService_ExecutePlanClient, error) { stream, err := c.cc.NewStream(ctx, &SparkConnectService_ServiceDesc.Streams[0], SparkConnectService_ExecutePlan_FullMethodName, opts...) if err != nil { return nil, err } x := &sparkConnectServiceExecutePlanClient{stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } if err := x.ClientStream.CloseSend(); err != nil { return nil, err } return x, nil } type SparkConnectService_ExecutePlanClient interface { Recv() (*ExecutePlanResponse, error) grpc.ClientStream } type sparkConnectServiceExecutePlanClient struct { grpc.ClientStream } func (x *sparkConnectServiceExecutePlanClient) Recv() (*ExecutePlanResponse, error) { m := new(ExecutePlanResponse) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func (c *sparkConnectServiceClient) AnalyzePlan(ctx context.Context, in *AnalyzePlanRequest, opts ...grpc.CallOption) (*AnalyzePlanResponse, error) { out := new(AnalyzePlanResponse) err := c.cc.Invoke(ctx, SparkConnectService_AnalyzePlan_FullMethodName, in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *sparkConnectServiceClient) Config(ctx context.Context, in *ConfigRequest, opts ...grpc.CallOption) (*ConfigResponse, error) { out := new(ConfigResponse) err := c.cc.Invoke(ctx, SparkConnectService_Config_FullMethodName, in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *sparkConnectServiceClient) AddArtifacts(ctx context.Context, opts ...grpc.CallOption) (SparkConnectService_AddArtifactsClient, error) { stream, err := c.cc.NewStream(ctx, &SparkConnectService_ServiceDesc.Streams[1], SparkConnectService_AddArtifacts_FullMethodName, opts...) if err != nil { return nil, err } x := &sparkConnectServiceAddArtifactsClient{stream} return x, nil } type SparkConnectService_AddArtifactsClient interface { Send(*AddArtifactsRequest) error CloseAndRecv() (*AddArtifactsResponse, error) grpc.ClientStream } type sparkConnectServiceAddArtifactsClient struct { grpc.ClientStream } func (x *sparkConnectServiceAddArtifactsClient) Send(m *AddArtifactsRequest) error { return x.ClientStream.SendMsg(m) } func (x *sparkConnectServiceAddArtifactsClient) CloseAndRecv() (*AddArtifactsResponse, error) { if err := x.ClientStream.CloseSend(); err != nil { return nil, err } m := new(AddArtifactsResponse) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func (c *sparkConnectServiceClient) ArtifactStatus(ctx context.Context, in *ArtifactStatusesRequest, opts ...grpc.CallOption) (*ArtifactStatusesResponse, error) { out := new(ArtifactStatusesResponse) err := c.cc.Invoke(ctx, SparkConnectService_ArtifactStatus_FullMethodName, in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *sparkConnectServiceClient) Interrupt(ctx context.Context, in *InterruptRequest, opts ...grpc.CallOption) (*InterruptResponse, error) { out := new(InterruptResponse) err := c.cc.Invoke(ctx, SparkConnectService_Interrupt_FullMethodName, in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *sparkConnectServiceClient) ReattachExecute(ctx context.Context, in *ReattachExecuteRequest, opts ...grpc.CallOption) (SparkConnectService_ReattachExecuteClient, error) { stream, err := c.cc.NewStream(ctx, &SparkConnectService_ServiceDesc.Streams[2], SparkConnectService_ReattachExecute_FullMethodName, opts...) if err != nil { return nil, err } x := &sparkConnectServiceReattachExecuteClient{stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } if err := x.ClientStream.CloseSend(); err != nil { return nil, err } return x, nil } type SparkConnectService_ReattachExecuteClient interface { Recv() (*ExecutePlanResponse, error) grpc.ClientStream } type sparkConnectServiceReattachExecuteClient struct { grpc.ClientStream } func (x *sparkConnectServiceReattachExecuteClient) Recv() (*ExecutePlanResponse, error) { m := new(ExecutePlanResponse) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func (c *sparkConnectServiceClient) ReleaseExecute(ctx context.Context, in *ReleaseExecuteRequest, opts ...grpc.CallOption) (*ReleaseExecuteResponse, error) { out := new(ReleaseExecuteResponse) err := c.cc.Invoke(ctx, SparkConnectService_ReleaseExecute_FullMethodName, in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *sparkConnectServiceClient) ReleaseSession(ctx context.Context, in *ReleaseSessionRequest, opts ...grpc.CallOption) (*ReleaseSessionResponse, error) { out := new(ReleaseSessionResponse) err := c.cc.Invoke(ctx, SparkConnectService_ReleaseSession_FullMethodName, in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *sparkConnectServiceClient) FetchErrorDetails(ctx context.Context, in *FetchErrorDetailsRequest, opts ...grpc.CallOption) (*FetchErrorDetailsResponse, error) { out := new(FetchErrorDetailsResponse) err := c.cc.Invoke(ctx, SparkConnectService_FetchErrorDetails_FullMethodName, in, out, opts...) if err != nil { return nil, err } return out, nil } // SparkConnectServiceServer is the server API for SparkConnectService service. // All implementations must embed UnimplementedSparkConnectServiceServer // for forward compatibility type SparkConnectServiceServer interface { // Executes a request that contains the query and returns a stream of [[Response]]. // // It is guaranteed that there is at least one ARROW batch returned even if the result set is empty. ExecutePlan(*ExecutePlanRequest, SparkConnectService_ExecutePlanServer) error // Analyzes a query and returns a [[AnalyzeResponse]] containing metadata about the query. AnalyzePlan(context.Context, *AnalyzePlanRequest) (*AnalyzePlanResponse, error) // Update or fetch the configurations and returns a [[ConfigResponse]] containing the result. Config(context.Context, *ConfigRequest) (*ConfigResponse, error) // Add artifacts to the session and returns a [[AddArtifactsResponse]] containing metadata about // the added artifacts. AddArtifacts(SparkConnectService_AddArtifactsServer) error // Check statuses of artifacts in the session and returns them in a [[ArtifactStatusesResponse]] ArtifactStatus(context.Context, *ArtifactStatusesRequest) (*ArtifactStatusesResponse, error) // Interrupts running executions Interrupt(context.Context, *InterruptRequest) (*InterruptResponse, error) // Reattach to an existing reattachable execution. // The ExecutePlan must have been started with ReattachOptions.reattachable=true. // If the ExecutePlanResponse stream ends without a ResultComplete message, there is more to // continue. If there is a ResultComplete, the client should use ReleaseExecute with ReattachExecute(*ReattachExecuteRequest, SparkConnectService_ReattachExecuteServer) error // Release an reattachable execution, or parts thereof. // The ExecutePlan must have been started with ReattachOptions.reattachable=true. // Non reattachable executions are released automatically and immediately after the ExecutePlan // RPC and ReleaseExecute may not be used. ReleaseExecute(context.Context, *ReleaseExecuteRequest) (*ReleaseExecuteResponse, error) // Release a session. // All the executions in the session will be released. Any further requests for the session with // that session_id for the given user_id will fail. If the session didn't exist or was already // released, this is a noop. ReleaseSession(context.Context, *ReleaseSessionRequest) (*ReleaseSessionResponse, error) // FetchErrorDetails retrieves the matched exception with details based on a provided error id. FetchErrorDetails(context.Context, *FetchErrorDetailsRequest) (*FetchErrorDetailsResponse, error) mustEmbedUnimplementedSparkConnectServiceServer() } // UnimplementedSparkConnectServiceServer must be embedded to have forward compatible implementations. type UnimplementedSparkConnectServiceServer struct { } func (UnimplementedSparkConnectServiceServer) ExecutePlan(*ExecutePlanRequest, SparkConnectService_ExecutePlanServer) error { return status.Errorf(codes.Unimplemented, "method ExecutePlan not implemented") } func (UnimplementedSparkConnectServiceServer) AnalyzePlan(context.Context, *AnalyzePlanRequest) (*AnalyzePlanResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method AnalyzePlan not implemented") } func (UnimplementedSparkConnectServiceServer) Config(context.Context, *ConfigRequest) (*ConfigResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method Config not implemented") } func (UnimplementedSparkConnectServiceServer) AddArtifacts(SparkConnectService_AddArtifactsServer) error { return status.Errorf(codes.Unimplemented, "method AddArtifacts not implemented") } func (UnimplementedSparkConnectServiceServer) ArtifactStatus(context.Context, *ArtifactStatusesRequest) (*ArtifactStatusesResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method ArtifactStatus not implemented") } func (UnimplementedSparkConnectServiceServer) Interrupt(context.Context, *InterruptRequest) (*InterruptResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method Interrupt not implemented") } func (UnimplementedSparkConnectServiceServer) ReattachExecute(*ReattachExecuteRequest, SparkConnectService_ReattachExecuteServer) error { return status.Errorf(codes.Unimplemented, "method ReattachExecute not implemented") } func (UnimplementedSparkConnectServiceServer) ReleaseExecute(context.Context, *ReleaseExecuteRequest) (*ReleaseExecuteResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method ReleaseExecute not implemented") } func (UnimplementedSparkConnectServiceServer) ReleaseSession(context.Context, *ReleaseSessionRequest) (*ReleaseSessionResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method ReleaseSession not implemented") } func (UnimplementedSparkConnectServiceServer) FetchErrorDetails(context.Context, *FetchErrorDetailsRequest) (*FetchErrorDetailsResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method FetchErrorDetails not implemented") } func (UnimplementedSparkConnectServiceServer) mustEmbedUnimplementedSparkConnectServiceServer() {} // UnsafeSparkConnectServiceServer may be embedded to opt out of forward compatibility for this service. // Use of this interface is not recommended, as added methods to SparkConnectServiceServer will // result in compilation errors. type UnsafeSparkConnectServiceServer interface { mustEmbedUnimplementedSparkConnectServiceServer() } func RegisterSparkConnectServiceServer(s grpc.ServiceRegistrar, srv SparkConnectServiceServer) { s.RegisterService(&SparkConnectService_ServiceDesc, srv) } func _SparkConnectService_ExecutePlan_Handler(srv interface{}, stream grpc.ServerStream) error { m := new(ExecutePlanRequest) if err := stream.RecvMsg(m); err != nil { return err } return srv.(SparkConnectServiceServer).ExecutePlan(m, &sparkConnectServiceExecutePlanServer{stream}) } type SparkConnectService_ExecutePlanServer interface { Send(*ExecutePlanResponse) error grpc.ServerStream } type sparkConnectServiceExecutePlanServer struct { grpc.ServerStream } func (x *sparkConnectServiceExecutePlanServer) Send(m *ExecutePlanResponse) error { return x.ServerStream.SendMsg(m) } func _SparkConnectService_AnalyzePlan_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(AnalyzePlanRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(SparkConnectServiceServer).AnalyzePlan(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: SparkConnectService_AnalyzePlan_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(SparkConnectServiceServer).AnalyzePlan(ctx, req.(*AnalyzePlanRequest)) } return interceptor(ctx, in, info, handler) } func _SparkConnectService_Config_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ConfigRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(SparkConnectServiceServer).Config(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: SparkConnectService_Config_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(SparkConnectServiceServer).Config(ctx, req.(*ConfigRequest)) } return interceptor(ctx, in, info, handler) } func _SparkConnectService_AddArtifacts_Handler(srv interface{}, stream grpc.ServerStream) error { return srv.(SparkConnectServiceServer).AddArtifacts(&sparkConnectServiceAddArtifactsServer{stream}) } type SparkConnectService_AddArtifactsServer interface { SendAndClose(*AddArtifactsResponse) error Recv() (*AddArtifactsRequest, error) grpc.ServerStream } type sparkConnectServiceAddArtifactsServer struct { grpc.ServerStream } func (x *sparkConnectServiceAddArtifactsServer) SendAndClose(m *AddArtifactsResponse) error { return x.ServerStream.SendMsg(m) } func (x *sparkConnectServiceAddArtifactsServer) Recv() (*AddArtifactsRequest, error) { m := new(AddArtifactsRequest) if err := x.ServerStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func _SparkConnectService_ArtifactStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ArtifactStatusesRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(SparkConnectServiceServer).ArtifactStatus(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: SparkConnectService_ArtifactStatus_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(SparkConnectServiceServer).ArtifactStatus(ctx, req.(*ArtifactStatusesRequest)) } return interceptor(ctx, in, info, handler) } func _SparkConnectService_Interrupt_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(InterruptRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(SparkConnectServiceServer).Interrupt(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: SparkConnectService_Interrupt_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(SparkConnectServiceServer).Interrupt(ctx, req.(*InterruptRequest)) } return interceptor(ctx, in, info, handler) } func _SparkConnectService_ReattachExecute_Handler(srv interface{}, stream grpc.ServerStream) error { m := new(ReattachExecuteRequest) if err := stream.RecvMsg(m); err != nil { return err } return srv.(SparkConnectServiceServer).ReattachExecute(m, &sparkConnectServiceReattachExecuteServer{stream}) } type SparkConnectService_ReattachExecuteServer interface { Send(*ExecutePlanResponse) error grpc.ServerStream } type sparkConnectServiceReattachExecuteServer struct { grpc.ServerStream } func (x *sparkConnectServiceReattachExecuteServer) Send(m *ExecutePlanResponse) error { return x.ServerStream.SendMsg(m) } func _SparkConnectService_ReleaseExecute_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ReleaseExecuteRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(SparkConnectServiceServer).ReleaseExecute(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: SparkConnectService_ReleaseExecute_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(SparkConnectServiceServer).ReleaseExecute(ctx, req.(*ReleaseExecuteRequest)) } return interceptor(ctx, in, info, handler) } func _SparkConnectService_ReleaseSession_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ReleaseSessionRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(SparkConnectServiceServer).ReleaseSession(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: SparkConnectService_ReleaseSession_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(SparkConnectServiceServer).ReleaseSession(ctx, req.(*ReleaseSessionRequest)) } return interceptor(ctx, in, info, handler) } func _SparkConnectService_FetchErrorDetails_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(FetchErrorDetailsRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(SparkConnectServiceServer).FetchErrorDetails(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: SparkConnectService_FetchErrorDetails_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(SparkConnectServiceServer).FetchErrorDetails(ctx, req.(*FetchErrorDetailsRequest)) } return interceptor(ctx, in, info, handler) } // SparkConnectService_ServiceDesc is the grpc.ServiceDesc for SparkConnectService service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) var SparkConnectService_ServiceDesc = grpc.ServiceDesc{ ServiceName: "spark.connect.SparkConnectService", HandlerType: (*SparkConnectServiceServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "AnalyzePlan", Handler: _SparkConnectService_AnalyzePlan_Handler, }, { MethodName: "Config", Handler: _SparkConnectService_Config_Handler, }, { MethodName: "ArtifactStatus", Handler: _SparkConnectService_ArtifactStatus_Handler, }, { MethodName: "Interrupt", Handler: _SparkConnectService_Interrupt_Handler, }, { MethodName: "ReleaseExecute", Handler: _SparkConnectService_ReleaseExecute_Handler, }, { MethodName: "ReleaseSession", Handler: _SparkConnectService_ReleaseSession_Handler, }, { MethodName: "FetchErrorDetails", Handler: _SparkConnectService_FetchErrorDetails_Handler, }, }, Streams: []grpc.StreamDesc{ { StreamName: "ExecutePlan", Handler: _SparkConnectService_ExecutePlan_Handler, ServerStreams: true, }, { StreamName: "AddArtifacts", Handler: _SparkConnectService_AddArtifacts_Handler, ClientStreams: true, }, { StreamName: "ReattachExecute", Handler: _SparkConnectService_ReattachExecute_Handler, ServerStreams: true, }, }, Metadata: "spark/connect/base.proto", } ================================================ FILE: internal/generated/catalog.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/catalog.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // Catalog messages are marked as unstable. type Catalog struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to CatType: // // *Catalog_CurrentDatabase // *Catalog_SetCurrentDatabase // *Catalog_ListDatabases // *Catalog_ListTables // *Catalog_ListFunctions // *Catalog_ListColumns // *Catalog_GetDatabase // *Catalog_GetTable // *Catalog_GetFunction // *Catalog_DatabaseExists // *Catalog_TableExists // *Catalog_FunctionExists // *Catalog_CreateExternalTable // *Catalog_CreateTable // *Catalog_DropTempView // *Catalog_DropGlobalTempView // *Catalog_RecoverPartitions // *Catalog_IsCached // *Catalog_CacheTable // *Catalog_UncacheTable // *Catalog_ClearCache // *Catalog_RefreshTable // *Catalog_RefreshByPath // *Catalog_CurrentCatalog // *Catalog_SetCurrentCatalog // *Catalog_ListCatalogs CatType isCatalog_CatType `protobuf_oneof:"cat_type"` } func (x *Catalog) Reset() { *x = Catalog{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Catalog) String() string { return protoimpl.X.MessageStringOf(x) } func (*Catalog) ProtoMessage() {} func (x *Catalog) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Catalog.ProtoReflect.Descriptor instead. func (*Catalog) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{0} } func (m *Catalog) GetCatType() isCatalog_CatType { if m != nil { return m.CatType } return nil } func (x *Catalog) GetCurrentDatabase() *CurrentDatabase { if x, ok := x.GetCatType().(*Catalog_CurrentDatabase); ok { return x.CurrentDatabase } return nil } func (x *Catalog) GetSetCurrentDatabase() *SetCurrentDatabase { if x, ok := x.GetCatType().(*Catalog_SetCurrentDatabase); ok { return x.SetCurrentDatabase } return nil } func (x *Catalog) GetListDatabases() *ListDatabases { if x, ok := x.GetCatType().(*Catalog_ListDatabases); ok { return x.ListDatabases } return nil } func (x *Catalog) GetListTables() *ListTables { if x, ok := x.GetCatType().(*Catalog_ListTables); ok { return x.ListTables } return nil } func (x *Catalog) GetListFunctions() *ListFunctions { if x, ok := x.GetCatType().(*Catalog_ListFunctions); ok { return x.ListFunctions } return nil } func (x *Catalog) GetListColumns() *ListColumns { if x, ok := x.GetCatType().(*Catalog_ListColumns); ok { return x.ListColumns } return nil } func (x *Catalog) GetGetDatabase() *GetDatabase { if x, ok := x.GetCatType().(*Catalog_GetDatabase); ok { return x.GetDatabase } return nil } func (x *Catalog) GetGetTable() *GetTable { if x, ok := x.GetCatType().(*Catalog_GetTable); ok { return x.GetTable } return nil } func (x *Catalog) GetGetFunction() *GetFunction { if x, ok := x.GetCatType().(*Catalog_GetFunction); ok { return x.GetFunction } return nil } func (x *Catalog) GetDatabaseExists() *DatabaseExists { if x, ok := x.GetCatType().(*Catalog_DatabaseExists); ok { return x.DatabaseExists } return nil } func (x *Catalog) GetTableExists() *TableExists { if x, ok := x.GetCatType().(*Catalog_TableExists); ok { return x.TableExists } return nil } func (x *Catalog) GetFunctionExists() *FunctionExists { if x, ok := x.GetCatType().(*Catalog_FunctionExists); ok { return x.FunctionExists } return nil } func (x *Catalog) GetCreateExternalTable() *CreateExternalTable { if x, ok := x.GetCatType().(*Catalog_CreateExternalTable); ok { return x.CreateExternalTable } return nil } func (x *Catalog) GetCreateTable() *CreateTable { if x, ok := x.GetCatType().(*Catalog_CreateTable); ok { return x.CreateTable } return nil } func (x *Catalog) GetDropTempView() *DropTempView { if x, ok := x.GetCatType().(*Catalog_DropTempView); ok { return x.DropTempView } return nil } func (x *Catalog) GetDropGlobalTempView() *DropGlobalTempView { if x, ok := x.GetCatType().(*Catalog_DropGlobalTempView); ok { return x.DropGlobalTempView } return nil } func (x *Catalog) GetRecoverPartitions() *RecoverPartitions { if x, ok := x.GetCatType().(*Catalog_RecoverPartitions); ok { return x.RecoverPartitions } return nil } func (x *Catalog) GetIsCached() *IsCached { if x, ok := x.GetCatType().(*Catalog_IsCached); ok { return x.IsCached } return nil } func (x *Catalog) GetCacheTable() *CacheTable { if x, ok := x.GetCatType().(*Catalog_CacheTable); ok { return x.CacheTable } return nil } func (x *Catalog) GetUncacheTable() *UncacheTable { if x, ok := x.GetCatType().(*Catalog_UncacheTable); ok { return x.UncacheTable } return nil } func (x *Catalog) GetClearCache() *ClearCache { if x, ok := x.GetCatType().(*Catalog_ClearCache); ok { return x.ClearCache } return nil } func (x *Catalog) GetRefreshTable() *RefreshTable { if x, ok := x.GetCatType().(*Catalog_RefreshTable); ok { return x.RefreshTable } return nil } func (x *Catalog) GetRefreshByPath() *RefreshByPath { if x, ok := x.GetCatType().(*Catalog_RefreshByPath); ok { return x.RefreshByPath } return nil } func (x *Catalog) GetCurrentCatalog() *CurrentCatalog { if x, ok := x.GetCatType().(*Catalog_CurrentCatalog); ok { return x.CurrentCatalog } return nil } func (x *Catalog) GetSetCurrentCatalog() *SetCurrentCatalog { if x, ok := x.GetCatType().(*Catalog_SetCurrentCatalog); ok { return x.SetCurrentCatalog } return nil } func (x *Catalog) GetListCatalogs() *ListCatalogs { if x, ok := x.GetCatType().(*Catalog_ListCatalogs); ok { return x.ListCatalogs } return nil } type isCatalog_CatType interface { isCatalog_CatType() } type Catalog_CurrentDatabase struct { CurrentDatabase *CurrentDatabase `protobuf:"bytes,1,opt,name=current_database,json=currentDatabase,proto3,oneof"` } type Catalog_SetCurrentDatabase struct { SetCurrentDatabase *SetCurrentDatabase `protobuf:"bytes,2,opt,name=set_current_database,json=setCurrentDatabase,proto3,oneof"` } type Catalog_ListDatabases struct { ListDatabases *ListDatabases `protobuf:"bytes,3,opt,name=list_databases,json=listDatabases,proto3,oneof"` } type Catalog_ListTables struct { ListTables *ListTables `protobuf:"bytes,4,opt,name=list_tables,json=listTables,proto3,oneof"` } type Catalog_ListFunctions struct { ListFunctions *ListFunctions `protobuf:"bytes,5,opt,name=list_functions,json=listFunctions,proto3,oneof"` } type Catalog_ListColumns struct { ListColumns *ListColumns `protobuf:"bytes,6,opt,name=list_columns,json=listColumns,proto3,oneof"` } type Catalog_GetDatabase struct { GetDatabase *GetDatabase `protobuf:"bytes,7,opt,name=get_database,json=getDatabase,proto3,oneof"` } type Catalog_GetTable struct { GetTable *GetTable `protobuf:"bytes,8,opt,name=get_table,json=getTable,proto3,oneof"` } type Catalog_GetFunction struct { GetFunction *GetFunction `protobuf:"bytes,9,opt,name=get_function,json=getFunction,proto3,oneof"` } type Catalog_DatabaseExists struct { DatabaseExists *DatabaseExists `protobuf:"bytes,10,opt,name=database_exists,json=databaseExists,proto3,oneof"` } type Catalog_TableExists struct { TableExists *TableExists `protobuf:"bytes,11,opt,name=table_exists,json=tableExists,proto3,oneof"` } type Catalog_FunctionExists struct { FunctionExists *FunctionExists `protobuf:"bytes,12,opt,name=function_exists,json=functionExists,proto3,oneof"` } type Catalog_CreateExternalTable struct { CreateExternalTable *CreateExternalTable `protobuf:"bytes,13,opt,name=create_external_table,json=createExternalTable,proto3,oneof"` } type Catalog_CreateTable struct { CreateTable *CreateTable `protobuf:"bytes,14,opt,name=create_table,json=createTable,proto3,oneof"` } type Catalog_DropTempView struct { DropTempView *DropTempView `protobuf:"bytes,15,opt,name=drop_temp_view,json=dropTempView,proto3,oneof"` } type Catalog_DropGlobalTempView struct { DropGlobalTempView *DropGlobalTempView `protobuf:"bytes,16,opt,name=drop_global_temp_view,json=dropGlobalTempView,proto3,oneof"` } type Catalog_RecoverPartitions struct { RecoverPartitions *RecoverPartitions `protobuf:"bytes,17,opt,name=recover_partitions,json=recoverPartitions,proto3,oneof"` } type Catalog_IsCached struct { IsCached *IsCached `protobuf:"bytes,18,opt,name=is_cached,json=isCached,proto3,oneof"` } type Catalog_CacheTable struct { CacheTable *CacheTable `protobuf:"bytes,19,opt,name=cache_table,json=cacheTable,proto3,oneof"` } type Catalog_UncacheTable struct { UncacheTable *UncacheTable `protobuf:"bytes,20,opt,name=uncache_table,json=uncacheTable,proto3,oneof"` } type Catalog_ClearCache struct { ClearCache *ClearCache `protobuf:"bytes,21,opt,name=clear_cache,json=clearCache,proto3,oneof"` } type Catalog_RefreshTable struct { RefreshTable *RefreshTable `protobuf:"bytes,22,opt,name=refresh_table,json=refreshTable,proto3,oneof"` } type Catalog_RefreshByPath struct { RefreshByPath *RefreshByPath `protobuf:"bytes,23,opt,name=refresh_by_path,json=refreshByPath,proto3,oneof"` } type Catalog_CurrentCatalog struct { CurrentCatalog *CurrentCatalog `protobuf:"bytes,24,opt,name=current_catalog,json=currentCatalog,proto3,oneof"` } type Catalog_SetCurrentCatalog struct { SetCurrentCatalog *SetCurrentCatalog `protobuf:"bytes,25,opt,name=set_current_catalog,json=setCurrentCatalog,proto3,oneof"` } type Catalog_ListCatalogs struct { ListCatalogs *ListCatalogs `protobuf:"bytes,26,opt,name=list_catalogs,json=listCatalogs,proto3,oneof"` } func (*Catalog_CurrentDatabase) isCatalog_CatType() {} func (*Catalog_SetCurrentDatabase) isCatalog_CatType() {} func (*Catalog_ListDatabases) isCatalog_CatType() {} func (*Catalog_ListTables) isCatalog_CatType() {} func (*Catalog_ListFunctions) isCatalog_CatType() {} func (*Catalog_ListColumns) isCatalog_CatType() {} func (*Catalog_GetDatabase) isCatalog_CatType() {} func (*Catalog_GetTable) isCatalog_CatType() {} func (*Catalog_GetFunction) isCatalog_CatType() {} func (*Catalog_DatabaseExists) isCatalog_CatType() {} func (*Catalog_TableExists) isCatalog_CatType() {} func (*Catalog_FunctionExists) isCatalog_CatType() {} func (*Catalog_CreateExternalTable) isCatalog_CatType() {} func (*Catalog_CreateTable) isCatalog_CatType() {} func (*Catalog_DropTempView) isCatalog_CatType() {} func (*Catalog_DropGlobalTempView) isCatalog_CatType() {} func (*Catalog_RecoverPartitions) isCatalog_CatType() {} func (*Catalog_IsCached) isCatalog_CatType() {} func (*Catalog_CacheTable) isCatalog_CatType() {} func (*Catalog_UncacheTable) isCatalog_CatType() {} func (*Catalog_ClearCache) isCatalog_CatType() {} func (*Catalog_RefreshTable) isCatalog_CatType() {} func (*Catalog_RefreshByPath) isCatalog_CatType() {} func (*Catalog_CurrentCatalog) isCatalog_CatType() {} func (*Catalog_SetCurrentCatalog) isCatalog_CatType() {} func (*Catalog_ListCatalogs) isCatalog_CatType() {} // See `spark.catalog.currentDatabase` type CurrentDatabase struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *CurrentDatabase) Reset() { *x = CurrentDatabase{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CurrentDatabase) String() string { return protoimpl.X.MessageStringOf(x) } func (*CurrentDatabase) ProtoMessage() {} func (x *CurrentDatabase) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CurrentDatabase.ProtoReflect.Descriptor instead. func (*CurrentDatabase) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{1} } // See `spark.catalog.setCurrentDatabase` type SetCurrentDatabase struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) DbName string `protobuf:"bytes,1,opt,name=db_name,json=dbName,proto3" json:"db_name,omitempty"` } func (x *SetCurrentDatabase) Reset() { *x = SetCurrentDatabase{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SetCurrentDatabase) String() string { return protoimpl.X.MessageStringOf(x) } func (*SetCurrentDatabase) ProtoMessage() {} func (x *SetCurrentDatabase) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SetCurrentDatabase.ProtoReflect.Descriptor instead. func (*SetCurrentDatabase) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{2} } func (x *SetCurrentDatabase) GetDbName() string { if x != nil { return x.DbName } return "" } // See `spark.catalog.listDatabases` type ListDatabases struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) The pattern that the database name needs to match Pattern *string `protobuf:"bytes,1,opt,name=pattern,proto3,oneof" json:"pattern,omitempty"` } func (x *ListDatabases) Reset() { *x = ListDatabases{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ListDatabases) String() string { return protoimpl.X.MessageStringOf(x) } func (*ListDatabases) ProtoMessage() {} func (x *ListDatabases) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ListDatabases.ProtoReflect.Descriptor instead. func (*ListDatabases) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{3} } func (x *ListDatabases) GetPattern() string { if x != nil && x.Pattern != nil { return *x.Pattern } return "" } // See `spark.catalog.listTables` type ListTables struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) DbName *string `protobuf:"bytes,1,opt,name=db_name,json=dbName,proto3,oneof" json:"db_name,omitempty"` // (Optional) The pattern that the table name needs to match Pattern *string `protobuf:"bytes,2,opt,name=pattern,proto3,oneof" json:"pattern,omitempty"` } func (x *ListTables) Reset() { *x = ListTables{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ListTables) String() string { return protoimpl.X.MessageStringOf(x) } func (*ListTables) ProtoMessage() {} func (x *ListTables) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ListTables.ProtoReflect.Descriptor instead. func (*ListTables) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{4} } func (x *ListTables) GetDbName() string { if x != nil && x.DbName != nil { return *x.DbName } return "" } func (x *ListTables) GetPattern() string { if x != nil && x.Pattern != nil { return *x.Pattern } return "" } // See `spark.catalog.listFunctions` type ListFunctions struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) DbName *string `protobuf:"bytes,1,opt,name=db_name,json=dbName,proto3,oneof" json:"db_name,omitempty"` // (Optional) The pattern that the function name needs to match Pattern *string `protobuf:"bytes,2,opt,name=pattern,proto3,oneof" json:"pattern,omitempty"` } func (x *ListFunctions) Reset() { *x = ListFunctions{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ListFunctions) String() string { return protoimpl.X.MessageStringOf(x) } func (*ListFunctions) ProtoMessage() {} func (x *ListFunctions) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ListFunctions.ProtoReflect.Descriptor instead. func (*ListFunctions) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{5} } func (x *ListFunctions) GetDbName() string { if x != nil && x.DbName != nil { return *x.DbName } return "" } func (x *ListFunctions) GetPattern() string { if x != nil && x.Pattern != nil { return *x.Pattern } return "" } // See `spark.catalog.listColumns` type ListColumns struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` // (Optional) DbName *string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3,oneof" json:"db_name,omitempty"` } func (x *ListColumns) Reset() { *x = ListColumns{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ListColumns) String() string { return protoimpl.X.MessageStringOf(x) } func (*ListColumns) ProtoMessage() {} func (x *ListColumns) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ListColumns.ProtoReflect.Descriptor instead. func (*ListColumns) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{6} } func (x *ListColumns) GetTableName() string { if x != nil { return x.TableName } return "" } func (x *ListColumns) GetDbName() string { if x != nil && x.DbName != nil { return *x.DbName } return "" } // See `spark.catalog.getDatabase` type GetDatabase struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) DbName string `protobuf:"bytes,1,opt,name=db_name,json=dbName,proto3" json:"db_name,omitempty"` } func (x *GetDatabase) Reset() { *x = GetDatabase{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *GetDatabase) String() string { return protoimpl.X.MessageStringOf(x) } func (*GetDatabase) ProtoMessage() {} func (x *GetDatabase) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use GetDatabase.ProtoReflect.Descriptor instead. func (*GetDatabase) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{7} } func (x *GetDatabase) GetDbName() string { if x != nil { return x.DbName } return "" } // See `spark.catalog.getTable` type GetTable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` // (Optional) DbName *string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3,oneof" json:"db_name,omitempty"` } func (x *GetTable) Reset() { *x = GetTable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *GetTable) String() string { return protoimpl.X.MessageStringOf(x) } func (*GetTable) ProtoMessage() {} func (x *GetTable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use GetTable.ProtoReflect.Descriptor instead. func (*GetTable) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{8} } func (x *GetTable) GetTableName() string { if x != nil { return x.TableName } return "" } func (x *GetTable) GetDbName() string { if x != nil && x.DbName != nil { return *x.DbName } return "" } // See `spark.catalog.getFunction` type GetFunction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) FunctionName string `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` // (Optional) DbName *string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3,oneof" json:"db_name,omitempty"` } func (x *GetFunction) Reset() { *x = GetFunction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *GetFunction) String() string { return protoimpl.X.MessageStringOf(x) } func (*GetFunction) ProtoMessage() {} func (x *GetFunction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use GetFunction.ProtoReflect.Descriptor instead. func (*GetFunction) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{9} } func (x *GetFunction) GetFunctionName() string { if x != nil { return x.FunctionName } return "" } func (x *GetFunction) GetDbName() string { if x != nil && x.DbName != nil { return *x.DbName } return "" } // See `spark.catalog.databaseExists` type DatabaseExists struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) DbName string `protobuf:"bytes,1,opt,name=db_name,json=dbName,proto3" json:"db_name,omitempty"` } func (x *DatabaseExists) Reset() { *x = DatabaseExists{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DatabaseExists) String() string { return protoimpl.X.MessageStringOf(x) } func (*DatabaseExists) ProtoMessage() {} func (x *DatabaseExists) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DatabaseExists.ProtoReflect.Descriptor instead. func (*DatabaseExists) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{10} } func (x *DatabaseExists) GetDbName() string { if x != nil { return x.DbName } return "" } // See `spark.catalog.tableExists` type TableExists struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` // (Optional) DbName *string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3,oneof" json:"db_name,omitempty"` } func (x *TableExists) Reset() { *x = TableExists{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *TableExists) String() string { return protoimpl.X.MessageStringOf(x) } func (*TableExists) ProtoMessage() {} func (x *TableExists) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TableExists.ProtoReflect.Descriptor instead. func (*TableExists) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{11} } func (x *TableExists) GetTableName() string { if x != nil { return x.TableName } return "" } func (x *TableExists) GetDbName() string { if x != nil && x.DbName != nil { return *x.DbName } return "" } // See `spark.catalog.functionExists` type FunctionExists struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) FunctionName string `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` // (Optional) DbName *string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3,oneof" json:"db_name,omitempty"` } func (x *FunctionExists) Reset() { *x = FunctionExists{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *FunctionExists) String() string { return protoimpl.X.MessageStringOf(x) } func (*FunctionExists) ProtoMessage() {} func (x *FunctionExists) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use FunctionExists.ProtoReflect.Descriptor instead. func (*FunctionExists) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{12} } func (x *FunctionExists) GetFunctionName() string { if x != nil { return x.FunctionName } return "" } func (x *FunctionExists) GetDbName() string { if x != nil && x.DbName != nil { return *x.DbName } return "" } // See `spark.catalog.createExternalTable` type CreateExternalTable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` // (Optional) Path *string `protobuf:"bytes,2,opt,name=path,proto3,oneof" json:"path,omitempty"` // (Optional) Source *string `protobuf:"bytes,3,opt,name=source,proto3,oneof" json:"source,omitempty"` // (Optional) Schema *DataType `protobuf:"bytes,4,opt,name=schema,proto3,oneof" json:"schema,omitempty"` // Options could be empty for valid data source format. // The map key is case insensitive. Options map[string]string `protobuf:"bytes,5,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *CreateExternalTable) Reset() { *x = CreateExternalTable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CreateExternalTable) String() string { return protoimpl.X.MessageStringOf(x) } func (*CreateExternalTable) ProtoMessage() {} func (x *CreateExternalTable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CreateExternalTable.ProtoReflect.Descriptor instead. func (*CreateExternalTable) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{13} } func (x *CreateExternalTable) GetTableName() string { if x != nil { return x.TableName } return "" } func (x *CreateExternalTable) GetPath() string { if x != nil && x.Path != nil { return *x.Path } return "" } func (x *CreateExternalTable) GetSource() string { if x != nil && x.Source != nil { return *x.Source } return "" } func (x *CreateExternalTable) GetSchema() *DataType { if x != nil { return x.Schema } return nil } func (x *CreateExternalTable) GetOptions() map[string]string { if x != nil { return x.Options } return nil } // See `spark.catalog.createTable` type CreateTable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` // (Optional) Path *string `protobuf:"bytes,2,opt,name=path,proto3,oneof" json:"path,omitempty"` // (Optional) Source *string `protobuf:"bytes,3,opt,name=source,proto3,oneof" json:"source,omitempty"` // (Optional) Description *string `protobuf:"bytes,4,opt,name=description,proto3,oneof" json:"description,omitempty"` // (Optional) Schema *DataType `protobuf:"bytes,5,opt,name=schema,proto3,oneof" json:"schema,omitempty"` // Options could be empty for valid data source format. // The map key is case insensitive. Options map[string]string `protobuf:"bytes,6,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *CreateTable) Reset() { *x = CreateTable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CreateTable) String() string { return protoimpl.X.MessageStringOf(x) } func (*CreateTable) ProtoMessage() {} func (x *CreateTable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CreateTable.ProtoReflect.Descriptor instead. func (*CreateTable) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{14} } func (x *CreateTable) GetTableName() string { if x != nil { return x.TableName } return "" } func (x *CreateTable) GetPath() string { if x != nil && x.Path != nil { return *x.Path } return "" } func (x *CreateTable) GetSource() string { if x != nil && x.Source != nil { return *x.Source } return "" } func (x *CreateTable) GetDescription() string { if x != nil && x.Description != nil { return *x.Description } return "" } func (x *CreateTable) GetSchema() *DataType { if x != nil { return x.Schema } return nil } func (x *CreateTable) GetOptions() map[string]string { if x != nil { return x.Options } return nil } // See `spark.catalog.dropTempView` type DropTempView struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) ViewName string `protobuf:"bytes,1,opt,name=view_name,json=viewName,proto3" json:"view_name,omitempty"` } func (x *DropTempView) Reset() { *x = DropTempView{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DropTempView) String() string { return protoimpl.X.MessageStringOf(x) } func (*DropTempView) ProtoMessage() {} func (x *DropTempView) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DropTempView.ProtoReflect.Descriptor instead. func (*DropTempView) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{15} } func (x *DropTempView) GetViewName() string { if x != nil { return x.ViewName } return "" } // See `spark.catalog.dropGlobalTempView` type DropGlobalTempView struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) ViewName string `protobuf:"bytes,1,opt,name=view_name,json=viewName,proto3" json:"view_name,omitempty"` } func (x *DropGlobalTempView) Reset() { *x = DropGlobalTempView{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DropGlobalTempView) String() string { return protoimpl.X.MessageStringOf(x) } func (*DropGlobalTempView) ProtoMessage() {} func (x *DropGlobalTempView) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DropGlobalTempView.ProtoReflect.Descriptor instead. func (*DropGlobalTempView) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{16} } func (x *DropGlobalTempView) GetViewName() string { if x != nil { return x.ViewName } return "" } // See `spark.catalog.recoverPartitions` type RecoverPartitions struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` } func (x *RecoverPartitions) Reset() { *x = RecoverPartitions{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *RecoverPartitions) String() string { return protoimpl.X.MessageStringOf(x) } func (*RecoverPartitions) ProtoMessage() {} func (x *RecoverPartitions) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use RecoverPartitions.ProtoReflect.Descriptor instead. func (*RecoverPartitions) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{17} } func (x *RecoverPartitions) GetTableName() string { if x != nil { return x.TableName } return "" } // See `spark.catalog.isCached` type IsCached struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` } func (x *IsCached) Reset() { *x = IsCached{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *IsCached) String() string { return protoimpl.X.MessageStringOf(x) } func (*IsCached) ProtoMessage() {} func (x *IsCached) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use IsCached.ProtoReflect.Descriptor instead. func (*IsCached) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{18} } func (x *IsCached) GetTableName() string { if x != nil { return x.TableName } return "" } // See `spark.catalog.cacheTable` type CacheTable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` // (Optional) StorageLevel *StorageLevel `protobuf:"bytes,2,opt,name=storage_level,json=storageLevel,proto3,oneof" json:"storage_level,omitempty"` } func (x *CacheTable) Reset() { *x = CacheTable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CacheTable) String() string { return protoimpl.X.MessageStringOf(x) } func (*CacheTable) ProtoMessage() {} func (x *CacheTable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CacheTable.ProtoReflect.Descriptor instead. func (*CacheTable) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{19} } func (x *CacheTable) GetTableName() string { if x != nil { return x.TableName } return "" } func (x *CacheTable) GetStorageLevel() *StorageLevel { if x != nil { return x.StorageLevel } return nil } // See `spark.catalog.uncacheTable` type UncacheTable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` } func (x *UncacheTable) Reset() { *x = UncacheTable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *UncacheTable) String() string { return protoimpl.X.MessageStringOf(x) } func (*UncacheTable) ProtoMessage() {} func (x *UncacheTable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use UncacheTable.ProtoReflect.Descriptor instead. func (*UncacheTable) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{20} } func (x *UncacheTable) GetTableName() string { if x != nil { return x.TableName } return "" } // See `spark.catalog.clearCache` type ClearCache struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *ClearCache) Reset() { *x = ClearCache{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ClearCache) String() string { return protoimpl.X.MessageStringOf(x) } func (*ClearCache) ProtoMessage() {} func (x *ClearCache) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ClearCache.ProtoReflect.Descriptor instead. func (*ClearCache) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{21} } // See `spark.catalog.refreshTable` type RefreshTable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` } func (x *RefreshTable) Reset() { *x = RefreshTable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *RefreshTable) String() string { return protoimpl.X.MessageStringOf(x) } func (*RefreshTable) ProtoMessage() {} func (x *RefreshTable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use RefreshTable.ProtoReflect.Descriptor instead. func (*RefreshTable) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{22} } func (x *RefreshTable) GetTableName() string { if x != nil { return x.TableName } return "" } // See `spark.catalog.refreshByPath` type RefreshByPath struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` } func (x *RefreshByPath) Reset() { *x = RefreshByPath{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *RefreshByPath) String() string { return protoimpl.X.MessageStringOf(x) } func (*RefreshByPath) ProtoMessage() {} func (x *RefreshByPath) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use RefreshByPath.ProtoReflect.Descriptor instead. func (*RefreshByPath) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{23} } func (x *RefreshByPath) GetPath() string { if x != nil { return x.Path } return "" } // See `spark.catalog.currentCatalog` type CurrentCatalog struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *CurrentCatalog) Reset() { *x = CurrentCatalog{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CurrentCatalog) String() string { return protoimpl.X.MessageStringOf(x) } func (*CurrentCatalog) ProtoMessage() {} func (x *CurrentCatalog) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CurrentCatalog.ProtoReflect.Descriptor instead. func (*CurrentCatalog) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{24} } // See `spark.catalog.setCurrentCatalog` type SetCurrentCatalog struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) CatalogName string `protobuf:"bytes,1,opt,name=catalog_name,json=catalogName,proto3" json:"catalog_name,omitempty"` } func (x *SetCurrentCatalog) Reset() { *x = SetCurrentCatalog{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SetCurrentCatalog) String() string { return protoimpl.X.MessageStringOf(x) } func (*SetCurrentCatalog) ProtoMessage() {} func (x *SetCurrentCatalog) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SetCurrentCatalog.ProtoReflect.Descriptor instead. func (*SetCurrentCatalog) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{25} } func (x *SetCurrentCatalog) GetCatalogName() string { if x != nil { return x.CatalogName } return "" } // See `spark.catalog.listCatalogs` type ListCatalogs struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) The pattern that the catalog name needs to match Pattern *string `protobuf:"bytes,1,opt,name=pattern,proto3,oneof" json:"pattern,omitempty"` } func (x *ListCatalogs) Reset() { *x = ListCatalogs{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_catalog_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ListCatalogs) String() string { return protoimpl.X.MessageStringOf(x) } func (*ListCatalogs) ProtoMessage() {} func (x *ListCatalogs) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_catalog_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ListCatalogs.ProtoReflect.Descriptor instead. func (*ListCatalogs) Descriptor() ([]byte, []int) { return file_spark_connect_catalog_proto_rawDescGZIP(), []int{26} } func (x *ListCatalogs) GetPattern() string { if x != nil && x.Pattern != nil { return *x.Pattern } return "" } var File_spark_connect_catalog_proto protoreflect.FileDescriptor var file_spark_connect_catalog_proto_rawDesc = []byte{ 0x0a, 0x1b, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x1a, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xc6, 0x0e, 0x0a, 0x07, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x12, 0x4b, 0x0a, 0x10, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x48, 0x00, 0x52, 0x0f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x55, 0x0a, 0x14, 0x73, 0x65, 0x74, 0x5f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x48, 0x00, 0x52, 0x12, 0x73, 0x65, 0x74, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x45, 0x0a, 0x0e, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0d, 0x6c, 0x69, 0x73, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x73, 0x12, 0x3c, 0x0a, 0x0b, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x6c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x45, 0x0a, 0x0e, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0d, 0x6c, 0x69, 0x73, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3f, 0x0a, 0x0c, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0b, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x3f, 0x0a, 0x0c, 0x67, 0x65, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x09, 0x67, 0x65, 0x74, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x08, 0x67, 0x65, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x3f, 0x0a, 0x0c, 0x67, 0x65, 0x74, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0b, 0x67, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x0f, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x48, 0x00, 0x52, 0x0e, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x3f, 0x0a, 0x0c, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x48, 0x00, 0x52, 0x0b, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x48, 0x0a, 0x0f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x48, 0x00, 0x52, 0x0e, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x58, 0x0a, 0x15, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x13, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x3f, 0x0a, 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x43, 0x0a, 0x0e, 0x64, 0x72, 0x6f, 0x70, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x5f, 0x76, 0x69, 0x65, 0x77, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x72, 0x6f, 0x70, 0x54, 0x65, 0x6d, 0x70, 0x56, 0x69, 0x65, 0x77, 0x48, 0x00, 0x52, 0x0c, 0x64, 0x72, 0x6f, 0x70, 0x54, 0x65, 0x6d, 0x70, 0x56, 0x69, 0x65, 0x77, 0x12, 0x56, 0x0a, 0x15, 0x64, 0x72, 0x6f, 0x70, 0x5f, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x5f, 0x76, 0x69, 0x65, 0x77, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x72, 0x6f, 0x70, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x54, 0x65, 0x6d, 0x70, 0x56, 0x69, 0x65, 0x77, 0x48, 0x00, 0x52, 0x12, 0x64, 0x72, 0x6f, 0x70, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x54, 0x65, 0x6d, 0x70, 0x56, 0x69, 0x65, 0x77, 0x12, 0x51, 0x0a, 0x12, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x11, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x36, 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x49, 0x73, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x48, 0x00, 0x52, 0x08, 0x69, 0x73, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x61, 0x63, 0x68, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x42, 0x0a, 0x0d, 0x75, 0x6e, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x6e, 0x63, 0x61, 0x63, 0x68, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0c, 0x75, 0x6e, 0x63, 0x61, 0x63, 0x68, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x3c, 0x0a, 0x0b, 0x63, 0x6c, 0x65, 0x61, 0x72, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x43, 0x61, 0x63, 0x68, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6c, 0x65, 0x61, 0x72, 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, 0x42, 0x0a, 0x0d, 0x72, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x16, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0c, 0x72, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x46, 0x0a, 0x0f, 0x72, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x5f, 0x62, 0x79, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x42, 0x79, 0x50, 0x61, 0x74, 0x68, 0x48, 0x00, 0x52, 0x0d, 0x72, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x42, 0x79, 0x50, 0x61, 0x74, 0x68, 0x12, 0x48, 0x0a, 0x0f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x12, 0x52, 0x0a, 0x13, 0x73, 0x65, 0x74, 0x5f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x11, 0x73, 0x65, 0x74, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x12, 0x42, 0x0a, 0x0d, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x73, 0x48, 0x00, 0x52, 0x0c, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x73, 0x42, 0x0a, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x11, 0x0a, 0x0f, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x22, 0x2d, 0x0a, 0x12, 0x53, 0x65, 0x74, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x0a, 0x0d, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x22, 0x61, 0x0a, 0x0a, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x1c, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x22, 0x64, 0x0a, 0x0d, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x22, 0x56, 0x0a, 0x0b, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x26, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x53, 0x0a, 0x08, 0x47, 0x65, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x5c, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x29, 0x0a, 0x0e, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x56, 0x0a, 0x0b, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x5f, 0x0a, 0x0e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x07, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x64, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xc6, 0x02, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x17, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x02, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x12, 0x49, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0xed, 0x02, 0x0a, 0x0b, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x17, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x12, 0x25, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x03, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x12, 0x41, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0x2b, 0x0a, 0x0c, 0x44, 0x72, 0x6f, 0x70, 0x54, 0x65, 0x6d, 0x70, 0x56, 0x69, 0x65, 0x77, 0x12, 0x1b, 0x0a, 0x09, 0x76, 0x69, 0x65, 0x77, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x76, 0x69, 0x65, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x31, 0x0a, 0x12, 0x44, 0x72, 0x6f, 0x70, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x54, 0x65, 0x6d, 0x70, 0x56, 0x69, 0x65, 0x77, 0x12, 0x1b, 0x0a, 0x09, 0x76, 0x69, 0x65, 0x77, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x76, 0x69, 0x65, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x32, 0x0a, 0x11, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x29, 0x0a, 0x08, 0x49, 0x73, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x84, 0x01, 0x0a, 0x0a, 0x43, 0x61, 0x63, 0x68, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x45, 0x0a, 0x0d, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x88, 0x01, 0x01, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x22, 0x2d, 0x0a, 0x0c, 0x55, 0x6e, 0x63, 0x61, 0x63, 0x68, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x0c, 0x0a, 0x0a, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x43, 0x61, 0x63, 0x68, 0x65, 0x22, 0x2d, 0x0a, 0x0c, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x23, 0x0a, 0x0d, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x42, 0x79, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x10, 0x0a, 0x0e, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x22, 0x36, 0x0a, 0x11, 0x53, 0x65, 0x74, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x39, 0x0a, 0x0c, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_catalog_proto_rawDescOnce sync.Once file_spark_connect_catalog_proto_rawDescData = file_spark_connect_catalog_proto_rawDesc ) func file_spark_connect_catalog_proto_rawDescGZIP() []byte { file_spark_connect_catalog_proto_rawDescOnce.Do(func() { file_spark_connect_catalog_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_catalog_proto_rawDescData) }) return file_spark_connect_catalog_proto_rawDescData } var file_spark_connect_catalog_proto_msgTypes = make([]protoimpl.MessageInfo, 29) var file_spark_connect_catalog_proto_goTypes = []interface{}{ (*Catalog)(nil), // 0: spark.connect.Catalog (*CurrentDatabase)(nil), // 1: spark.connect.CurrentDatabase (*SetCurrentDatabase)(nil), // 2: spark.connect.SetCurrentDatabase (*ListDatabases)(nil), // 3: spark.connect.ListDatabases (*ListTables)(nil), // 4: spark.connect.ListTables (*ListFunctions)(nil), // 5: spark.connect.ListFunctions (*ListColumns)(nil), // 6: spark.connect.ListColumns (*GetDatabase)(nil), // 7: spark.connect.GetDatabase (*GetTable)(nil), // 8: spark.connect.GetTable (*GetFunction)(nil), // 9: spark.connect.GetFunction (*DatabaseExists)(nil), // 10: spark.connect.DatabaseExists (*TableExists)(nil), // 11: spark.connect.TableExists (*FunctionExists)(nil), // 12: spark.connect.FunctionExists (*CreateExternalTable)(nil), // 13: spark.connect.CreateExternalTable (*CreateTable)(nil), // 14: spark.connect.CreateTable (*DropTempView)(nil), // 15: spark.connect.DropTempView (*DropGlobalTempView)(nil), // 16: spark.connect.DropGlobalTempView (*RecoverPartitions)(nil), // 17: spark.connect.RecoverPartitions (*IsCached)(nil), // 18: spark.connect.IsCached (*CacheTable)(nil), // 19: spark.connect.CacheTable (*UncacheTable)(nil), // 20: spark.connect.UncacheTable (*ClearCache)(nil), // 21: spark.connect.ClearCache (*RefreshTable)(nil), // 22: spark.connect.RefreshTable (*RefreshByPath)(nil), // 23: spark.connect.RefreshByPath (*CurrentCatalog)(nil), // 24: spark.connect.CurrentCatalog (*SetCurrentCatalog)(nil), // 25: spark.connect.SetCurrentCatalog (*ListCatalogs)(nil), // 26: spark.connect.ListCatalogs nil, // 27: spark.connect.CreateExternalTable.OptionsEntry nil, // 28: spark.connect.CreateTable.OptionsEntry (*DataType)(nil), // 29: spark.connect.DataType (*StorageLevel)(nil), // 30: spark.connect.StorageLevel } var file_spark_connect_catalog_proto_depIdxs = []int32{ 1, // 0: spark.connect.Catalog.current_database:type_name -> spark.connect.CurrentDatabase 2, // 1: spark.connect.Catalog.set_current_database:type_name -> spark.connect.SetCurrentDatabase 3, // 2: spark.connect.Catalog.list_databases:type_name -> spark.connect.ListDatabases 4, // 3: spark.connect.Catalog.list_tables:type_name -> spark.connect.ListTables 5, // 4: spark.connect.Catalog.list_functions:type_name -> spark.connect.ListFunctions 6, // 5: spark.connect.Catalog.list_columns:type_name -> spark.connect.ListColumns 7, // 6: spark.connect.Catalog.get_database:type_name -> spark.connect.GetDatabase 8, // 7: spark.connect.Catalog.get_table:type_name -> spark.connect.GetTable 9, // 8: spark.connect.Catalog.get_function:type_name -> spark.connect.GetFunction 10, // 9: spark.connect.Catalog.database_exists:type_name -> spark.connect.DatabaseExists 11, // 10: spark.connect.Catalog.table_exists:type_name -> spark.connect.TableExists 12, // 11: spark.connect.Catalog.function_exists:type_name -> spark.connect.FunctionExists 13, // 12: spark.connect.Catalog.create_external_table:type_name -> spark.connect.CreateExternalTable 14, // 13: spark.connect.Catalog.create_table:type_name -> spark.connect.CreateTable 15, // 14: spark.connect.Catalog.drop_temp_view:type_name -> spark.connect.DropTempView 16, // 15: spark.connect.Catalog.drop_global_temp_view:type_name -> spark.connect.DropGlobalTempView 17, // 16: spark.connect.Catalog.recover_partitions:type_name -> spark.connect.RecoverPartitions 18, // 17: spark.connect.Catalog.is_cached:type_name -> spark.connect.IsCached 19, // 18: spark.connect.Catalog.cache_table:type_name -> spark.connect.CacheTable 20, // 19: spark.connect.Catalog.uncache_table:type_name -> spark.connect.UncacheTable 21, // 20: spark.connect.Catalog.clear_cache:type_name -> spark.connect.ClearCache 22, // 21: spark.connect.Catalog.refresh_table:type_name -> spark.connect.RefreshTable 23, // 22: spark.connect.Catalog.refresh_by_path:type_name -> spark.connect.RefreshByPath 24, // 23: spark.connect.Catalog.current_catalog:type_name -> spark.connect.CurrentCatalog 25, // 24: spark.connect.Catalog.set_current_catalog:type_name -> spark.connect.SetCurrentCatalog 26, // 25: spark.connect.Catalog.list_catalogs:type_name -> spark.connect.ListCatalogs 29, // 26: spark.connect.CreateExternalTable.schema:type_name -> spark.connect.DataType 27, // 27: spark.connect.CreateExternalTable.options:type_name -> spark.connect.CreateExternalTable.OptionsEntry 29, // 28: spark.connect.CreateTable.schema:type_name -> spark.connect.DataType 28, // 29: spark.connect.CreateTable.options:type_name -> spark.connect.CreateTable.OptionsEntry 30, // 30: spark.connect.CacheTable.storage_level:type_name -> spark.connect.StorageLevel 31, // [31:31] is the sub-list for method output_type 31, // [31:31] is the sub-list for method input_type 31, // [31:31] is the sub-list for extension type_name 31, // [31:31] is the sub-list for extension extendee 0, // [0:31] is the sub-list for field type_name } func init() { file_spark_connect_catalog_proto_init() } func file_spark_connect_catalog_proto_init() { if File_spark_connect_catalog_proto != nil { return } file_spark_connect_common_proto_init() file_spark_connect_types_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_catalog_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Catalog); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CurrentDatabase); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SetCurrentDatabase); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ListDatabases); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ListTables); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ListFunctions); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ListColumns); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetDatabase); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetTable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetFunction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DatabaseExists); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*TableExists); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FunctionExists); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CreateExternalTable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CreateTable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DropTempView); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DropGlobalTempView); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecoverPartitions); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*IsCached); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CacheTable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*UncacheTable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ClearCache); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RefreshTable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RefreshByPath); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CurrentCatalog); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SetCurrentCatalog); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_catalog_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ListCatalogs); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_catalog_proto_msgTypes[0].OneofWrappers = []interface{}{ (*Catalog_CurrentDatabase)(nil), (*Catalog_SetCurrentDatabase)(nil), (*Catalog_ListDatabases)(nil), (*Catalog_ListTables)(nil), (*Catalog_ListFunctions)(nil), (*Catalog_ListColumns)(nil), (*Catalog_GetDatabase)(nil), (*Catalog_GetTable)(nil), (*Catalog_GetFunction)(nil), (*Catalog_DatabaseExists)(nil), (*Catalog_TableExists)(nil), (*Catalog_FunctionExists)(nil), (*Catalog_CreateExternalTable)(nil), (*Catalog_CreateTable)(nil), (*Catalog_DropTempView)(nil), (*Catalog_DropGlobalTempView)(nil), (*Catalog_RecoverPartitions)(nil), (*Catalog_IsCached)(nil), (*Catalog_CacheTable)(nil), (*Catalog_UncacheTable)(nil), (*Catalog_ClearCache)(nil), (*Catalog_RefreshTable)(nil), (*Catalog_RefreshByPath)(nil), (*Catalog_CurrentCatalog)(nil), (*Catalog_SetCurrentCatalog)(nil), (*Catalog_ListCatalogs)(nil), } file_spark_connect_catalog_proto_msgTypes[3].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[4].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[5].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[6].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[8].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[9].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[11].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[12].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[13].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[14].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[19].OneofWrappers = []interface{}{} file_spark_connect_catalog_proto_msgTypes[26].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_catalog_proto_rawDesc, NumEnums: 0, NumMessages: 29, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_catalog_proto_goTypes, DependencyIndexes: file_spark_connect_catalog_proto_depIdxs, MessageInfos: file_spark_connect_catalog_proto_msgTypes, }.Build() File_spark_connect_catalog_proto = out.File file_spark_connect_catalog_proto_rawDesc = nil file_spark_connect_catalog_proto_goTypes = nil file_spark_connect_catalog_proto_depIdxs = nil } ================================================ FILE: internal/generated/commands.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/commands.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" anypb "google.golang.org/protobuf/types/known/anypb" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // The enum used for client side streaming query listener event // There is no QueryStartedEvent defined here, // it is added as a field in WriteStreamOperationStartResult type StreamingQueryEventType int32 const ( StreamingQueryEventType_QUERY_PROGRESS_UNSPECIFIED StreamingQueryEventType = 0 StreamingQueryEventType_QUERY_PROGRESS_EVENT StreamingQueryEventType = 1 StreamingQueryEventType_QUERY_TERMINATED_EVENT StreamingQueryEventType = 2 StreamingQueryEventType_QUERY_IDLE_EVENT StreamingQueryEventType = 3 ) // Enum value maps for StreamingQueryEventType. var ( StreamingQueryEventType_name = map[int32]string{ 0: "QUERY_PROGRESS_UNSPECIFIED", 1: "QUERY_PROGRESS_EVENT", 2: "QUERY_TERMINATED_EVENT", 3: "QUERY_IDLE_EVENT", } StreamingQueryEventType_value = map[string]int32{ "QUERY_PROGRESS_UNSPECIFIED": 0, "QUERY_PROGRESS_EVENT": 1, "QUERY_TERMINATED_EVENT": 2, "QUERY_IDLE_EVENT": 3, } ) func (x StreamingQueryEventType) Enum() *StreamingQueryEventType { p := new(StreamingQueryEventType) *p = x return p } func (x StreamingQueryEventType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (StreamingQueryEventType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_commands_proto_enumTypes[0].Descriptor() } func (StreamingQueryEventType) Type() protoreflect.EnumType { return &file_spark_connect_commands_proto_enumTypes[0] } func (x StreamingQueryEventType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use StreamingQueryEventType.Descriptor instead. func (StreamingQueryEventType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{0} } type WriteOperation_SaveMode int32 const ( WriteOperation_SAVE_MODE_UNSPECIFIED WriteOperation_SaveMode = 0 WriteOperation_SAVE_MODE_APPEND WriteOperation_SaveMode = 1 WriteOperation_SAVE_MODE_OVERWRITE WriteOperation_SaveMode = 2 WriteOperation_SAVE_MODE_ERROR_IF_EXISTS WriteOperation_SaveMode = 3 WriteOperation_SAVE_MODE_IGNORE WriteOperation_SaveMode = 4 ) // Enum value maps for WriteOperation_SaveMode. var ( WriteOperation_SaveMode_name = map[int32]string{ 0: "SAVE_MODE_UNSPECIFIED", 1: "SAVE_MODE_APPEND", 2: "SAVE_MODE_OVERWRITE", 3: "SAVE_MODE_ERROR_IF_EXISTS", 4: "SAVE_MODE_IGNORE", } WriteOperation_SaveMode_value = map[string]int32{ "SAVE_MODE_UNSPECIFIED": 0, "SAVE_MODE_APPEND": 1, "SAVE_MODE_OVERWRITE": 2, "SAVE_MODE_ERROR_IF_EXISTS": 3, "SAVE_MODE_IGNORE": 4, } ) func (x WriteOperation_SaveMode) Enum() *WriteOperation_SaveMode { p := new(WriteOperation_SaveMode) *p = x return p } func (x WriteOperation_SaveMode) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (WriteOperation_SaveMode) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_commands_proto_enumTypes[1].Descriptor() } func (WriteOperation_SaveMode) Type() protoreflect.EnumType { return &file_spark_connect_commands_proto_enumTypes[1] } func (x WriteOperation_SaveMode) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use WriteOperation_SaveMode.Descriptor instead. func (WriteOperation_SaveMode) EnumDescriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{3, 0} } type WriteOperation_SaveTable_TableSaveMethod int32 const ( WriteOperation_SaveTable_TABLE_SAVE_METHOD_UNSPECIFIED WriteOperation_SaveTable_TableSaveMethod = 0 WriteOperation_SaveTable_TABLE_SAVE_METHOD_SAVE_AS_TABLE WriteOperation_SaveTable_TableSaveMethod = 1 WriteOperation_SaveTable_TABLE_SAVE_METHOD_INSERT_INTO WriteOperation_SaveTable_TableSaveMethod = 2 ) // Enum value maps for WriteOperation_SaveTable_TableSaveMethod. var ( WriteOperation_SaveTable_TableSaveMethod_name = map[int32]string{ 0: "TABLE_SAVE_METHOD_UNSPECIFIED", 1: "TABLE_SAVE_METHOD_SAVE_AS_TABLE", 2: "TABLE_SAVE_METHOD_INSERT_INTO", } WriteOperation_SaveTable_TableSaveMethod_value = map[string]int32{ "TABLE_SAVE_METHOD_UNSPECIFIED": 0, "TABLE_SAVE_METHOD_SAVE_AS_TABLE": 1, "TABLE_SAVE_METHOD_INSERT_INTO": 2, } ) func (x WriteOperation_SaveTable_TableSaveMethod) Enum() *WriteOperation_SaveTable_TableSaveMethod { p := new(WriteOperation_SaveTable_TableSaveMethod) *p = x return p } func (x WriteOperation_SaveTable_TableSaveMethod) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (WriteOperation_SaveTable_TableSaveMethod) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_commands_proto_enumTypes[2].Descriptor() } func (WriteOperation_SaveTable_TableSaveMethod) Type() protoreflect.EnumType { return &file_spark_connect_commands_proto_enumTypes[2] } func (x WriteOperation_SaveTable_TableSaveMethod) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use WriteOperation_SaveTable_TableSaveMethod.Descriptor instead. func (WriteOperation_SaveTable_TableSaveMethod) EnumDescriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{3, 1, 0} } type WriteOperationV2_Mode int32 const ( WriteOperationV2_MODE_UNSPECIFIED WriteOperationV2_Mode = 0 WriteOperationV2_MODE_CREATE WriteOperationV2_Mode = 1 WriteOperationV2_MODE_OVERWRITE WriteOperationV2_Mode = 2 WriteOperationV2_MODE_OVERWRITE_PARTITIONS WriteOperationV2_Mode = 3 WriteOperationV2_MODE_APPEND WriteOperationV2_Mode = 4 WriteOperationV2_MODE_REPLACE WriteOperationV2_Mode = 5 WriteOperationV2_MODE_CREATE_OR_REPLACE WriteOperationV2_Mode = 6 ) // Enum value maps for WriteOperationV2_Mode. var ( WriteOperationV2_Mode_name = map[int32]string{ 0: "MODE_UNSPECIFIED", 1: "MODE_CREATE", 2: "MODE_OVERWRITE", 3: "MODE_OVERWRITE_PARTITIONS", 4: "MODE_APPEND", 5: "MODE_REPLACE", 6: "MODE_CREATE_OR_REPLACE", } WriteOperationV2_Mode_value = map[string]int32{ "MODE_UNSPECIFIED": 0, "MODE_CREATE": 1, "MODE_OVERWRITE": 2, "MODE_OVERWRITE_PARTITIONS": 3, "MODE_APPEND": 4, "MODE_REPLACE": 5, "MODE_CREATE_OR_REPLACE": 6, } ) func (x WriteOperationV2_Mode) Enum() *WriteOperationV2_Mode { p := new(WriteOperationV2_Mode) *p = x return p } func (x WriteOperationV2_Mode) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (WriteOperationV2_Mode) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_commands_proto_enumTypes[3].Descriptor() } func (WriteOperationV2_Mode) Type() protoreflect.EnumType { return &file_spark_connect_commands_proto_enumTypes[3] } func (x WriteOperationV2_Mode) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use WriteOperationV2_Mode.Descriptor instead. func (WriteOperationV2_Mode) EnumDescriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{4, 0} } // A [[Command]] is an operation that is executed by the server that does not directly consume or // produce a relational result. type Command struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to CommandType: // // *Command_RegisterFunction // *Command_WriteOperation // *Command_CreateDataframeView // *Command_WriteOperationV2 // *Command_SqlCommand // *Command_WriteStreamOperationStart // *Command_StreamingQueryCommand // *Command_GetResourcesCommand // *Command_StreamingQueryManagerCommand // *Command_RegisterTableFunction // *Command_StreamingQueryListenerBusCommand // *Command_RegisterDataSource // *Command_CreateResourceProfileCommand // *Command_CheckpointCommand // *Command_RemoveCachedRemoteRelationCommand // *Command_MergeIntoTableCommand // *Command_MlCommand // *Command_ExecuteExternalCommand // *Command_PipelineCommand // *Command_Extension CommandType isCommand_CommandType `protobuf_oneof:"command_type"` } func (x *Command) Reset() { *x = Command{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Command) String() string { return protoimpl.X.MessageStringOf(x) } func (*Command) ProtoMessage() {} func (x *Command) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Command.ProtoReflect.Descriptor instead. func (*Command) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{0} } func (m *Command) GetCommandType() isCommand_CommandType { if m != nil { return m.CommandType } return nil } func (x *Command) GetRegisterFunction() *CommonInlineUserDefinedFunction { if x, ok := x.GetCommandType().(*Command_RegisterFunction); ok { return x.RegisterFunction } return nil } func (x *Command) GetWriteOperation() *WriteOperation { if x, ok := x.GetCommandType().(*Command_WriteOperation); ok { return x.WriteOperation } return nil } func (x *Command) GetCreateDataframeView() *CreateDataFrameViewCommand { if x, ok := x.GetCommandType().(*Command_CreateDataframeView); ok { return x.CreateDataframeView } return nil } func (x *Command) GetWriteOperationV2() *WriteOperationV2 { if x, ok := x.GetCommandType().(*Command_WriteOperationV2); ok { return x.WriteOperationV2 } return nil } func (x *Command) GetSqlCommand() *SqlCommand { if x, ok := x.GetCommandType().(*Command_SqlCommand); ok { return x.SqlCommand } return nil } func (x *Command) GetWriteStreamOperationStart() *WriteStreamOperationStart { if x, ok := x.GetCommandType().(*Command_WriteStreamOperationStart); ok { return x.WriteStreamOperationStart } return nil } func (x *Command) GetStreamingQueryCommand() *StreamingQueryCommand { if x, ok := x.GetCommandType().(*Command_StreamingQueryCommand); ok { return x.StreamingQueryCommand } return nil } func (x *Command) GetGetResourcesCommand() *GetResourcesCommand { if x, ok := x.GetCommandType().(*Command_GetResourcesCommand); ok { return x.GetResourcesCommand } return nil } func (x *Command) GetStreamingQueryManagerCommand() *StreamingQueryManagerCommand { if x, ok := x.GetCommandType().(*Command_StreamingQueryManagerCommand); ok { return x.StreamingQueryManagerCommand } return nil } func (x *Command) GetRegisterTableFunction() *CommonInlineUserDefinedTableFunction { if x, ok := x.GetCommandType().(*Command_RegisterTableFunction); ok { return x.RegisterTableFunction } return nil } func (x *Command) GetStreamingQueryListenerBusCommand() *StreamingQueryListenerBusCommand { if x, ok := x.GetCommandType().(*Command_StreamingQueryListenerBusCommand); ok { return x.StreamingQueryListenerBusCommand } return nil } func (x *Command) GetRegisterDataSource() *CommonInlineUserDefinedDataSource { if x, ok := x.GetCommandType().(*Command_RegisterDataSource); ok { return x.RegisterDataSource } return nil } func (x *Command) GetCreateResourceProfileCommand() *CreateResourceProfileCommand { if x, ok := x.GetCommandType().(*Command_CreateResourceProfileCommand); ok { return x.CreateResourceProfileCommand } return nil } func (x *Command) GetCheckpointCommand() *CheckpointCommand { if x, ok := x.GetCommandType().(*Command_CheckpointCommand); ok { return x.CheckpointCommand } return nil } func (x *Command) GetRemoveCachedRemoteRelationCommand() *RemoveCachedRemoteRelationCommand { if x, ok := x.GetCommandType().(*Command_RemoveCachedRemoteRelationCommand); ok { return x.RemoveCachedRemoteRelationCommand } return nil } func (x *Command) GetMergeIntoTableCommand() *MergeIntoTableCommand { if x, ok := x.GetCommandType().(*Command_MergeIntoTableCommand); ok { return x.MergeIntoTableCommand } return nil } func (x *Command) GetMlCommand() *MlCommand { if x, ok := x.GetCommandType().(*Command_MlCommand); ok { return x.MlCommand } return nil } func (x *Command) GetExecuteExternalCommand() *ExecuteExternalCommand { if x, ok := x.GetCommandType().(*Command_ExecuteExternalCommand); ok { return x.ExecuteExternalCommand } return nil } func (x *Command) GetPipelineCommand() *PipelineCommand { if x, ok := x.GetCommandType().(*Command_PipelineCommand); ok { return x.PipelineCommand } return nil } func (x *Command) GetExtension() *anypb.Any { if x, ok := x.GetCommandType().(*Command_Extension); ok { return x.Extension } return nil } type isCommand_CommandType interface { isCommand_CommandType() } type Command_RegisterFunction struct { RegisterFunction *CommonInlineUserDefinedFunction `protobuf:"bytes,1,opt,name=register_function,json=registerFunction,proto3,oneof"` } type Command_WriteOperation struct { WriteOperation *WriteOperation `protobuf:"bytes,2,opt,name=write_operation,json=writeOperation,proto3,oneof"` } type Command_CreateDataframeView struct { CreateDataframeView *CreateDataFrameViewCommand `protobuf:"bytes,3,opt,name=create_dataframe_view,json=createDataframeView,proto3,oneof"` } type Command_WriteOperationV2 struct { WriteOperationV2 *WriteOperationV2 `protobuf:"bytes,4,opt,name=write_operation_v2,json=writeOperationV2,proto3,oneof"` } type Command_SqlCommand struct { SqlCommand *SqlCommand `protobuf:"bytes,5,opt,name=sql_command,json=sqlCommand,proto3,oneof"` } type Command_WriteStreamOperationStart struct { WriteStreamOperationStart *WriteStreamOperationStart `protobuf:"bytes,6,opt,name=write_stream_operation_start,json=writeStreamOperationStart,proto3,oneof"` } type Command_StreamingQueryCommand struct { StreamingQueryCommand *StreamingQueryCommand `protobuf:"bytes,7,opt,name=streaming_query_command,json=streamingQueryCommand,proto3,oneof"` } type Command_GetResourcesCommand struct { GetResourcesCommand *GetResourcesCommand `protobuf:"bytes,8,opt,name=get_resources_command,json=getResourcesCommand,proto3,oneof"` } type Command_StreamingQueryManagerCommand struct { StreamingQueryManagerCommand *StreamingQueryManagerCommand `protobuf:"bytes,9,opt,name=streaming_query_manager_command,json=streamingQueryManagerCommand,proto3,oneof"` } type Command_RegisterTableFunction struct { RegisterTableFunction *CommonInlineUserDefinedTableFunction `protobuf:"bytes,10,opt,name=register_table_function,json=registerTableFunction,proto3,oneof"` } type Command_StreamingQueryListenerBusCommand struct { StreamingQueryListenerBusCommand *StreamingQueryListenerBusCommand `protobuf:"bytes,11,opt,name=streaming_query_listener_bus_command,json=streamingQueryListenerBusCommand,proto3,oneof"` } type Command_RegisterDataSource struct { RegisterDataSource *CommonInlineUserDefinedDataSource `protobuf:"bytes,12,opt,name=register_data_source,json=registerDataSource,proto3,oneof"` } type Command_CreateResourceProfileCommand struct { CreateResourceProfileCommand *CreateResourceProfileCommand `protobuf:"bytes,13,opt,name=create_resource_profile_command,json=createResourceProfileCommand,proto3,oneof"` } type Command_CheckpointCommand struct { CheckpointCommand *CheckpointCommand `protobuf:"bytes,14,opt,name=checkpoint_command,json=checkpointCommand,proto3,oneof"` } type Command_RemoveCachedRemoteRelationCommand struct { RemoveCachedRemoteRelationCommand *RemoveCachedRemoteRelationCommand `protobuf:"bytes,15,opt,name=remove_cached_remote_relation_command,json=removeCachedRemoteRelationCommand,proto3,oneof"` } type Command_MergeIntoTableCommand struct { MergeIntoTableCommand *MergeIntoTableCommand `protobuf:"bytes,16,opt,name=merge_into_table_command,json=mergeIntoTableCommand,proto3,oneof"` } type Command_MlCommand struct { MlCommand *MlCommand `protobuf:"bytes,17,opt,name=ml_command,json=mlCommand,proto3,oneof"` } type Command_ExecuteExternalCommand struct { ExecuteExternalCommand *ExecuteExternalCommand `protobuf:"bytes,18,opt,name=execute_external_command,json=executeExternalCommand,proto3,oneof"` } type Command_PipelineCommand struct { PipelineCommand *PipelineCommand `protobuf:"bytes,19,opt,name=pipeline_command,json=pipelineCommand,proto3,oneof"` } type Command_Extension struct { // This field is used to mark extensions to the protocol. When plugins generate arbitrary // Commands they can add them here. During the planning the correct resolution is done. Extension *anypb.Any `protobuf:"bytes,999,opt,name=extension,proto3,oneof"` } func (*Command_RegisterFunction) isCommand_CommandType() {} func (*Command_WriteOperation) isCommand_CommandType() {} func (*Command_CreateDataframeView) isCommand_CommandType() {} func (*Command_WriteOperationV2) isCommand_CommandType() {} func (*Command_SqlCommand) isCommand_CommandType() {} func (*Command_WriteStreamOperationStart) isCommand_CommandType() {} func (*Command_StreamingQueryCommand) isCommand_CommandType() {} func (*Command_GetResourcesCommand) isCommand_CommandType() {} func (*Command_StreamingQueryManagerCommand) isCommand_CommandType() {} func (*Command_RegisterTableFunction) isCommand_CommandType() {} func (*Command_StreamingQueryListenerBusCommand) isCommand_CommandType() {} func (*Command_RegisterDataSource) isCommand_CommandType() {} func (*Command_CreateResourceProfileCommand) isCommand_CommandType() {} func (*Command_CheckpointCommand) isCommand_CommandType() {} func (*Command_RemoveCachedRemoteRelationCommand) isCommand_CommandType() {} func (*Command_MergeIntoTableCommand) isCommand_CommandType() {} func (*Command_MlCommand) isCommand_CommandType() {} func (*Command_ExecuteExternalCommand) isCommand_CommandType() {} func (*Command_PipelineCommand) isCommand_CommandType() {} func (*Command_Extension) isCommand_CommandType() {} // A SQL Command is used to trigger the eager evaluation of SQL commands in Spark. // // When the SQL provide as part of the message is a command it will be immediately evaluated // and the result will be collected and returned as part of a LocalRelation. If the result is // not a command, the operation will simply return a SQL Relation. This allows the client to be // almost oblivious to the server-side behavior. type SqlCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) SQL Query. // // Deprecated: Marked as deprecated in spark/connect/commands.proto. Sql string `protobuf:"bytes,1,opt,name=sql,proto3" json:"sql,omitempty"` // (Optional) A map of parameter names to literal expressions. // // Deprecated: Marked as deprecated in spark/connect/commands.proto. Args map[string]*Expression_Literal `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Optional) A sequence of literal expressions for positional parameters in the SQL query text. // // Deprecated: Marked as deprecated in spark/connect/commands.proto. PosArgs []*Expression_Literal `protobuf:"bytes,3,rep,name=pos_args,json=posArgs,proto3" json:"pos_args,omitempty"` // (Optional) A map of parameter names to expressions. // It cannot coexist with `pos_arguments`. // // Deprecated: Marked as deprecated in spark/connect/commands.proto. NamedArguments map[string]*Expression `protobuf:"bytes,4,rep,name=named_arguments,json=namedArguments,proto3" json:"named_arguments,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Optional) A sequence of expressions for positional parameters in the SQL query text. // It cannot coexist with `named_arguments`. // // Deprecated: Marked as deprecated in spark/connect/commands.proto. PosArguments []*Expression `protobuf:"bytes,5,rep,name=pos_arguments,json=posArguments,proto3" json:"pos_arguments,omitempty"` // (Optional) The relation that this SQL command will be built on. Input *Relation `protobuf:"bytes,6,opt,name=input,proto3" json:"input,omitempty"` } func (x *SqlCommand) Reset() { *x = SqlCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SqlCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*SqlCommand) ProtoMessage() {} func (x *SqlCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SqlCommand.ProtoReflect.Descriptor instead. func (*SqlCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{1} } // Deprecated: Marked as deprecated in spark/connect/commands.proto. func (x *SqlCommand) GetSql() string { if x != nil { return x.Sql } return "" } // Deprecated: Marked as deprecated in spark/connect/commands.proto. func (x *SqlCommand) GetArgs() map[string]*Expression_Literal { if x != nil { return x.Args } return nil } // Deprecated: Marked as deprecated in spark/connect/commands.proto. func (x *SqlCommand) GetPosArgs() []*Expression_Literal { if x != nil { return x.PosArgs } return nil } // Deprecated: Marked as deprecated in spark/connect/commands.proto. func (x *SqlCommand) GetNamedArguments() map[string]*Expression { if x != nil { return x.NamedArguments } return nil } // Deprecated: Marked as deprecated in spark/connect/commands.proto. func (x *SqlCommand) GetPosArguments() []*Expression { if x != nil { return x.PosArguments } return nil } func (x *SqlCommand) GetInput() *Relation { if x != nil { return x.Input } return nil } // A command that can create DataFrame global temp view or local temp view. type CreateDataFrameViewCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The relation that this view will be built on. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) View name. Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` // (Required) Whether this is global temp view or local temp view. IsGlobal bool `protobuf:"varint,3,opt,name=is_global,json=isGlobal,proto3" json:"is_global,omitempty"` // (Required) // // If true, and if the view already exists, updates it; if false, and if the view // already exists, throws exception. Replace bool `protobuf:"varint,4,opt,name=replace,proto3" json:"replace,omitempty"` } func (x *CreateDataFrameViewCommand) Reset() { *x = CreateDataFrameViewCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CreateDataFrameViewCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*CreateDataFrameViewCommand) ProtoMessage() {} func (x *CreateDataFrameViewCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CreateDataFrameViewCommand.ProtoReflect.Descriptor instead. func (*CreateDataFrameViewCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{2} } func (x *CreateDataFrameViewCommand) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *CreateDataFrameViewCommand) GetName() string { if x != nil { return x.Name } return "" } func (x *CreateDataFrameViewCommand) GetIsGlobal() bool { if x != nil { return x.IsGlobal } return false } func (x *CreateDataFrameViewCommand) GetReplace() bool { if x != nil { return x.Replace } return false } // As writes are not directly handled during analysis and planning, they are modeled as commands. type WriteOperation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The output of the `input` relation will be persisted according to the options. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta. Source *string `protobuf:"bytes,2,opt,name=source,proto3,oneof" json:"source,omitempty"` // (Optional) // // The destination of the write operation can be either a path or a table. // If the destination is neither a path nor a table, such as jdbc and noop, // the `save_type` should not be set. // // Types that are assignable to SaveType: // // *WriteOperation_Path // *WriteOperation_Table SaveType isWriteOperation_SaveType `protobuf_oneof:"save_type"` // (Required) the save mode. Mode WriteOperation_SaveMode `protobuf:"varint,5,opt,name=mode,proto3,enum=spark.connect.WriteOperation_SaveMode" json:"mode,omitempty"` // (Optional) List of columns to sort the output by. SortColumnNames []string `protobuf:"bytes,6,rep,name=sort_column_names,json=sortColumnNames,proto3" json:"sort_column_names,omitempty"` // (Optional) List of columns for partitioning. PartitioningColumns []string `protobuf:"bytes,7,rep,name=partitioning_columns,json=partitioningColumns,proto3" json:"partitioning_columns,omitempty"` // (Optional) Bucketing specification. Bucketing must set the number of buckets and the columns // to bucket by. BucketBy *WriteOperation_BucketBy `protobuf:"bytes,8,opt,name=bucket_by,json=bucketBy,proto3" json:"bucket_by,omitempty"` // (Optional) A list of configuration options. Options map[string]string `protobuf:"bytes,9,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Optional) Columns used for clustering the table. ClusteringColumns []string `protobuf:"bytes,10,rep,name=clustering_columns,json=clusteringColumns,proto3" json:"clustering_columns,omitempty"` } func (x *WriteOperation) Reset() { *x = WriteOperation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WriteOperation) String() string { return protoimpl.X.MessageStringOf(x) } func (*WriteOperation) ProtoMessage() {} func (x *WriteOperation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WriteOperation.ProtoReflect.Descriptor instead. func (*WriteOperation) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{3} } func (x *WriteOperation) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *WriteOperation) GetSource() string { if x != nil && x.Source != nil { return *x.Source } return "" } func (m *WriteOperation) GetSaveType() isWriteOperation_SaveType { if m != nil { return m.SaveType } return nil } func (x *WriteOperation) GetPath() string { if x, ok := x.GetSaveType().(*WriteOperation_Path); ok { return x.Path } return "" } func (x *WriteOperation) GetTable() *WriteOperation_SaveTable { if x, ok := x.GetSaveType().(*WriteOperation_Table); ok { return x.Table } return nil } func (x *WriteOperation) GetMode() WriteOperation_SaveMode { if x != nil { return x.Mode } return WriteOperation_SAVE_MODE_UNSPECIFIED } func (x *WriteOperation) GetSortColumnNames() []string { if x != nil { return x.SortColumnNames } return nil } func (x *WriteOperation) GetPartitioningColumns() []string { if x != nil { return x.PartitioningColumns } return nil } func (x *WriteOperation) GetBucketBy() *WriteOperation_BucketBy { if x != nil { return x.BucketBy } return nil } func (x *WriteOperation) GetOptions() map[string]string { if x != nil { return x.Options } return nil } func (x *WriteOperation) GetClusteringColumns() []string { if x != nil { return x.ClusteringColumns } return nil } type isWriteOperation_SaveType interface { isWriteOperation_SaveType() } type WriteOperation_Path struct { Path string `protobuf:"bytes,3,opt,name=path,proto3,oneof"` } type WriteOperation_Table struct { Table *WriteOperation_SaveTable `protobuf:"bytes,4,opt,name=table,proto3,oneof"` } func (*WriteOperation_Path) isWriteOperation_SaveType() {} func (*WriteOperation_Table) isWriteOperation_SaveType() {} // As writes are not directly handled during analysis and planning, they are modeled as commands. type WriteOperationV2 struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The output of the `input` relation will be persisted according to the options. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The destination of the write operation must be either a path or a table. TableName string `protobuf:"bytes,2,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` // (Optional) A provider for the underlying output data source. Spark's default catalog supports // "parquet", "json", etc. Provider *string `protobuf:"bytes,3,opt,name=provider,proto3,oneof" json:"provider,omitempty"` // (Optional) List of columns for partitioning for output table created by `create`, // `createOrReplace`, or `replace` PartitioningColumns []*Expression `protobuf:"bytes,4,rep,name=partitioning_columns,json=partitioningColumns,proto3" json:"partitioning_columns,omitempty"` // (Optional) A list of configuration options. Options map[string]string `protobuf:"bytes,5,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Optional) A list of table properties. TableProperties map[string]string `protobuf:"bytes,6,rep,name=table_properties,json=tableProperties,proto3" json:"table_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Required) Write mode. Mode WriteOperationV2_Mode `protobuf:"varint,7,opt,name=mode,proto3,enum=spark.connect.WriteOperationV2_Mode" json:"mode,omitempty"` // (Optional) A condition for overwrite saving mode OverwriteCondition *Expression `protobuf:"bytes,8,opt,name=overwrite_condition,json=overwriteCondition,proto3" json:"overwrite_condition,omitempty"` // (Optional) Columns used for clustering the table. ClusteringColumns []string `protobuf:"bytes,9,rep,name=clustering_columns,json=clusteringColumns,proto3" json:"clustering_columns,omitempty"` } func (x *WriteOperationV2) Reset() { *x = WriteOperationV2{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WriteOperationV2) String() string { return protoimpl.X.MessageStringOf(x) } func (*WriteOperationV2) ProtoMessage() {} func (x *WriteOperationV2) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WriteOperationV2.ProtoReflect.Descriptor instead. func (*WriteOperationV2) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{4} } func (x *WriteOperationV2) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *WriteOperationV2) GetTableName() string { if x != nil { return x.TableName } return "" } func (x *WriteOperationV2) GetProvider() string { if x != nil && x.Provider != nil { return *x.Provider } return "" } func (x *WriteOperationV2) GetPartitioningColumns() []*Expression { if x != nil { return x.PartitioningColumns } return nil } func (x *WriteOperationV2) GetOptions() map[string]string { if x != nil { return x.Options } return nil } func (x *WriteOperationV2) GetTableProperties() map[string]string { if x != nil { return x.TableProperties } return nil } func (x *WriteOperationV2) GetMode() WriteOperationV2_Mode { if x != nil { return x.Mode } return WriteOperationV2_MODE_UNSPECIFIED } func (x *WriteOperationV2) GetOverwriteCondition() *Expression { if x != nil { return x.OverwriteCondition } return nil } func (x *WriteOperationV2) GetClusteringColumns() []string { if x != nil { return x.ClusteringColumns } return nil } // Starts write stream operation as streaming query. Query ID and Run ID of the streaming // query are returned. type WriteStreamOperationStart struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The output of the `input` streaming relation will be written. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` Format string `protobuf:"bytes,2,opt,name=format,proto3" json:"format,omitempty"` Options map[string]string `protobuf:"bytes,3,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` PartitioningColumnNames []string `protobuf:"bytes,4,rep,name=partitioning_column_names,json=partitioningColumnNames,proto3" json:"partitioning_column_names,omitempty"` // Types that are assignable to Trigger: // // *WriteStreamOperationStart_ProcessingTimeInterval // *WriteStreamOperationStart_AvailableNow // *WriteStreamOperationStart_Once // *WriteStreamOperationStart_ContinuousCheckpointInterval Trigger isWriteStreamOperationStart_Trigger `protobuf_oneof:"trigger"` OutputMode string `protobuf:"bytes,9,opt,name=output_mode,json=outputMode,proto3" json:"output_mode,omitempty"` QueryName string `protobuf:"bytes,10,opt,name=query_name,json=queryName,proto3" json:"query_name,omitempty"` // The destination is optional. When set, it can be a path or a table name. // // Types that are assignable to SinkDestination: // // *WriteStreamOperationStart_Path // *WriteStreamOperationStart_TableName SinkDestination isWriteStreamOperationStart_SinkDestination `protobuf_oneof:"sink_destination"` ForeachWriter *StreamingForeachFunction `protobuf:"bytes,13,opt,name=foreach_writer,json=foreachWriter,proto3" json:"foreach_writer,omitempty"` ForeachBatch *StreamingForeachFunction `protobuf:"bytes,14,opt,name=foreach_batch,json=foreachBatch,proto3" json:"foreach_batch,omitempty"` // (Optional) Columns used for clustering the table. ClusteringColumnNames []string `protobuf:"bytes,15,rep,name=clustering_column_names,json=clusteringColumnNames,proto3" json:"clustering_column_names,omitempty"` } func (x *WriteStreamOperationStart) Reset() { *x = WriteStreamOperationStart{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WriteStreamOperationStart) String() string { return protoimpl.X.MessageStringOf(x) } func (*WriteStreamOperationStart) ProtoMessage() {} func (x *WriteStreamOperationStart) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WriteStreamOperationStart.ProtoReflect.Descriptor instead. func (*WriteStreamOperationStart) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{5} } func (x *WriteStreamOperationStart) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *WriteStreamOperationStart) GetFormat() string { if x != nil { return x.Format } return "" } func (x *WriteStreamOperationStart) GetOptions() map[string]string { if x != nil { return x.Options } return nil } func (x *WriteStreamOperationStart) GetPartitioningColumnNames() []string { if x != nil { return x.PartitioningColumnNames } return nil } func (m *WriteStreamOperationStart) GetTrigger() isWriteStreamOperationStart_Trigger { if m != nil { return m.Trigger } return nil } func (x *WriteStreamOperationStart) GetProcessingTimeInterval() string { if x, ok := x.GetTrigger().(*WriteStreamOperationStart_ProcessingTimeInterval); ok { return x.ProcessingTimeInterval } return "" } func (x *WriteStreamOperationStart) GetAvailableNow() bool { if x, ok := x.GetTrigger().(*WriteStreamOperationStart_AvailableNow); ok { return x.AvailableNow } return false } func (x *WriteStreamOperationStart) GetOnce() bool { if x, ok := x.GetTrigger().(*WriteStreamOperationStart_Once); ok { return x.Once } return false } func (x *WriteStreamOperationStart) GetContinuousCheckpointInterval() string { if x, ok := x.GetTrigger().(*WriteStreamOperationStart_ContinuousCheckpointInterval); ok { return x.ContinuousCheckpointInterval } return "" } func (x *WriteStreamOperationStart) GetOutputMode() string { if x != nil { return x.OutputMode } return "" } func (x *WriteStreamOperationStart) GetQueryName() string { if x != nil { return x.QueryName } return "" } func (m *WriteStreamOperationStart) GetSinkDestination() isWriteStreamOperationStart_SinkDestination { if m != nil { return m.SinkDestination } return nil } func (x *WriteStreamOperationStart) GetPath() string { if x, ok := x.GetSinkDestination().(*WriteStreamOperationStart_Path); ok { return x.Path } return "" } func (x *WriteStreamOperationStart) GetTableName() string { if x, ok := x.GetSinkDestination().(*WriteStreamOperationStart_TableName); ok { return x.TableName } return "" } func (x *WriteStreamOperationStart) GetForeachWriter() *StreamingForeachFunction { if x != nil { return x.ForeachWriter } return nil } func (x *WriteStreamOperationStart) GetForeachBatch() *StreamingForeachFunction { if x != nil { return x.ForeachBatch } return nil } func (x *WriteStreamOperationStart) GetClusteringColumnNames() []string { if x != nil { return x.ClusteringColumnNames } return nil } type isWriteStreamOperationStart_Trigger interface { isWriteStreamOperationStart_Trigger() } type WriteStreamOperationStart_ProcessingTimeInterval struct { ProcessingTimeInterval string `protobuf:"bytes,5,opt,name=processing_time_interval,json=processingTimeInterval,proto3,oneof"` } type WriteStreamOperationStart_AvailableNow struct { AvailableNow bool `protobuf:"varint,6,opt,name=available_now,json=availableNow,proto3,oneof"` } type WriteStreamOperationStart_Once struct { Once bool `protobuf:"varint,7,opt,name=once,proto3,oneof"` } type WriteStreamOperationStart_ContinuousCheckpointInterval struct { ContinuousCheckpointInterval string `protobuf:"bytes,8,opt,name=continuous_checkpoint_interval,json=continuousCheckpointInterval,proto3,oneof"` } func (*WriteStreamOperationStart_ProcessingTimeInterval) isWriteStreamOperationStart_Trigger() {} func (*WriteStreamOperationStart_AvailableNow) isWriteStreamOperationStart_Trigger() {} func (*WriteStreamOperationStart_Once) isWriteStreamOperationStart_Trigger() {} func (*WriteStreamOperationStart_ContinuousCheckpointInterval) isWriteStreamOperationStart_Trigger() { } type isWriteStreamOperationStart_SinkDestination interface { isWriteStreamOperationStart_SinkDestination() } type WriteStreamOperationStart_Path struct { Path string `protobuf:"bytes,11,opt,name=path,proto3,oneof"` } type WriteStreamOperationStart_TableName struct { TableName string `protobuf:"bytes,12,opt,name=table_name,json=tableName,proto3,oneof"` } func (*WriteStreamOperationStart_Path) isWriteStreamOperationStart_SinkDestination() {} func (*WriteStreamOperationStart_TableName) isWriteStreamOperationStart_SinkDestination() {} type StreamingForeachFunction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Function: // // *StreamingForeachFunction_PythonFunction // *StreamingForeachFunction_ScalaFunction Function isStreamingForeachFunction_Function `protobuf_oneof:"function"` } func (x *StreamingForeachFunction) Reset() { *x = StreamingForeachFunction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingForeachFunction) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingForeachFunction) ProtoMessage() {} func (x *StreamingForeachFunction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingForeachFunction.ProtoReflect.Descriptor instead. func (*StreamingForeachFunction) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{6} } func (m *StreamingForeachFunction) GetFunction() isStreamingForeachFunction_Function { if m != nil { return m.Function } return nil } func (x *StreamingForeachFunction) GetPythonFunction() *PythonUDF { if x, ok := x.GetFunction().(*StreamingForeachFunction_PythonFunction); ok { return x.PythonFunction } return nil } func (x *StreamingForeachFunction) GetScalaFunction() *ScalarScalaUDF { if x, ok := x.GetFunction().(*StreamingForeachFunction_ScalaFunction); ok { return x.ScalaFunction } return nil } type isStreamingForeachFunction_Function interface { isStreamingForeachFunction_Function() } type StreamingForeachFunction_PythonFunction struct { PythonFunction *PythonUDF `protobuf:"bytes,1,opt,name=python_function,json=pythonFunction,proto3,oneof"` } type StreamingForeachFunction_ScalaFunction struct { ScalaFunction *ScalarScalaUDF `protobuf:"bytes,2,opt,name=scala_function,json=scalaFunction,proto3,oneof"` } func (*StreamingForeachFunction_PythonFunction) isStreamingForeachFunction_Function() {} func (*StreamingForeachFunction_ScalaFunction) isStreamingForeachFunction_Function() {} type WriteStreamOperationStartResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Query instance. See `StreamingQueryInstanceId`. QueryId *StreamingQueryInstanceId `protobuf:"bytes,1,opt,name=query_id,json=queryId,proto3" json:"query_id,omitempty"` // An optional query name. Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` // Optional query started event if there is any listener registered on the client side. QueryStartedEventJson *string `protobuf:"bytes,3,opt,name=query_started_event_json,json=queryStartedEventJson,proto3,oneof" json:"query_started_event_json,omitempty"` } func (x *WriteStreamOperationStartResult) Reset() { *x = WriteStreamOperationStartResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WriteStreamOperationStartResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*WriteStreamOperationStartResult) ProtoMessage() {} func (x *WriteStreamOperationStartResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WriteStreamOperationStartResult.ProtoReflect.Descriptor instead. func (*WriteStreamOperationStartResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{7} } func (x *WriteStreamOperationStartResult) GetQueryId() *StreamingQueryInstanceId { if x != nil { return x.QueryId } return nil } func (x *WriteStreamOperationStartResult) GetName() string { if x != nil { return x.Name } return "" } func (x *WriteStreamOperationStartResult) GetQueryStartedEventJson() string { if x != nil && x.QueryStartedEventJson != nil { return *x.QueryStartedEventJson } return "" } // A tuple that uniquely identifies an instance of streaming query run. It consists of `id` that // persists across the streaming runs and `run_id` that changes between each run of the // streaming query that resumes from the checkpoint. type StreamingQueryInstanceId struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The unique id of this query that persists across restarts from checkpoint data. // That is, this id is generated when a query is started for the first time, and // will be the same every time it is restarted from checkpoint data. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // (Required) The unique id of this run of the query. That is, every start/restart of a query // will generate a unique run_id. Therefore, every time a query is restarted from // checkpoint, it will have the same `id` but different `run_id`s. RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` } func (x *StreamingQueryInstanceId) Reset() { *x = StreamingQueryInstanceId{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryInstanceId) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryInstanceId) ProtoMessage() {} func (x *StreamingQueryInstanceId) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryInstanceId.ProtoReflect.Descriptor instead. func (*StreamingQueryInstanceId) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{8} } func (x *StreamingQueryInstanceId) GetId() string { if x != nil { return x.Id } return "" } func (x *StreamingQueryInstanceId) GetRunId() string { if x != nil { return x.RunId } return "" } // Commands for a streaming query. type StreamingQueryCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Query instance. See `StreamingQueryInstanceId`. QueryId *StreamingQueryInstanceId `protobuf:"bytes,1,opt,name=query_id,json=queryId,proto3" json:"query_id,omitempty"` // See documentation for the corresponding API method in StreamingQuery. // // Types that are assignable to Command: // // *StreamingQueryCommand_Status // *StreamingQueryCommand_LastProgress // *StreamingQueryCommand_RecentProgress // *StreamingQueryCommand_Stop // *StreamingQueryCommand_ProcessAllAvailable // *StreamingQueryCommand_Explain // *StreamingQueryCommand_Exception // *StreamingQueryCommand_AwaitTermination Command isStreamingQueryCommand_Command `protobuf_oneof:"command"` } func (x *StreamingQueryCommand) Reset() { *x = StreamingQueryCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommand) ProtoMessage() {} func (x *StreamingQueryCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommand.ProtoReflect.Descriptor instead. func (*StreamingQueryCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{9} } func (x *StreamingQueryCommand) GetQueryId() *StreamingQueryInstanceId { if x != nil { return x.QueryId } return nil } func (m *StreamingQueryCommand) GetCommand() isStreamingQueryCommand_Command { if m != nil { return m.Command } return nil } func (x *StreamingQueryCommand) GetStatus() bool { if x, ok := x.GetCommand().(*StreamingQueryCommand_Status); ok { return x.Status } return false } func (x *StreamingQueryCommand) GetLastProgress() bool { if x, ok := x.GetCommand().(*StreamingQueryCommand_LastProgress); ok { return x.LastProgress } return false } func (x *StreamingQueryCommand) GetRecentProgress() bool { if x, ok := x.GetCommand().(*StreamingQueryCommand_RecentProgress); ok { return x.RecentProgress } return false } func (x *StreamingQueryCommand) GetStop() bool { if x, ok := x.GetCommand().(*StreamingQueryCommand_Stop); ok { return x.Stop } return false } func (x *StreamingQueryCommand) GetProcessAllAvailable() bool { if x, ok := x.GetCommand().(*StreamingQueryCommand_ProcessAllAvailable); ok { return x.ProcessAllAvailable } return false } func (x *StreamingQueryCommand) GetExplain() *StreamingQueryCommand_ExplainCommand { if x, ok := x.GetCommand().(*StreamingQueryCommand_Explain); ok { return x.Explain } return nil } func (x *StreamingQueryCommand) GetException() bool { if x, ok := x.GetCommand().(*StreamingQueryCommand_Exception); ok { return x.Exception } return false } func (x *StreamingQueryCommand) GetAwaitTermination() *StreamingQueryCommand_AwaitTerminationCommand { if x, ok := x.GetCommand().(*StreamingQueryCommand_AwaitTermination); ok { return x.AwaitTermination } return nil } type isStreamingQueryCommand_Command interface { isStreamingQueryCommand_Command() } type StreamingQueryCommand_Status struct { // status() API. Status bool `protobuf:"varint,2,opt,name=status,proto3,oneof"` } type StreamingQueryCommand_LastProgress struct { // lastProgress() API. LastProgress bool `protobuf:"varint,3,opt,name=last_progress,json=lastProgress,proto3,oneof"` } type StreamingQueryCommand_RecentProgress struct { // recentProgress() API. RecentProgress bool `protobuf:"varint,4,opt,name=recent_progress,json=recentProgress,proto3,oneof"` } type StreamingQueryCommand_Stop struct { // stop() API. Stops the query. Stop bool `protobuf:"varint,5,opt,name=stop,proto3,oneof"` } type StreamingQueryCommand_ProcessAllAvailable struct { // processAllAvailable() API. Waits till all the available data is processed ProcessAllAvailable bool `protobuf:"varint,6,opt,name=process_all_available,json=processAllAvailable,proto3,oneof"` } type StreamingQueryCommand_Explain struct { // explain() API. Returns logical and physical plans. Explain *StreamingQueryCommand_ExplainCommand `protobuf:"bytes,7,opt,name=explain,proto3,oneof"` } type StreamingQueryCommand_Exception struct { // exception() API. Returns the exception in the query if any. Exception bool `protobuf:"varint,8,opt,name=exception,proto3,oneof"` } type StreamingQueryCommand_AwaitTermination struct { // awaitTermination() API. Waits for the termination of the query. AwaitTermination *StreamingQueryCommand_AwaitTerminationCommand `protobuf:"bytes,9,opt,name=await_termination,json=awaitTermination,proto3,oneof"` } func (*StreamingQueryCommand_Status) isStreamingQueryCommand_Command() {} func (*StreamingQueryCommand_LastProgress) isStreamingQueryCommand_Command() {} func (*StreamingQueryCommand_RecentProgress) isStreamingQueryCommand_Command() {} func (*StreamingQueryCommand_Stop) isStreamingQueryCommand_Command() {} func (*StreamingQueryCommand_ProcessAllAvailable) isStreamingQueryCommand_Command() {} func (*StreamingQueryCommand_Explain) isStreamingQueryCommand_Command() {} func (*StreamingQueryCommand_Exception) isStreamingQueryCommand_Command() {} func (*StreamingQueryCommand_AwaitTermination) isStreamingQueryCommand_Command() {} // Response for commands on a streaming query. type StreamingQueryCommandResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Query instance id. See `StreamingQueryInstanceId`. QueryId *StreamingQueryInstanceId `protobuf:"bytes,1,opt,name=query_id,json=queryId,proto3" json:"query_id,omitempty"` // Types that are assignable to ResultType: // // *StreamingQueryCommandResult_Status // *StreamingQueryCommandResult_RecentProgress // *StreamingQueryCommandResult_Explain // *StreamingQueryCommandResult_Exception // *StreamingQueryCommandResult_AwaitTermination ResultType isStreamingQueryCommandResult_ResultType `protobuf_oneof:"result_type"` } func (x *StreamingQueryCommandResult) Reset() { *x = StreamingQueryCommandResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommandResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommandResult) ProtoMessage() {} func (x *StreamingQueryCommandResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommandResult.ProtoReflect.Descriptor instead. func (*StreamingQueryCommandResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{10} } func (x *StreamingQueryCommandResult) GetQueryId() *StreamingQueryInstanceId { if x != nil { return x.QueryId } return nil } func (m *StreamingQueryCommandResult) GetResultType() isStreamingQueryCommandResult_ResultType { if m != nil { return m.ResultType } return nil } func (x *StreamingQueryCommandResult) GetStatus() *StreamingQueryCommandResult_StatusResult { if x, ok := x.GetResultType().(*StreamingQueryCommandResult_Status); ok { return x.Status } return nil } func (x *StreamingQueryCommandResult) GetRecentProgress() *StreamingQueryCommandResult_RecentProgressResult { if x, ok := x.GetResultType().(*StreamingQueryCommandResult_RecentProgress); ok { return x.RecentProgress } return nil } func (x *StreamingQueryCommandResult) GetExplain() *StreamingQueryCommandResult_ExplainResult { if x, ok := x.GetResultType().(*StreamingQueryCommandResult_Explain); ok { return x.Explain } return nil } func (x *StreamingQueryCommandResult) GetException() *StreamingQueryCommandResult_ExceptionResult { if x, ok := x.GetResultType().(*StreamingQueryCommandResult_Exception); ok { return x.Exception } return nil } func (x *StreamingQueryCommandResult) GetAwaitTermination() *StreamingQueryCommandResult_AwaitTerminationResult { if x, ok := x.GetResultType().(*StreamingQueryCommandResult_AwaitTermination); ok { return x.AwaitTermination } return nil } type isStreamingQueryCommandResult_ResultType interface { isStreamingQueryCommandResult_ResultType() } type StreamingQueryCommandResult_Status struct { Status *StreamingQueryCommandResult_StatusResult `protobuf:"bytes,2,opt,name=status,proto3,oneof"` } type StreamingQueryCommandResult_RecentProgress struct { RecentProgress *StreamingQueryCommandResult_RecentProgressResult `protobuf:"bytes,3,opt,name=recent_progress,json=recentProgress,proto3,oneof"` } type StreamingQueryCommandResult_Explain struct { Explain *StreamingQueryCommandResult_ExplainResult `protobuf:"bytes,4,opt,name=explain,proto3,oneof"` } type StreamingQueryCommandResult_Exception struct { Exception *StreamingQueryCommandResult_ExceptionResult `protobuf:"bytes,5,opt,name=exception,proto3,oneof"` } type StreamingQueryCommandResult_AwaitTermination struct { AwaitTermination *StreamingQueryCommandResult_AwaitTerminationResult `protobuf:"bytes,6,opt,name=await_termination,json=awaitTermination,proto3,oneof"` } func (*StreamingQueryCommandResult_Status) isStreamingQueryCommandResult_ResultType() {} func (*StreamingQueryCommandResult_RecentProgress) isStreamingQueryCommandResult_ResultType() {} func (*StreamingQueryCommandResult_Explain) isStreamingQueryCommandResult_ResultType() {} func (*StreamingQueryCommandResult_Exception) isStreamingQueryCommandResult_ResultType() {} func (*StreamingQueryCommandResult_AwaitTermination) isStreamingQueryCommandResult_ResultType() {} // Commands for the streaming query manager. type StreamingQueryManagerCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // See documentation for the corresponding API method in StreamingQueryManager. // // Types that are assignable to Command: // // *StreamingQueryManagerCommand_Active // *StreamingQueryManagerCommand_GetQuery // *StreamingQueryManagerCommand_AwaitAnyTermination // *StreamingQueryManagerCommand_ResetTerminated // *StreamingQueryManagerCommand_AddListener // *StreamingQueryManagerCommand_RemoveListener // *StreamingQueryManagerCommand_ListListeners Command isStreamingQueryManagerCommand_Command `protobuf_oneof:"command"` } func (x *StreamingQueryManagerCommand) Reset() { *x = StreamingQueryManagerCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommand) ProtoMessage() {} func (x *StreamingQueryManagerCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommand.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{11} } func (m *StreamingQueryManagerCommand) GetCommand() isStreamingQueryManagerCommand_Command { if m != nil { return m.Command } return nil } func (x *StreamingQueryManagerCommand) GetActive() bool { if x, ok := x.GetCommand().(*StreamingQueryManagerCommand_Active); ok { return x.Active } return false } func (x *StreamingQueryManagerCommand) GetGetQuery() string { if x, ok := x.GetCommand().(*StreamingQueryManagerCommand_GetQuery); ok { return x.GetQuery } return "" } func (x *StreamingQueryManagerCommand) GetAwaitAnyTermination() *StreamingQueryManagerCommand_AwaitAnyTerminationCommand { if x, ok := x.GetCommand().(*StreamingQueryManagerCommand_AwaitAnyTermination); ok { return x.AwaitAnyTermination } return nil } func (x *StreamingQueryManagerCommand) GetResetTerminated() bool { if x, ok := x.GetCommand().(*StreamingQueryManagerCommand_ResetTerminated); ok { return x.ResetTerminated } return false } func (x *StreamingQueryManagerCommand) GetAddListener() *StreamingQueryManagerCommand_StreamingQueryListenerCommand { if x, ok := x.GetCommand().(*StreamingQueryManagerCommand_AddListener); ok { return x.AddListener } return nil } func (x *StreamingQueryManagerCommand) GetRemoveListener() *StreamingQueryManagerCommand_StreamingQueryListenerCommand { if x, ok := x.GetCommand().(*StreamingQueryManagerCommand_RemoveListener); ok { return x.RemoveListener } return nil } func (x *StreamingQueryManagerCommand) GetListListeners() bool { if x, ok := x.GetCommand().(*StreamingQueryManagerCommand_ListListeners); ok { return x.ListListeners } return false } type isStreamingQueryManagerCommand_Command interface { isStreamingQueryManagerCommand_Command() } type StreamingQueryManagerCommand_Active struct { // active() API, returns a list of active queries. Active bool `protobuf:"varint,1,opt,name=active,proto3,oneof"` } type StreamingQueryManagerCommand_GetQuery struct { // get() API, returns the StreamingQuery identified by id. GetQuery string `protobuf:"bytes,2,opt,name=get_query,json=getQuery,proto3,oneof"` } type StreamingQueryManagerCommand_AwaitAnyTermination struct { // awaitAnyTermination() API, wait until any query terminates or timeout. AwaitAnyTermination *StreamingQueryManagerCommand_AwaitAnyTerminationCommand `protobuf:"bytes,3,opt,name=await_any_termination,json=awaitAnyTermination,proto3,oneof"` } type StreamingQueryManagerCommand_ResetTerminated struct { // resetTerminated() API. ResetTerminated bool `protobuf:"varint,4,opt,name=reset_terminated,json=resetTerminated,proto3,oneof"` } type StreamingQueryManagerCommand_AddListener struct { // addListener API. AddListener *StreamingQueryManagerCommand_StreamingQueryListenerCommand `protobuf:"bytes,5,opt,name=add_listener,json=addListener,proto3,oneof"` } type StreamingQueryManagerCommand_RemoveListener struct { // removeListener API. RemoveListener *StreamingQueryManagerCommand_StreamingQueryListenerCommand `protobuf:"bytes,6,opt,name=remove_listener,json=removeListener,proto3,oneof"` } type StreamingQueryManagerCommand_ListListeners struct { // listListeners() API, returns a list of streaming query listeners. ListListeners bool `protobuf:"varint,7,opt,name=list_listeners,json=listListeners,proto3,oneof"` } func (*StreamingQueryManagerCommand_Active) isStreamingQueryManagerCommand_Command() {} func (*StreamingQueryManagerCommand_GetQuery) isStreamingQueryManagerCommand_Command() {} func (*StreamingQueryManagerCommand_AwaitAnyTermination) isStreamingQueryManagerCommand_Command() {} func (*StreamingQueryManagerCommand_ResetTerminated) isStreamingQueryManagerCommand_Command() {} func (*StreamingQueryManagerCommand_AddListener) isStreamingQueryManagerCommand_Command() {} func (*StreamingQueryManagerCommand_RemoveListener) isStreamingQueryManagerCommand_Command() {} func (*StreamingQueryManagerCommand_ListListeners) isStreamingQueryManagerCommand_Command() {} // Response for commands on the streaming query manager. type StreamingQueryManagerCommandResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to ResultType: // // *StreamingQueryManagerCommandResult_Active // *StreamingQueryManagerCommandResult_Query // *StreamingQueryManagerCommandResult_AwaitAnyTermination // *StreamingQueryManagerCommandResult_ResetTerminated // *StreamingQueryManagerCommandResult_AddListener // *StreamingQueryManagerCommandResult_RemoveListener // *StreamingQueryManagerCommandResult_ListListeners ResultType isStreamingQueryManagerCommandResult_ResultType `protobuf_oneof:"result_type"` } func (x *StreamingQueryManagerCommandResult) Reset() { *x = StreamingQueryManagerCommandResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommandResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommandResult) ProtoMessage() {} func (x *StreamingQueryManagerCommandResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommandResult.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommandResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{12} } func (m *StreamingQueryManagerCommandResult) GetResultType() isStreamingQueryManagerCommandResult_ResultType { if m != nil { return m.ResultType } return nil } func (x *StreamingQueryManagerCommandResult) GetActive() *StreamingQueryManagerCommandResult_ActiveResult { if x, ok := x.GetResultType().(*StreamingQueryManagerCommandResult_Active); ok { return x.Active } return nil } func (x *StreamingQueryManagerCommandResult) GetQuery() *StreamingQueryManagerCommandResult_StreamingQueryInstance { if x, ok := x.GetResultType().(*StreamingQueryManagerCommandResult_Query); ok { return x.Query } return nil } func (x *StreamingQueryManagerCommandResult) GetAwaitAnyTermination() *StreamingQueryManagerCommandResult_AwaitAnyTerminationResult { if x, ok := x.GetResultType().(*StreamingQueryManagerCommandResult_AwaitAnyTermination); ok { return x.AwaitAnyTermination } return nil } func (x *StreamingQueryManagerCommandResult) GetResetTerminated() bool { if x, ok := x.GetResultType().(*StreamingQueryManagerCommandResult_ResetTerminated); ok { return x.ResetTerminated } return false } func (x *StreamingQueryManagerCommandResult) GetAddListener() bool { if x, ok := x.GetResultType().(*StreamingQueryManagerCommandResult_AddListener); ok { return x.AddListener } return false } func (x *StreamingQueryManagerCommandResult) GetRemoveListener() bool { if x, ok := x.GetResultType().(*StreamingQueryManagerCommandResult_RemoveListener); ok { return x.RemoveListener } return false } func (x *StreamingQueryManagerCommandResult) GetListListeners() *StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult { if x, ok := x.GetResultType().(*StreamingQueryManagerCommandResult_ListListeners); ok { return x.ListListeners } return nil } type isStreamingQueryManagerCommandResult_ResultType interface { isStreamingQueryManagerCommandResult_ResultType() } type StreamingQueryManagerCommandResult_Active struct { Active *StreamingQueryManagerCommandResult_ActiveResult `protobuf:"bytes,1,opt,name=active,proto3,oneof"` } type StreamingQueryManagerCommandResult_Query struct { Query *StreamingQueryManagerCommandResult_StreamingQueryInstance `protobuf:"bytes,2,opt,name=query,proto3,oneof"` } type StreamingQueryManagerCommandResult_AwaitAnyTermination struct { AwaitAnyTermination *StreamingQueryManagerCommandResult_AwaitAnyTerminationResult `protobuf:"bytes,3,opt,name=await_any_termination,json=awaitAnyTermination,proto3,oneof"` } type StreamingQueryManagerCommandResult_ResetTerminated struct { ResetTerminated bool `protobuf:"varint,4,opt,name=reset_terminated,json=resetTerminated,proto3,oneof"` } type StreamingQueryManagerCommandResult_AddListener struct { AddListener bool `protobuf:"varint,5,opt,name=add_listener,json=addListener,proto3,oneof"` } type StreamingQueryManagerCommandResult_RemoveListener struct { RemoveListener bool `protobuf:"varint,6,opt,name=remove_listener,json=removeListener,proto3,oneof"` } type StreamingQueryManagerCommandResult_ListListeners struct { ListListeners *StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult `protobuf:"bytes,7,opt,name=list_listeners,json=listListeners,proto3,oneof"` } func (*StreamingQueryManagerCommandResult_Active) isStreamingQueryManagerCommandResult_ResultType() {} func (*StreamingQueryManagerCommandResult_Query) isStreamingQueryManagerCommandResult_ResultType() {} func (*StreamingQueryManagerCommandResult_AwaitAnyTermination) isStreamingQueryManagerCommandResult_ResultType() { } func (*StreamingQueryManagerCommandResult_ResetTerminated) isStreamingQueryManagerCommandResult_ResultType() { } func (*StreamingQueryManagerCommandResult_AddListener) isStreamingQueryManagerCommandResult_ResultType() { } func (*StreamingQueryManagerCommandResult_RemoveListener) isStreamingQueryManagerCommandResult_ResultType() { } func (*StreamingQueryManagerCommandResult_ListListeners) isStreamingQueryManagerCommandResult_ResultType() { } // The protocol for client-side StreamingQueryListener. // This command will only be set when either the first listener is added to the client, or the last // listener is removed from the client. // The add_listener_bus_listener command will only be set true in the first case. // The remove_listener_bus_listener command will only be set true in the second case. type StreamingQueryListenerBusCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Command: // // *StreamingQueryListenerBusCommand_AddListenerBusListener // *StreamingQueryListenerBusCommand_RemoveListenerBusListener Command isStreamingQueryListenerBusCommand_Command `protobuf_oneof:"command"` } func (x *StreamingQueryListenerBusCommand) Reset() { *x = StreamingQueryListenerBusCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryListenerBusCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryListenerBusCommand) ProtoMessage() {} func (x *StreamingQueryListenerBusCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryListenerBusCommand.ProtoReflect.Descriptor instead. func (*StreamingQueryListenerBusCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{13} } func (m *StreamingQueryListenerBusCommand) GetCommand() isStreamingQueryListenerBusCommand_Command { if m != nil { return m.Command } return nil } func (x *StreamingQueryListenerBusCommand) GetAddListenerBusListener() bool { if x, ok := x.GetCommand().(*StreamingQueryListenerBusCommand_AddListenerBusListener); ok { return x.AddListenerBusListener } return false } func (x *StreamingQueryListenerBusCommand) GetRemoveListenerBusListener() bool { if x, ok := x.GetCommand().(*StreamingQueryListenerBusCommand_RemoveListenerBusListener); ok { return x.RemoveListenerBusListener } return false } type isStreamingQueryListenerBusCommand_Command interface { isStreamingQueryListenerBusCommand_Command() } type StreamingQueryListenerBusCommand_AddListenerBusListener struct { AddListenerBusListener bool `protobuf:"varint,1,opt,name=add_listener_bus_listener,json=addListenerBusListener,proto3,oneof"` } type StreamingQueryListenerBusCommand_RemoveListenerBusListener struct { RemoveListenerBusListener bool `protobuf:"varint,2,opt,name=remove_listener_bus_listener,json=removeListenerBusListener,proto3,oneof"` } func (*StreamingQueryListenerBusCommand_AddListenerBusListener) isStreamingQueryListenerBusCommand_Command() { } func (*StreamingQueryListenerBusCommand_RemoveListenerBusListener) isStreamingQueryListenerBusCommand_Command() { } // The protocol for the returned events in the long-running response channel. type StreamingQueryListenerEvent struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The json serialized event, all StreamingQueryListener events have a json method EventJson string `protobuf:"bytes,1,opt,name=event_json,json=eventJson,proto3" json:"event_json,omitempty"` // (Required) Query event type used by client to decide how to deserialize the event_json EventType StreamingQueryEventType `protobuf:"varint,2,opt,name=event_type,json=eventType,proto3,enum=spark.connect.StreamingQueryEventType" json:"event_type,omitempty"` } func (x *StreamingQueryListenerEvent) Reset() { *x = StreamingQueryListenerEvent{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryListenerEvent) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryListenerEvent) ProtoMessage() {} func (x *StreamingQueryListenerEvent) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryListenerEvent.ProtoReflect.Descriptor instead. func (*StreamingQueryListenerEvent) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{14} } func (x *StreamingQueryListenerEvent) GetEventJson() string { if x != nil { return x.EventJson } return "" } func (x *StreamingQueryListenerEvent) GetEventType() StreamingQueryEventType { if x != nil { return x.EventType } return StreamingQueryEventType_QUERY_PROGRESS_UNSPECIFIED } type StreamingQueryListenerEventsResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Events []*StreamingQueryListenerEvent `protobuf:"bytes,1,rep,name=events,proto3" json:"events,omitempty"` ListenerBusListenerAdded *bool `protobuf:"varint,2,opt,name=listener_bus_listener_added,json=listenerBusListenerAdded,proto3,oneof" json:"listener_bus_listener_added,omitempty"` } func (x *StreamingQueryListenerEventsResult) Reset() { *x = StreamingQueryListenerEventsResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryListenerEventsResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryListenerEventsResult) ProtoMessage() {} func (x *StreamingQueryListenerEventsResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryListenerEventsResult.ProtoReflect.Descriptor instead. func (*StreamingQueryListenerEventsResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{15} } func (x *StreamingQueryListenerEventsResult) GetEvents() []*StreamingQueryListenerEvent { if x != nil { return x.Events } return nil } func (x *StreamingQueryListenerEventsResult) GetListenerBusListenerAdded() bool { if x != nil && x.ListenerBusListenerAdded != nil { return *x.ListenerBusListenerAdded } return false } // Command to get the output of 'SparkContext.resources' type GetResourcesCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *GetResourcesCommand) Reset() { *x = GetResourcesCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *GetResourcesCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*GetResourcesCommand) ProtoMessage() {} func (x *GetResourcesCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use GetResourcesCommand.ProtoReflect.Descriptor instead. func (*GetResourcesCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{16} } // Response for command 'GetResourcesCommand'. type GetResourcesCommandResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Resources map[string]*ResourceInformation `protobuf:"bytes,1,rep,name=resources,proto3" json:"resources,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *GetResourcesCommandResult) Reset() { *x = GetResourcesCommandResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *GetResourcesCommandResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*GetResourcesCommandResult) ProtoMessage() {} func (x *GetResourcesCommandResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use GetResourcesCommandResult.ProtoReflect.Descriptor instead. func (*GetResourcesCommandResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{17} } func (x *GetResourcesCommandResult) GetResources() map[string]*ResourceInformation { if x != nil { return x.Resources } return nil } // Command to create ResourceProfile type CreateResourceProfileCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The ResourceProfile to be built on the server-side. Profile *ResourceProfile `protobuf:"bytes,1,opt,name=profile,proto3" json:"profile,omitempty"` } func (x *CreateResourceProfileCommand) Reset() { *x = CreateResourceProfileCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CreateResourceProfileCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*CreateResourceProfileCommand) ProtoMessage() {} func (x *CreateResourceProfileCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CreateResourceProfileCommand.ProtoReflect.Descriptor instead. func (*CreateResourceProfileCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{18} } func (x *CreateResourceProfileCommand) GetProfile() *ResourceProfile { if x != nil { return x.Profile } return nil } // Response for command 'CreateResourceProfileCommand'. type CreateResourceProfileCommandResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Server-side generated resource profile id. ProfileId int32 `protobuf:"varint,1,opt,name=profile_id,json=profileId,proto3" json:"profile_id,omitempty"` } func (x *CreateResourceProfileCommandResult) Reset() { *x = CreateResourceProfileCommandResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CreateResourceProfileCommandResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*CreateResourceProfileCommandResult) ProtoMessage() {} func (x *CreateResourceProfileCommandResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CreateResourceProfileCommandResult.ProtoReflect.Descriptor instead. func (*CreateResourceProfileCommandResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{19} } func (x *CreateResourceProfileCommandResult) GetProfileId() int32 { if x != nil { return x.ProfileId } return 0 } // Command to remove `CashedRemoteRelation` type RemoveCachedRemoteRelationCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The remote to be related Relation *CachedRemoteRelation `protobuf:"bytes,1,opt,name=relation,proto3" json:"relation,omitempty"` } func (x *RemoveCachedRemoteRelationCommand) Reset() { *x = RemoveCachedRemoteRelationCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *RemoveCachedRemoteRelationCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*RemoveCachedRemoteRelationCommand) ProtoMessage() {} func (x *RemoveCachedRemoteRelationCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use RemoveCachedRemoteRelationCommand.ProtoReflect.Descriptor instead. func (*RemoveCachedRemoteRelationCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{20} } func (x *RemoveCachedRemoteRelationCommand) GetRelation() *CachedRemoteRelation { if x != nil { return x.Relation } return nil } type CheckpointCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The logical plan to checkpoint. Relation *Relation `protobuf:"bytes,1,opt,name=relation,proto3" json:"relation,omitempty"` // (Required) Locally checkpoint using a local temporary // directory in Spark Connect server (Spark Driver) Local bool `protobuf:"varint,2,opt,name=local,proto3" json:"local,omitempty"` // (Required) Whether to checkpoint this dataframe immediately. Eager bool `protobuf:"varint,3,opt,name=eager,proto3" json:"eager,omitempty"` // (Optional) For local checkpoint, the storage level to use. StorageLevel *StorageLevel `protobuf:"bytes,4,opt,name=storage_level,json=storageLevel,proto3,oneof" json:"storage_level,omitempty"` } func (x *CheckpointCommand) Reset() { *x = CheckpointCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CheckpointCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*CheckpointCommand) ProtoMessage() {} func (x *CheckpointCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CheckpointCommand.ProtoReflect.Descriptor instead. func (*CheckpointCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{21} } func (x *CheckpointCommand) GetRelation() *Relation { if x != nil { return x.Relation } return nil } func (x *CheckpointCommand) GetLocal() bool { if x != nil { return x.Local } return false } func (x *CheckpointCommand) GetEager() bool { if x != nil { return x.Eager } return false } func (x *CheckpointCommand) GetStorageLevel() *StorageLevel { if x != nil { return x.StorageLevel } return nil } type MergeIntoTableCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The name of the target table. TargetTableName string `protobuf:"bytes,1,opt,name=target_table_name,json=targetTableName,proto3" json:"target_table_name,omitempty"` // (Required) The relation of the source table. SourceTablePlan *Relation `protobuf:"bytes,2,opt,name=source_table_plan,json=sourceTablePlan,proto3" json:"source_table_plan,omitempty"` // (Required) The condition to match the source and target. MergeCondition *Expression `protobuf:"bytes,3,opt,name=merge_condition,json=mergeCondition,proto3" json:"merge_condition,omitempty"` // (Optional) The actions to be taken when the condition is matched. MatchActions []*Expression `protobuf:"bytes,4,rep,name=match_actions,json=matchActions,proto3" json:"match_actions,omitempty"` // (Optional) The actions to be taken when the condition is not matched. NotMatchedActions []*Expression `protobuf:"bytes,5,rep,name=not_matched_actions,json=notMatchedActions,proto3" json:"not_matched_actions,omitempty"` // (Optional) The actions to be taken when the condition is not matched by source. NotMatchedBySourceActions []*Expression `protobuf:"bytes,6,rep,name=not_matched_by_source_actions,json=notMatchedBySourceActions,proto3" json:"not_matched_by_source_actions,omitempty"` // (Required) Whether to enable schema evolution. WithSchemaEvolution bool `protobuf:"varint,7,opt,name=with_schema_evolution,json=withSchemaEvolution,proto3" json:"with_schema_evolution,omitempty"` } func (x *MergeIntoTableCommand) Reset() { *x = MergeIntoTableCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MergeIntoTableCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*MergeIntoTableCommand) ProtoMessage() {} func (x *MergeIntoTableCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MergeIntoTableCommand.ProtoReflect.Descriptor instead. func (*MergeIntoTableCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{22} } func (x *MergeIntoTableCommand) GetTargetTableName() string { if x != nil { return x.TargetTableName } return "" } func (x *MergeIntoTableCommand) GetSourceTablePlan() *Relation { if x != nil { return x.SourceTablePlan } return nil } func (x *MergeIntoTableCommand) GetMergeCondition() *Expression { if x != nil { return x.MergeCondition } return nil } func (x *MergeIntoTableCommand) GetMatchActions() []*Expression { if x != nil { return x.MatchActions } return nil } func (x *MergeIntoTableCommand) GetNotMatchedActions() []*Expression { if x != nil { return x.NotMatchedActions } return nil } func (x *MergeIntoTableCommand) GetNotMatchedBySourceActions() []*Expression { if x != nil { return x.NotMatchedBySourceActions } return nil } func (x *MergeIntoTableCommand) GetWithSchemaEvolution() bool { if x != nil { return x.WithSchemaEvolution } return false } // Execute an arbitrary string command inside an external execution engine type ExecuteExternalCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The class name of the runner that implements `ExternalCommandRunner` Runner string `protobuf:"bytes,1,opt,name=runner,proto3" json:"runner,omitempty"` // (Required) The target command to be executed. Command string `protobuf:"bytes,2,opt,name=command,proto3" json:"command,omitempty"` // (Optional) The options for the runner. Options map[string]string `protobuf:"bytes,3,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *ExecuteExternalCommand) Reset() { *x = ExecuteExternalCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecuteExternalCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecuteExternalCommand) ProtoMessage() {} func (x *ExecuteExternalCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecuteExternalCommand.ProtoReflect.Descriptor instead. func (*ExecuteExternalCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{23} } func (x *ExecuteExternalCommand) GetRunner() string { if x != nil { return x.Runner } return "" } func (x *ExecuteExternalCommand) GetCommand() string { if x != nil { return x.Command } return "" } func (x *ExecuteExternalCommand) GetOptions() map[string]string { if x != nil { return x.Options } return nil } type WriteOperation_SaveTable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The table name. TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` // (Required) The method to be called to write to the table. SaveMethod WriteOperation_SaveTable_TableSaveMethod `protobuf:"varint,2,opt,name=save_method,json=saveMethod,proto3,enum=spark.connect.WriteOperation_SaveTable_TableSaveMethod" json:"save_method,omitempty"` } func (x *WriteOperation_SaveTable) Reset() { *x = WriteOperation_SaveTable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WriteOperation_SaveTable) String() string { return protoimpl.X.MessageStringOf(x) } func (*WriteOperation_SaveTable) ProtoMessage() {} func (x *WriteOperation_SaveTable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[27] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WriteOperation_SaveTable.ProtoReflect.Descriptor instead. func (*WriteOperation_SaveTable) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{3, 1} } func (x *WriteOperation_SaveTable) GetTableName() string { if x != nil { return x.TableName } return "" } func (x *WriteOperation_SaveTable) GetSaveMethod() WriteOperation_SaveTable_TableSaveMethod { if x != nil { return x.SaveMethod } return WriteOperation_SaveTable_TABLE_SAVE_METHOD_UNSPECIFIED } type WriteOperation_BucketBy struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields BucketColumnNames []string `protobuf:"bytes,1,rep,name=bucket_column_names,json=bucketColumnNames,proto3" json:"bucket_column_names,omitempty"` NumBuckets int32 `protobuf:"varint,2,opt,name=num_buckets,json=numBuckets,proto3" json:"num_buckets,omitempty"` } func (x *WriteOperation_BucketBy) Reset() { *x = WriteOperation_BucketBy{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WriteOperation_BucketBy) String() string { return protoimpl.X.MessageStringOf(x) } func (*WriteOperation_BucketBy) ProtoMessage() {} func (x *WriteOperation_BucketBy) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[28] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WriteOperation_BucketBy.ProtoReflect.Descriptor instead. func (*WriteOperation_BucketBy) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{3, 2} } func (x *WriteOperation_BucketBy) GetBucketColumnNames() []string { if x != nil { return x.BucketColumnNames } return nil } func (x *WriteOperation_BucketBy) GetNumBuckets() int32 { if x != nil { return x.NumBuckets } return 0 } type StreamingQueryCommand_ExplainCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // TODO: Consider reusing Explain from AnalyzePlanRequest message. // // We can not do this right now since it base.proto imports this file. Extended bool `protobuf:"varint,1,opt,name=extended,proto3" json:"extended,omitempty"` } func (x *StreamingQueryCommand_ExplainCommand) Reset() { *x = StreamingQueryCommand_ExplainCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommand_ExplainCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommand_ExplainCommand) ProtoMessage() {} func (x *StreamingQueryCommand_ExplainCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[32] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommand_ExplainCommand.ProtoReflect.Descriptor instead. func (*StreamingQueryCommand_ExplainCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{9, 0} } func (x *StreamingQueryCommand_ExplainCommand) GetExtended() bool { if x != nil { return x.Extended } return false } type StreamingQueryCommand_AwaitTerminationCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TimeoutMs *int64 `protobuf:"varint,2,opt,name=timeout_ms,json=timeoutMs,proto3,oneof" json:"timeout_ms,omitempty"` } func (x *StreamingQueryCommand_AwaitTerminationCommand) Reset() { *x = StreamingQueryCommand_AwaitTerminationCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommand_AwaitTerminationCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommand_AwaitTerminationCommand) ProtoMessage() {} func (x *StreamingQueryCommand_AwaitTerminationCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[33] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommand_AwaitTerminationCommand.ProtoReflect.Descriptor instead. func (*StreamingQueryCommand_AwaitTerminationCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{9, 1} } func (x *StreamingQueryCommand_AwaitTerminationCommand) GetTimeoutMs() int64 { if x != nil && x.TimeoutMs != nil { return *x.TimeoutMs } return 0 } type StreamingQueryCommandResult_StatusResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // See documentation for these Scala 'StreamingQueryStatus' struct StatusMessage string `protobuf:"bytes,1,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` IsDataAvailable bool `protobuf:"varint,2,opt,name=is_data_available,json=isDataAvailable,proto3" json:"is_data_available,omitempty"` IsTriggerActive bool `protobuf:"varint,3,opt,name=is_trigger_active,json=isTriggerActive,proto3" json:"is_trigger_active,omitempty"` IsActive bool `protobuf:"varint,4,opt,name=is_active,json=isActive,proto3" json:"is_active,omitempty"` } func (x *StreamingQueryCommandResult_StatusResult) Reset() { *x = StreamingQueryCommandResult_StatusResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[34] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommandResult_StatusResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommandResult_StatusResult) ProtoMessage() {} func (x *StreamingQueryCommandResult_StatusResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[34] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommandResult_StatusResult.ProtoReflect.Descriptor instead. func (*StreamingQueryCommandResult_StatusResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{10, 0} } func (x *StreamingQueryCommandResult_StatusResult) GetStatusMessage() string { if x != nil { return x.StatusMessage } return "" } func (x *StreamingQueryCommandResult_StatusResult) GetIsDataAvailable() bool { if x != nil { return x.IsDataAvailable } return false } func (x *StreamingQueryCommandResult_StatusResult) GetIsTriggerActive() bool { if x != nil { return x.IsTriggerActive } return false } func (x *StreamingQueryCommandResult_StatusResult) GetIsActive() bool { if x != nil { return x.IsActive } return false } type StreamingQueryCommandResult_RecentProgressResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Progress reports as an array of json strings. RecentProgressJson []string `protobuf:"bytes,5,rep,name=recent_progress_json,json=recentProgressJson,proto3" json:"recent_progress_json,omitempty"` } func (x *StreamingQueryCommandResult_RecentProgressResult) Reset() { *x = StreamingQueryCommandResult_RecentProgressResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[35] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommandResult_RecentProgressResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommandResult_RecentProgressResult) ProtoMessage() {} func (x *StreamingQueryCommandResult_RecentProgressResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[35] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommandResult_RecentProgressResult.ProtoReflect.Descriptor instead. func (*StreamingQueryCommandResult_RecentProgressResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{10, 1} } func (x *StreamingQueryCommandResult_RecentProgressResult) GetRecentProgressJson() []string { if x != nil { return x.RecentProgressJson } return nil } type StreamingQueryCommandResult_ExplainResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Logical and physical plans as string Result string `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` } func (x *StreamingQueryCommandResult_ExplainResult) Reset() { *x = StreamingQueryCommandResult_ExplainResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[36] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommandResult_ExplainResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommandResult_ExplainResult) ProtoMessage() {} func (x *StreamingQueryCommandResult_ExplainResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[36] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommandResult_ExplainResult.ProtoReflect.Descriptor instead. func (*StreamingQueryCommandResult_ExplainResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{10, 2} } func (x *StreamingQueryCommandResult_ExplainResult) GetResult() string { if x != nil { return x.Result } return "" } type StreamingQueryCommandResult_ExceptionResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Exception message as string, maps to the return value of original // StreamingQueryException's toString method ExceptionMessage *string `protobuf:"bytes,1,opt,name=exception_message,json=exceptionMessage,proto3,oneof" json:"exception_message,omitempty"` // (Optional) Exception error class as string ErrorClass *string `protobuf:"bytes,2,opt,name=error_class,json=errorClass,proto3,oneof" json:"error_class,omitempty"` // (Optional) Exception stack trace as string StackTrace *string `protobuf:"bytes,3,opt,name=stack_trace,json=stackTrace,proto3,oneof" json:"stack_trace,omitempty"` } func (x *StreamingQueryCommandResult_ExceptionResult) Reset() { *x = StreamingQueryCommandResult_ExceptionResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[37] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommandResult_ExceptionResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommandResult_ExceptionResult) ProtoMessage() {} func (x *StreamingQueryCommandResult_ExceptionResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[37] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommandResult_ExceptionResult.ProtoReflect.Descriptor instead. func (*StreamingQueryCommandResult_ExceptionResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{10, 3} } func (x *StreamingQueryCommandResult_ExceptionResult) GetExceptionMessage() string { if x != nil && x.ExceptionMessage != nil { return *x.ExceptionMessage } return "" } func (x *StreamingQueryCommandResult_ExceptionResult) GetErrorClass() string { if x != nil && x.ErrorClass != nil { return *x.ErrorClass } return "" } func (x *StreamingQueryCommandResult_ExceptionResult) GetStackTrace() string { if x != nil && x.StackTrace != nil { return *x.StackTrace } return "" } type StreamingQueryCommandResult_AwaitTerminationResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Terminated bool `protobuf:"varint,1,opt,name=terminated,proto3" json:"terminated,omitempty"` } func (x *StreamingQueryCommandResult_AwaitTerminationResult) Reset() { *x = StreamingQueryCommandResult_AwaitTerminationResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[38] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryCommandResult_AwaitTerminationResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryCommandResult_AwaitTerminationResult) ProtoMessage() {} func (x *StreamingQueryCommandResult_AwaitTerminationResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[38] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryCommandResult_AwaitTerminationResult.ProtoReflect.Descriptor instead. func (*StreamingQueryCommandResult_AwaitTerminationResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{10, 4} } func (x *StreamingQueryCommandResult_AwaitTerminationResult) GetTerminated() bool { if x != nil { return x.Terminated } return false } type StreamingQueryManagerCommand_AwaitAnyTerminationCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) The waiting time in milliseconds to wait for any query to terminate. TimeoutMs *int64 `protobuf:"varint,1,opt,name=timeout_ms,json=timeoutMs,proto3,oneof" json:"timeout_ms,omitempty"` } func (x *StreamingQueryManagerCommand_AwaitAnyTerminationCommand) Reset() { *x = StreamingQueryManagerCommand_AwaitAnyTerminationCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[39] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommand_AwaitAnyTerminationCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommand_AwaitAnyTerminationCommand) ProtoMessage() {} func (x *StreamingQueryManagerCommand_AwaitAnyTerminationCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[39] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommand_AwaitAnyTerminationCommand.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommand_AwaitAnyTerminationCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{11, 0} } func (x *StreamingQueryManagerCommand_AwaitAnyTerminationCommand) GetTimeoutMs() int64 { if x != nil && x.TimeoutMs != nil { return *x.TimeoutMs } return 0 } type StreamingQueryManagerCommand_StreamingQueryListenerCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ListenerPayload []byte `protobuf:"bytes,1,opt,name=listener_payload,json=listenerPayload,proto3" json:"listener_payload,omitempty"` PythonListenerPayload *PythonUDF `protobuf:"bytes,2,opt,name=python_listener_payload,json=pythonListenerPayload,proto3,oneof" json:"python_listener_payload,omitempty"` Id string `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` } func (x *StreamingQueryManagerCommand_StreamingQueryListenerCommand) Reset() { *x = StreamingQueryManagerCommand_StreamingQueryListenerCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[40] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommand_StreamingQueryListenerCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommand_StreamingQueryListenerCommand) ProtoMessage() {} func (x *StreamingQueryManagerCommand_StreamingQueryListenerCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[40] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommand_StreamingQueryListenerCommand.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommand_StreamingQueryListenerCommand) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{11, 1} } func (x *StreamingQueryManagerCommand_StreamingQueryListenerCommand) GetListenerPayload() []byte { if x != nil { return x.ListenerPayload } return nil } func (x *StreamingQueryManagerCommand_StreamingQueryListenerCommand) GetPythonListenerPayload() *PythonUDF { if x != nil { return x.PythonListenerPayload } return nil } func (x *StreamingQueryManagerCommand_StreamingQueryListenerCommand) GetId() string { if x != nil { return x.Id } return "" } type StreamingQueryManagerCommandResult_ActiveResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ActiveQueries []*StreamingQueryManagerCommandResult_StreamingQueryInstance `protobuf:"bytes,1,rep,name=active_queries,json=activeQueries,proto3" json:"active_queries,omitempty"` } func (x *StreamingQueryManagerCommandResult_ActiveResult) Reset() { *x = StreamingQueryManagerCommandResult_ActiveResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[41] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommandResult_ActiveResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommandResult_ActiveResult) ProtoMessage() {} func (x *StreamingQueryManagerCommandResult_ActiveResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[41] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommandResult_ActiveResult.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommandResult_ActiveResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{12, 0} } func (x *StreamingQueryManagerCommandResult_ActiveResult) GetActiveQueries() []*StreamingQueryManagerCommandResult_StreamingQueryInstance { if x != nil { return x.ActiveQueries } return nil } type StreamingQueryManagerCommandResult_StreamingQueryInstance struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The id and runId of this query. Id *StreamingQueryInstanceId `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // (Optional) The name of this query. Name *string `protobuf:"bytes,2,opt,name=name,proto3,oneof" json:"name,omitempty"` } func (x *StreamingQueryManagerCommandResult_StreamingQueryInstance) Reset() { *x = StreamingQueryManagerCommandResult_StreamingQueryInstance{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[42] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommandResult_StreamingQueryInstance) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommandResult_StreamingQueryInstance) ProtoMessage() {} func (x *StreamingQueryManagerCommandResult_StreamingQueryInstance) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[42] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommandResult_StreamingQueryInstance.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommandResult_StreamingQueryInstance) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{12, 1} } func (x *StreamingQueryManagerCommandResult_StreamingQueryInstance) GetId() *StreamingQueryInstanceId { if x != nil { return x.Id } return nil } func (x *StreamingQueryManagerCommandResult_StreamingQueryInstance) GetName() string { if x != nil && x.Name != nil { return *x.Name } return "" } type StreamingQueryManagerCommandResult_AwaitAnyTerminationResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Terminated bool `protobuf:"varint,1,opt,name=terminated,proto3" json:"terminated,omitempty"` } func (x *StreamingQueryManagerCommandResult_AwaitAnyTerminationResult) Reset() { *x = StreamingQueryManagerCommandResult_AwaitAnyTerminationResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[43] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommandResult_AwaitAnyTerminationResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommandResult_AwaitAnyTerminationResult) ProtoMessage() {} func (x *StreamingQueryManagerCommandResult_AwaitAnyTerminationResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[43] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommandResult_AwaitAnyTerminationResult.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommandResult_AwaitAnyTerminationResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{12, 2} } func (x *StreamingQueryManagerCommandResult_AwaitAnyTerminationResult) GetTerminated() bool { if x != nil { return x.Terminated } return false } type StreamingQueryManagerCommandResult_StreamingQueryListenerInstance struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ListenerPayload []byte `protobuf:"bytes,1,opt,name=listener_payload,json=listenerPayload,proto3" json:"listener_payload,omitempty"` } func (x *StreamingQueryManagerCommandResult_StreamingQueryListenerInstance) Reset() { *x = StreamingQueryManagerCommandResult_StreamingQueryListenerInstance{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[44] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommandResult_StreamingQueryListenerInstance) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommandResult_StreamingQueryListenerInstance) ProtoMessage() {} func (x *StreamingQueryManagerCommandResult_StreamingQueryListenerInstance) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[44] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommandResult_StreamingQueryListenerInstance.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommandResult_StreamingQueryListenerInstance) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{12, 3} } func (x *StreamingQueryManagerCommandResult_StreamingQueryListenerInstance) GetListenerPayload() []byte { if x != nil { return x.ListenerPayload } return nil } type StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Reference IDs of listener instances. ListenerIds []string `protobuf:"bytes,1,rep,name=listener_ids,json=listenerIds,proto3" json:"listener_ids,omitempty"` } func (x *StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult) Reset() { *x = StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_commands_proto_msgTypes[45] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult) ProtoMessage() {} func (x *StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_commands_proto_msgTypes[45] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult.ProtoReflect.Descriptor instead. func (*StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult) Descriptor() ([]byte, []int) { return file_spark_connect_commands_proto_rawDescGZIP(), []int{12, 4} } func (x *StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult) GetListenerIds() []string { if x != nil { return x.ListenerIds } return nil } var File_spark_connect_commands_proto protoreflect.FileDescriptor var file_spark_connect_commands_proto_rawDesc = []byte{ 0x0a, 0x1c, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1a, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x16, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x6d, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xfb, 0x0e, 0x0a, 0x07, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x5d, 0x0a, 0x11, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x10, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x0f, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x5f, 0x0a, 0x15, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x76, 0x69, 0x65, 0x77, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x56, 0x69, 0x65, 0x77, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x13, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x56, 0x69, 0x65, 0x77, 0x12, 0x4f, 0x0a, 0x12, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x32, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x32, 0x48, 0x00, 0x52, 0x10, 0x77, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x32, 0x12, 0x3c, 0x0a, 0x0b, 0x73, 0x71, 0x6c, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x71, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x6b, 0x0a, 0x1c, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x48, 0x00, 0x52, 0x19, 0x77, 0x72, 0x69, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x5e, 0x0a, 0x17, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x15, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x58, 0x0a, 0x15, 0x67, 0x65, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x13, 0x67, 0x65, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x74, 0x0a, 0x1f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x1c, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x6d, 0x0a, 0x17, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x15, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x81, 0x01, 0x0a, 0x24, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x62, 0x75, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x42, 0x75, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x42, 0x75, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x64, 0x0a, 0x14, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x48, 0x00, 0x52, 0x12, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x74, 0x0a, 0x1f, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x1c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x51, 0x0a, 0x12, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x11, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x84, 0x01, 0x0a, 0x25, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x21, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x5f, 0x0a, 0x18, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x5f, 0x69, 0x6e, 0x74, 0x6f, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x49, 0x6e, 0x74, 0x6f, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x15, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x49, 0x6e, 0x74, 0x6f, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x39, 0x0a, 0x0a, 0x6d, 0x6c, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x09, 0x6d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x61, 0x0a, 0x18, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x16, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x4b, 0x0a, 0x10, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x0f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x35, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x48, 0x00, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x0e, 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xaa, 0x04, 0x0a, 0x0a, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x14, 0x0a, 0x03, 0x73, 0x71, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x03, 0x73, 0x71, 0x6c, 0x12, 0x3b, 0x0a, 0x04, 0x61, 0x72, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x41, 0x72, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x04, 0x61, 0x72, 0x67, 0x73, 0x12, 0x40, 0x0a, 0x08, 0x70, 0x6f, 0x73, 0x5f, 0x61, 0x72, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x70, 0x6f, 0x73, 0x41, 0x72, 0x67, 0x73, 0x12, 0x5a, 0x0a, 0x0f, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0e, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x42, 0x0a, 0x0d, 0x70, 0x6f, 0x73, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0c, 0x70, 0x6f, 0x73, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x1a, 0x5a, 0x0a, 0x09, 0x41, 0x72, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x37, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5c, 0x0a, 0x13, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2f, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x96, 0x01, 0x0a, 0x1a, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x56, 0x69, 0x65, 0x77, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x22, 0xca, 0x08, 0x0a, 0x0e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x12, 0x14, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x3f, 0x0a, 0x05, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x61, 0x76, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x05, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x3a, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x61, 0x76, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0f, 0x73, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x31, 0x0a, 0x14, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, 0x13, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x43, 0x0a, 0x09, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x42, 0x79, 0x52, 0x08, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x42, 0x79, 0x12, 0x44, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x09, 0x52, 0x11, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x82, 0x02, 0x0a, 0x09, 0x53, 0x61, 0x76, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x58, 0x0a, 0x0b, 0x73, 0x61, 0x76, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x61, 0x76, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x61, 0x76, 0x65, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x52, 0x0a, 0x73, 0x61, 0x76, 0x65, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x22, 0x7c, 0x0a, 0x0f, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x61, 0x76, 0x65, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, 0x21, 0x0a, 0x1d, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x4d, 0x45, 0x54, 0x48, 0x4f, 0x44, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x23, 0x0a, 0x1f, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x4d, 0x45, 0x54, 0x48, 0x4f, 0x44, 0x5f, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x41, 0x53, 0x5f, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x12, 0x21, 0x0a, 0x1d, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x4d, 0x45, 0x54, 0x48, 0x4f, 0x44, 0x5f, 0x49, 0x4e, 0x53, 0x45, 0x52, 0x54, 0x5f, 0x49, 0x4e, 0x54, 0x4f, 0x10, 0x02, 0x1a, 0x5b, 0x0a, 0x08, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x42, 0x79, 0x12, 0x2e, 0x0a, 0x13, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x11, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6e, 0x75, 0x6d, 0x5f, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x6e, 0x75, 0x6d, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x22, 0x89, 0x01, 0x0a, 0x08, 0x53, 0x61, 0x76, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x19, 0x0a, 0x15, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x41, 0x50, 0x50, 0x45, 0x4e, 0x44, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x4f, 0x56, 0x45, 0x52, 0x57, 0x52, 0x49, 0x54, 0x45, 0x10, 0x02, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x5f, 0x49, 0x46, 0x5f, 0x45, 0x58, 0x49, 0x53, 0x54, 0x53, 0x10, 0x03, 0x12, 0x14, 0x0a, 0x10, 0x53, 0x41, 0x56, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x49, 0x47, 0x4e, 0x4f, 0x52, 0x45, 0x10, 0x04, 0x42, 0x0b, 0x0a, 0x09, 0x73, 0x61, 0x76, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0xdc, 0x06, 0x0a, 0x10, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x32, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x88, 0x01, 0x01, 0x12, 0x4c, 0x0a, 0x14, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x46, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x32, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x5f, 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x32, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x38, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x32, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x12, 0x4a, 0x0a, 0x13, 0x6f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x12, 0x6f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x09, 0x20, 0x03, 0x28, 0x09, 0x52, 0x11, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x42, 0x0a, 0x14, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x9f, 0x01, 0x0a, 0x04, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x4f, 0x56, 0x45, 0x52, 0x57, 0x52, 0x49, 0x54, 0x45, 0x10, 0x02, 0x12, 0x1d, 0x0a, 0x19, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x4f, 0x56, 0x45, 0x52, 0x57, 0x52, 0x49, 0x54, 0x45, 0x5f, 0x50, 0x41, 0x52, 0x54, 0x49, 0x54, 0x49, 0x4f, 0x4e, 0x53, 0x10, 0x03, 0x12, 0x0f, 0x0a, 0x0b, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x41, 0x50, 0x50, 0x45, 0x4e, 0x44, 0x10, 0x04, 0x12, 0x10, 0x0a, 0x0c, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x52, 0x45, 0x50, 0x4c, 0x41, 0x43, 0x45, 0x10, 0x05, 0x12, 0x1a, 0x0a, 0x16, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x5f, 0x4f, 0x52, 0x5f, 0x52, 0x45, 0x50, 0x4c, 0x41, 0x43, 0x45, 0x10, 0x06, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x22, 0xd8, 0x06, 0x0a, 0x19, 0x57, 0x72, 0x69, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x4f, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3a, 0x0a, 0x19, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x17, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x18, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x16, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x54, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x25, 0x0a, 0x0d, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x6f, 0x77, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0c, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x6f, 0x77, 0x12, 0x14, 0x0a, 0x04, 0x6f, 0x6e, 0x63, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x04, 0x6f, 0x6e, 0x63, 0x65, 0x12, 0x46, 0x0a, 0x1e, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x6f, 0x75, 0x73, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x1c, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x6f, 0x75, 0x73, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x71, 0x75, 0x65, 0x72, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x4e, 0x0a, 0x0e, 0x66, 0x6f, 0x72, 0x65, 0x61, 0x63, 0x68, 0x5f, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x46, 0x6f, 0x72, 0x65, 0x61, 0x63, 0x68, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x66, 0x6f, 0x72, 0x65, 0x61, 0x63, 0x68, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x12, 0x4c, 0x0a, 0x0d, 0x66, 0x6f, 0x72, 0x65, 0x61, 0x63, 0x68, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x46, 0x6f, 0x72, 0x65, 0x61, 0x63, 0x68, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x66, 0x6f, 0x72, 0x65, 0x61, 0x63, 0x68, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x36, 0x0a, 0x17, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x0f, 0x20, 0x03, 0x28, 0x09, 0x52, 0x15, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x42, 0x12, 0x0a, 0x10, 0x73, 0x69, 0x6e, 0x6b, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xb3, 0x01, 0x0a, 0x18, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x46, 0x6f, 0x72, 0x65, 0x61, 0x63, 0x68, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, 0x0a, 0x0f, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x55, 0x44, 0x46, 0x48, 0x00, 0x52, 0x0e, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x46, 0x0a, 0x0e, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x55, 0x44, 0x46, 0x48, 0x00, 0x52, 0x0d, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x0a, 0x0a, 0x08, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xd4, 0x01, 0x0a, 0x1f, 0x57, 0x72, 0x69, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x42, 0x0a, 0x08, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x52, 0x07, 0x71, 0x75, 0x65, 0x72, 0x79, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3c, 0x0a, 0x18, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x15, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4a, 0x73, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x1b, 0x0a, 0x19, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0x41, 0x0a, 0x18, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x22, 0xf8, 0x04, 0x0a, 0x15, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x42, 0x0a, 0x08, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x52, 0x07, 0x71, 0x75, 0x65, 0x72, 0x79, 0x49, 0x64, 0x12, 0x18, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x25, 0x0a, 0x0d, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0c, 0x6c, 0x61, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x29, 0x0a, 0x0f, 0x72, 0x65, 0x63, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0e, 0x72, 0x65, 0x63, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x14, 0x0a, 0x04, 0x73, 0x74, 0x6f, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x04, 0x73, 0x74, 0x6f, 0x70, 0x12, 0x34, 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x61, 0x6c, 0x6c, 0x5f, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x13, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x41, 0x6c, 0x6c, 0x41, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x4f, 0x0a, 0x07, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x07, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x12, 0x1e, 0x0a, 0x09, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x6b, 0x0a, 0x11, 0x61, 0x77, 0x61, 0x69, 0x74, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x41, 0x77, 0x61, 0x69, 0x74, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x10, 0x61, 0x77, 0x61, 0x69, 0x74, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x2c, 0x0a, 0x0e, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x1a, 0x4c, 0x0a, 0x17, 0x41, 0x77, 0x61, 0x69, 0x74, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x22, 0x0a, 0x0a, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x6d, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x6d, 0x73, 0x42, 0x09, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x22, 0xf5, 0x08, 0x0a, 0x1b, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x42, 0x0a, 0x08, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x52, 0x07, 0x71, 0x75, 0x65, 0x72, 0x79, 0x49, 0x64, 0x12, 0x51, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x6a, 0x0a, 0x0f, 0x72, 0x65, 0x63, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x52, 0x65, 0x63, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x0e, 0x72, 0x65, 0x63, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x54, 0x0a, 0x07, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x07, 0x65, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x12, 0x5a, 0x0a, 0x09, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x45, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x09, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x70, 0x0a, 0x11, 0x61, 0x77, 0x61, 0x69, 0x74, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x41, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x41, 0x77, 0x61, 0x69, 0x74, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x10, 0x61, 0x77, 0x61, 0x69, 0x74, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xaa, 0x01, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x69, 0x73, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x69, 0x73, 0x44, 0x61, 0x74, 0x61, 0x41, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x69, 0x73, 0x5f, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x69, 0x73, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x41, 0x63, 0x74, 0x69, 0x76, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, 0x41, 0x63, 0x74, 0x69, 0x76, 0x65, 0x1a, 0x48, 0x0a, 0x14, 0x52, 0x65, 0x63, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x30, 0x0a, 0x14, 0x72, 0x65, 0x63, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x72, 0x65, 0x63, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x4a, 0x73, 0x6f, 0x6e, 0x1a, 0x27, 0x0a, 0x0d, 0x45, 0x78, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x1a, 0xc5, 0x01, 0x0a, 0x0f, 0x45, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x30, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x10, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0a, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x1a, 0x38, 0x0a, 0x16, 0x41, 0x77, 0x61, 0x69, 0x74, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x42, 0x0d, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xbd, 0x06, 0x0a, 0x1c, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x18, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x12, 0x1d, 0x0a, 0x09, 0x67, 0x65, 0x74, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x67, 0x65, 0x74, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x7c, 0x0a, 0x15, 0x61, 0x77, 0x61, 0x69, 0x74, 0x5f, 0x61, 0x6e, 0x79, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x46, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x41, 0x77, 0x61, 0x69, 0x74, 0x41, 0x6e, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x13, 0x61, 0x77, 0x61, 0x69, 0x74, 0x41, 0x6e, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2b, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x65, 0x74, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x65, 0x74, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x12, 0x6e, 0x0a, 0x0c, 0x61, 0x64, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x49, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x0b, 0x61, 0x64, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x12, 0x74, 0x0a, 0x0f, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x49, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x48, 0x00, 0x52, 0x0e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x12, 0x27, 0x0a, 0x0e, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0d, 0x6c, 0x69, 0x73, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x73, 0x1a, 0x4f, 0x0a, 0x1a, 0x41, 0x77, 0x61, 0x69, 0x74, 0x41, 0x6e, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x22, 0x0a, 0x0a, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x6d, 0x73, 0x1a, 0xcd, 0x01, 0x0a, 0x1d, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x29, 0x0a, 0x10, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x55, 0x0a, 0x17, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x55, 0x44, 0x46, 0x48, 0x00, 0x52, 0x15, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x88, 0x01, 0x01, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x42, 0x1a, 0x0a, 0x18, 0x5f, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x42, 0x09, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x22, 0xb4, 0x08, 0x0a, 0x22, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x58, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x76, 0x65, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x12, 0x60, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x48, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x12, 0x81, 0x01, 0x0a, 0x15, 0x61, 0x77, 0x61, 0x69, 0x74, 0x5f, 0x61, 0x6e, 0x79, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x4b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x41, 0x77, 0x61, 0x69, 0x74, 0x41, 0x6e, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x13, 0x61, 0x77, 0x61, 0x69, 0x74, 0x41, 0x6e, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2b, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x65, 0x74, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x65, 0x74, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x12, 0x23, 0x0a, 0x0c, 0x61, 0x64, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0b, 0x61, 0x64, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x12, 0x29, 0x0a, 0x0f, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x12, 0x7b, 0x0a, 0x0e, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x52, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x6c, 0x69, 0x73, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x73, 0x1a, 0x7f, 0x0a, 0x0c, 0x41, 0x63, 0x74, 0x69, 0x76, 0x65, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x6f, 0x0a, 0x0e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x48, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x52, 0x0d, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x51, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x1a, 0x73, 0x0a, 0x16, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x37, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x52, 0x02, 0x69, 0x64, 0x12, 0x17, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x1a, 0x3b, 0x0a, 0x19, 0x41, 0x77, 0x61, 0x69, 0x74, 0x41, 0x6e, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x1a, 0x4b, 0x0a, 0x1e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x1a, 0x45, 0x0a, 0x20, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x73, 0x42, 0x0d, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xad, 0x01, 0x0a, 0x20, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x42, 0x75, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x3b, 0x0a, 0x19, 0x61, 0x64, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x62, 0x75, 0x73, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x16, 0x61, 0x64, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x42, 0x75, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x12, 0x41, 0x0a, 0x1c, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x62, 0x75, 0x73, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x19, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x42, 0x75, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x22, 0x83, 0x01, 0x0a, 0x1b, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4a, 0x73, 0x6f, 0x6e, 0x12, 0x45, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x22, 0xcc, 0x01, 0x0a, 0x22, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x42, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x42, 0x0a, 0x1b, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x62, 0x75, 0x73, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x61, 0x64, 0x64, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x18, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x42, 0x75, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x41, 0x64, 0x64, 0x65, 0x64, 0x88, 0x01, 0x01, 0x42, 0x1e, 0x0a, 0x1c, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x62, 0x75, 0x73, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x5f, 0x61, 0x64, 0x64, 0x65, 0x64, 0x22, 0x15, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x22, 0xd4, 0x01, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x55, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a, 0x60, 0x0a, 0x0e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x38, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x58, 0x0a, 0x1c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x38, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x43, 0x0a, 0x22, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x64, 0x22, 0x64, 0x0a, 0x21, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x3f, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xcd, 0x01, 0x0a, 0x11, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x33, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x61, 0x67, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x65, 0x61, 0x67, 0x65, 0x72, 0x12, 0x45, 0x0a, 0x0d, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x88, 0x01, 0x01, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x22, 0xe8, 0x03, 0x0a, 0x15, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x49, 0x6e, 0x74, 0x6f, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x2a, 0x0a, 0x11, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x43, 0x0a, 0x11, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0x12, 0x42, 0x0a, 0x0f, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0e, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x0d, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x49, 0x0a, 0x13, 0x6e, 0x6f, 0x74, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x11, 0x6e, 0x6f, 0x74, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x5b, 0x0a, 0x1d, 0x6e, 0x6f, 0x74, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x19, 0x6e, 0x6f, 0x74, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x64, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x65, 0x76, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x77, 0x69, 0x74, 0x68, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x45, 0x76, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xd4, 0x01, 0x0a, 0x16, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x4c, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x2a, 0x85, 0x01, 0x0a, 0x17, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x1a, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x50, 0x52, 0x4f, 0x47, 0x52, 0x45, 0x53, 0x53, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x18, 0x0a, 0x14, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x50, 0x52, 0x4f, 0x47, 0x52, 0x45, 0x53, 0x53, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x10, 0x01, 0x12, 0x1a, 0x0a, 0x16, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x54, 0x45, 0x52, 0x4d, 0x49, 0x4e, 0x41, 0x54, 0x45, 0x44, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x10, 0x02, 0x12, 0x14, 0x0a, 0x10, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x49, 0x44, 0x4c, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x10, 0x03, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_commands_proto_rawDescOnce sync.Once file_spark_connect_commands_proto_rawDescData = file_spark_connect_commands_proto_rawDesc ) func file_spark_connect_commands_proto_rawDescGZIP() []byte { file_spark_connect_commands_proto_rawDescOnce.Do(func() { file_spark_connect_commands_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_commands_proto_rawDescData) }) return file_spark_connect_commands_proto_rawDescData } var file_spark_connect_commands_proto_enumTypes = make([]protoimpl.EnumInfo, 4) var file_spark_connect_commands_proto_msgTypes = make([]protoimpl.MessageInfo, 48) var file_spark_connect_commands_proto_goTypes = []interface{}{ (StreamingQueryEventType)(0), // 0: spark.connect.StreamingQueryEventType (WriteOperation_SaveMode)(0), // 1: spark.connect.WriteOperation.SaveMode (WriteOperation_SaveTable_TableSaveMethod)(0), // 2: spark.connect.WriteOperation.SaveTable.TableSaveMethod (WriteOperationV2_Mode)(0), // 3: spark.connect.WriteOperationV2.Mode (*Command)(nil), // 4: spark.connect.Command (*SqlCommand)(nil), // 5: spark.connect.SqlCommand (*CreateDataFrameViewCommand)(nil), // 6: spark.connect.CreateDataFrameViewCommand (*WriteOperation)(nil), // 7: spark.connect.WriteOperation (*WriteOperationV2)(nil), // 8: spark.connect.WriteOperationV2 (*WriteStreamOperationStart)(nil), // 9: spark.connect.WriteStreamOperationStart (*StreamingForeachFunction)(nil), // 10: spark.connect.StreamingForeachFunction (*WriteStreamOperationStartResult)(nil), // 11: spark.connect.WriteStreamOperationStartResult (*StreamingQueryInstanceId)(nil), // 12: spark.connect.StreamingQueryInstanceId (*StreamingQueryCommand)(nil), // 13: spark.connect.StreamingQueryCommand (*StreamingQueryCommandResult)(nil), // 14: spark.connect.StreamingQueryCommandResult (*StreamingQueryManagerCommand)(nil), // 15: spark.connect.StreamingQueryManagerCommand (*StreamingQueryManagerCommandResult)(nil), // 16: spark.connect.StreamingQueryManagerCommandResult (*StreamingQueryListenerBusCommand)(nil), // 17: spark.connect.StreamingQueryListenerBusCommand (*StreamingQueryListenerEvent)(nil), // 18: spark.connect.StreamingQueryListenerEvent (*StreamingQueryListenerEventsResult)(nil), // 19: spark.connect.StreamingQueryListenerEventsResult (*GetResourcesCommand)(nil), // 20: spark.connect.GetResourcesCommand (*GetResourcesCommandResult)(nil), // 21: spark.connect.GetResourcesCommandResult (*CreateResourceProfileCommand)(nil), // 22: spark.connect.CreateResourceProfileCommand (*CreateResourceProfileCommandResult)(nil), // 23: spark.connect.CreateResourceProfileCommandResult (*RemoveCachedRemoteRelationCommand)(nil), // 24: spark.connect.RemoveCachedRemoteRelationCommand (*CheckpointCommand)(nil), // 25: spark.connect.CheckpointCommand (*MergeIntoTableCommand)(nil), // 26: spark.connect.MergeIntoTableCommand (*ExecuteExternalCommand)(nil), // 27: spark.connect.ExecuteExternalCommand nil, // 28: spark.connect.SqlCommand.ArgsEntry nil, // 29: spark.connect.SqlCommand.NamedArgumentsEntry nil, // 30: spark.connect.WriteOperation.OptionsEntry (*WriteOperation_SaveTable)(nil), // 31: spark.connect.WriteOperation.SaveTable (*WriteOperation_BucketBy)(nil), // 32: spark.connect.WriteOperation.BucketBy nil, // 33: spark.connect.WriteOperationV2.OptionsEntry nil, // 34: spark.connect.WriteOperationV2.TablePropertiesEntry nil, // 35: spark.connect.WriteStreamOperationStart.OptionsEntry (*StreamingQueryCommand_ExplainCommand)(nil), // 36: spark.connect.StreamingQueryCommand.ExplainCommand (*StreamingQueryCommand_AwaitTerminationCommand)(nil), // 37: spark.connect.StreamingQueryCommand.AwaitTerminationCommand (*StreamingQueryCommandResult_StatusResult)(nil), // 38: spark.connect.StreamingQueryCommandResult.StatusResult (*StreamingQueryCommandResult_RecentProgressResult)(nil), // 39: spark.connect.StreamingQueryCommandResult.RecentProgressResult (*StreamingQueryCommandResult_ExplainResult)(nil), // 40: spark.connect.StreamingQueryCommandResult.ExplainResult (*StreamingQueryCommandResult_ExceptionResult)(nil), // 41: spark.connect.StreamingQueryCommandResult.ExceptionResult (*StreamingQueryCommandResult_AwaitTerminationResult)(nil), // 42: spark.connect.StreamingQueryCommandResult.AwaitTerminationResult (*StreamingQueryManagerCommand_AwaitAnyTerminationCommand)(nil), // 43: spark.connect.StreamingQueryManagerCommand.AwaitAnyTerminationCommand (*StreamingQueryManagerCommand_StreamingQueryListenerCommand)(nil), // 44: spark.connect.StreamingQueryManagerCommand.StreamingQueryListenerCommand (*StreamingQueryManagerCommandResult_ActiveResult)(nil), // 45: spark.connect.StreamingQueryManagerCommandResult.ActiveResult (*StreamingQueryManagerCommandResult_StreamingQueryInstance)(nil), // 46: spark.connect.StreamingQueryManagerCommandResult.StreamingQueryInstance (*StreamingQueryManagerCommandResult_AwaitAnyTerminationResult)(nil), // 47: spark.connect.StreamingQueryManagerCommandResult.AwaitAnyTerminationResult (*StreamingQueryManagerCommandResult_StreamingQueryListenerInstance)(nil), // 48: spark.connect.StreamingQueryManagerCommandResult.StreamingQueryListenerInstance (*StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult)(nil), // 49: spark.connect.StreamingQueryManagerCommandResult.ListStreamingQueryListenerResult nil, // 50: spark.connect.GetResourcesCommandResult.ResourcesEntry nil, // 51: spark.connect.ExecuteExternalCommand.OptionsEntry (*CommonInlineUserDefinedFunction)(nil), // 52: spark.connect.CommonInlineUserDefinedFunction (*CommonInlineUserDefinedTableFunction)(nil), // 53: spark.connect.CommonInlineUserDefinedTableFunction (*CommonInlineUserDefinedDataSource)(nil), // 54: spark.connect.CommonInlineUserDefinedDataSource (*MlCommand)(nil), // 55: spark.connect.MlCommand (*PipelineCommand)(nil), // 56: spark.connect.PipelineCommand (*anypb.Any)(nil), // 57: google.protobuf.Any (*Expression_Literal)(nil), // 58: spark.connect.Expression.Literal (*Expression)(nil), // 59: spark.connect.Expression (*Relation)(nil), // 60: spark.connect.Relation (*PythonUDF)(nil), // 61: spark.connect.PythonUDF (*ScalarScalaUDF)(nil), // 62: spark.connect.ScalarScalaUDF (*ResourceProfile)(nil), // 63: spark.connect.ResourceProfile (*CachedRemoteRelation)(nil), // 64: spark.connect.CachedRemoteRelation (*StorageLevel)(nil), // 65: spark.connect.StorageLevel (*ResourceInformation)(nil), // 66: spark.connect.ResourceInformation } var file_spark_connect_commands_proto_depIdxs = []int32{ 52, // 0: spark.connect.Command.register_function:type_name -> spark.connect.CommonInlineUserDefinedFunction 7, // 1: spark.connect.Command.write_operation:type_name -> spark.connect.WriteOperation 6, // 2: spark.connect.Command.create_dataframe_view:type_name -> spark.connect.CreateDataFrameViewCommand 8, // 3: spark.connect.Command.write_operation_v2:type_name -> spark.connect.WriteOperationV2 5, // 4: spark.connect.Command.sql_command:type_name -> spark.connect.SqlCommand 9, // 5: spark.connect.Command.write_stream_operation_start:type_name -> spark.connect.WriteStreamOperationStart 13, // 6: spark.connect.Command.streaming_query_command:type_name -> spark.connect.StreamingQueryCommand 20, // 7: spark.connect.Command.get_resources_command:type_name -> spark.connect.GetResourcesCommand 15, // 8: spark.connect.Command.streaming_query_manager_command:type_name -> spark.connect.StreamingQueryManagerCommand 53, // 9: spark.connect.Command.register_table_function:type_name -> spark.connect.CommonInlineUserDefinedTableFunction 17, // 10: spark.connect.Command.streaming_query_listener_bus_command:type_name -> spark.connect.StreamingQueryListenerBusCommand 54, // 11: spark.connect.Command.register_data_source:type_name -> spark.connect.CommonInlineUserDefinedDataSource 22, // 12: spark.connect.Command.create_resource_profile_command:type_name -> spark.connect.CreateResourceProfileCommand 25, // 13: spark.connect.Command.checkpoint_command:type_name -> spark.connect.CheckpointCommand 24, // 14: spark.connect.Command.remove_cached_remote_relation_command:type_name -> spark.connect.RemoveCachedRemoteRelationCommand 26, // 15: spark.connect.Command.merge_into_table_command:type_name -> spark.connect.MergeIntoTableCommand 55, // 16: spark.connect.Command.ml_command:type_name -> spark.connect.MlCommand 27, // 17: spark.connect.Command.execute_external_command:type_name -> spark.connect.ExecuteExternalCommand 56, // 18: spark.connect.Command.pipeline_command:type_name -> spark.connect.PipelineCommand 57, // 19: spark.connect.Command.extension:type_name -> google.protobuf.Any 28, // 20: spark.connect.SqlCommand.args:type_name -> spark.connect.SqlCommand.ArgsEntry 58, // 21: spark.connect.SqlCommand.pos_args:type_name -> spark.connect.Expression.Literal 29, // 22: spark.connect.SqlCommand.named_arguments:type_name -> spark.connect.SqlCommand.NamedArgumentsEntry 59, // 23: spark.connect.SqlCommand.pos_arguments:type_name -> spark.connect.Expression 60, // 24: spark.connect.SqlCommand.input:type_name -> spark.connect.Relation 60, // 25: spark.connect.CreateDataFrameViewCommand.input:type_name -> spark.connect.Relation 60, // 26: spark.connect.WriteOperation.input:type_name -> spark.connect.Relation 31, // 27: spark.connect.WriteOperation.table:type_name -> spark.connect.WriteOperation.SaveTable 1, // 28: spark.connect.WriteOperation.mode:type_name -> spark.connect.WriteOperation.SaveMode 32, // 29: spark.connect.WriteOperation.bucket_by:type_name -> spark.connect.WriteOperation.BucketBy 30, // 30: spark.connect.WriteOperation.options:type_name -> spark.connect.WriteOperation.OptionsEntry 60, // 31: spark.connect.WriteOperationV2.input:type_name -> spark.connect.Relation 59, // 32: spark.connect.WriteOperationV2.partitioning_columns:type_name -> spark.connect.Expression 33, // 33: spark.connect.WriteOperationV2.options:type_name -> spark.connect.WriteOperationV2.OptionsEntry 34, // 34: spark.connect.WriteOperationV2.table_properties:type_name -> spark.connect.WriteOperationV2.TablePropertiesEntry 3, // 35: spark.connect.WriteOperationV2.mode:type_name -> spark.connect.WriteOperationV2.Mode 59, // 36: spark.connect.WriteOperationV2.overwrite_condition:type_name -> spark.connect.Expression 60, // 37: spark.connect.WriteStreamOperationStart.input:type_name -> spark.connect.Relation 35, // 38: spark.connect.WriteStreamOperationStart.options:type_name -> spark.connect.WriteStreamOperationStart.OptionsEntry 10, // 39: spark.connect.WriteStreamOperationStart.foreach_writer:type_name -> spark.connect.StreamingForeachFunction 10, // 40: spark.connect.WriteStreamOperationStart.foreach_batch:type_name -> spark.connect.StreamingForeachFunction 61, // 41: spark.connect.StreamingForeachFunction.python_function:type_name -> spark.connect.PythonUDF 62, // 42: spark.connect.StreamingForeachFunction.scala_function:type_name -> spark.connect.ScalarScalaUDF 12, // 43: spark.connect.WriteStreamOperationStartResult.query_id:type_name -> spark.connect.StreamingQueryInstanceId 12, // 44: spark.connect.StreamingQueryCommand.query_id:type_name -> spark.connect.StreamingQueryInstanceId 36, // 45: spark.connect.StreamingQueryCommand.explain:type_name -> spark.connect.StreamingQueryCommand.ExplainCommand 37, // 46: spark.connect.StreamingQueryCommand.await_termination:type_name -> spark.connect.StreamingQueryCommand.AwaitTerminationCommand 12, // 47: spark.connect.StreamingQueryCommandResult.query_id:type_name -> spark.connect.StreamingQueryInstanceId 38, // 48: spark.connect.StreamingQueryCommandResult.status:type_name -> spark.connect.StreamingQueryCommandResult.StatusResult 39, // 49: spark.connect.StreamingQueryCommandResult.recent_progress:type_name -> spark.connect.StreamingQueryCommandResult.RecentProgressResult 40, // 50: spark.connect.StreamingQueryCommandResult.explain:type_name -> spark.connect.StreamingQueryCommandResult.ExplainResult 41, // 51: spark.connect.StreamingQueryCommandResult.exception:type_name -> spark.connect.StreamingQueryCommandResult.ExceptionResult 42, // 52: spark.connect.StreamingQueryCommandResult.await_termination:type_name -> spark.connect.StreamingQueryCommandResult.AwaitTerminationResult 43, // 53: spark.connect.StreamingQueryManagerCommand.await_any_termination:type_name -> spark.connect.StreamingQueryManagerCommand.AwaitAnyTerminationCommand 44, // 54: spark.connect.StreamingQueryManagerCommand.add_listener:type_name -> spark.connect.StreamingQueryManagerCommand.StreamingQueryListenerCommand 44, // 55: spark.connect.StreamingQueryManagerCommand.remove_listener:type_name -> spark.connect.StreamingQueryManagerCommand.StreamingQueryListenerCommand 45, // 56: spark.connect.StreamingQueryManagerCommandResult.active:type_name -> spark.connect.StreamingQueryManagerCommandResult.ActiveResult 46, // 57: spark.connect.StreamingQueryManagerCommandResult.query:type_name -> spark.connect.StreamingQueryManagerCommandResult.StreamingQueryInstance 47, // 58: spark.connect.StreamingQueryManagerCommandResult.await_any_termination:type_name -> spark.connect.StreamingQueryManagerCommandResult.AwaitAnyTerminationResult 49, // 59: spark.connect.StreamingQueryManagerCommandResult.list_listeners:type_name -> spark.connect.StreamingQueryManagerCommandResult.ListStreamingQueryListenerResult 0, // 60: spark.connect.StreamingQueryListenerEvent.event_type:type_name -> spark.connect.StreamingQueryEventType 18, // 61: spark.connect.StreamingQueryListenerEventsResult.events:type_name -> spark.connect.StreamingQueryListenerEvent 50, // 62: spark.connect.GetResourcesCommandResult.resources:type_name -> spark.connect.GetResourcesCommandResult.ResourcesEntry 63, // 63: spark.connect.CreateResourceProfileCommand.profile:type_name -> spark.connect.ResourceProfile 64, // 64: spark.connect.RemoveCachedRemoteRelationCommand.relation:type_name -> spark.connect.CachedRemoteRelation 60, // 65: spark.connect.CheckpointCommand.relation:type_name -> spark.connect.Relation 65, // 66: spark.connect.CheckpointCommand.storage_level:type_name -> spark.connect.StorageLevel 60, // 67: spark.connect.MergeIntoTableCommand.source_table_plan:type_name -> spark.connect.Relation 59, // 68: spark.connect.MergeIntoTableCommand.merge_condition:type_name -> spark.connect.Expression 59, // 69: spark.connect.MergeIntoTableCommand.match_actions:type_name -> spark.connect.Expression 59, // 70: spark.connect.MergeIntoTableCommand.not_matched_actions:type_name -> spark.connect.Expression 59, // 71: spark.connect.MergeIntoTableCommand.not_matched_by_source_actions:type_name -> spark.connect.Expression 51, // 72: spark.connect.ExecuteExternalCommand.options:type_name -> spark.connect.ExecuteExternalCommand.OptionsEntry 58, // 73: spark.connect.SqlCommand.ArgsEntry.value:type_name -> spark.connect.Expression.Literal 59, // 74: spark.connect.SqlCommand.NamedArgumentsEntry.value:type_name -> spark.connect.Expression 2, // 75: spark.connect.WriteOperation.SaveTable.save_method:type_name -> spark.connect.WriteOperation.SaveTable.TableSaveMethod 61, // 76: spark.connect.StreamingQueryManagerCommand.StreamingQueryListenerCommand.python_listener_payload:type_name -> spark.connect.PythonUDF 46, // 77: spark.connect.StreamingQueryManagerCommandResult.ActiveResult.active_queries:type_name -> spark.connect.StreamingQueryManagerCommandResult.StreamingQueryInstance 12, // 78: spark.connect.StreamingQueryManagerCommandResult.StreamingQueryInstance.id:type_name -> spark.connect.StreamingQueryInstanceId 66, // 79: spark.connect.GetResourcesCommandResult.ResourcesEntry.value:type_name -> spark.connect.ResourceInformation 80, // [80:80] is the sub-list for method output_type 80, // [80:80] is the sub-list for method input_type 80, // [80:80] is the sub-list for extension type_name 80, // [80:80] is the sub-list for extension extendee 0, // [0:80] is the sub-list for field type_name } func init() { file_spark_connect_commands_proto_init() } func file_spark_connect_commands_proto_init() { if File_spark_connect_commands_proto != nil { return } file_spark_connect_common_proto_init() file_spark_connect_expressions_proto_init() file_spark_connect_relations_proto_init() file_spark_connect_ml_proto_init() file_spark_connect_pipelines_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_commands_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Command); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SqlCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CreateDataFrameViewCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WriteOperation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WriteOperationV2); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WriteStreamOperationStart); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingForeachFunction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WriteStreamOperationStartResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryInstanceId); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommandResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommandResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryListenerBusCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryListenerEvent); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryListenerEventsResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetResourcesCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetResourcesCommandResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CreateResourceProfileCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CreateResourceProfileCommandResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RemoveCachedRemoteRelationCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CheckpointCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MergeIntoTableCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecuteExternalCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WriteOperation_SaveTable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WriteOperation_BucketBy); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommand_ExplainCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommand_AwaitTerminationCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommandResult_StatusResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommandResult_RecentProgressResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommandResult_ExplainResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommandResult_ExceptionResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryCommandResult_AwaitTerminationResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommand_AwaitAnyTerminationCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommand_StreamingQueryListenerCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommandResult_ActiveResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommandResult_StreamingQueryInstance); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommandResult_AwaitAnyTerminationResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommandResult_StreamingQueryListenerInstance); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_commands_proto_msgTypes[45].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StreamingQueryManagerCommandResult_ListStreamingQueryListenerResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_commands_proto_msgTypes[0].OneofWrappers = []interface{}{ (*Command_RegisterFunction)(nil), (*Command_WriteOperation)(nil), (*Command_CreateDataframeView)(nil), (*Command_WriteOperationV2)(nil), (*Command_SqlCommand)(nil), (*Command_WriteStreamOperationStart)(nil), (*Command_StreamingQueryCommand)(nil), (*Command_GetResourcesCommand)(nil), (*Command_StreamingQueryManagerCommand)(nil), (*Command_RegisterTableFunction)(nil), (*Command_StreamingQueryListenerBusCommand)(nil), (*Command_RegisterDataSource)(nil), (*Command_CreateResourceProfileCommand)(nil), (*Command_CheckpointCommand)(nil), (*Command_RemoveCachedRemoteRelationCommand)(nil), (*Command_MergeIntoTableCommand)(nil), (*Command_MlCommand)(nil), (*Command_ExecuteExternalCommand)(nil), (*Command_PipelineCommand)(nil), (*Command_Extension)(nil), } file_spark_connect_commands_proto_msgTypes[3].OneofWrappers = []interface{}{ (*WriteOperation_Path)(nil), (*WriteOperation_Table)(nil), } file_spark_connect_commands_proto_msgTypes[4].OneofWrappers = []interface{}{} file_spark_connect_commands_proto_msgTypes[5].OneofWrappers = []interface{}{ (*WriteStreamOperationStart_ProcessingTimeInterval)(nil), (*WriteStreamOperationStart_AvailableNow)(nil), (*WriteStreamOperationStart_Once)(nil), (*WriteStreamOperationStart_ContinuousCheckpointInterval)(nil), (*WriteStreamOperationStart_Path)(nil), (*WriteStreamOperationStart_TableName)(nil), } file_spark_connect_commands_proto_msgTypes[6].OneofWrappers = []interface{}{ (*StreamingForeachFunction_PythonFunction)(nil), (*StreamingForeachFunction_ScalaFunction)(nil), } file_spark_connect_commands_proto_msgTypes[7].OneofWrappers = []interface{}{} file_spark_connect_commands_proto_msgTypes[9].OneofWrappers = []interface{}{ (*StreamingQueryCommand_Status)(nil), (*StreamingQueryCommand_LastProgress)(nil), (*StreamingQueryCommand_RecentProgress)(nil), (*StreamingQueryCommand_Stop)(nil), (*StreamingQueryCommand_ProcessAllAvailable)(nil), (*StreamingQueryCommand_Explain)(nil), (*StreamingQueryCommand_Exception)(nil), (*StreamingQueryCommand_AwaitTermination)(nil), } file_spark_connect_commands_proto_msgTypes[10].OneofWrappers = []interface{}{ (*StreamingQueryCommandResult_Status)(nil), (*StreamingQueryCommandResult_RecentProgress)(nil), (*StreamingQueryCommandResult_Explain)(nil), (*StreamingQueryCommandResult_Exception)(nil), (*StreamingQueryCommandResult_AwaitTermination)(nil), } file_spark_connect_commands_proto_msgTypes[11].OneofWrappers = []interface{}{ (*StreamingQueryManagerCommand_Active)(nil), (*StreamingQueryManagerCommand_GetQuery)(nil), (*StreamingQueryManagerCommand_AwaitAnyTermination)(nil), (*StreamingQueryManagerCommand_ResetTerminated)(nil), (*StreamingQueryManagerCommand_AddListener)(nil), (*StreamingQueryManagerCommand_RemoveListener)(nil), (*StreamingQueryManagerCommand_ListListeners)(nil), } file_spark_connect_commands_proto_msgTypes[12].OneofWrappers = []interface{}{ (*StreamingQueryManagerCommandResult_Active)(nil), (*StreamingQueryManagerCommandResult_Query)(nil), (*StreamingQueryManagerCommandResult_AwaitAnyTermination)(nil), (*StreamingQueryManagerCommandResult_ResetTerminated)(nil), (*StreamingQueryManagerCommandResult_AddListener)(nil), (*StreamingQueryManagerCommandResult_RemoveListener)(nil), (*StreamingQueryManagerCommandResult_ListListeners)(nil), } file_spark_connect_commands_proto_msgTypes[13].OneofWrappers = []interface{}{ (*StreamingQueryListenerBusCommand_AddListenerBusListener)(nil), (*StreamingQueryListenerBusCommand_RemoveListenerBusListener)(nil), } file_spark_connect_commands_proto_msgTypes[15].OneofWrappers = []interface{}{} file_spark_connect_commands_proto_msgTypes[21].OneofWrappers = []interface{}{} file_spark_connect_commands_proto_msgTypes[33].OneofWrappers = []interface{}{} file_spark_connect_commands_proto_msgTypes[37].OneofWrappers = []interface{}{} file_spark_connect_commands_proto_msgTypes[39].OneofWrappers = []interface{}{} file_spark_connect_commands_proto_msgTypes[40].OneofWrappers = []interface{}{} file_spark_connect_commands_proto_msgTypes[42].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_commands_proto_rawDesc, NumEnums: 4, NumMessages: 48, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_commands_proto_goTypes, DependencyIndexes: file_spark_connect_commands_proto_depIdxs, EnumInfos: file_spark_connect_commands_proto_enumTypes, MessageInfos: file_spark_connect_commands_proto_msgTypes, }.Build() File_spark_connect_commands_proto = out.File file_spark_connect_commands_proto_rawDesc = nil file_spark_connect_commands_proto_goTypes = nil file_spark_connect_commands_proto_depIdxs = nil } ================================================ FILE: internal/generated/common.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/common.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // StorageLevel for persisting Datasets/Tables. type StorageLevel struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Whether the cache should use disk or not. UseDisk bool `protobuf:"varint,1,opt,name=use_disk,json=useDisk,proto3" json:"use_disk,omitempty"` // (Required) Whether the cache should use memory or not. UseMemory bool `protobuf:"varint,2,opt,name=use_memory,json=useMemory,proto3" json:"use_memory,omitempty"` // (Required) Whether the cache should use off-heap or not. UseOffHeap bool `protobuf:"varint,3,opt,name=use_off_heap,json=useOffHeap,proto3" json:"use_off_heap,omitempty"` // (Required) Whether the cached data is deserialized or not. Deserialized bool `protobuf:"varint,4,opt,name=deserialized,proto3" json:"deserialized,omitempty"` // (Required) The number of replicas. Replication int32 `protobuf:"varint,5,opt,name=replication,proto3" json:"replication,omitempty"` } func (x *StorageLevel) Reset() { *x = StorageLevel{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StorageLevel) String() string { return protoimpl.X.MessageStringOf(x) } func (*StorageLevel) ProtoMessage() {} func (x *StorageLevel) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StorageLevel.ProtoReflect.Descriptor instead. func (*StorageLevel) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{0} } func (x *StorageLevel) GetUseDisk() bool { if x != nil { return x.UseDisk } return false } func (x *StorageLevel) GetUseMemory() bool { if x != nil { return x.UseMemory } return false } func (x *StorageLevel) GetUseOffHeap() bool { if x != nil { return x.UseOffHeap } return false } func (x *StorageLevel) GetDeserialized() bool { if x != nil { return x.Deserialized } return false } func (x *StorageLevel) GetReplication() int32 { if x != nil { return x.Replication } return 0 } // ResourceInformation to hold information about a type of Resource. // The corresponding class is 'org.apache.spark.resource.ResourceInformation' type ResourceInformation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The name of the resource Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // (Required) An array of strings describing the addresses of the resource. Addresses []string `protobuf:"bytes,2,rep,name=addresses,proto3" json:"addresses,omitempty"` } func (x *ResourceInformation) Reset() { *x = ResourceInformation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ResourceInformation) String() string { return protoimpl.X.MessageStringOf(x) } func (*ResourceInformation) ProtoMessage() {} func (x *ResourceInformation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ResourceInformation.ProtoReflect.Descriptor instead. func (*ResourceInformation) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{1} } func (x *ResourceInformation) GetName() string { if x != nil { return x.Name } return "" } func (x *ResourceInformation) GetAddresses() []string { if x != nil { return x.Addresses } return nil } // An executor resource request. type ExecutorResourceRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) resource name. ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` // (Required) resource amount requesting. Amount int64 `protobuf:"varint,2,opt,name=amount,proto3" json:"amount,omitempty"` // Optional script used to discover the resources. DiscoveryScript *string `protobuf:"bytes,3,opt,name=discovery_script,json=discoveryScript,proto3,oneof" json:"discovery_script,omitempty"` // Optional vendor, required for some cluster managers. Vendor *string `protobuf:"bytes,4,opt,name=vendor,proto3,oneof" json:"vendor,omitempty"` } func (x *ExecutorResourceRequest) Reset() { *x = ExecutorResourceRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExecutorResourceRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExecutorResourceRequest) ProtoMessage() {} func (x *ExecutorResourceRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExecutorResourceRequest.ProtoReflect.Descriptor instead. func (*ExecutorResourceRequest) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{2} } func (x *ExecutorResourceRequest) GetResourceName() string { if x != nil { return x.ResourceName } return "" } func (x *ExecutorResourceRequest) GetAmount() int64 { if x != nil { return x.Amount } return 0 } func (x *ExecutorResourceRequest) GetDiscoveryScript() string { if x != nil && x.DiscoveryScript != nil { return *x.DiscoveryScript } return "" } func (x *ExecutorResourceRequest) GetVendor() string { if x != nil && x.Vendor != nil { return *x.Vendor } return "" } // A task resource request. type TaskResourceRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) resource name. ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` // (Required) resource amount requesting as a double to support fractional // resource requests. Amount float64 `protobuf:"fixed64,2,opt,name=amount,proto3" json:"amount,omitempty"` } func (x *TaskResourceRequest) Reset() { *x = TaskResourceRequest{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *TaskResourceRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*TaskResourceRequest) ProtoMessage() {} func (x *TaskResourceRequest) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TaskResourceRequest.ProtoReflect.Descriptor instead. func (*TaskResourceRequest) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{3} } func (x *TaskResourceRequest) GetResourceName() string { if x != nil { return x.ResourceName } return "" } func (x *TaskResourceRequest) GetAmount() float64 { if x != nil { return x.Amount } return 0 } type ResourceProfile struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Resource requests for executors. Mapped from the resource name // (e.g., cores, memory, CPU) to its specific request. ExecutorResources map[string]*ExecutorResourceRequest `protobuf:"bytes,1,rep,name=executor_resources,json=executorResources,proto3" json:"executor_resources,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Optional) Resource requests for tasks. Mapped from the resource name // (e.g., cores, memory, CPU) to its specific request. TaskResources map[string]*TaskResourceRequest `protobuf:"bytes,2,rep,name=task_resources,json=taskResources,proto3" json:"task_resources,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *ResourceProfile) Reset() { *x = ResourceProfile{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ResourceProfile) String() string { return protoimpl.X.MessageStringOf(x) } func (*ResourceProfile) ProtoMessage() {} func (x *ResourceProfile) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ResourceProfile.ProtoReflect.Descriptor instead. func (*ResourceProfile) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{4} } func (x *ResourceProfile) GetExecutorResources() map[string]*ExecutorResourceRequest { if x != nil { return x.ExecutorResources } return nil } func (x *ResourceProfile) GetTaskResources() map[string]*TaskResourceRequest { if x != nil { return x.TaskResources } return nil } type Origin struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Indicate the origin type. // // Types that are assignable to Function: // // *Origin_PythonOrigin // *Origin_JvmOrigin Function isOrigin_Function `protobuf_oneof:"function"` } func (x *Origin) Reset() { *x = Origin{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Origin) String() string { return protoimpl.X.MessageStringOf(x) } func (*Origin) ProtoMessage() {} func (x *Origin) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Origin.ProtoReflect.Descriptor instead. func (*Origin) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{5} } func (m *Origin) GetFunction() isOrigin_Function { if m != nil { return m.Function } return nil } func (x *Origin) GetPythonOrigin() *PythonOrigin { if x, ok := x.GetFunction().(*Origin_PythonOrigin); ok { return x.PythonOrigin } return nil } func (x *Origin) GetJvmOrigin() *JvmOrigin { if x, ok := x.GetFunction().(*Origin_JvmOrigin); ok { return x.JvmOrigin } return nil } type isOrigin_Function interface { isOrigin_Function() } type Origin_PythonOrigin struct { PythonOrigin *PythonOrigin `protobuf:"bytes,1,opt,name=python_origin,json=pythonOrigin,proto3,oneof"` } type Origin_JvmOrigin struct { JvmOrigin *JvmOrigin `protobuf:"bytes,2,opt,name=jvm_origin,json=jvmOrigin,proto3,oneof"` } func (*Origin_PythonOrigin) isOrigin_Function() {} func (*Origin_JvmOrigin) isOrigin_Function() {} type PythonOrigin struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Name of the origin, for example, the name of the function Fragment string `protobuf:"bytes,1,opt,name=fragment,proto3" json:"fragment,omitempty"` // (Required) Callsite to show to end users, for example, stacktrace. CallSite string `protobuf:"bytes,2,opt,name=call_site,json=callSite,proto3" json:"call_site,omitempty"` } func (x *PythonOrigin) Reset() { *x = PythonOrigin{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PythonOrigin) String() string { return protoimpl.X.MessageStringOf(x) } func (*PythonOrigin) ProtoMessage() {} func (x *PythonOrigin) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PythonOrigin.ProtoReflect.Descriptor instead. func (*PythonOrigin) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{6} } func (x *PythonOrigin) GetFragment() string { if x != nil { return x.Fragment } return "" } func (x *PythonOrigin) GetCallSite() string { if x != nil { return x.CallSite } return "" } type JvmOrigin struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Line number in the source file. Line *int32 `protobuf:"varint,1,opt,name=line,proto3,oneof" json:"line,omitempty"` // (Optional) Start position in the source file. StartPosition *int32 `protobuf:"varint,2,opt,name=start_position,json=startPosition,proto3,oneof" json:"start_position,omitempty"` // (Optional) Start index in the source file. StartIndex *int32 `protobuf:"varint,3,opt,name=start_index,json=startIndex,proto3,oneof" json:"start_index,omitempty"` // (Optional) Stop index in the source file. StopIndex *int32 `protobuf:"varint,4,opt,name=stop_index,json=stopIndex,proto3,oneof" json:"stop_index,omitempty"` // (Optional) SQL text. SqlText *string `protobuf:"bytes,5,opt,name=sql_text,json=sqlText,proto3,oneof" json:"sql_text,omitempty"` // (Optional) Object type. ObjectType *string `protobuf:"bytes,6,opt,name=object_type,json=objectType,proto3,oneof" json:"object_type,omitempty"` // (Optional) Object name. ObjectName *string `protobuf:"bytes,7,opt,name=object_name,json=objectName,proto3,oneof" json:"object_name,omitempty"` // (Optional) Stack trace. StackTrace []*StackTraceElement `protobuf:"bytes,8,rep,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` } func (x *JvmOrigin) Reset() { *x = JvmOrigin{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *JvmOrigin) String() string { return protoimpl.X.MessageStringOf(x) } func (*JvmOrigin) ProtoMessage() {} func (x *JvmOrigin) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use JvmOrigin.ProtoReflect.Descriptor instead. func (*JvmOrigin) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{7} } func (x *JvmOrigin) GetLine() int32 { if x != nil && x.Line != nil { return *x.Line } return 0 } func (x *JvmOrigin) GetStartPosition() int32 { if x != nil && x.StartPosition != nil { return *x.StartPosition } return 0 } func (x *JvmOrigin) GetStartIndex() int32 { if x != nil && x.StartIndex != nil { return *x.StartIndex } return 0 } func (x *JvmOrigin) GetStopIndex() int32 { if x != nil && x.StopIndex != nil { return *x.StopIndex } return 0 } func (x *JvmOrigin) GetSqlText() string { if x != nil && x.SqlText != nil { return *x.SqlText } return "" } func (x *JvmOrigin) GetObjectType() string { if x != nil && x.ObjectType != nil { return *x.ObjectType } return "" } func (x *JvmOrigin) GetObjectName() string { if x != nil && x.ObjectName != nil { return *x.ObjectName } return "" } func (x *JvmOrigin) GetStackTrace() []*StackTraceElement { if x != nil { return x.StackTrace } return nil } // A message to hold a [[java.lang.StackTraceElement]]. type StackTraceElement struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Class loader name ClassLoaderName *string `protobuf:"bytes,1,opt,name=class_loader_name,json=classLoaderName,proto3,oneof" json:"class_loader_name,omitempty"` // (Optional) Module name ModuleName *string `protobuf:"bytes,2,opt,name=module_name,json=moduleName,proto3,oneof" json:"module_name,omitempty"` // (Optional) Module version ModuleVersion *string `protobuf:"bytes,3,opt,name=module_version,json=moduleVersion,proto3,oneof" json:"module_version,omitempty"` // (Required) Declaring class DeclaringClass string `protobuf:"bytes,4,opt,name=declaring_class,json=declaringClass,proto3" json:"declaring_class,omitempty"` // (Required) Method name MethodName string `protobuf:"bytes,5,opt,name=method_name,json=methodName,proto3" json:"method_name,omitempty"` // (Optional) File name FileName *string `protobuf:"bytes,6,opt,name=file_name,json=fileName,proto3,oneof" json:"file_name,omitempty"` // (Required) Line number LineNumber int32 `protobuf:"varint,7,opt,name=line_number,json=lineNumber,proto3" json:"line_number,omitempty"` } func (x *StackTraceElement) Reset() { *x = StackTraceElement{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StackTraceElement) String() string { return protoimpl.X.MessageStringOf(x) } func (*StackTraceElement) ProtoMessage() {} func (x *StackTraceElement) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StackTraceElement.ProtoReflect.Descriptor instead. func (*StackTraceElement) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{8} } func (x *StackTraceElement) GetClassLoaderName() string { if x != nil && x.ClassLoaderName != nil { return *x.ClassLoaderName } return "" } func (x *StackTraceElement) GetModuleName() string { if x != nil && x.ModuleName != nil { return *x.ModuleName } return "" } func (x *StackTraceElement) GetModuleVersion() string { if x != nil && x.ModuleVersion != nil { return *x.ModuleVersion } return "" } func (x *StackTraceElement) GetDeclaringClass() string { if x != nil { return x.DeclaringClass } return "" } func (x *StackTraceElement) GetMethodName() string { if x != nil { return x.MethodName } return "" } func (x *StackTraceElement) GetFileName() string { if x != nil && x.FileName != nil { return *x.FileName } return "" } func (x *StackTraceElement) GetLineNumber() int32 { if x != nil { return x.LineNumber } return 0 } type Bools struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Values []bool `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` } func (x *Bools) Reset() { *x = Bools{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Bools) String() string { return protoimpl.X.MessageStringOf(x) } func (*Bools) ProtoMessage() {} func (x *Bools) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Bools.ProtoReflect.Descriptor instead. func (*Bools) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{9} } func (x *Bools) GetValues() []bool { if x != nil { return x.Values } return nil } type Ints struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Values []int32 `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` } func (x *Ints) Reset() { *x = Ints{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Ints) String() string { return protoimpl.X.MessageStringOf(x) } func (*Ints) ProtoMessage() {} func (x *Ints) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Ints.ProtoReflect.Descriptor instead. func (*Ints) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{10} } func (x *Ints) GetValues() []int32 { if x != nil { return x.Values } return nil } type Longs struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Values []int64 `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` } func (x *Longs) Reset() { *x = Longs{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Longs) String() string { return protoimpl.X.MessageStringOf(x) } func (*Longs) ProtoMessage() {} func (x *Longs) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Longs.ProtoReflect.Descriptor instead. func (*Longs) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{11} } func (x *Longs) GetValues() []int64 { if x != nil { return x.Values } return nil } type Floats struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Values []float32 `protobuf:"fixed32,1,rep,packed,name=values,proto3" json:"values,omitempty"` } func (x *Floats) Reset() { *x = Floats{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Floats) String() string { return protoimpl.X.MessageStringOf(x) } func (*Floats) ProtoMessage() {} func (x *Floats) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Floats.ProtoReflect.Descriptor instead. func (*Floats) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{12} } func (x *Floats) GetValues() []float32 { if x != nil { return x.Values } return nil } type Doubles struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Values []float64 `protobuf:"fixed64,1,rep,packed,name=values,proto3" json:"values,omitempty"` } func (x *Doubles) Reset() { *x = Doubles{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Doubles) String() string { return protoimpl.X.MessageStringOf(x) } func (*Doubles) ProtoMessage() {} func (x *Doubles) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Doubles.ProtoReflect.Descriptor instead. func (*Doubles) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{13} } func (x *Doubles) GetValues() []float64 { if x != nil { return x.Values } return nil } type Strings struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` } func (x *Strings) Reset() { *x = Strings{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_common_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Strings) String() string { return protoimpl.X.MessageStringOf(x) } func (*Strings) ProtoMessage() {} func (x *Strings) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_common_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Strings.ProtoReflect.Descriptor instead. func (*Strings) Descriptor() ([]byte, []int) { return file_spark_connect_common_proto_rawDescGZIP(), []int{14} } func (x *Strings) GetValues() []string { if x != nil { return x.Values } return nil } var File_spark_connect_common_proto protoreflect.FileDescriptor var file_spark_connect_common_proto_rawDesc = []byte{ 0x0a, 0x1a, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x22, 0xb0, 0x01, 0x0a, 0x0c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x19, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x5f, 0x64, 0x69, 0x73, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x75, 0x73, 0x65, 0x44, 0x69, 0x73, 0x6b, 0x12, 0x1d, 0x0a, 0x0a, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x20, 0x0a, 0x0c, 0x75, 0x73, 0x65, 0x5f, 0x6f, 0x66, 0x66, 0x5f, 0x68, 0x65, 0x61, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x75, 0x73, 0x65, 0x4f, 0x66, 0x66, 0x48, 0x65, 0x61, 0x70, 0x12, 0x22, 0x0a, 0x0c, 0x64, 0x65, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x64, 0x65, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x47, 0x0a, 0x13, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x65, 0x73, 0x22, 0xc3, 0x01, 0x0a, 0x17, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2e, 0x0a, 0x10, 0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x79, 0x5f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x79, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x79, 0x5f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x22, 0x52, 0x0a, 0x13, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xa5, 0x03, 0x0a, 0x0f, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x12, 0x64, 0x0a, 0x12, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x58, 0x0a, 0x0e, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a, 0x6c, 0x0a, 0x16, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x64, 0x0a, 0x12, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x38, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x93, 0x01, 0x0a, 0x06, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0d, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x48, 0x00, 0x52, 0x0c, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x6a, 0x76, 0x6d, 0x5f, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4a, 0x76, 0x6d, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x48, 0x00, 0x52, 0x09, 0x6a, 0x76, 0x6d, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x42, 0x0a, 0x0a, 0x08, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x47, 0x0a, 0x0c, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x73, 0x69, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x61, 0x6c, 0x6c, 0x53, 0x69, 0x74, 0x65, 0x22, 0xb1, 0x03, 0x0a, 0x09, 0x4a, 0x76, 0x6d, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x12, 0x17, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x88, 0x01, 0x01, 0x12, 0x2a, 0x0a, 0x0e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x48, 0x02, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x22, 0x0a, 0x0a, 0x73, 0x74, 0x6f, 0x70, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x48, 0x03, 0x52, 0x09, 0x73, 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x08, 0x73, 0x71, 0x6c, 0x5f, 0x74, 0x65, 0x78, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x04, 0x52, 0x07, 0x73, 0x71, 0x6c, 0x54, 0x65, 0x78, 0x74, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x05, 0x52, 0x0a, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x06, 0x52, 0x0a, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x41, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x73, 0x74, 0x6f, 0x70, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x73, 0x71, 0x6c, 0x5f, 0x74, 0x65, 0x78, 0x74, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xea, 0x02, 0x0a, 0x11, 0x53, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x2f, 0x0a, 0x11, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4c, 0x6f, 0x61, 0x64, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0a, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x2a, 0x0a, 0x0e, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0d, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x27, 0x0a, 0x0f, 0x64, 0x65, 0x63, 0x6c, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x64, 0x65, 0x63, 0x6c, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x1f, 0x0a, 0x05, 0x42, 0x6f, 0x6f, 0x6c, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x08, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x1e, 0x0a, 0x04, 0x49, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x05, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x1f, 0x0a, 0x05, 0x4c, 0x6f, 0x6e, 0x67, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x20, 0x0a, 0x06, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x21, 0x0a, 0x07, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x01, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x21, 0x0a, 0x07, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_common_proto_rawDescOnce sync.Once file_spark_connect_common_proto_rawDescData = file_spark_connect_common_proto_rawDesc ) func file_spark_connect_common_proto_rawDescGZIP() []byte { file_spark_connect_common_proto_rawDescOnce.Do(func() { file_spark_connect_common_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_common_proto_rawDescData) }) return file_spark_connect_common_proto_rawDescData } var file_spark_connect_common_proto_msgTypes = make([]protoimpl.MessageInfo, 17) var file_spark_connect_common_proto_goTypes = []interface{}{ (*StorageLevel)(nil), // 0: spark.connect.StorageLevel (*ResourceInformation)(nil), // 1: spark.connect.ResourceInformation (*ExecutorResourceRequest)(nil), // 2: spark.connect.ExecutorResourceRequest (*TaskResourceRequest)(nil), // 3: spark.connect.TaskResourceRequest (*ResourceProfile)(nil), // 4: spark.connect.ResourceProfile (*Origin)(nil), // 5: spark.connect.Origin (*PythonOrigin)(nil), // 6: spark.connect.PythonOrigin (*JvmOrigin)(nil), // 7: spark.connect.JvmOrigin (*StackTraceElement)(nil), // 8: spark.connect.StackTraceElement (*Bools)(nil), // 9: spark.connect.Bools (*Ints)(nil), // 10: spark.connect.Ints (*Longs)(nil), // 11: spark.connect.Longs (*Floats)(nil), // 12: spark.connect.Floats (*Doubles)(nil), // 13: spark.connect.Doubles (*Strings)(nil), // 14: spark.connect.Strings nil, // 15: spark.connect.ResourceProfile.ExecutorResourcesEntry nil, // 16: spark.connect.ResourceProfile.TaskResourcesEntry } var file_spark_connect_common_proto_depIdxs = []int32{ 15, // 0: spark.connect.ResourceProfile.executor_resources:type_name -> spark.connect.ResourceProfile.ExecutorResourcesEntry 16, // 1: spark.connect.ResourceProfile.task_resources:type_name -> spark.connect.ResourceProfile.TaskResourcesEntry 6, // 2: spark.connect.Origin.python_origin:type_name -> spark.connect.PythonOrigin 7, // 3: spark.connect.Origin.jvm_origin:type_name -> spark.connect.JvmOrigin 8, // 4: spark.connect.JvmOrigin.stack_trace:type_name -> spark.connect.StackTraceElement 2, // 5: spark.connect.ResourceProfile.ExecutorResourcesEntry.value:type_name -> spark.connect.ExecutorResourceRequest 3, // 6: spark.connect.ResourceProfile.TaskResourcesEntry.value:type_name -> spark.connect.TaskResourceRequest 7, // [7:7] is the sub-list for method output_type 7, // [7:7] is the sub-list for method input_type 7, // [7:7] is the sub-list for extension type_name 7, // [7:7] is the sub-list for extension extendee 0, // [0:7] is the sub-list for field type_name } func init() { file_spark_connect_common_proto_init() } func file_spark_connect_common_proto_init() { if File_spark_connect_common_proto != nil { return } if !protoimpl.UnsafeEnabled { file_spark_connect_common_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StorageLevel); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ResourceInformation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExecutorResourceRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*TaskResourceRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ResourceProfile); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Origin); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PythonOrigin); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*JvmOrigin); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StackTraceElement); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Bools); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Ints); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Longs); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Floats); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Doubles); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_common_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Strings); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_common_proto_msgTypes[2].OneofWrappers = []interface{}{} file_spark_connect_common_proto_msgTypes[5].OneofWrappers = []interface{}{ (*Origin_PythonOrigin)(nil), (*Origin_JvmOrigin)(nil), } file_spark_connect_common_proto_msgTypes[7].OneofWrappers = []interface{}{} file_spark_connect_common_proto_msgTypes[8].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_common_proto_rawDesc, NumEnums: 0, NumMessages: 17, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_common_proto_goTypes, DependencyIndexes: file_spark_connect_common_proto_depIdxs, MessageInfos: file_spark_connect_common_proto_msgTypes, }.Build() File_spark_connect_common_proto = out.File file_spark_connect_common_proto_rawDesc = nil file_spark_connect_common_proto_goTypes = nil file_spark_connect_common_proto_depIdxs = nil } ================================================ FILE: internal/generated/example_plugins.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/example_plugins.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) type ExamplePluginRelation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` CustomField string `protobuf:"bytes,2,opt,name=custom_field,json=customField,proto3" json:"custom_field,omitempty"` } func (x *ExamplePluginRelation) Reset() { *x = ExamplePluginRelation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_example_plugins_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExamplePluginRelation) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExamplePluginRelation) ProtoMessage() {} func (x *ExamplePluginRelation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_example_plugins_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExamplePluginRelation.ProtoReflect.Descriptor instead. func (*ExamplePluginRelation) Descriptor() ([]byte, []int) { return file_spark_connect_example_plugins_proto_rawDescGZIP(), []int{0} } func (x *ExamplePluginRelation) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *ExamplePluginRelation) GetCustomField() string { if x != nil { return x.CustomField } return "" } type ExamplePluginExpression struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Child *Expression `protobuf:"bytes,1,opt,name=child,proto3" json:"child,omitempty"` CustomField string `protobuf:"bytes,2,opt,name=custom_field,json=customField,proto3" json:"custom_field,omitempty"` } func (x *ExamplePluginExpression) Reset() { *x = ExamplePluginExpression{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_example_plugins_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExamplePluginExpression) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExamplePluginExpression) ProtoMessage() {} func (x *ExamplePluginExpression) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_example_plugins_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExamplePluginExpression.ProtoReflect.Descriptor instead. func (*ExamplePluginExpression) Descriptor() ([]byte, []int) { return file_spark_connect_example_plugins_proto_rawDescGZIP(), []int{1} } func (x *ExamplePluginExpression) GetChild() *Expression { if x != nil { return x.Child } return nil } func (x *ExamplePluginExpression) GetCustomField() string { if x != nil { return x.CustomField } return "" } type ExamplePluginCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields CustomField string `protobuf:"bytes,1,opt,name=custom_field,json=customField,proto3" json:"custom_field,omitempty"` } func (x *ExamplePluginCommand) Reset() { *x = ExamplePluginCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_example_plugins_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExamplePluginCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExamplePluginCommand) ProtoMessage() {} func (x *ExamplePluginCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_example_plugins_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExamplePluginCommand.ProtoReflect.Descriptor instead. func (*ExamplePluginCommand) Descriptor() ([]byte, []int) { return file_spark_connect_example_plugins_proto_rawDescGZIP(), []int{2} } func (x *ExamplePluginCommand) GetCustomField() string { if x != nil { return x.CustomField } return "" } var File_spark_connect_example_plugins_proto protoreflect.FileDescriptor var file_spark_connect_example_plugins_proto_rawDesc = []byte{ 0x0a, 0x23, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x69, 0x0a, 0x15, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0x6d, 0x0a, 0x17, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x05, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0x39, 0x0a, 0x14, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_example_plugins_proto_rawDescOnce sync.Once file_spark_connect_example_plugins_proto_rawDescData = file_spark_connect_example_plugins_proto_rawDesc ) func file_spark_connect_example_plugins_proto_rawDescGZIP() []byte { file_spark_connect_example_plugins_proto_rawDescOnce.Do(func() { file_spark_connect_example_plugins_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_example_plugins_proto_rawDescData) }) return file_spark_connect_example_plugins_proto_rawDescData } var file_spark_connect_example_plugins_proto_msgTypes = make([]protoimpl.MessageInfo, 3) var file_spark_connect_example_plugins_proto_goTypes = []interface{}{ (*ExamplePluginRelation)(nil), // 0: spark.connect.ExamplePluginRelation (*ExamplePluginExpression)(nil), // 1: spark.connect.ExamplePluginExpression (*ExamplePluginCommand)(nil), // 2: spark.connect.ExamplePluginCommand (*Relation)(nil), // 3: spark.connect.Relation (*Expression)(nil), // 4: spark.connect.Expression } var file_spark_connect_example_plugins_proto_depIdxs = []int32{ 3, // 0: spark.connect.ExamplePluginRelation.input:type_name -> spark.connect.Relation 4, // 1: spark.connect.ExamplePluginExpression.child:type_name -> spark.connect.Expression 2, // [2:2] is the sub-list for method output_type 2, // [2:2] is the sub-list for method input_type 2, // [2:2] is the sub-list for extension type_name 2, // [2:2] is the sub-list for extension extendee 0, // [0:2] is the sub-list for field type_name } func init() { file_spark_connect_example_plugins_proto_init() } func file_spark_connect_example_plugins_proto_init() { if File_spark_connect_example_plugins_proto != nil { return } file_spark_connect_relations_proto_init() file_spark_connect_expressions_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_example_plugins_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExamplePluginRelation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_example_plugins_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExamplePluginExpression); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_example_plugins_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExamplePluginCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_example_plugins_proto_rawDesc, NumEnums: 0, NumMessages: 3, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_example_plugins_proto_goTypes, DependencyIndexes: file_spark_connect_example_plugins_proto_depIdxs, MessageInfos: file_spark_connect_example_plugins_proto_msgTypes, }.Build() File_spark_connect_example_plugins_proto = out.File file_spark_connect_example_plugins_proto_rawDesc = nil file_spark_connect_example_plugins_proto_goTypes = nil file_spark_connect_example_plugins_proto_depIdxs = nil } ================================================ FILE: internal/generated/expressions.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/expressions.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" anypb "google.golang.org/protobuf/types/known/anypb" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) type Expression_Window_WindowFrame_FrameType int32 const ( Expression_Window_WindowFrame_FRAME_TYPE_UNDEFINED Expression_Window_WindowFrame_FrameType = 0 // RowFrame treats rows in a partition individually. Expression_Window_WindowFrame_FRAME_TYPE_ROW Expression_Window_WindowFrame_FrameType = 1 // RangeFrame treats rows in a partition as groups of peers. // All rows having the same 'ORDER BY' ordering are considered as peers. Expression_Window_WindowFrame_FRAME_TYPE_RANGE Expression_Window_WindowFrame_FrameType = 2 ) // Enum value maps for Expression_Window_WindowFrame_FrameType. var ( Expression_Window_WindowFrame_FrameType_name = map[int32]string{ 0: "FRAME_TYPE_UNDEFINED", 1: "FRAME_TYPE_ROW", 2: "FRAME_TYPE_RANGE", } Expression_Window_WindowFrame_FrameType_value = map[string]int32{ "FRAME_TYPE_UNDEFINED": 0, "FRAME_TYPE_ROW": 1, "FRAME_TYPE_RANGE": 2, } ) func (x Expression_Window_WindowFrame_FrameType) Enum() *Expression_Window_WindowFrame_FrameType { p := new(Expression_Window_WindowFrame_FrameType) *p = x return p } func (x Expression_Window_WindowFrame_FrameType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (Expression_Window_WindowFrame_FrameType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_expressions_proto_enumTypes[0].Descriptor() } func (Expression_Window_WindowFrame_FrameType) Type() protoreflect.EnumType { return &file_spark_connect_expressions_proto_enumTypes[0] } func (x Expression_Window_WindowFrame_FrameType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use Expression_Window_WindowFrame_FrameType.Descriptor instead. func (Expression_Window_WindowFrame_FrameType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 0, 0, 0} } type Expression_SortOrder_SortDirection int32 const ( Expression_SortOrder_SORT_DIRECTION_UNSPECIFIED Expression_SortOrder_SortDirection = 0 Expression_SortOrder_SORT_DIRECTION_ASCENDING Expression_SortOrder_SortDirection = 1 Expression_SortOrder_SORT_DIRECTION_DESCENDING Expression_SortOrder_SortDirection = 2 ) // Enum value maps for Expression_SortOrder_SortDirection. var ( Expression_SortOrder_SortDirection_name = map[int32]string{ 0: "SORT_DIRECTION_UNSPECIFIED", 1: "SORT_DIRECTION_ASCENDING", 2: "SORT_DIRECTION_DESCENDING", } Expression_SortOrder_SortDirection_value = map[string]int32{ "SORT_DIRECTION_UNSPECIFIED": 0, "SORT_DIRECTION_ASCENDING": 1, "SORT_DIRECTION_DESCENDING": 2, } ) func (x Expression_SortOrder_SortDirection) Enum() *Expression_SortOrder_SortDirection { p := new(Expression_SortOrder_SortDirection) *p = x return p } func (x Expression_SortOrder_SortDirection) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (Expression_SortOrder_SortDirection) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_expressions_proto_enumTypes[1].Descriptor() } func (Expression_SortOrder_SortDirection) Type() protoreflect.EnumType { return &file_spark_connect_expressions_proto_enumTypes[1] } func (x Expression_SortOrder_SortDirection) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use Expression_SortOrder_SortDirection.Descriptor instead. func (Expression_SortOrder_SortDirection) EnumDescriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 1, 0} } type Expression_SortOrder_NullOrdering int32 const ( Expression_SortOrder_SORT_NULLS_UNSPECIFIED Expression_SortOrder_NullOrdering = 0 Expression_SortOrder_SORT_NULLS_FIRST Expression_SortOrder_NullOrdering = 1 Expression_SortOrder_SORT_NULLS_LAST Expression_SortOrder_NullOrdering = 2 ) // Enum value maps for Expression_SortOrder_NullOrdering. var ( Expression_SortOrder_NullOrdering_name = map[int32]string{ 0: "SORT_NULLS_UNSPECIFIED", 1: "SORT_NULLS_FIRST", 2: "SORT_NULLS_LAST", } Expression_SortOrder_NullOrdering_value = map[string]int32{ "SORT_NULLS_UNSPECIFIED": 0, "SORT_NULLS_FIRST": 1, "SORT_NULLS_LAST": 2, } ) func (x Expression_SortOrder_NullOrdering) Enum() *Expression_SortOrder_NullOrdering { p := new(Expression_SortOrder_NullOrdering) *p = x return p } func (x Expression_SortOrder_NullOrdering) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (Expression_SortOrder_NullOrdering) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_expressions_proto_enumTypes[2].Descriptor() } func (Expression_SortOrder_NullOrdering) Type() protoreflect.EnumType { return &file_spark_connect_expressions_proto_enumTypes[2] } func (x Expression_SortOrder_NullOrdering) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use Expression_SortOrder_NullOrdering.Descriptor instead. func (Expression_SortOrder_NullOrdering) EnumDescriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 1, 1} } type Expression_Cast_EvalMode int32 const ( Expression_Cast_EVAL_MODE_UNSPECIFIED Expression_Cast_EvalMode = 0 Expression_Cast_EVAL_MODE_LEGACY Expression_Cast_EvalMode = 1 Expression_Cast_EVAL_MODE_ANSI Expression_Cast_EvalMode = 2 Expression_Cast_EVAL_MODE_TRY Expression_Cast_EvalMode = 3 ) // Enum value maps for Expression_Cast_EvalMode. var ( Expression_Cast_EvalMode_name = map[int32]string{ 0: "EVAL_MODE_UNSPECIFIED", 1: "EVAL_MODE_LEGACY", 2: "EVAL_MODE_ANSI", 3: "EVAL_MODE_TRY", } Expression_Cast_EvalMode_value = map[string]int32{ "EVAL_MODE_UNSPECIFIED": 0, "EVAL_MODE_LEGACY": 1, "EVAL_MODE_ANSI": 2, "EVAL_MODE_TRY": 3, } ) func (x Expression_Cast_EvalMode) Enum() *Expression_Cast_EvalMode { p := new(Expression_Cast_EvalMode) *p = x return p } func (x Expression_Cast_EvalMode) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (Expression_Cast_EvalMode) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_expressions_proto_enumTypes[3].Descriptor() } func (Expression_Cast_EvalMode) Type() protoreflect.EnumType { return &file_spark_connect_expressions_proto_enumTypes[3] } func (x Expression_Cast_EvalMode) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use Expression_Cast_EvalMode.Descriptor instead. func (Expression_Cast_EvalMode) EnumDescriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 2, 0} } type MergeAction_ActionType int32 const ( MergeAction_ACTION_TYPE_INVALID MergeAction_ActionType = 0 MergeAction_ACTION_TYPE_DELETE MergeAction_ActionType = 1 MergeAction_ACTION_TYPE_INSERT MergeAction_ActionType = 2 MergeAction_ACTION_TYPE_INSERT_STAR MergeAction_ActionType = 3 MergeAction_ACTION_TYPE_UPDATE MergeAction_ActionType = 4 MergeAction_ACTION_TYPE_UPDATE_STAR MergeAction_ActionType = 5 ) // Enum value maps for MergeAction_ActionType. var ( MergeAction_ActionType_name = map[int32]string{ 0: "ACTION_TYPE_INVALID", 1: "ACTION_TYPE_DELETE", 2: "ACTION_TYPE_INSERT", 3: "ACTION_TYPE_INSERT_STAR", 4: "ACTION_TYPE_UPDATE", 5: "ACTION_TYPE_UPDATE_STAR", } MergeAction_ActionType_value = map[string]int32{ "ACTION_TYPE_INVALID": 0, "ACTION_TYPE_DELETE": 1, "ACTION_TYPE_INSERT": 2, "ACTION_TYPE_INSERT_STAR": 3, "ACTION_TYPE_UPDATE": 4, "ACTION_TYPE_UPDATE_STAR": 5, } ) func (x MergeAction_ActionType) Enum() *MergeAction_ActionType { p := new(MergeAction_ActionType) *p = x return p } func (x MergeAction_ActionType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (MergeAction_ActionType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_expressions_proto_enumTypes[4].Descriptor() } func (MergeAction_ActionType) Type() protoreflect.EnumType { return &file_spark_connect_expressions_proto_enumTypes[4] } func (x MergeAction_ActionType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use MergeAction_ActionType.Descriptor instead. func (MergeAction_ActionType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{9, 0} } type SubqueryExpression_SubqueryType int32 const ( SubqueryExpression_SUBQUERY_TYPE_UNKNOWN SubqueryExpression_SubqueryType = 0 SubqueryExpression_SUBQUERY_TYPE_SCALAR SubqueryExpression_SubqueryType = 1 SubqueryExpression_SUBQUERY_TYPE_EXISTS SubqueryExpression_SubqueryType = 2 SubqueryExpression_SUBQUERY_TYPE_TABLE_ARG SubqueryExpression_SubqueryType = 3 SubqueryExpression_SUBQUERY_TYPE_IN SubqueryExpression_SubqueryType = 4 ) // Enum value maps for SubqueryExpression_SubqueryType. var ( SubqueryExpression_SubqueryType_name = map[int32]string{ 0: "SUBQUERY_TYPE_UNKNOWN", 1: "SUBQUERY_TYPE_SCALAR", 2: "SUBQUERY_TYPE_EXISTS", 3: "SUBQUERY_TYPE_TABLE_ARG", 4: "SUBQUERY_TYPE_IN", } SubqueryExpression_SubqueryType_value = map[string]int32{ "SUBQUERY_TYPE_UNKNOWN": 0, "SUBQUERY_TYPE_SCALAR": 1, "SUBQUERY_TYPE_EXISTS": 2, "SUBQUERY_TYPE_TABLE_ARG": 3, "SUBQUERY_TYPE_IN": 4, } ) func (x SubqueryExpression_SubqueryType) Enum() *SubqueryExpression_SubqueryType { p := new(SubqueryExpression_SubqueryType) *p = x return p } func (x SubqueryExpression_SubqueryType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (SubqueryExpression_SubqueryType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_expressions_proto_enumTypes[5].Descriptor() } func (SubqueryExpression_SubqueryType) Type() protoreflect.EnumType { return &file_spark_connect_expressions_proto_enumTypes[5] } func (x SubqueryExpression_SubqueryType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use SubqueryExpression_SubqueryType.Descriptor instead. func (SubqueryExpression_SubqueryType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{10, 0} } // Expression used to refer to fields, functions and similar. This can be used everywhere // expressions in SQL appear. type Expression struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Common *ExpressionCommon `protobuf:"bytes,18,opt,name=common,proto3" json:"common,omitempty"` // Types that are assignable to ExprType: // // *Expression_Literal_ // *Expression_UnresolvedAttribute_ // *Expression_UnresolvedFunction_ // *Expression_ExpressionString_ // *Expression_UnresolvedStar_ // *Expression_Alias_ // *Expression_Cast_ // *Expression_UnresolvedRegex_ // *Expression_SortOrder_ // *Expression_LambdaFunction_ // *Expression_Window_ // *Expression_UnresolvedExtractValue_ // *Expression_UpdateFields_ // *Expression_UnresolvedNamedLambdaVariable_ // *Expression_CommonInlineUserDefinedFunction // *Expression_CallFunction // *Expression_NamedArgumentExpression // *Expression_MergeAction // *Expression_TypedAggregateExpression // *Expression_SubqueryExpression // *Expression_Extension ExprType isExpression_ExprType `protobuf_oneof:"expr_type"` } func (x *Expression) Reset() { *x = Expression{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression) ProtoMessage() {} func (x *Expression) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression.ProtoReflect.Descriptor instead. func (*Expression) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0} } func (x *Expression) GetCommon() *ExpressionCommon { if x != nil { return x.Common } return nil } func (m *Expression) GetExprType() isExpression_ExprType { if m != nil { return m.ExprType } return nil } func (x *Expression) GetLiteral() *Expression_Literal { if x, ok := x.GetExprType().(*Expression_Literal_); ok { return x.Literal } return nil } func (x *Expression) GetUnresolvedAttribute() *Expression_UnresolvedAttribute { if x, ok := x.GetExprType().(*Expression_UnresolvedAttribute_); ok { return x.UnresolvedAttribute } return nil } func (x *Expression) GetUnresolvedFunction() *Expression_UnresolvedFunction { if x, ok := x.GetExprType().(*Expression_UnresolvedFunction_); ok { return x.UnresolvedFunction } return nil } func (x *Expression) GetExpressionString() *Expression_ExpressionString { if x, ok := x.GetExprType().(*Expression_ExpressionString_); ok { return x.ExpressionString } return nil } func (x *Expression) GetUnresolvedStar() *Expression_UnresolvedStar { if x, ok := x.GetExprType().(*Expression_UnresolvedStar_); ok { return x.UnresolvedStar } return nil } func (x *Expression) GetAlias() *Expression_Alias { if x, ok := x.GetExprType().(*Expression_Alias_); ok { return x.Alias } return nil } func (x *Expression) GetCast() *Expression_Cast { if x, ok := x.GetExprType().(*Expression_Cast_); ok { return x.Cast } return nil } func (x *Expression) GetUnresolvedRegex() *Expression_UnresolvedRegex { if x, ok := x.GetExprType().(*Expression_UnresolvedRegex_); ok { return x.UnresolvedRegex } return nil } func (x *Expression) GetSortOrder() *Expression_SortOrder { if x, ok := x.GetExprType().(*Expression_SortOrder_); ok { return x.SortOrder } return nil } func (x *Expression) GetLambdaFunction() *Expression_LambdaFunction { if x, ok := x.GetExprType().(*Expression_LambdaFunction_); ok { return x.LambdaFunction } return nil } func (x *Expression) GetWindow() *Expression_Window { if x, ok := x.GetExprType().(*Expression_Window_); ok { return x.Window } return nil } func (x *Expression) GetUnresolvedExtractValue() *Expression_UnresolvedExtractValue { if x, ok := x.GetExprType().(*Expression_UnresolvedExtractValue_); ok { return x.UnresolvedExtractValue } return nil } func (x *Expression) GetUpdateFields() *Expression_UpdateFields { if x, ok := x.GetExprType().(*Expression_UpdateFields_); ok { return x.UpdateFields } return nil } func (x *Expression) GetUnresolvedNamedLambdaVariable() *Expression_UnresolvedNamedLambdaVariable { if x, ok := x.GetExprType().(*Expression_UnresolvedNamedLambdaVariable_); ok { return x.UnresolvedNamedLambdaVariable } return nil } func (x *Expression) GetCommonInlineUserDefinedFunction() *CommonInlineUserDefinedFunction { if x, ok := x.GetExprType().(*Expression_CommonInlineUserDefinedFunction); ok { return x.CommonInlineUserDefinedFunction } return nil } func (x *Expression) GetCallFunction() *CallFunction { if x, ok := x.GetExprType().(*Expression_CallFunction); ok { return x.CallFunction } return nil } func (x *Expression) GetNamedArgumentExpression() *NamedArgumentExpression { if x, ok := x.GetExprType().(*Expression_NamedArgumentExpression); ok { return x.NamedArgumentExpression } return nil } func (x *Expression) GetMergeAction() *MergeAction { if x, ok := x.GetExprType().(*Expression_MergeAction); ok { return x.MergeAction } return nil } func (x *Expression) GetTypedAggregateExpression() *TypedAggregateExpression { if x, ok := x.GetExprType().(*Expression_TypedAggregateExpression); ok { return x.TypedAggregateExpression } return nil } func (x *Expression) GetSubqueryExpression() *SubqueryExpression { if x, ok := x.GetExprType().(*Expression_SubqueryExpression); ok { return x.SubqueryExpression } return nil } func (x *Expression) GetExtension() *anypb.Any { if x, ok := x.GetExprType().(*Expression_Extension); ok { return x.Extension } return nil } type isExpression_ExprType interface { isExpression_ExprType() } type Expression_Literal_ struct { Literal *Expression_Literal `protobuf:"bytes,1,opt,name=literal,proto3,oneof"` } type Expression_UnresolvedAttribute_ struct { UnresolvedAttribute *Expression_UnresolvedAttribute `protobuf:"bytes,2,opt,name=unresolved_attribute,json=unresolvedAttribute,proto3,oneof"` } type Expression_UnresolvedFunction_ struct { UnresolvedFunction *Expression_UnresolvedFunction `protobuf:"bytes,3,opt,name=unresolved_function,json=unresolvedFunction,proto3,oneof"` } type Expression_ExpressionString_ struct { ExpressionString *Expression_ExpressionString `protobuf:"bytes,4,opt,name=expression_string,json=expressionString,proto3,oneof"` } type Expression_UnresolvedStar_ struct { UnresolvedStar *Expression_UnresolvedStar `protobuf:"bytes,5,opt,name=unresolved_star,json=unresolvedStar,proto3,oneof"` } type Expression_Alias_ struct { Alias *Expression_Alias `protobuf:"bytes,6,opt,name=alias,proto3,oneof"` } type Expression_Cast_ struct { Cast *Expression_Cast `protobuf:"bytes,7,opt,name=cast,proto3,oneof"` } type Expression_UnresolvedRegex_ struct { UnresolvedRegex *Expression_UnresolvedRegex `protobuf:"bytes,8,opt,name=unresolved_regex,json=unresolvedRegex,proto3,oneof"` } type Expression_SortOrder_ struct { SortOrder *Expression_SortOrder `protobuf:"bytes,9,opt,name=sort_order,json=sortOrder,proto3,oneof"` } type Expression_LambdaFunction_ struct { LambdaFunction *Expression_LambdaFunction `protobuf:"bytes,10,opt,name=lambda_function,json=lambdaFunction,proto3,oneof"` } type Expression_Window_ struct { Window *Expression_Window `protobuf:"bytes,11,opt,name=window,proto3,oneof"` } type Expression_UnresolvedExtractValue_ struct { UnresolvedExtractValue *Expression_UnresolvedExtractValue `protobuf:"bytes,12,opt,name=unresolved_extract_value,json=unresolvedExtractValue,proto3,oneof"` } type Expression_UpdateFields_ struct { UpdateFields *Expression_UpdateFields `protobuf:"bytes,13,opt,name=update_fields,json=updateFields,proto3,oneof"` } type Expression_UnresolvedNamedLambdaVariable_ struct { UnresolvedNamedLambdaVariable *Expression_UnresolvedNamedLambdaVariable `protobuf:"bytes,14,opt,name=unresolved_named_lambda_variable,json=unresolvedNamedLambdaVariable,proto3,oneof"` } type Expression_CommonInlineUserDefinedFunction struct { CommonInlineUserDefinedFunction *CommonInlineUserDefinedFunction `protobuf:"bytes,15,opt,name=common_inline_user_defined_function,json=commonInlineUserDefinedFunction,proto3,oneof"` } type Expression_CallFunction struct { CallFunction *CallFunction `protobuf:"bytes,16,opt,name=call_function,json=callFunction,proto3,oneof"` } type Expression_NamedArgumentExpression struct { NamedArgumentExpression *NamedArgumentExpression `protobuf:"bytes,17,opt,name=named_argument_expression,json=namedArgumentExpression,proto3,oneof"` } type Expression_MergeAction struct { MergeAction *MergeAction `protobuf:"bytes,19,opt,name=merge_action,json=mergeAction,proto3,oneof"` } type Expression_TypedAggregateExpression struct { TypedAggregateExpression *TypedAggregateExpression `protobuf:"bytes,20,opt,name=typed_aggregate_expression,json=typedAggregateExpression,proto3,oneof"` } type Expression_SubqueryExpression struct { SubqueryExpression *SubqueryExpression `protobuf:"bytes,21,opt,name=subquery_expression,json=subqueryExpression,proto3,oneof"` } type Expression_Extension struct { // This field is used to mark extensions to the protocol. When plugins generate arbitrary // relations they can add them here. During the planning the correct resolution is done. Extension *anypb.Any `protobuf:"bytes,999,opt,name=extension,proto3,oneof"` } func (*Expression_Literal_) isExpression_ExprType() {} func (*Expression_UnresolvedAttribute_) isExpression_ExprType() {} func (*Expression_UnresolvedFunction_) isExpression_ExprType() {} func (*Expression_ExpressionString_) isExpression_ExprType() {} func (*Expression_UnresolvedStar_) isExpression_ExprType() {} func (*Expression_Alias_) isExpression_ExprType() {} func (*Expression_Cast_) isExpression_ExprType() {} func (*Expression_UnresolvedRegex_) isExpression_ExprType() {} func (*Expression_SortOrder_) isExpression_ExprType() {} func (*Expression_LambdaFunction_) isExpression_ExprType() {} func (*Expression_Window_) isExpression_ExprType() {} func (*Expression_UnresolvedExtractValue_) isExpression_ExprType() {} func (*Expression_UpdateFields_) isExpression_ExprType() {} func (*Expression_UnresolvedNamedLambdaVariable_) isExpression_ExprType() {} func (*Expression_CommonInlineUserDefinedFunction) isExpression_ExprType() {} func (*Expression_CallFunction) isExpression_ExprType() {} func (*Expression_NamedArgumentExpression) isExpression_ExprType() {} func (*Expression_MergeAction) isExpression_ExprType() {} func (*Expression_TypedAggregateExpression) isExpression_ExprType() {} func (*Expression_SubqueryExpression) isExpression_ExprType() {} func (*Expression_Extension) isExpression_ExprType() {} type ExpressionCommon struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Keep the information of the origin for this expression such as stacktrace. Origin *Origin `protobuf:"bytes,1,opt,name=origin,proto3" json:"origin,omitempty"` } func (x *ExpressionCommon) Reset() { *x = ExpressionCommon{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ExpressionCommon) String() string { return protoimpl.X.MessageStringOf(x) } func (*ExpressionCommon) ProtoMessage() {} func (x *ExpressionCommon) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ExpressionCommon.ProtoReflect.Descriptor instead. func (*ExpressionCommon) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{1} } func (x *ExpressionCommon) GetOrigin() *Origin { if x != nil { return x.Origin } return nil } type CommonInlineUserDefinedFunction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Name of the user-defined function. FunctionName string `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` // (Optional) Indicate if the user-defined function is deterministic. Deterministic bool `protobuf:"varint,2,opt,name=deterministic,proto3" json:"deterministic,omitempty"` // (Optional) Function arguments. Empty arguments are allowed. Arguments []*Expression `protobuf:"bytes,3,rep,name=arguments,proto3" json:"arguments,omitempty"` // (Required) Indicate the function type of the user-defined function. // // Types that are assignable to Function: // // *CommonInlineUserDefinedFunction_PythonUdf // *CommonInlineUserDefinedFunction_ScalarScalaUdf // *CommonInlineUserDefinedFunction_JavaUdf Function isCommonInlineUserDefinedFunction_Function `protobuf_oneof:"function"` // (Required) Indicate if this function should be applied on distinct values. IsDistinct bool `protobuf:"varint,7,opt,name=is_distinct,json=isDistinct,proto3" json:"is_distinct,omitempty"` } func (x *CommonInlineUserDefinedFunction) Reset() { *x = CommonInlineUserDefinedFunction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CommonInlineUserDefinedFunction) String() string { return protoimpl.X.MessageStringOf(x) } func (*CommonInlineUserDefinedFunction) ProtoMessage() {} func (x *CommonInlineUserDefinedFunction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CommonInlineUserDefinedFunction.ProtoReflect.Descriptor instead. func (*CommonInlineUserDefinedFunction) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{2} } func (x *CommonInlineUserDefinedFunction) GetFunctionName() string { if x != nil { return x.FunctionName } return "" } func (x *CommonInlineUserDefinedFunction) GetDeterministic() bool { if x != nil { return x.Deterministic } return false } func (x *CommonInlineUserDefinedFunction) GetArguments() []*Expression { if x != nil { return x.Arguments } return nil } func (m *CommonInlineUserDefinedFunction) GetFunction() isCommonInlineUserDefinedFunction_Function { if m != nil { return m.Function } return nil } func (x *CommonInlineUserDefinedFunction) GetPythonUdf() *PythonUDF { if x, ok := x.GetFunction().(*CommonInlineUserDefinedFunction_PythonUdf); ok { return x.PythonUdf } return nil } func (x *CommonInlineUserDefinedFunction) GetScalarScalaUdf() *ScalarScalaUDF { if x, ok := x.GetFunction().(*CommonInlineUserDefinedFunction_ScalarScalaUdf); ok { return x.ScalarScalaUdf } return nil } func (x *CommonInlineUserDefinedFunction) GetJavaUdf() *JavaUDF { if x, ok := x.GetFunction().(*CommonInlineUserDefinedFunction_JavaUdf); ok { return x.JavaUdf } return nil } func (x *CommonInlineUserDefinedFunction) GetIsDistinct() bool { if x != nil { return x.IsDistinct } return false } type isCommonInlineUserDefinedFunction_Function interface { isCommonInlineUserDefinedFunction_Function() } type CommonInlineUserDefinedFunction_PythonUdf struct { PythonUdf *PythonUDF `protobuf:"bytes,4,opt,name=python_udf,json=pythonUdf,proto3,oneof"` } type CommonInlineUserDefinedFunction_ScalarScalaUdf struct { ScalarScalaUdf *ScalarScalaUDF `protobuf:"bytes,5,opt,name=scalar_scala_udf,json=scalarScalaUdf,proto3,oneof"` } type CommonInlineUserDefinedFunction_JavaUdf struct { JavaUdf *JavaUDF `protobuf:"bytes,6,opt,name=java_udf,json=javaUdf,proto3,oneof"` } func (*CommonInlineUserDefinedFunction_PythonUdf) isCommonInlineUserDefinedFunction_Function() {} func (*CommonInlineUserDefinedFunction_ScalarScalaUdf) isCommonInlineUserDefinedFunction_Function() {} func (*CommonInlineUserDefinedFunction_JavaUdf) isCommonInlineUserDefinedFunction_Function() {} type PythonUDF struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Output type of the Python UDF OutputType *DataType `protobuf:"bytes,1,opt,name=output_type,json=outputType,proto3" json:"output_type,omitempty"` // (Required) EvalType of the Python UDF EvalType int32 `protobuf:"varint,2,opt,name=eval_type,json=evalType,proto3" json:"eval_type,omitempty"` // (Required) The encoded commands of the Python UDF Command []byte `protobuf:"bytes,3,opt,name=command,proto3" json:"command,omitempty"` // (Required) Python version being used in the client. PythonVer string `protobuf:"bytes,4,opt,name=python_ver,json=pythonVer,proto3" json:"python_ver,omitempty"` // (Optional) Additional includes for the Python UDF. AdditionalIncludes []string `protobuf:"bytes,5,rep,name=additional_includes,json=additionalIncludes,proto3" json:"additional_includes,omitempty"` } func (x *PythonUDF) Reset() { *x = PythonUDF{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PythonUDF) String() string { return protoimpl.X.MessageStringOf(x) } func (*PythonUDF) ProtoMessage() {} func (x *PythonUDF) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PythonUDF.ProtoReflect.Descriptor instead. func (*PythonUDF) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{3} } func (x *PythonUDF) GetOutputType() *DataType { if x != nil { return x.OutputType } return nil } func (x *PythonUDF) GetEvalType() int32 { if x != nil { return x.EvalType } return 0 } func (x *PythonUDF) GetCommand() []byte { if x != nil { return x.Command } return nil } func (x *PythonUDF) GetPythonVer() string { if x != nil { return x.PythonVer } return "" } func (x *PythonUDF) GetAdditionalIncludes() []string { if x != nil { return x.AdditionalIncludes } return nil } type ScalarScalaUDF struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Serialized JVM object containing UDF definition, input encoders and output encoder Payload []byte `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` // (Optional) Input type(s) of the UDF InputTypes []*DataType `protobuf:"bytes,2,rep,name=inputTypes,proto3" json:"inputTypes,omitempty"` // (Required) Output type of the UDF OutputType *DataType `protobuf:"bytes,3,opt,name=outputType,proto3" json:"outputType,omitempty"` // (Required) True if the UDF can return null value Nullable bool `protobuf:"varint,4,opt,name=nullable,proto3" json:"nullable,omitempty"` // (Required) Indicate if the UDF is an aggregate function Aggregate bool `protobuf:"varint,5,opt,name=aggregate,proto3" json:"aggregate,omitempty"` } func (x *ScalarScalaUDF) Reset() { *x = ScalarScalaUDF{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ScalarScalaUDF) String() string { return protoimpl.X.MessageStringOf(x) } func (*ScalarScalaUDF) ProtoMessage() {} func (x *ScalarScalaUDF) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ScalarScalaUDF.ProtoReflect.Descriptor instead. func (*ScalarScalaUDF) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{4} } func (x *ScalarScalaUDF) GetPayload() []byte { if x != nil { return x.Payload } return nil } func (x *ScalarScalaUDF) GetInputTypes() []*DataType { if x != nil { return x.InputTypes } return nil } func (x *ScalarScalaUDF) GetOutputType() *DataType { if x != nil { return x.OutputType } return nil } func (x *ScalarScalaUDF) GetNullable() bool { if x != nil { return x.Nullable } return false } func (x *ScalarScalaUDF) GetAggregate() bool { if x != nil { return x.Aggregate } return false } type JavaUDF struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Fully qualified name of Java class ClassName string `protobuf:"bytes,1,opt,name=class_name,json=className,proto3" json:"class_name,omitempty"` // (Optional) Output type of the Java UDF OutputType *DataType `protobuf:"bytes,2,opt,name=output_type,json=outputType,proto3,oneof" json:"output_type,omitempty"` // (Required) Indicate if the Java user-defined function is an aggregate function Aggregate bool `protobuf:"varint,3,opt,name=aggregate,proto3" json:"aggregate,omitempty"` } func (x *JavaUDF) Reset() { *x = JavaUDF{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *JavaUDF) String() string { return protoimpl.X.MessageStringOf(x) } func (*JavaUDF) ProtoMessage() {} func (x *JavaUDF) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use JavaUDF.ProtoReflect.Descriptor instead. func (*JavaUDF) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{5} } func (x *JavaUDF) GetClassName() string { if x != nil { return x.ClassName } return "" } func (x *JavaUDF) GetOutputType() *DataType { if x != nil { return x.OutputType } return nil } func (x *JavaUDF) GetAggregate() bool { if x != nil { return x.Aggregate } return false } type TypedAggregateExpression struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The aggregate function object packed into bytes. ScalarScalaUdf *ScalarScalaUDF `protobuf:"bytes,1,opt,name=scalar_scala_udf,json=scalarScalaUdf,proto3" json:"scalar_scala_udf,omitempty"` } func (x *TypedAggregateExpression) Reset() { *x = TypedAggregateExpression{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *TypedAggregateExpression) String() string { return protoimpl.X.MessageStringOf(x) } func (*TypedAggregateExpression) ProtoMessage() {} func (x *TypedAggregateExpression) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TypedAggregateExpression.ProtoReflect.Descriptor instead. func (*TypedAggregateExpression) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{6} } func (x *TypedAggregateExpression) GetScalarScalaUdf() *ScalarScalaUDF { if x != nil { return x.ScalarScalaUdf } return nil } type CallFunction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Unparsed name of the SQL function. FunctionName string `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` // (Optional) Function arguments. Empty arguments are allowed. Arguments []*Expression `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"` } func (x *CallFunction) Reset() { *x = CallFunction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CallFunction) String() string { return protoimpl.X.MessageStringOf(x) } func (*CallFunction) ProtoMessage() {} func (x *CallFunction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CallFunction.ProtoReflect.Descriptor instead. func (*CallFunction) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{7} } func (x *CallFunction) GetFunctionName() string { if x != nil { return x.FunctionName } return "" } func (x *CallFunction) GetArguments() []*Expression { if x != nil { return x.Arguments } return nil } type NamedArgumentExpression struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The key of the named argument. Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` // (Required) The value expression of the named argument. Value *Expression `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } func (x *NamedArgumentExpression) Reset() { *x = NamedArgumentExpression{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *NamedArgumentExpression) String() string { return protoimpl.X.MessageStringOf(x) } func (*NamedArgumentExpression) ProtoMessage() {} func (x *NamedArgumentExpression) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use NamedArgumentExpression.ProtoReflect.Descriptor instead. func (*NamedArgumentExpression) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{8} } func (x *NamedArgumentExpression) GetKey() string { if x != nil { return x.Key } return "" } func (x *NamedArgumentExpression) GetValue() *Expression { if x != nil { return x.Value } return nil } type MergeAction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The action type of the merge action. ActionType MergeAction_ActionType `protobuf:"varint,1,opt,name=action_type,json=actionType,proto3,enum=spark.connect.MergeAction_ActionType" json:"action_type,omitempty"` // (Optional) The condition expression of the merge action. Condition *Expression `protobuf:"bytes,2,opt,name=condition,proto3,oneof" json:"condition,omitempty"` // (Optional) The assignments of the merge action. Required for ActionTypes INSERT and UPDATE. Assignments []*MergeAction_Assignment `protobuf:"bytes,3,rep,name=assignments,proto3" json:"assignments,omitempty"` } func (x *MergeAction) Reset() { *x = MergeAction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MergeAction) String() string { return protoimpl.X.MessageStringOf(x) } func (*MergeAction) ProtoMessage() {} func (x *MergeAction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MergeAction.ProtoReflect.Descriptor instead. func (*MergeAction) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{9} } func (x *MergeAction) GetActionType() MergeAction_ActionType { if x != nil { return x.ActionType } return MergeAction_ACTION_TYPE_INVALID } func (x *MergeAction) GetCondition() *Expression { if x != nil { return x.Condition } return nil } func (x *MergeAction) GetAssignments() []*MergeAction_Assignment { if x != nil { return x.Assignments } return nil } type SubqueryExpression struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The ID of the corresponding connect plan. PlanId int64 `protobuf:"varint,1,opt,name=plan_id,json=planId,proto3" json:"plan_id,omitempty"` // (Required) The type of the subquery. SubqueryType SubqueryExpression_SubqueryType `protobuf:"varint,2,opt,name=subquery_type,json=subqueryType,proto3,enum=spark.connect.SubqueryExpression_SubqueryType" json:"subquery_type,omitempty"` // (Optional) Options specific to table arguments. TableArgOptions *SubqueryExpression_TableArgOptions `protobuf:"bytes,3,opt,name=table_arg_options,json=tableArgOptions,proto3,oneof" json:"table_arg_options,omitempty"` // (Optional) IN subquery values. InSubqueryValues []*Expression `protobuf:"bytes,4,rep,name=in_subquery_values,json=inSubqueryValues,proto3" json:"in_subquery_values,omitempty"` } func (x *SubqueryExpression) Reset() { *x = SubqueryExpression{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SubqueryExpression) String() string { return protoimpl.X.MessageStringOf(x) } func (*SubqueryExpression) ProtoMessage() {} func (x *SubqueryExpression) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SubqueryExpression.ProtoReflect.Descriptor instead. func (*SubqueryExpression) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{10} } func (x *SubqueryExpression) GetPlanId() int64 { if x != nil { return x.PlanId } return 0 } func (x *SubqueryExpression) GetSubqueryType() SubqueryExpression_SubqueryType { if x != nil { return x.SubqueryType } return SubqueryExpression_SUBQUERY_TYPE_UNKNOWN } func (x *SubqueryExpression) GetTableArgOptions() *SubqueryExpression_TableArgOptions { if x != nil { return x.TableArgOptions } return nil } func (x *SubqueryExpression) GetInSubqueryValues() []*Expression { if x != nil { return x.InSubqueryValues } return nil } // Expression for the OVER clause or WINDOW clause. type Expression_Window struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The window function. WindowFunction *Expression `protobuf:"bytes,1,opt,name=window_function,json=windowFunction,proto3" json:"window_function,omitempty"` // (Optional) The way that input rows are partitioned. PartitionSpec []*Expression `protobuf:"bytes,2,rep,name=partition_spec,json=partitionSpec,proto3" json:"partition_spec,omitempty"` // (Optional) Ordering of rows in a partition. OrderSpec []*Expression_SortOrder `protobuf:"bytes,3,rep,name=order_spec,json=orderSpec,proto3" json:"order_spec,omitempty"` // (Optional) Window frame in a partition. // // If not set, it will be treated as 'UnspecifiedFrame'. FrameSpec *Expression_Window_WindowFrame `protobuf:"bytes,4,opt,name=frame_spec,json=frameSpec,proto3" json:"frame_spec,omitempty"` } func (x *Expression_Window) Reset() { *x = Expression_Window{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Window) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Window) ProtoMessage() {} func (x *Expression_Window) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Window.ProtoReflect.Descriptor instead. func (*Expression_Window) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 0} } func (x *Expression_Window) GetWindowFunction() *Expression { if x != nil { return x.WindowFunction } return nil } func (x *Expression_Window) GetPartitionSpec() []*Expression { if x != nil { return x.PartitionSpec } return nil } func (x *Expression_Window) GetOrderSpec() []*Expression_SortOrder { if x != nil { return x.OrderSpec } return nil } func (x *Expression_Window) GetFrameSpec() *Expression_Window_WindowFrame { if x != nil { return x.FrameSpec } return nil } // SortOrder is used to specify the data ordering, it is normally used in Sort and Window. // It is an unevaluable expression and cannot be evaluated, so can not be used in Projection. type Expression_SortOrder struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The expression to be sorted. Child *Expression `protobuf:"bytes,1,opt,name=child,proto3" json:"child,omitempty"` // (Required) The sort direction, should be ASCENDING or DESCENDING. Direction Expression_SortOrder_SortDirection `protobuf:"varint,2,opt,name=direction,proto3,enum=spark.connect.Expression_SortOrder_SortDirection" json:"direction,omitempty"` // (Required) How to deal with NULLs, should be NULLS_FIRST or NULLS_LAST. NullOrdering Expression_SortOrder_NullOrdering `protobuf:"varint,3,opt,name=null_ordering,json=nullOrdering,proto3,enum=spark.connect.Expression_SortOrder_NullOrdering" json:"null_ordering,omitempty"` } func (x *Expression_SortOrder) Reset() { *x = Expression_SortOrder{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_SortOrder) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_SortOrder) ProtoMessage() {} func (x *Expression_SortOrder) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_SortOrder.ProtoReflect.Descriptor instead. func (*Expression_SortOrder) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 1} } func (x *Expression_SortOrder) GetChild() *Expression { if x != nil { return x.Child } return nil } func (x *Expression_SortOrder) GetDirection() Expression_SortOrder_SortDirection { if x != nil { return x.Direction } return Expression_SortOrder_SORT_DIRECTION_UNSPECIFIED } func (x *Expression_SortOrder) GetNullOrdering() Expression_SortOrder_NullOrdering { if x != nil { return x.NullOrdering } return Expression_SortOrder_SORT_NULLS_UNSPECIFIED } type Expression_Cast struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) the expression to be casted. Expr *Expression `protobuf:"bytes,1,opt,name=expr,proto3" json:"expr,omitempty"` // (Required) the data type that the expr to be casted to. // // Types that are assignable to CastToType: // // *Expression_Cast_Type // *Expression_Cast_TypeStr CastToType isExpression_Cast_CastToType `protobuf_oneof:"cast_to_type"` // (Optional) The expression evaluation mode. EvalMode Expression_Cast_EvalMode `protobuf:"varint,4,opt,name=eval_mode,json=evalMode,proto3,enum=spark.connect.Expression_Cast_EvalMode" json:"eval_mode,omitempty"` } func (x *Expression_Cast) Reset() { *x = Expression_Cast{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Cast) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Cast) ProtoMessage() {} func (x *Expression_Cast) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Cast.ProtoReflect.Descriptor instead. func (*Expression_Cast) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 2} } func (x *Expression_Cast) GetExpr() *Expression { if x != nil { return x.Expr } return nil } func (m *Expression_Cast) GetCastToType() isExpression_Cast_CastToType { if m != nil { return m.CastToType } return nil } func (x *Expression_Cast) GetType() *DataType { if x, ok := x.GetCastToType().(*Expression_Cast_Type); ok { return x.Type } return nil } func (x *Expression_Cast) GetTypeStr() string { if x, ok := x.GetCastToType().(*Expression_Cast_TypeStr); ok { return x.TypeStr } return "" } func (x *Expression_Cast) GetEvalMode() Expression_Cast_EvalMode { if x != nil { return x.EvalMode } return Expression_Cast_EVAL_MODE_UNSPECIFIED } type isExpression_Cast_CastToType interface { isExpression_Cast_CastToType() } type Expression_Cast_Type struct { Type *DataType `protobuf:"bytes,2,opt,name=type,proto3,oneof"` } type Expression_Cast_TypeStr struct { // If this is set, Server will use Catalyst parser to parse this string to DataType. TypeStr string `protobuf:"bytes,3,opt,name=type_str,json=typeStr,proto3,oneof"` } func (*Expression_Cast_Type) isExpression_Cast_CastToType() {} func (*Expression_Cast_TypeStr) isExpression_Cast_CastToType() {} type Expression_Literal struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to LiteralType: // // *Expression_Literal_Null // *Expression_Literal_Binary // *Expression_Literal_Boolean // *Expression_Literal_Byte // *Expression_Literal_Short // *Expression_Literal_Integer // *Expression_Literal_Long // *Expression_Literal_Float // *Expression_Literal_Double // *Expression_Literal_Decimal_ // *Expression_Literal_String_ // *Expression_Literal_Date // *Expression_Literal_Timestamp // *Expression_Literal_TimestampNtz // *Expression_Literal_CalendarInterval_ // *Expression_Literal_YearMonthInterval // *Expression_Literal_DayTimeInterval // *Expression_Literal_Array_ // *Expression_Literal_Map_ // *Expression_Literal_Struct_ // *Expression_Literal_SpecializedArray_ LiteralType isExpression_Literal_LiteralType `protobuf_oneof:"literal_type"` } func (x *Expression_Literal) Reset() { *x = Expression_Literal{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Literal) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Literal) ProtoMessage() {} func (x *Expression_Literal) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Literal.ProtoReflect.Descriptor instead. func (*Expression_Literal) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 3} } func (m *Expression_Literal) GetLiteralType() isExpression_Literal_LiteralType { if m != nil { return m.LiteralType } return nil } func (x *Expression_Literal) GetNull() *DataType { if x, ok := x.GetLiteralType().(*Expression_Literal_Null); ok { return x.Null } return nil } func (x *Expression_Literal) GetBinary() []byte { if x, ok := x.GetLiteralType().(*Expression_Literal_Binary); ok { return x.Binary } return nil } func (x *Expression_Literal) GetBoolean() bool { if x, ok := x.GetLiteralType().(*Expression_Literal_Boolean); ok { return x.Boolean } return false } func (x *Expression_Literal) GetByte() int32 { if x, ok := x.GetLiteralType().(*Expression_Literal_Byte); ok { return x.Byte } return 0 } func (x *Expression_Literal) GetShort() int32 { if x, ok := x.GetLiteralType().(*Expression_Literal_Short); ok { return x.Short } return 0 } func (x *Expression_Literal) GetInteger() int32 { if x, ok := x.GetLiteralType().(*Expression_Literal_Integer); ok { return x.Integer } return 0 } func (x *Expression_Literal) GetLong() int64 { if x, ok := x.GetLiteralType().(*Expression_Literal_Long); ok { return x.Long } return 0 } func (x *Expression_Literal) GetFloat() float32 { if x, ok := x.GetLiteralType().(*Expression_Literal_Float); ok { return x.Float } return 0 } func (x *Expression_Literal) GetDouble() float64 { if x, ok := x.GetLiteralType().(*Expression_Literal_Double); ok { return x.Double } return 0 } func (x *Expression_Literal) GetDecimal() *Expression_Literal_Decimal { if x, ok := x.GetLiteralType().(*Expression_Literal_Decimal_); ok { return x.Decimal } return nil } func (x *Expression_Literal) GetString_() string { if x, ok := x.GetLiteralType().(*Expression_Literal_String_); ok { return x.String_ } return "" } func (x *Expression_Literal) GetDate() int32 { if x, ok := x.GetLiteralType().(*Expression_Literal_Date); ok { return x.Date } return 0 } func (x *Expression_Literal) GetTimestamp() int64 { if x, ok := x.GetLiteralType().(*Expression_Literal_Timestamp); ok { return x.Timestamp } return 0 } func (x *Expression_Literal) GetTimestampNtz() int64 { if x, ok := x.GetLiteralType().(*Expression_Literal_TimestampNtz); ok { return x.TimestampNtz } return 0 } func (x *Expression_Literal) GetCalendarInterval() *Expression_Literal_CalendarInterval { if x, ok := x.GetLiteralType().(*Expression_Literal_CalendarInterval_); ok { return x.CalendarInterval } return nil } func (x *Expression_Literal) GetYearMonthInterval() int32 { if x, ok := x.GetLiteralType().(*Expression_Literal_YearMonthInterval); ok { return x.YearMonthInterval } return 0 } func (x *Expression_Literal) GetDayTimeInterval() int64 { if x, ok := x.GetLiteralType().(*Expression_Literal_DayTimeInterval); ok { return x.DayTimeInterval } return 0 } func (x *Expression_Literal) GetArray() *Expression_Literal_Array { if x, ok := x.GetLiteralType().(*Expression_Literal_Array_); ok { return x.Array } return nil } func (x *Expression_Literal) GetMap() *Expression_Literal_Map { if x, ok := x.GetLiteralType().(*Expression_Literal_Map_); ok { return x.Map } return nil } func (x *Expression_Literal) GetStruct() *Expression_Literal_Struct { if x, ok := x.GetLiteralType().(*Expression_Literal_Struct_); ok { return x.Struct } return nil } func (x *Expression_Literal) GetSpecializedArray() *Expression_Literal_SpecializedArray { if x, ok := x.GetLiteralType().(*Expression_Literal_SpecializedArray_); ok { return x.SpecializedArray } return nil } type isExpression_Literal_LiteralType interface { isExpression_Literal_LiteralType() } type Expression_Literal_Null struct { Null *DataType `protobuf:"bytes,1,opt,name=null,proto3,oneof"` } type Expression_Literal_Binary struct { Binary []byte `protobuf:"bytes,2,opt,name=binary,proto3,oneof"` } type Expression_Literal_Boolean struct { Boolean bool `protobuf:"varint,3,opt,name=boolean,proto3,oneof"` } type Expression_Literal_Byte struct { Byte int32 `protobuf:"varint,4,opt,name=byte,proto3,oneof"` } type Expression_Literal_Short struct { Short int32 `protobuf:"varint,5,opt,name=short,proto3,oneof"` } type Expression_Literal_Integer struct { Integer int32 `protobuf:"varint,6,opt,name=integer,proto3,oneof"` } type Expression_Literal_Long struct { Long int64 `protobuf:"varint,7,opt,name=long,proto3,oneof"` } type Expression_Literal_Float struct { Float float32 `protobuf:"fixed32,10,opt,name=float,proto3,oneof"` } type Expression_Literal_Double struct { Double float64 `protobuf:"fixed64,11,opt,name=double,proto3,oneof"` } type Expression_Literal_Decimal_ struct { Decimal *Expression_Literal_Decimal `protobuf:"bytes,12,opt,name=decimal,proto3,oneof"` } type Expression_Literal_String_ struct { String_ string `protobuf:"bytes,13,opt,name=string,proto3,oneof"` } type Expression_Literal_Date struct { // Date in units of days since the UNIX epoch. Date int32 `protobuf:"varint,16,opt,name=date,proto3,oneof"` } type Expression_Literal_Timestamp struct { // Timestamp in units of microseconds since the UNIX epoch. Timestamp int64 `protobuf:"varint,17,opt,name=timestamp,proto3,oneof"` } type Expression_Literal_TimestampNtz struct { // Timestamp in units of microseconds since the UNIX epoch (without timezone information). TimestampNtz int64 `protobuf:"varint,18,opt,name=timestamp_ntz,json=timestampNtz,proto3,oneof"` } type Expression_Literal_CalendarInterval_ struct { CalendarInterval *Expression_Literal_CalendarInterval `protobuf:"bytes,19,opt,name=calendar_interval,json=calendarInterval,proto3,oneof"` } type Expression_Literal_YearMonthInterval struct { YearMonthInterval int32 `protobuf:"varint,20,opt,name=year_month_interval,json=yearMonthInterval,proto3,oneof"` } type Expression_Literal_DayTimeInterval struct { DayTimeInterval int64 `protobuf:"varint,21,opt,name=day_time_interval,json=dayTimeInterval,proto3,oneof"` } type Expression_Literal_Array_ struct { Array *Expression_Literal_Array `protobuf:"bytes,22,opt,name=array,proto3,oneof"` } type Expression_Literal_Map_ struct { Map *Expression_Literal_Map `protobuf:"bytes,23,opt,name=map,proto3,oneof"` } type Expression_Literal_Struct_ struct { Struct *Expression_Literal_Struct `protobuf:"bytes,24,opt,name=struct,proto3,oneof"` } type Expression_Literal_SpecializedArray_ struct { SpecializedArray *Expression_Literal_SpecializedArray `protobuf:"bytes,25,opt,name=specialized_array,json=specializedArray,proto3,oneof"` } func (*Expression_Literal_Null) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Binary) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Boolean) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Byte) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Short) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Integer) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Long) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Float) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Double) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Decimal_) isExpression_Literal_LiteralType() {} func (*Expression_Literal_String_) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Date) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Timestamp) isExpression_Literal_LiteralType() {} func (*Expression_Literal_TimestampNtz) isExpression_Literal_LiteralType() {} func (*Expression_Literal_CalendarInterval_) isExpression_Literal_LiteralType() {} func (*Expression_Literal_YearMonthInterval) isExpression_Literal_LiteralType() {} func (*Expression_Literal_DayTimeInterval) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Array_) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Map_) isExpression_Literal_LiteralType() {} func (*Expression_Literal_Struct_) isExpression_Literal_LiteralType() {} func (*Expression_Literal_SpecializedArray_) isExpression_Literal_LiteralType() {} // An unresolved attribute that is not explicitly bound to a specific column, but the column // is resolved during analysis by name. type Expression_UnresolvedAttribute struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) An identifier that will be parsed by Catalyst parser. This should follow the // Spark SQL identifier syntax. UnparsedIdentifier string `protobuf:"bytes,1,opt,name=unparsed_identifier,json=unparsedIdentifier,proto3" json:"unparsed_identifier,omitempty"` // (Optional) The id of corresponding connect plan. PlanId *int64 `protobuf:"varint,2,opt,name=plan_id,json=planId,proto3,oneof" json:"plan_id,omitempty"` // (Optional) The requested column is a metadata column. IsMetadataColumn *bool `protobuf:"varint,3,opt,name=is_metadata_column,json=isMetadataColumn,proto3,oneof" json:"is_metadata_column,omitempty"` } func (x *Expression_UnresolvedAttribute) Reset() { *x = Expression_UnresolvedAttribute{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_UnresolvedAttribute) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_UnresolvedAttribute) ProtoMessage() {} func (x *Expression_UnresolvedAttribute) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_UnresolvedAttribute.ProtoReflect.Descriptor instead. func (*Expression_UnresolvedAttribute) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 4} } func (x *Expression_UnresolvedAttribute) GetUnparsedIdentifier() string { if x != nil { return x.UnparsedIdentifier } return "" } func (x *Expression_UnresolvedAttribute) GetPlanId() int64 { if x != nil && x.PlanId != nil { return *x.PlanId } return 0 } func (x *Expression_UnresolvedAttribute) GetIsMetadataColumn() bool { if x != nil && x.IsMetadataColumn != nil { return *x.IsMetadataColumn } return false } // An unresolved function is not explicitly bound to one explicit function, but the function // is resolved during analysis following Sparks name resolution rules. type Expression_UnresolvedFunction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) name (or unparsed name for user defined function) for the unresolved function. FunctionName string `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` // (Optional) Function arguments. Empty arguments are allowed. Arguments []*Expression `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"` // (Required) Indicate if this function should be applied on distinct values. IsDistinct bool `protobuf:"varint,3,opt,name=is_distinct,json=isDistinct,proto3" json:"is_distinct,omitempty"` // (Required) Indicate if this is a user defined function. // // When it is not a user defined function, Connect will use the function name directly. // When it is a user defined function, Connect will parse the function name first. IsUserDefinedFunction bool `protobuf:"varint,4,opt,name=is_user_defined_function,json=isUserDefinedFunction,proto3" json:"is_user_defined_function,omitempty"` // (Optional) Indicate if this function is defined in the internal function registry. // If not set, the server will try to look up the function in the internal function registry // and decide appropriately. IsInternal *bool `protobuf:"varint,5,opt,name=is_internal,json=isInternal,proto3,oneof" json:"is_internal,omitempty"` } func (x *Expression_UnresolvedFunction) Reset() { *x = Expression_UnresolvedFunction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_UnresolvedFunction) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_UnresolvedFunction) ProtoMessage() {} func (x *Expression_UnresolvedFunction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_UnresolvedFunction.ProtoReflect.Descriptor instead. func (*Expression_UnresolvedFunction) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 5} } func (x *Expression_UnresolvedFunction) GetFunctionName() string { if x != nil { return x.FunctionName } return "" } func (x *Expression_UnresolvedFunction) GetArguments() []*Expression { if x != nil { return x.Arguments } return nil } func (x *Expression_UnresolvedFunction) GetIsDistinct() bool { if x != nil { return x.IsDistinct } return false } func (x *Expression_UnresolvedFunction) GetIsUserDefinedFunction() bool { if x != nil { return x.IsUserDefinedFunction } return false } func (x *Expression_UnresolvedFunction) GetIsInternal() bool { if x != nil && x.IsInternal != nil { return *x.IsInternal } return false } // Expression as string. type Expression_ExpressionString struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) A SQL expression that will be parsed by Catalyst parser. Expression string `protobuf:"bytes,1,opt,name=expression,proto3" json:"expression,omitempty"` } func (x *Expression_ExpressionString) Reset() { *x = Expression_ExpressionString{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_ExpressionString) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_ExpressionString) ProtoMessage() {} func (x *Expression_ExpressionString) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_ExpressionString.ProtoReflect.Descriptor instead. func (*Expression_ExpressionString) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 6} } func (x *Expression_ExpressionString) GetExpression() string { if x != nil { return x.Expression } return "" } // UnresolvedStar is used to expand all the fields of a relation or struct. type Expression_UnresolvedStar struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) The target of the expansion. // // If set, it should end with '.*' and will be parsed by 'parseAttributeName' // in the server side. UnparsedTarget *string `protobuf:"bytes,1,opt,name=unparsed_target,json=unparsedTarget,proto3,oneof" json:"unparsed_target,omitempty"` // (Optional) The id of corresponding connect plan. PlanId *int64 `protobuf:"varint,2,opt,name=plan_id,json=planId,proto3,oneof" json:"plan_id,omitempty"` } func (x *Expression_UnresolvedStar) Reset() { *x = Expression_UnresolvedStar{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_UnresolvedStar) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_UnresolvedStar) ProtoMessage() {} func (x *Expression_UnresolvedStar) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_UnresolvedStar.ProtoReflect.Descriptor instead. func (*Expression_UnresolvedStar) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 7} } func (x *Expression_UnresolvedStar) GetUnparsedTarget() string { if x != nil && x.UnparsedTarget != nil { return *x.UnparsedTarget } return "" } func (x *Expression_UnresolvedStar) GetPlanId() int64 { if x != nil && x.PlanId != nil { return *x.PlanId } return 0 } // Represents all of the input attributes to a given relational operator, for example in // "SELECT `(id)?+.+` FROM ...". type Expression_UnresolvedRegex struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The column name used to extract column with regex. ColName string `protobuf:"bytes,1,opt,name=col_name,json=colName,proto3" json:"col_name,omitempty"` // (Optional) The id of corresponding connect plan. PlanId *int64 `protobuf:"varint,2,opt,name=plan_id,json=planId,proto3,oneof" json:"plan_id,omitempty"` } func (x *Expression_UnresolvedRegex) Reset() { *x = Expression_UnresolvedRegex{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_UnresolvedRegex) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_UnresolvedRegex) ProtoMessage() {} func (x *Expression_UnresolvedRegex) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_UnresolvedRegex.ProtoReflect.Descriptor instead. func (*Expression_UnresolvedRegex) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 8} } func (x *Expression_UnresolvedRegex) GetColName() string { if x != nil { return x.ColName } return "" } func (x *Expression_UnresolvedRegex) GetPlanId() int64 { if x != nil && x.PlanId != nil { return *x.PlanId } return 0 } // Extracts a value or values from an Expression type Expression_UnresolvedExtractValue struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The expression to extract value from, can be // Map, Array, Struct or array of Structs. Child *Expression `protobuf:"bytes,1,opt,name=child,proto3" json:"child,omitempty"` // (Required) The expression to describe the extraction, can be // key of Map, index of Array, field name of Struct. Extraction *Expression `protobuf:"bytes,2,opt,name=extraction,proto3" json:"extraction,omitempty"` } func (x *Expression_UnresolvedExtractValue) Reset() { *x = Expression_UnresolvedExtractValue{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_UnresolvedExtractValue) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_UnresolvedExtractValue) ProtoMessage() {} func (x *Expression_UnresolvedExtractValue) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_UnresolvedExtractValue.ProtoReflect.Descriptor instead. func (*Expression_UnresolvedExtractValue) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 9} } func (x *Expression_UnresolvedExtractValue) GetChild() *Expression { if x != nil { return x.Child } return nil } func (x *Expression_UnresolvedExtractValue) GetExtraction() *Expression { if x != nil { return x.Extraction } return nil } // Add, replace or drop a field of `StructType` expression by name. type Expression_UpdateFields struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The struct expression. StructExpression *Expression `protobuf:"bytes,1,opt,name=struct_expression,json=structExpression,proto3" json:"struct_expression,omitempty"` // (Required) The field name. FieldName string `protobuf:"bytes,2,opt,name=field_name,json=fieldName,proto3" json:"field_name,omitempty"` // (Optional) The expression to add or replace. // // When not set, it means this field will be dropped. ValueExpression *Expression `protobuf:"bytes,3,opt,name=value_expression,json=valueExpression,proto3" json:"value_expression,omitempty"` } func (x *Expression_UpdateFields) Reset() { *x = Expression_UpdateFields{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_UpdateFields) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_UpdateFields) ProtoMessage() {} func (x *Expression_UpdateFields) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_UpdateFields.ProtoReflect.Descriptor instead. func (*Expression_UpdateFields) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 10} } func (x *Expression_UpdateFields) GetStructExpression() *Expression { if x != nil { return x.StructExpression } return nil } func (x *Expression_UpdateFields) GetFieldName() string { if x != nil { return x.FieldName } return "" } func (x *Expression_UpdateFields) GetValueExpression() *Expression { if x != nil { return x.ValueExpression } return nil } type Expression_Alias struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The expression that alias will be added on. Expr *Expression `protobuf:"bytes,1,opt,name=expr,proto3" json:"expr,omitempty"` // (Required) a list of name parts for the alias. // // Scalar columns only has one name that presents. Name []string `protobuf:"bytes,2,rep,name=name,proto3" json:"name,omitempty"` // (Optional) Alias metadata expressed as a JSON map. Metadata *string `protobuf:"bytes,3,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` } func (x *Expression_Alias) Reset() { *x = Expression_Alias{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Alias) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Alias) ProtoMessage() {} func (x *Expression_Alias) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Alias.ProtoReflect.Descriptor instead. func (*Expression_Alias) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 11} } func (x *Expression_Alias) GetExpr() *Expression { if x != nil { return x.Expr } return nil } func (x *Expression_Alias) GetName() []string { if x != nil { return x.Name } return nil } func (x *Expression_Alias) GetMetadata() string { if x != nil && x.Metadata != nil { return *x.Metadata } return "" } type Expression_LambdaFunction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The lambda function. // // The function body should use 'UnresolvedAttribute' as arguments, the sever side will // replace 'UnresolvedAttribute' with 'UnresolvedNamedLambdaVariable'. Function *Expression `protobuf:"bytes,1,opt,name=function,proto3" json:"function,omitempty"` // (Required) Function variables. Must contains 1 ~ 3 variables. Arguments []*Expression_UnresolvedNamedLambdaVariable `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"` } func (x *Expression_LambdaFunction) Reset() { *x = Expression_LambdaFunction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_LambdaFunction) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_LambdaFunction) ProtoMessage() {} func (x *Expression_LambdaFunction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_LambdaFunction.ProtoReflect.Descriptor instead. func (*Expression_LambdaFunction) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 12} } func (x *Expression_LambdaFunction) GetFunction() *Expression { if x != nil { return x.Function } return nil } func (x *Expression_LambdaFunction) GetArguments() []*Expression_UnresolvedNamedLambdaVariable { if x != nil { return x.Arguments } return nil } type Expression_UnresolvedNamedLambdaVariable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) a list of name parts for the variable. Must not be empty. NameParts []string `protobuf:"bytes,1,rep,name=name_parts,json=nameParts,proto3" json:"name_parts,omitempty"` } func (x *Expression_UnresolvedNamedLambdaVariable) Reset() { *x = Expression_UnresolvedNamedLambdaVariable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_UnresolvedNamedLambdaVariable) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_UnresolvedNamedLambdaVariable) ProtoMessage() {} func (x *Expression_UnresolvedNamedLambdaVariable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_UnresolvedNamedLambdaVariable.ProtoReflect.Descriptor instead. func (*Expression_UnresolvedNamedLambdaVariable) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 13} } func (x *Expression_UnresolvedNamedLambdaVariable) GetNameParts() []string { if x != nil { return x.NameParts } return nil } // The window frame type Expression_Window_WindowFrame struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The type of the frame. FrameType Expression_Window_WindowFrame_FrameType `protobuf:"varint,1,opt,name=frame_type,json=frameType,proto3,enum=spark.connect.Expression_Window_WindowFrame_FrameType" json:"frame_type,omitempty"` // (Required) The lower bound of the frame. Lower *Expression_Window_WindowFrame_FrameBoundary `protobuf:"bytes,2,opt,name=lower,proto3" json:"lower,omitempty"` // (Required) The upper bound of the frame. Upper *Expression_Window_WindowFrame_FrameBoundary `protobuf:"bytes,3,opt,name=upper,proto3" json:"upper,omitempty"` } func (x *Expression_Window_WindowFrame) Reset() { *x = Expression_Window_WindowFrame{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Window_WindowFrame) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Window_WindowFrame) ProtoMessage() {} func (x *Expression_Window_WindowFrame) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Window_WindowFrame.ProtoReflect.Descriptor instead. func (*Expression_Window_WindowFrame) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 0, 0} } func (x *Expression_Window_WindowFrame) GetFrameType() Expression_Window_WindowFrame_FrameType { if x != nil { return x.FrameType } return Expression_Window_WindowFrame_FRAME_TYPE_UNDEFINED } func (x *Expression_Window_WindowFrame) GetLower() *Expression_Window_WindowFrame_FrameBoundary { if x != nil { return x.Lower } return nil } func (x *Expression_Window_WindowFrame) GetUpper() *Expression_Window_WindowFrame_FrameBoundary { if x != nil { return x.Upper } return nil } type Expression_Window_WindowFrame_FrameBoundary struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Boundary: // // *Expression_Window_WindowFrame_FrameBoundary_CurrentRow // *Expression_Window_WindowFrame_FrameBoundary_Unbounded // *Expression_Window_WindowFrame_FrameBoundary_Value Boundary isExpression_Window_WindowFrame_FrameBoundary_Boundary `protobuf_oneof:"boundary"` } func (x *Expression_Window_WindowFrame_FrameBoundary) Reset() { *x = Expression_Window_WindowFrame_FrameBoundary{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Window_WindowFrame_FrameBoundary) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Window_WindowFrame_FrameBoundary) ProtoMessage() {} func (x *Expression_Window_WindowFrame_FrameBoundary) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Window_WindowFrame_FrameBoundary.ProtoReflect.Descriptor instead. func (*Expression_Window_WindowFrame_FrameBoundary) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 0, 0, 0} } func (m *Expression_Window_WindowFrame_FrameBoundary) GetBoundary() isExpression_Window_WindowFrame_FrameBoundary_Boundary { if m != nil { return m.Boundary } return nil } func (x *Expression_Window_WindowFrame_FrameBoundary) GetCurrentRow() bool { if x, ok := x.GetBoundary().(*Expression_Window_WindowFrame_FrameBoundary_CurrentRow); ok { return x.CurrentRow } return false } func (x *Expression_Window_WindowFrame_FrameBoundary) GetUnbounded() bool { if x, ok := x.GetBoundary().(*Expression_Window_WindowFrame_FrameBoundary_Unbounded); ok { return x.Unbounded } return false } func (x *Expression_Window_WindowFrame_FrameBoundary) GetValue() *Expression { if x, ok := x.GetBoundary().(*Expression_Window_WindowFrame_FrameBoundary_Value); ok { return x.Value } return nil } type isExpression_Window_WindowFrame_FrameBoundary_Boundary interface { isExpression_Window_WindowFrame_FrameBoundary_Boundary() } type Expression_Window_WindowFrame_FrameBoundary_CurrentRow struct { // CURRENT ROW boundary CurrentRow bool `protobuf:"varint,1,opt,name=current_row,json=currentRow,proto3,oneof"` } type Expression_Window_WindowFrame_FrameBoundary_Unbounded struct { // UNBOUNDED boundary. // For lower bound, it will be converted to 'UnboundedPreceding'. // for upper bound, it will be converted to 'UnboundedFollowing'. Unbounded bool `protobuf:"varint,2,opt,name=unbounded,proto3,oneof"` } type Expression_Window_WindowFrame_FrameBoundary_Value struct { // This is an expression for future proofing. We are expecting literals on the server side. Value *Expression `protobuf:"bytes,3,opt,name=value,proto3,oneof"` } func (*Expression_Window_WindowFrame_FrameBoundary_CurrentRow) isExpression_Window_WindowFrame_FrameBoundary_Boundary() { } func (*Expression_Window_WindowFrame_FrameBoundary_Unbounded) isExpression_Window_WindowFrame_FrameBoundary_Boundary() { } func (*Expression_Window_WindowFrame_FrameBoundary_Value) isExpression_Window_WindowFrame_FrameBoundary_Boundary() { } type Expression_Literal_Decimal struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // the string representation. Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` // The maximum number of digits allowed in the value. // the maximum precision is 38. Precision *int32 `protobuf:"varint,2,opt,name=precision,proto3,oneof" json:"precision,omitempty"` // declared scale of decimal literal Scale *int32 `protobuf:"varint,3,opt,name=scale,proto3,oneof" json:"scale,omitempty"` } func (x *Expression_Literal_Decimal) Reset() { *x = Expression_Literal_Decimal{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Literal_Decimal) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Literal_Decimal) ProtoMessage() {} func (x *Expression_Literal_Decimal) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[27] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Literal_Decimal.ProtoReflect.Descriptor instead. func (*Expression_Literal_Decimal) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 3, 0} } func (x *Expression_Literal_Decimal) GetValue() string { if x != nil { return x.Value } return "" } func (x *Expression_Literal_Decimal) GetPrecision() int32 { if x != nil && x.Precision != nil { return *x.Precision } return 0 } func (x *Expression_Literal_Decimal) GetScale() int32 { if x != nil && x.Scale != nil { return *x.Scale } return 0 } type Expression_Literal_CalendarInterval struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Months int32 `protobuf:"varint,1,opt,name=months,proto3" json:"months,omitempty"` Days int32 `protobuf:"varint,2,opt,name=days,proto3" json:"days,omitempty"` Microseconds int64 `protobuf:"varint,3,opt,name=microseconds,proto3" json:"microseconds,omitempty"` } func (x *Expression_Literal_CalendarInterval) Reset() { *x = Expression_Literal_CalendarInterval{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Literal_CalendarInterval) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Literal_CalendarInterval) ProtoMessage() {} func (x *Expression_Literal_CalendarInterval) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[28] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Literal_CalendarInterval.ProtoReflect.Descriptor instead. func (*Expression_Literal_CalendarInterval) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 3, 1} } func (x *Expression_Literal_CalendarInterval) GetMonths() int32 { if x != nil { return x.Months } return 0 } func (x *Expression_Literal_CalendarInterval) GetDays() int32 { if x != nil { return x.Days } return 0 } func (x *Expression_Literal_CalendarInterval) GetMicroseconds() int64 { if x != nil { return x.Microseconds } return 0 } type Expression_Literal_Array struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ElementType *DataType `protobuf:"bytes,1,opt,name=element_type,json=elementType,proto3" json:"element_type,omitempty"` Elements []*Expression_Literal `protobuf:"bytes,2,rep,name=elements,proto3" json:"elements,omitempty"` } func (x *Expression_Literal_Array) Reset() { *x = Expression_Literal_Array{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[29] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Literal_Array) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Literal_Array) ProtoMessage() {} func (x *Expression_Literal_Array) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[29] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Literal_Array.ProtoReflect.Descriptor instead. func (*Expression_Literal_Array) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 3, 2} } func (x *Expression_Literal_Array) GetElementType() *DataType { if x != nil { return x.ElementType } return nil } func (x *Expression_Literal_Array) GetElements() []*Expression_Literal { if x != nil { return x.Elements } return nil } type Expression_Literal_Map struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields KeyType *DataType `protobuf:"bytes,1,opt,name=key_type,json=keyType,proto3" json:"key_type,omitempty"` ValueType *DataType `protobuf:"bytes,2,opt,name=value_type,json=valueType,proto3" json:"value_type,omitempty"` Keys []*Expression_Literal `protobuf:"bytes,3,rep,name=keys,proto3" json:"keys,omitempty"` Values []*Expression_Literal `protobuf:"bytes,4,rep,name=values,proto3" json:"values,omitempty"` } func (x *Expression_Literal_Map) Reset() { *x = Expression_Literal_Map{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Literal_Map) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Literal_Map) ProtoMessage() {} func (x *Expression_Literal_Map) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[30] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Literal_Map.ProtoReflect.Descriptor instead. func (*Expression_Literal_Map) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 3, 3} } func (x *Expression_Literal_Map) GetKeyType() *DataType { if x != nil { return x.KeyType } return nil } func (x *Expression_Literal_Map) GetValueType() *DataType { if x != nil { return x.ValueType } return nil } func (x *Expression_Literal_Map) GetKeys() []*Expression_Literal { if x != nil { return x.Keys } return nil } func (x *Expression_Literal_Map) GetValues() []*Expression_Literal { if x != nil { return x.Values } return nil } type Expression_Literal_Struct struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields StructType *DataType `protobuf:"bytes,1,opt,name=struct_type,json=structType,proto3" json:"struct_type,omitempty"` Elements []*Expression_Literal `protobuf:"bytes,2,rep,name=elements,proto3" json:"elements,omitempty"` } func (x *Expression_Literal_Struct) Reset() { *x = Expression_Literal_Struct{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Literal_Struct) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Literal_Struct) ProtoMessage() {} func (x *Expression_Literal_Struct) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[31] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Literal_Struct.ProtoReflect.Descriptor instead. func (*Expression_Literal_Struct) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 3, 4} } func (x *Expression_Literal_Struct) GetStructType() *DataType { if x != nil { return x.StructType } return nil } func (x *Expression_Literal_Struct) GetElements() []*Expression_Literal { if x != nil { return x.Elements } return nil } type Expression_Literal_SpecializedArray struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to ValueType: // // *Expression_Literal_SpecializedArray_Bools // *Expression_Literal_SpecializedArray_Ints // *Expression_Literal_SpecializedArray_Longs // *Expression_Literal_SpecializedArray_Floats // *Expression_Literal_SpecializedArray_Doubles // *Expression_Literal_SpecializedArray_Strings ValueType isExpression_Literal_SpecializedArray_ValueType `protobuf_oneof:"value_type"` } func (x *Expression_Literal_SpecializedArray) Reset() { *x = Expression_Literal_SpecializedArray{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Expression_Literal_SpecializedArray) String() string { return protoimpl.X.MessageStringOf(x) } func (*Expression_Literal_SpecializedArray) ProtoMessage() {} func (x *Expression_Literal_SpecializedArray) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[32] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Expression_Literal_SpecializedArray.ProtoReflect.Descriptor instead. func (*Expression_Literal_SpecializedArray) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{0, 3, 5} } func (m *Expression_Literal_SpecializedArray) GetValueType() isExpression_Literal_SpecializedArray_ValueType { if m != nil { return m.ValueType } return nil } func (x *Expression_Literal_SpecializedArray) GetBools() *Bools { if x, ok := x.GetValueType().(*Expression_Literal_SpecializedArray_Bools); ok { return x.Bools } return nil } func (x *Expression_Literal_SpecializedArray) GetInts() *Ints { if x, ok := x.GetValueType().(*Expression_Literal_SpecializedArray_Ints); ok { return x.Ints } return nil } func (x *Expression_Literal_SpecializedArray) GetLongs() *Longs { if x, ok := x.GetValueType().(*Expression_Literal_SpecializedArray_Longs); ok { return x.Longs } return nil } func (x *Expression_Literal_SpecializedArray) GetFloats() *Floats { if x, ok := x.GetValueType().(*Expression_Literal_SpecializedArray_Floats); ok { return x.Floats } return nil } func (x *Expression_Literal_SpecializedArray) GetDoubles() *Doubles { if x, ok := x.GetValueType().(*Expression_Literal_SpecializedArray_Doubles); ok { return x.Doubles } return nil } func (x *Expression_Literal_SpecializedArray) GetStrings() *Strings { if x, ok := x.GetValueType().(*Expression_Literal_SpecializedArray_Strings); ok { return x.Strings } return nil } type isExpression_Literal_SpecializedArray_ValueType interface { isExpression_Literal_SpecializedArray_ValueType() } type Expression_Literal_SpecializedArray_Bools struct { Bools *Bools `protobuf:"bytes,1,opt,name=bools,proto3,oneof"` } type Expression_Literal_SpecializedArray_Ints struct { Ints *Ints `protobuf:"bytes,2,opt,name=ints,proto3,oneof"` } type Expression_Literal_SpecializedArray_Longs struct { Longs *Longs `protobuf:"bytes,3,opt,name=longs,proto3,oneof"` } type Expression_Literal_SpecializedArray_Floats struct { Floats *Floats `protobuf:"bytes,4,opt,name=floats,proto3,oneof"` } type Expression_Literal_SpecializedArray_Doubles struct { Doubles *Doubles `protobuf:"bytes,5,opt,name=doubles,proto3,oneof"` } type Expression_Literal_SpecializedArray_Strings struct { Strings *Strings `protobuf:"bytes,6,opt,name=strings,proto3,oneof"` } func (*Expression_Literal_SpecializedArray_Bools) isExpression_Literal_SpecializedArray_ValueType() {} func (*Expression_Literal_SpecializedArray_Ints) isExpression_Literal_SpecializedArray_ValueType() {} func (*Expression_Literal_SpecializedArray_Longs) isExpression_Literal_SpecializedArray_ValueType() {} func (*Expression_Literal_SpecializedArray_Floats) isExpression_Literal_SpecializedArray_ValueType() { } func (*Expression_Literal_SpecializedArray_Doubles) isExpression_Literal_SpecializedArray_ValueType() { } func (*Expression_Literal_SpecializedArray_Strings) isExpression_Literal_SpecializedArray_ValueType() { } type MergeAction_Assignment struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The key of the assignment. Key *Expression `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` // (Required) The value of the assignment. Value *Expression `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } func (x *MergeAction_Assignment) Reset() { *x = MergeAction_Assignment{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MergeAction_Assignment) String() string { return protoimpl.X.MessageStringOf(x) } func (*MergeAction_Assignment) ProtoMessage() {} func (x *MergeAction_Assignment) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[33] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MergeAction_Assignment.ProtoReflect.Descriptor instead. func (*MergeAction_Assignment) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{9, 0} } func (x *MergeAction_Assignment) GetKey() *Expression { if x != nil { return x.Key } return nil } func (x *MergeAction_Assignment) GetValue() *Expression { if x != nil { return x.Value } return nil } // Nested message for table argument options. type SubqueryExpression_TableArgOptions struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) The way that input rows are partitioned. PartitionSpec []*Expression `protobuf:"bytes,1,rep,name=partition_spec,json=partitionSpec,proto3" json:"partition_spec,omitempty"` // (Optional) Ordering of rows in a partition. OrderSpec []*Expression_SortOrder `protobuf:"bytes,2,rep,name=order_spec,json=orderSpec,proto3" json:"order_spec,omitempty"` // (Optional) Whether this is a single partition. WithSinglePartition *bool `protobuf:"varint,3,opt,name=with_single_partition,json=withSinglePartition,proto3,oneof" json:"with_single_partition,omitempty"` } func (x *SubqueryExpression_TableArgOptions) Reset() { *x = SubqueryExpression_TableArgOptions{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_expressions_proto_msgTypes[34] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SubqueryExpression_TableArgOptions) String() string { return protoimpl.X.MessageStringOf(x) } func (*SubqueryExpression_TableArgOptions) ProtoMessage() {} func (x *SubqueryExpression_TableArgOptions) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_expressions_proto_msgTypes[34] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SubqueryExpression_TableArgOptions.ProtoReflect.Descriptor instead. func (*SubqueryExpression_TableArgOptions) Descriptor() ([]byte, []int) { return file_spark_connect_expressions_proto_rawDescGZIP(), []int{10, 0} } func (x *SubqueryExpression_TableArgOptions) GetPartitionSpec() []*Expression { if x != nil { return x.PartitionSpec } return nil } func (x *SubqueryExpression_TableArgOptions) GetOrderSpec() []*Expression_SortOrder { if x != nil { return x.OrderSpec } return nil } func (x *SubqueryExpression_TableArgOptions) GetWithSinglePartition() bool { if x != nil && x.WithSinglePartition != nil { return *x.WithSinglePartition } return false } var File_spark_connect_expressions_proto protoreflect.FileDescriptor var file_spark_connect_expressions_proto_rawDesc = []byte{ 0x0a, 0x1f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1a, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xf3, 0x34, 0x0a, 0x0a, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x37, 0x0a, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x52, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x12, 0x3d, 0x0a, 0x07, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x07, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x12, 0x62, 0x0a, 0x14, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x48, 0x00, 0x52, 0x13, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x12, 0x5f, 0x0a, 0x13, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x59, 0x0a, 0x11, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x10, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x53, 0x0a, 0x0f, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x72, 0x12, 0x37, 0x0a, 0x05, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x41, 0x6c, 0x69, 0x61, 0x73, 0x48, 0x00, 0x52, 0x05, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x12, 0x34, 0x0a, 0x04, 0x63, 0x61, 0x73, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x63, 0x61, 0x73, 0x74, 0x12, 0x56, 0x0a, 0x10, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x67, 0x65, 0x78, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x52, 0x65, 0x67, 0x65, 0x78, 0x48, 0x00, 0x52, 0x0f, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x52, 0x65, 0x67, 0x65, 0x78, 0x12, 0x44, 0x0a, 0x0a, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x48, 0x00, 0x52, 0x09, 0x73, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x53, 0x0a, 0x0f, 0x6c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0e, 0x6c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3a, 0x0a, 0x06, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x48, 0x00, 0x52, 0x06, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x12, 0x6c, 0x0a, 0x18, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x65, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x45, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, 0x52, 0x16, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x45, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x4d, 0x0a, 0x0d, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x48, 0x00, 0x52, 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x12, 0x82, 0x01, 0x0a, 0x20, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x5f, 0x6c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x4c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x1d, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x4c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x7e, 0x0a, 0x23, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x1f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x42, 0x0a, 0x0d, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0c, 0x63, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x64, 0x0a, 0x19, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x17, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x0c, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0b, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x67, 0x0a, 0x1a, 0x74, 0x79, 0x70, 0x65, 0x64, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x64, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x18, 0x74, 0x79, 0x70, 0x65, 0x64, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x54, 0x0a, 0x13, 0x73, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, 0x73, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x48, 0x00, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x8f, 0x06, 0x0a, 0x06, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x12, 0x42, 0x0a, 0x0f, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0e, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x0e, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x12, 0x42, 0x0a, 0x0a, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x09, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x4b, 0x0a, 0x0a, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x52, 0x09, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x53, 0x70, 0x65, 0x63, 0x1a, 0xed, 0x03, 0x0a, 0x0b, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x12, 0x55, 0x0a, 0x0a, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x36, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x2e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x50, 0x0a, 0x05, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x2e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x52, 0x05, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x12, 0x50, 0x0a, 0x05, 0x75, 0x70, 0x70, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x2e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x52, 0x05, 0x75, 0x70, 0x70, 0x65, 0x72, 0x1a, 0x91, 0x01, 0x0a, 0x0d, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x12, 0x21, 0x0a, 0x0b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x77, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x77, 0x12, 0x1e, 0x0a, 0x09, 0x75, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x75, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x65, 0x64, 0x12, 0x31, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0a, 0x0a, 0x08, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x22, 0x4f, 0x0a, 0x09, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x14, 0x46, 0x52, 0x41, 0x4d, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x44, 0x45, 0x46, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x46, 0x52, 0x41, 0x4d, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x52, 0x4f, 0x57, 0x10, 0x01, 0x12, 0x14, 0x0a, 0x10, 0x46, 0x52, 0x41, 0x4d, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x52, 0x41, 0x4e, 0x47, 0x45, 0x10, 0x02, 0x1a, 0xa9, 0x03, 0x0a, 0x09, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x2f, 0x0a, 0x05, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x12, 0x4f, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x55, 0x0a, 0x0d, 0x6e, 0x75, 0x6c, 0x6c, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x4e, 0x75, 0x6c, 0x6c, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x0c, 0x6e, 0x75, 0x6c, 0x6c, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x6c, 0x0a, 0x0d, 0x53, 0x6f, 0x72, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x0a, 0x1a, 0x53, 0x4f, 0x52, 0x54, 0x5f, 0x44, 0x49, 0x52, 0x45, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x1c, 0x0a, 0x18, 0x53, 0x4f, 0x52, 0x54, 0x5f, 0x44, 0x49, 0x52, 0x45, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x41, 0x53, 0x43, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x4f, 0x52, 0x54, 0x5f, 0x44, 0x49, 0x52, 0x45, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x44, 0x45, 0x53, 0x43, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x22, 0x55, 0x0a, 0x0c, 0x4e, 0x75, 0x6c, 0x6c, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x4f, 0x52, 0x54, 0x5f, 0x4e, 0x55, 0x4c, 0x4c, 0x53, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x53, 0x4f, 0x52, 0x54, 0x5f, 0x4e, 0x55, 0x4c, 0x4c, 0x53, 0x5f, 0x46, 0x49, 0x52, 0x53, 0x54, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x4f, 0x52, 0x54, 0x5f, 0x4e, 0x55, 0x4c, 0x4c, 0x53, 0x5f, 0x4c, 0x41, 0x53, 0x54, 0x10, 0x02, 0x1a, 0xbb, 0x02, 0x0a, 0x04, 0x43, 0x61, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x04, 0x65, 0x78, 0x70, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x65, 0x78, 0x70, 0x72, 0x12, 0x2d, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x00, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x08, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x74, 0x79, 0x70, 0x65, 0x53, 0x74, 0x72, 0x12, 0x44, 0x0a, 0x09, 0x65, 0x76, 0x61, 0x6c, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x73, 0x74, 0x2e, 0x45, 0x76, 0x61, 0x6c, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x65, 0x76, 0x61, 0x6c, 0x4d, 0x6f, 0x64, 0x65, 0x22, 0x62, 0x0a, 0x08, 0x45, 0x76, 0x61, 0x6c, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x19, 0x0a, 0x15, 0x45, 0x56, 0x41, 0x4c, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x45, 0x56, 0x41, 0x4c, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x4c, 0x45, 0x47, 0x41, 0x43, 0x59, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x45, 0x56, 0x41, 0x4c, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x41, 0x4e, 0x53, 0x49, 0x10, 0x02, 0x12, 0x11, 0x0a, 0x0d, 0x45, 0x56, 0x41, 0x4c, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x54, 0x52, 0x59, 0x10, 0x03, 0x42, 0x0e, 0x0a, 0x0c, 0x63, 0x61, 0x73, 0x74, 0x5f, 0x74, 0x6f, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x1a, 0xc1, 0x0f, 0x0a, 0x07, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x12, 0x2d, 0x0a, 0x04, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x75, 0x6c, 0x6c, 0x12, 0x18, 0x0a, 0x06, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x06, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x12, 0x14, 0x0a, 0x04, 0x62, 0x79, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x04, 0x62, 0x79, 0x74, 0x65, 0x12, 0x16, 0x0a, 0x05, 0x73, 0x68, 0x6f, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x05, 0x73, 0x68, 0x6f, 0x72, 0x74, 0x12, 0x1a, 0x0a, 0x07, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x07, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x04, 0x6c, 0x6f, 0x6e, 0x67, 0x18, 0x07, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x04, 0x6c, 0x6f, 0x6e, 0x67, 0x12, 0x16, 0x0a, 0x05, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x02, 0x48, 0x00, 0x52, 0x05, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x18, 0x0a, 0x06, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x06, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x12, 0x45, 0x0a, 0x07, 0x64, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x2e, 0x44, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x07, 0x64, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x12, 0x18, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x14, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x65, 0x18, 0x10, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x04, 0x64, 0x61, 0x74, 0x65, 0x12, 0x1e, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x25, 0x0a, 0x0d, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x6e, 0x74, 0x7a, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4e, 0x74, 0x7a, 0x12, 0x61, 0x0a, 0x11, 0x63, 0x61, 0x6c, 0x65, 0x6e, 0x64, 0x61, 0x72, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x2e, 0x43, 0x61, 0x6c, 0x65, 0x6e, 0x64, 0x61, 0x72, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x10, 0x63, 0x61, 0x6c, 0x65, 0x6e, 0x64, 0x61, 0x72, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x30, 0x0a, 0x13, 0x79, 0x65, 0x61, 0x72, 0x5f, 0x6d, 0x6f, 0x6e, 0x74, 0x68, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x14, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x11, 0x79, 0x65, 0x61, 0x72, 0x4d, 0x6f, 0x6e, 0x74, 0x68, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x2c, 0x0a, 0x11, 0x64, 0x61, 0x79, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x15, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x79, 0x54, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x3f, 0x0a, 0x05, 0x61, 0x72, 0x72, 0x61, 0x79, 0x18, 0x16, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x2e, 0x41, 0x72, 0x72, 0x61, 0x79, 0x48, 0x00, 0x52, 0x05, 0x61, 0x72, 0x72, 0x61, 0x79, 0x12, 0x39, 0x0a, 0x03, 0x6d, 0x61, 0x70, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x2e, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x03, 0x6d, 0x61, 0x70, 0x12, 0x42, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x12, 0x61, 0x0a, 0x11, 0x73, 0x70, 0x65, 0x63, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x2e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x41, 0x72, 0x72, 0x61, 0x79, 0x48, 0x00, 0x52, 0x10, 0x73, 0x70, 0x65, 0x63, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x41, 0x72, 0x72, 0x61, 0x79, 0x1a, 0x75, 0x0a, 0x07, 0x44, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x09, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x09, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x05, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x1a, 0x62, 0x0a, 0x10, 0x43, 0x61, 0x6c, 0x65, 0x6e, 0x64, 0x61, 0x72, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x6f, 0x6e, 0x74, 0x68, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x6d, 0x6f, 0x6e, 0x74, 0x68, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x79, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x64, 0x61, 0x79, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x1a, 0x82, 0x01, 0x0a, 0x05, 0x41, 0x72, 0x72, 0x61, 0x79, 0x12, 0x3a, 0x0a, 0x0c, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0b, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x3d, 0x0a, 0x08, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x08, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xe3, 0x01, 0x0a, 0x03, 0x4d, 0x61, 0x70, 0x12, 0x32, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x36, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x35, 0x0a, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x04, 0x6b, 0x65, 0x79, 0x73, 0x12, 0x39, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0x81, 0x01, 0x0a, 0x06, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x12, 0x38, 0x0a, 0x0b, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x3d, 0x0a, 0x08, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x08, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xc0, 0x02, 0x0a, 0x10, 0x53, 0x70, 0x65, 0x63, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x41, 0x72, 0x72, 0x61, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x62, 0x6f, 0x6f, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x73, 0x48, 0x00, 0x52, 0x05, 0x62, 0x6f, 0x6f, 0x6c, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x69, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x49, 0x6e, 0x74, 0x73, 0x48, 0x00, 0x52, 0x04, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x2c, 0x0a, 0x05, 0x6c, 0x6f, 0x6e, 0x67, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x6f, 0x6e, 0x67, 0x73, 0x48, 0x00, 0x52, 0x05, 0x6c, 0x6f, 0x6e, 0x67, 0x73, 0x12, 0x2f, 0x0a, 0x06, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x73, 0x48, 0x00, 0x52, 0x06, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x73, 0x12, 0x32, 0x0a, 0x07, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x48, 0x00, 0x52, 0x07, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x07, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x73, 0x48, 0x00, 0x52, 0x07, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x1a, 0xba, 0x01, 0x0a, 0x13, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x13, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x07, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x06, 0x70, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x31, 0x0a, 0x12, 0x69, 0x73, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x01, 0x52, 0x10, 0x69, 0x73, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x15, 0x0a, 0x13, 0x5f, 0x69, 0x73, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x1a, 0x82, 0x02, 0x0a, 0x12, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x73, 0x5f, 0x64, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69, 0x73, 0x44, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x63, 0x74, 0x12, 0x37, 0x0a, 0x18, 0x69, 0x73, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x15, 0x69, 0x73, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x24, 0x0a, 0x0b, 0x69, 0x73, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0a, 0x69, 0x73, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x69, 0x73, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x1a, 0x32, 0x0a, 0x10, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x7c, 0x0a, 0x0e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x72, 0x12, 0x2c, 0x0a, 0x0f, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0e, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1c, 0x0a, 0x07, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x01, 0x52, 0x06, 0x70, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x1a, 0x56, 0x0a, 0x0f, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x52, 0x65, 0x67, 0x65, 0x78, 0x12, 0x19, 0x0a, 0x08, 0x63, 0x6f, 0x6c, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x07, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x06, 0x70, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x1a, 0x84, 0x01, 0x0a, 0x16, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x45, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2f, 0x0a, 0x05, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x12, 0x39, 0x0a, 0x0a, 0x65, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xbb, 0x01, 0x0a, 0x0c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x12, 0x46, 0x0a, 0x11, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x10, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x44, 0x0a, 0x10, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x78, 0x0a, 0x05, 0x41, 0x6c, 0x69, 0x61, 0x73, 0x12, 0x2d, 0x0a, 0x04, 0x65, 0x78, 0x70, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x65, 0x78, 0x70, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x9e, 0x01, 0x0a, 0x0e, 0x4c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x08, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x55, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x4c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x3e, 0x0a, 0x1d, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x4c, 0x61, 0x6d, 0x62, 0x64, 0x61, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x74, 0x73, 0x42, 0x0b, 0x0a, 0x09, 0x65, 0x78, 0x70, 0x72, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x41, 0x0a, 0x10, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x22, 0x8d, 0x03, 0x0a, 0x1f, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x64, 0x65, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x69, 0x73, 0x74, 0x69, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x64, 0x65, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x69, 0x73, 0x74, 0x69, 0x63, 0x12, 0x37, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x39, 0x0a, 0x0a, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x75, 0x64, 0x66, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x55, 0x44, 0x46, 0x48, 0x00, 0x52, 0x09, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x55, 0x64, 0x66, 0x12, 0x49, 0x0a, 0x10, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x5f, 0x75, 0x64, 0x66, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x55, 0x44, 0x46, 0x48, 0x00, 0x52, 0x0e, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x55, 0x64, 0x66, 0x12, 0x33, 0x0a, 0x08, 0x6a, 0x61, 0x76, 0x61, 0x5f, 0x75, 0x64, 0x66, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4a, 0x61, 0x76, 0x61, 0x55, 0x44, 0x46, 0x48, 0x00, 0x52, 0x07, 0x6a, 0x61, 0x76, 0x61, 0x55, 0x64, 0x66, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x73, 0x5f, 0x64, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x63, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69, 0x73, 0x44, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x63, 0x74, 0x42, 0x0a, 0x0a, 0x08, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xcc, 0x01, 0x0a, 0x09, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x55, 0x44, 0x46, 0x12, 0x38, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x65, 0x76, 0x61, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x65, 0x76, 0x61, 0x6c, 0x54, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x76, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x56, 0x65, 0x72, 0x12, 0x2f, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x73, 0x22, 0xd6, 0x01, 0x0a, 0x0e, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x55, 0x44, 0x46, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x37, 0x0a, 0x0a, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x54, 0x79, 0x70, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x54, 0x79, 0x70, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x6e, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6e, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x22, 0x95, 0x01, 0x0a, 0x07, 0x4a, 0x61, 0x76, 0x61, 0x55, 0x44, 0x46, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x3d, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1c, 0x0a, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x63, 0x0a, 0x18, 0x54, 0x79, 0x70, 0x65, 0x64, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x47, 0x0a, 0x10, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x5f, 0x75, 0x64, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x55, 0x44, 0x46, 0x52, 0x0e, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x55, 0x64, 0x66, 0x22, 0x6c, 0x0a, 0x0c, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x5c, 0x0a, 0x17, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2f, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x80, 0x04, 0x0a, 0x0b, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x46, 0x0a, 0x0b, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x3c, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x47, 0x0a, 0x0b, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0b, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x6a, 0x0a, 0x0a, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x2b, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2f, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xa7, 0x01, 0x0a, 0x0a, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, 0x13, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x16, 0x0a, 0x12, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x44, 0x45, 0x4c, 0x45, 0x54, 0x45, 0x10, 0x01, 0x12, 0x16, 0x0a, 0x12, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x53, 0x45, 0x52, 0x54, 0x10, 0x02, 0x12, 0x1b, 0x0a, 0x17, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x53, 0x45, 0x52, 0x54, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x10, 0x03, 0x12, 0x16, 0x0a, 0x12, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x10, 0x04, 0x12, 0x1b, 0x0a, 0x17, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x10, 0x05, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xc5, 0x05, 0x0a, 0x12, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x17, 0x0a, 0x07, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x70, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x12, 0x53, 0x0a, 0x0d, 0x73, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0c, 0x73, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x62, 0x0a, 0x11, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x61, 0x72, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x41, 0x72, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x41, 0x72, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x47, 0x0a, 0x12, 0x69, 0x6e, 0x5f, 0x73, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x10, 0x69, 0x6e, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0xea, 0x01, 0x0a, 0x0f, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x41, 0x72, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x40, 0x0a, 0x0e, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x12, 0x42, 0x0a, 0x0a, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x09, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x37, 0x0a, 0x15, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x13, 0x77, 0x69, 0x74, 0x68, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x18, 0x0a, 0x16, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x90, 0x01, 0x0a, 0x0c, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x15, 0x53, 0x55, 0x42, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x18, 0x0a, 0x14, 0x53, 0x55, 0x42, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x43, 0x41, 0x4c, 0x41, 0x52, 0x10, 0x01, 0x12, 0x18, 0x0a, 0x14, 0x53, 0x55, 0x42, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x58, 0x49, 0x53, 0x54, 0x53, 0x10, 0x02, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x55, 0x42, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x5f, 0x41, 0x52, 0x47, 0x10, 0x03, 0x12, 0x14, 0x0a, 0x10, 0x53, 0x55, 0x42, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x10, 0x04, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x61, 0x72, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_expressions_proto_rawDescOnce sync.Once file_spark_connect_expressions_proto_rawDescData = file_spark_connect_expressions_proto_rawDesc ) func file_spark_connect_expressions_proto_rawDescGZIP() []byte { file_spark_connect_expressions_proto_rawDescOnce.Do(func() { file_spark_connect_expressions_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_expressions_proto_rawDescData) }) return file_spark_connect_expressions_proto_rawDescData } var file_spark_connect_expressions_proto_enumTypes = make([]protoimpl.EnumInfo, 6) var file_spark_connect_expressions_proto_msgTypes = make([]protoimpl.MessageInfo, 35) var file_spark_connect_expressions_proto_goTypes = []interface{}{ (Expression_Window_WindowFrame_FrameType)(0), // 0: spark.connect.Expression.Window.WindowFrame.FrameType (Expression_SortOrder_SortDirection)(0), // 1: spark.connect.Expression.SortOrder.SortDirection (Expression_SortOrder_NullOrdering)(0), // 2: spark.connect.Expression.SortOrder.NullOrdering (Expression_Cast_EvalMode)(0), // 3: spark.connect.Expression.Cast.EvalMode (MergeAction_ActionType)(0), // 4: spark.connect.MergeAction.ActionType (SubqueryExpression_SubqueryType)(0), // 5: spark.connect.SubqueryExpression.SubqueryType (*Expression)(nil), // 6: spark.connect.Expression (*ExpressionCommon)(nil), // 7: spark.connect.ExpressionCommon (*CommonInlineUserDefinedFunction)(nil), // 8: spark.connect.CommonInlineUserDefinedFunction (*PythonUDF)(nil), // 9: spark.connect.PythonUDF (*ScalarScalaUDF)(nil), // 10: spark.connect.ScalarScalaUDF (*JavaUDF)(nil), // 11: spark.connect.JavaUDF (*TypedAggregateExpression)(nil), // 12: spark.connect.TypedAggregateExpression (*CallFunction)(nil), // 13: spark.connect.CallFunction (*NamedArgumentExpression)(nil), // 14: spark.connect.NamedArgumentExpression (*MergeAction)(nil), // 15: spark.connect.MergeAction (*SubqueryExpression)(nil), // 16: spark.connect.SubqueryExpression (*Expression_Window)(nil), // 17: spark.connect.Expression.Window (*Expression_SortOrder)(nil), // 18: spark.connect.Expression.SortOrder (*Expression_Cast)(nil), // 19: spark.connect.Expression.Cast (*Expression_Literal)(nil), // 20: spark.connect.Expression.Literal (*Expression_UnresolvedAttribute)(nil), // 21: spark.connect.Expression.UnresolvedAttribute (*Expression_UnresolvedFunction)(nil), // 22: spark.connect.Expression.UnresolvedFunction (*Expression_ExpressionString)(nil), // 23: spark.connect.Expression.ExpressionString (*Expression_UnresolvedStar)(nil), // 24: spark.connect.Expression.UnresolvedStar (*Expression_UnresolvedRegex)(nil), // 25: spark.connect.Expression.UnresolvedRegex (*Expression_UnresolvedExtractValue)(nil), // 26: spark.connect.Expression.UnresolvedExtractValue (*Expression_UpdateFields)(nil), // 27: spark.connect.Expression.UpdateFields (*Expression_Alias)(nil), // 28: spark.connect.Expression.Alias (*Expression_LambdaFunction)(nil), // 29: spark.connect.Expression.LambdaFunction (*Expression_UnresolvedNamedLambdaVariable)(nil), // 30: spark.connect.Expression.UnresolvedNamedLambdaVariable (*Expression_Window_WindowFrame)(nil), // 31: spark.connect.Expression.Window.WindowFrame (*Expression_Window_WindowFrame_FrameBoundary)(nil), // 32: spark.connect.Expression.Window.WindowFrame.FrameBoundary (*Expression_Literal_Decimal)(nil), // 33: spark.connect.Expression.Literal.Decimal (*Expression_Literal_CalendarInterval)(nil), // 34: spark.connect.Expression.Literal.CalendarInterval (*Expression_Literal_Array)(nil), // 35: spark.connect.Expression.Literal.Array (*Expression_Literal_Map)(nil), // 36: spark.connect.Expression.Literal.Map (*Expression_Literal_Struct)(nil), // 37: spark.connect.Expression.Literal.Struct (*Expression_Literal_SpecializedArray)(nil), // 38: spark.connect.Expression.Literal.SpecializedArray (*MergeAction_Assignment)(nil), // 39: spark.connect.MergeAction.Assignment (*SubqueryExpression_TableArgOptions)(nil), // 40: spark.connect.SubqueryExpression.TableArgOptions (*anypb.Any)(nil), // 41: google.protobuf.Any (*Origin)(nil), // 42: spark.connect.Origin (*DataType)(nil), // 43: spark.connect.DataType (*Bools)(nil), // 44: spark.connect.Bools (*Ints)(nil), // 45: spark.connect.Ints (*Longs)(nil), // 46: spark.connect.Longs (*Floats)(nil), // 47: spark.connect.Floats (*Doubles)(nil), // 48: spark.connect.Doubles (*Strings)(nil), // 49: spark.connect.Strings } var file_spark_connect_expressions_proto_depIdxs = []int32{ 7, // 0: spark.connect.Expression.common:type_name -> spark.connect.ExpressionCommon 20, // 1: spark.connect.Expression.literal:type_name -> spark.connect.Expression.Literal 21, // 2: spark.connect.Expression.unresolved_attribute:type_name -> spark.connect.Expression.UnresolvedAttribute 22, // 3: spark.connect.Expression.unresolved_function:type_name -> spark.connect.Expression.UnresolvedFunction 23, // 4: spark.connect.Expression.expression_string:type_name -> spark.connect.Expression.ExpressionString 24, // 5: spark.connect.Expression.unresolved_star:type_name -> spark.connect.Expression.UnresolvedStar 28, // 6: spark.connect.Expression.alias:type_name -> spark.connect.Expression.Alias 19, // 7: spark.connect.Expression.cast:type_name -> spark.connect.Expression.Cast 25, // 8: spark.connect.Expression.unresolved_regex:type_name -> spark.connect.Expression.UnresolvedRegex 18, // 9: spark.connect.Expression.sort_order:type_name -> spark.connect.Expression.SortOrder 29, // 10: spark.connect.Expression.lambda_function:type_name -> spark.connect.Expression.LambdaFunction 17, // 11: spark.connect.Expression.window:type_name -> spark.connect.Expression.Window 26, // 12: spark.connect.Expression.unresolved_extract_value:type_name -> spark.connect.Expression.UnresolvedExtractValue 27, // 13: spark.connect.Expression.update_fields:type_name -> spark.connect.Expression.UpdateFields 30, // 14: spark.connect.Expression.unresolved_named_lambda_variable:type_name -> spark.connect.Expression.UnresolvedNamedLambdaVariable 8, // 15: spark.connect.Expression.common_inline_user_defined_function:type_name -> spark.connect.CommonInlineUserDefinedFunction 13, // 16: spark.connect.Expression.call_function:type_name -> spark.connect.CallFunction 14, // 17: spark.connect.Expression.named_argument_expression:type_name -> spark.connect.NamedArgumentExpression 15, // 18: spark.connect.Expression.merge_action:type_name -> spark.connect.MergeAction 12, // 19: spark.connect.Expression.typed_aggregate_expression:type_name -> spark.connect.TypedAggregateExpression 16, // 20: spark.connect.Expression.subquery_expression:type_name -> spark.connect.SubqueryExpression 41, // 21: spark.connect.Expression.extension:type_name -> google.protobuf.Any 42, // 22: spark.connect.ExpressionCommon.origin:type_name -> spark.connect.Origin 6, // 23: spark.connect.CommonInlineUserDefinedFunction.arguments:type_name -> spark.connect.Expression 9, // 24: spark.connect.CommonInlineUserDefinedFunction.python_udf:type_name -> spark.connect.PythonUDF 10, // 25: spark.connect.CommonInlineUserDefinedFunction.scalar_scala_udf:type_name -> spark.connect.ScalarScalaUDF 11, // 26: spark.connect.CommonInlineUserDefinedFunction.java_udf:type_name -> spark.connect.JavaUDF 43, // 27: spark.connect.PythonUDF.output_type:type_name -> spark.connect.DataType 43, // 28: spark.connect.ScalarScalaUDF.inputTypes:type_name -> spark.connect.DataType 43, // 29: spark.connect.ScalarScalaUDF.outputType:type_name -> spark.connect.DataType 43, // 30: spark.connect.JavaUDF.output_type:type_name -> spark.connect.DataType 10, // 31: spark.connect.TypedAggregateExpression.scalar_scala_udf:type_name -> spark.connect.ScalarScalaUDF 6, // 32: spark.connect.CallFunction.arguments:type_name -> spark.connect.Expression 6, // 33: spark.connect.NamedArgumentExpression.value:type_name -> spark.connect.Expression 4, // 34: spark.connect.MergeAction.action_type:type_name -> spark.connect.MergeAction.ActionType 6, // 35: spark.connect.MergeAction.condition:type_name -> spark.connect.Expression 39, // 36: spark.connect.MergeAction.assignments:type_name -> spark.connect.MergeAction.Assignment 5, // 37: spark.connect.SubqueryExpression.subquery_type:type_name -> spark.connect.SubqueryExpression.SubqueryType 40, // 38: spark.connect.SubqueryExpression.table_arg_options:type_name -> spark.connect.SubqueryExpression.TableArgOptions 6, // 39: spark.connect.SubqueryExpression.in_subquery_values:type_name -> spark.connect.Expression 6, // 40: spark.connect.Expression.Window.window_function:type_name -> spark.connect.Expression 6, // 41: spark.connect.Expression.Window.partition_spec:type_name -> spark.connect.Expression 18, // 42: spark.connect.Expression.Window.order_spec:type_name -> spark.connect.Expression.SortOrder 31, // 43: spark.connect.Expression.Window.frame_spec:type_name -> spark.connect.Expression.Window.WindowFrame 6, // 44: spark.connect.Expression.SortOrder.child:type_name -> spark.connect.Expression 1, // 45: spark.connect.Expression.SortOrder.direction:type_name -> spark.connect.Expression.SortOrder.SortDirection 2, // 46: spark.connect.Expression.SortOrder.null_ordering:type_name -> spark.connect.Expression.SortOrder.NullOrdering 6, // 47: spark.connect.Expression.Cast.expr:type_name -> spark.connect.Expression 43, // 48: spark.connect.Expression.Cast.type:type_name -> spark.connect.DataType 3, // 49: spark.connect.Expression.Cast.eval_mode:type_name -> spark.connect.Expression.Cast.EvalMode 43, // 50: spark.connect.Expression.Literal.null:type_name -> spark.connect.DataType 33, // 51: spark.connect.Expression.Literal.decimal:type_name -> spark.connect.Expression.Literal.Decimal 34, // 52: spark.connect.Expression.Literal.calendar_interval:type_name -> spark.connect.Expression.Literal.CalendarInterval 35, // 53: spark.connect.Expression.Literal.array:type_name -> spark.connect.Expression.Literal.Array 36, // 54: spark.connect.Expression.Literal.map:type_name -> spark.connect.Expression.Literal.Map 37, // 55: spark.connect.Expression.Literal.struct:type_name -> spark.connect.Expression.Literal.Struct 38, // 56: spark.connect.Expression.Literal.specialized_array:type_name -> spark.connect.Expression.Literal.SpecializedArray 6, // 57: spark.connect.Expression.UnresolvedFunction.arguments:type_name -> spark.connect.Expression 6, // 58: spark.connect.Expression.UnresolvedExtractValue.child:type_name -> spark.connect.Expression 6, // 59: spark.connect.Expression.UnresolvedExtractValue.extraction:type_name -> spark.connect.Expression 6, // 60: spark.connect.Expression.UpdateFields.struct_expression:type_name -> spark.connect.Expression 6, // 61: spark.connect.Expression.UpdateFields.value_expression:type_name -> spark.connect.Expression 6, // 62: spark.connect.Expression.Alias.expr:type_name -> spark.connect.Expression 6, // 63: spark.connect.Expression.LambdaFunction.function:type_name -> spark.connect.Expression 30, // 64: spark.connect.Expression.LambdaFunction.arguments:type_name -> spark.connect.Expression.UnresolvedNamedLambdaVariable 0, // 65: spark.connect.Expression.Window.WindowFrame.frame_type:type_name -> spark.connect.Expression.Window.WindowFrame.FrameType 32, // 66: spark.connect.Expression.Window.WindowFrame.lower:type_name -> spark.connect.Expression.Window.WindowFrame.FrameBoundary 32, // 67: spark.connect.Expression.Window.WindowFrame.upper:type_name -> spark.connect.Expression.Window.WindowFrame.FrameBoundary 6, // 68: spark.connect.Expression.Window.WindowFrame.FrameBoundary.value:type_name -> spark.connect.Expression 43, // 69: spark.connect.Expression.Literal.Array.element_type:type_name -> spark.connect.DataType 20, // 70: spark.connect.Expression.Literal.Array.elements:type_name -> spark.connect.Expression.Literal 43, // 71: spark.connect.Expression.Literal.Map.key_type:type_name -> spark.connect.DataType 43, // 72: spark.connect.Expression.Literal.Map.value_type:type_name -> spark.connect.DataType 20, // 73: spark.connect.Expression.Literal.Map.keys:type_name -> spark.connect.Expression.Literal 20, // 74: spark.connect.Expression.Literal.Map.values:type_name -> spark.connect.Expression.Literal 43, // 75: spark.connect.Expression.Literal.Struct.struct_type:type_name -> spark.connect.DataType 20, // 76: spark.connect.Expression.Literal.Struct.elements:type_name -> spark.connect.Expression.Literal 44, // 77: spark.connect.Expression.Literal.SpecializedArray.bools:type_name -> spark.connect.Bools 45, // 78: spark.connect.Expression.Literal.SpecializedArray.ints:type_name -> spark.connect.Ints 46, // 79: spark.connect.Expression.Literal.SpecializedArray.longs:type_name -> spark.connect.Longs 47, // 80: spark.connect.Expression.Literal.SpecializedArray.floats:type_name -> spark.connect.Floats 48, // 81: spark.connect.Expression.Literal.SpecializedArray.doubles:type_name -> spark.connect.Doubles 49, // 82: spark.connect.Expression.Literal.SpecializedArray.strings:type_name -> spark.connect.Strings 6, // 83: spark.connect.MergeAction.Assignment.key:type_name -> spark.connect.Expression 6, // 84: spark.connect.MergeAction.Assignment.value:type_name -> spark.connect.Expression 6, // 85: spark.connect.SubqueryExpression.TableArgOptions.partition_spec:type_name -> spark.connect.Expression 18, // 86: spark.connect.SubqueryExpression.TableArgOptions.order_spec:type_name -> spark.connect.Expression.SortOrder 87, // [87:87] is the sub-list for method output_type 87, // [87:87] is the sub-list for method input_type 87, // [87:87] is the sub-list for extension type_name 87, // [87:87] is the sub-list for extension extendee 0, // [0:87] is the sub-list for field type_name } func init() { file_spark_connect_expressions_proto_init() } func file_spark_connect_expressions_proto_init() { if File_spark_connect_expressions_proto != nil { return } file_spark_connect_types_proto_init() file_spark_connect_common_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_expressions_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ExpressionCommon); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CommonInlineUserDefinedFunction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PythonUDF); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ScalarScalaUDF); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*JavaUDF); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*TypedAggregateExpression); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CallFunction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NamedArgumentExpression); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MergeAction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SubqueryExpression); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Window); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_SortOrder); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Cast); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Literal); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_UnresolvedAttribute); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_UnresolvedFunction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_ExpressionString); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_UnresolvedStar); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_UnresolvedRegex); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_UnresolvedExtractValue); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_UpdateFields); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Alias); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_LambdaFunction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_UnresolvedNamedLambdaVariable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Window_WindowFrame); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Window_WindowFrame_FrameBoundary); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Literal_Decimal); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Literal_CalendarInterval); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Literal_Array); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Literal_Map); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Literal_Struct); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Expression_Literal_SpecializedArray); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MergeAction_Assignment); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_expressions_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SubqueryExpression_TableArgOptions); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_expressions_proto_msgTypes[0].OneofWrappers = []interface{}{ (*Expression_Literal_)(nil), (*Expression_UnresolvedAttribute_)(nil), (*Expression_UnresolvedFunction_)(nil), (*Expression_ExpressionString_)(nil), (*Expression_UnresolvedStar_)(nil), (*Expression_Alias_)(nil), (*Expression_Cast_)(nil), (*Expression_UnresolvedRegex_)(nil), (*Expression_SortOrder_)(nil), (*Expression_LambdaFunction_)(nil), (*Expression_Window_)(nil), (*Expression_UnresolvedExtractValue_)(nil), (*Expression_UpdateFields_)(nil), (*Expression_UnresolvedNamedLambdaVariable_)(nil), (*Expression_CommonInlineUserDefinedFunction)(nil), (*Expression_CallFunction)(nil), (*Expression_NamedArgumentExpression)(nil), (*Expression_MergeAction)(nil), (*Expression_TypedAggregateExpression)(nil), (*Expression_SubqueryExpression)(nil), (*Expression_Extension)(nil), } file_spark_connect_expressions_proto_msgTypes[2].OneofWrappers = []interface{}{ (*CommonInlineUserDefinedFunction_PythonUdf)(nil), (*CommonInlineUserDefinedFunction_ScalarScalaUdf)(nil), (*CommonInlineUserDefinedFunction_JavaUdf)(nil), } file_spark_connect_expressions_proto_msgTypes[5].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[9].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[10].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[13].OneofWrappers = []interface{}{ (*Expression_Cast_Type)(nil), (*Expression_Cast_TypeStr)(nil), } file_spark_connect_expressions_proto_msgTypes[14].OneofWrappers = []interface{}{ (*Expression_Literal_Null)(nil), (*Expression_Literal_Binary)(nil), (*Expression_Literal_Boolean)(nil), (*Expression_Literal_Byte)(nil), (*Expression_Literal_Short)(nil), (*Expression_Literal_Integer)(nil), (*Expression_Literal_Long)(nil), (*Expression_Literal_Float)(nil), (*Expression_Literal_Double)(nil), (*Expression_Literal_Decimal_)(nil), (*Expression_Literal_String_)(nil), (*Expression_Literal_Date)(nil), (*Expression_Literal_Timestamp)(nil), (*Expression_Literal_TimestampNtz)(nil), (*Expression_Literal_CalendarInterval_)(nil), (*Expression_Literal_YearMonthInterval)(nil), (*Expression_Literal_DayTimeInterval)(nil), (*Expression_Literal_Array_)(nil), (*Expression_Literal_Map_)(nil), (*Expression_Literal_Struct_)(nil), (*Expression_Literal_SpecializedArray_)(nil), } file_spark_connect_expressions_proto_msgTypes[15].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[16].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[18].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[19].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[22].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[26].OneofWrappers = []interface{}{ (*Expression_Window_WindowFrame_FrameBoundary_CurrentRow)(nil), (*Expression_Window_WindowFrame_FrameBoundary_Unbounded)(nil), (*Expression_Window_WindowFrame_FrameBoundary_Value)(nil), } file_spark_connect_expressions_proto_msgTypes[27].OneofWrappers = []interface{}{} file_spark_connect_expressions_proto_msgTypes[32].OneofWrappers = []interface{}{ (*Expression_Literal_SpecializedArray_Bools)(nil), (*Expression_Literal_SpecializedArray_Ints)(nil), (*Expression_Literal_SpecializedArray_Longs)(nil), (*Expression_Literal_SpecializedArray_Floats)(nil), (*Expression_Literal_SpecializedArray_Doubles)(nil), (*Expression_Literal_SpecializedArray_Strings)(nil), } file_spark_connect_expressions_proto_msgTypes[34].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_expressions_proto_rawDesc, NumEnums: 6, NumMessages: 35, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_expressions_proto_goTypes, DependencyIndexes: file_spark_connect_expressions_proto_depIdxs, EnumInfos: file_spark_connect_expressions_proto_enumTypes, MessageInfos: file_spark_connect_expressions_proto_msgTypes, }.Build() File_spark_connect_expressions_proto = out.File file_spark_connect_expressions_proto_rawDesc = nil file_spark_connect_expressions_proto_goTypes = nil file_spark_connect_expressions_proto_depIdxs = nil } ================================================ FILE: internal/generated/ml.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/ml.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // Command for ML type MlCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Command: // // *MlCommand_Fit_ // *MlCommand_Fetch // *MlCommand_Delete_ // *MlCommand_Write_ // *MlCommand_Read_ // *MlCommand_Evaluate_ // *MlCommand_CleanCache_ // *MlCommand_GetCacheInfo_ Command isMlCommand_Command `protobuf_oneof:"command"` } func (x *MlCommand) Reset() { *x = MlCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommand) ProtoMessage() {} func (x *MlCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommand.ProtoReflect.Descriptor instead. func (*MlCommand) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{0} } func (m *MlCommand) GetCommand() isMlCommand_Command { if m != nil { return m.Command } return nil } func (x *MlCommand) GetFit() *MlCommand_Fit { if x, ok := x.GetCommand().(*MlCommand_Fit_); ok { return x.Fit } return nil } func (x *MlCommand) GetFetch() *Fetch { if x, ok := x.GetCommand().(*MlCommand_Fetch); ok { return x.Fetch } return nil } func (x *MlCommand) GetDelete() *MlCommand_Delete { if x, ok := x.GetCommand().(*MlCommand_Delete_); ok { return x.Delete } return nil } func (x *MlCommand) GetWrite() *MlCommand_Write { if x, ok := x.GetCommand().(*MlCommand_Write_); ok { return x.Write } return nil } func (x *MlCommand) GetRead() *MlCommand_Read { if x, ok := x.GetCommand().(*MlCommand_Read_); ok { return x.Read } return nil } func (x *MlCommand) GetEvaluate() *MlCommand_Evaluate { if x, ok := x.GetCommand().(*MlCommand_Evaluate_); ok { return x.Evaluate } return nil } func (x *MlCommand) GetCleanCache() *MlCommand_CleanCache { if x, ok := x.GetCommand().(*MlCommand_CleanCache_); ok { return x.CleanCache } return nil } func (x *MlCommand) GetGetCacheInfo() *MlCommand_GetCacheInfo { if x, ok := x.GetCommand().(*MlCommand_GetCacheInfo_); ok { return x.GetCacheInfo } return nil } type isMlCommand_Command interface { isMlCommand_Command() } type MlCommand_Fit_ struct { Fit *MlCommand_Fit `protobuf:"bytes,1,opt,name=fit,proto3,oneof"` } type MlCommand_Fetch struct { Fetch *Fetch `protobuf:"bytes,2,opt,name=fetch,proto3,oneof"` } type MlCommand_Delete_ struct { Delete *MlCommand_Delete `protobuf:"bytes,3,opt,name=delete,proto3,oneof"` } type MlCommand_Write_ struct { Write *MlCommand_Write `protobuf:"bytes,4,opt,name=write,proto3,oneof"` } type MlCommand_Read_ struct { Read *MlCommand_Read `protobuf:"bytes,5,opt,name=read,proto3,oneof"` } type MlCommand_Evaluate_ struct { Evaluate *MlCommand_Evaluate `protobuf:"bytes,6,opt,name=evaluate,proto3,oneof"` } type MlCommand_CleanCache_ struct { CleanCache *MlCommand_CleanCache `protobuf:"bytes,7,opt,name=clean_cache,json=cleanCache,proto3,oneof"` } type MlCommand_GetCacheInfo_ struct { GetCacheInfo *MlCommand_GetCacheInfo `protobuf:"bytes,8,opt,name=get_cache_info,json=getCacheInfo,proto3,oneof"` } func (*MlCommand_Fit_) isMlCommand_Command() {} func (*MlCommand_Fetch) isMlCommand_Command() {} func (*MlCommand_Delete_) isMlCommand_Command() {} func (*MlCommand_Write_) isMlCommand_Command() {} func (*MlCommand_Read_) isMlCommand_Command() {} func (*MlCommand_Evaluate_) isMlCommand_Command() {} func (*MlCommand_CleanCache_) isMlCommand_Command() {} func (*MlCommand_GetCacheInfo_) isMlCommand_Command() {} // The result of MlCommand type MlCommandResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to ResultType: // // *MlCommandResult_Param // *MlCommandResult_Summary // *MlCommandResult_OperatorInfo ResultType isMlCommandResult_ResultType `protobuf_oneof:"result_type"` } func (x *MlCommandResult) Reset() { *x = MlCommandResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommandResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommandResult) ProtoMessage() {} func (x *MlCommandResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommandResult.ProtoReflect.Descriptor instead. func (*MlCommandResult) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{1} } func (m *MlCommandResult) GetResultType() isMlCommandResult_ResultType { if m != nil { return m.ResultType } return nil } func (x *MlCommandResult) GetParam() *Expression_Literal { if x, ok := x.GetResultType().(*MlCommandResult_Param); ok { return x.Param } return nil } func (x *MlCommandResult) GetSummary() string { if x, ok := x.GetResultType().(*MlCommandResult_Summary); ok { return x.Summary } return "" } func (x *MlCommandResult) GetOperatorInfo() *MlCommandResult_MlOperatorInfo { if x, ok := x.GetResultType().(*MlCommandResult_OperatorInfo); ok { return x.OperatorInfo } return nil } type isMlCommandResult_ResultType interface { isMlCommandResult_ResultType() } type MlCommandResult_Param struct { // The result of the attribute Param *Expression_Literal `protobuf:"bytes,1,opt,name=param,proto3,oneof"` } type MlCommandResult_Summary struct { // Evaluate a Dataset in a model and return the cached ID of summary Summary string `protobuf:"bytes,2,opt,name=summary,proto3,oneof"` } type MlCommandResult_OperatorInfo struct { // Operator information OperatorInfo *MlCommandResult_MlOperatorInfo `protobuf:"bytes,3,opt,name=operator_info,json=operatorInfo,proto3,oneof"` } func (*MlCommandResult_Param) isMlCommandResult_ResultType() {} func (*MlCommandResult_Summary) isMlCommandResult_ResultType() {} func (*MlCommandResult_OperatorInfo) isMlCommandResult_ResultType() {} // Command for estimator.fit(dataset) type MlCommand_Fit struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Estimator information (its type should be OPERATOR_TYPE_ESTIMATOR) Estimator *MlOperator `protobuf:"bytes,1,opt,name=estimator,proto3" json:"estimator,omitempty"` // (Optional) parameters of the Estimator Params *MlParams `protobuf:"bytes,2,opt,name=params,proto3,oneof" json:"params,omitempty"` // (Required) the training dataset Dataset *Relation `protobuf:"bytes,3,opt,name=dataset,proto3" json:"dataset,omitempty"` } func (x *MlCommand_Fit) Reset() { *x = MlCommand_Fit{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommand_Fit) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommand_Fit) ProtoMessage() {} func (x *MlCommand_Fit) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommand_Fit.ProtoReflect.Descriptor instead. func (*MlCommand_Fit) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{0, 0} } func (x *MlCommand_Fit) GetEstimator() *MlOperator { if x != nil { return x.Estimator } return nil } func (x *MlCommand_Fit) GetParams() *MlParams { if x != nil { return x.Params } return nil } func (x *MlCommand_Fit) GetDataset() *Relation { if x != nil { return x.Dataset } return nil } // Command to delete the cached objects which could be a model // or summary evaluated by a model type MlCommand_Delete struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ObjRefs []*ObjectRef `protobuf:"bytes,1,rep,name=obj_refs,json=objRefs,proto3" json:"obj_refs,omitempty"` } func (x *MlCommand_Delete) Reset() { *x = MlCommand_Delete{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommand_Delete) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommand_Delete) ProtoMessage() {} func (x *MlCommand_Delete) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommand_Delete.ProtoReflect.Descriptor instead. func (*MlCommand_Delete) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{0, 1} } func (x *MlCommand_Delete) GetObjRefs() []*ObjectRef { if x != nil { return x.ObjRefs } return nil } // Force to clean up all the ML cached objects type MlCommand_CleanCache struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *MlCommand_CleanCache) Reset() { *x = MlCommand_CleanCache{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommand_CleanCache) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommand_CleanCache) ProtoMessage() {} func (x *MlCommand_CleanCache) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommand_CleanCache.ProtoReflect.Descriptor instead. func (*MlCommand_CleanCache) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{0, 2} } // Get the information of all the ML cached objects type MlCommand_GetCacheInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *MlCommand_GetCacheInfo) Reset() { *x = MlCommand_GetCacheInfo{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommand_GetCacheInfo) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommand_GetCacheInfo) ProtoMessage() {} func (x *MlCommand_GetCacheInfo) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommand_GetCacheInfo.ProtoReflect.Descriptor instead. func (*MlCommand_GetCacheInfo) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{0, 3} } // Command to write ML operator type MlCommand_Write struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // It could be an estimator/evaluator or the cached model // // Types that are assignable to Type: // // *MlCommand_Write_Operator // *MlCommand_Write_ObjRef Type isMlCommand_Write_Type `protobuf_oneof:"type"` // (Optional) The parameters of operator which could be estimator/evaluator or a cached model Params *MlParams `protobuf:"bytes,3,opt,name=params,proto3,oneof" json:"params,omitempty"` // (Required) Save the ML instance to the path Path string `protobuf:"bytes,4,opt,name=path,proto3" json:"path,omitempty"` // (Optional) Overwrites if the output path already exists. ShouldOverwrite *bool `protobuf:"varint,5,opt,name=should_overwrite,json=shouldOverwrite,proto3,oneof" json:"should_overwrite,omitempty"` // (Optional) The options of the writer Options map[string]string `protobuf:"bytes,6,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *MlCommand_Write) Reset() { *x = MlCommand_Write{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommand_Write) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommand_Write) ProtoMessage() {} func (x *MlCommand_Write) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommand_Write.ProtoReflect.Descriptor instead. func (*MlCommand_Write) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{0, 4} } func (m *MlCommand_Write) GetType() isMlCommand_Write_Type { if m != nil { return m.Type } return nil } func (x *MlCommand_Write) GetOperator() *MlOperator { if x, ok := x.GetType().(*MlCommand_Write_Operator); ok { return x.Operator } return nil } func (x *MlCommand_Write) GetObjRef() *ObjectRef { if x, ok := x.GetType().(*MlCommand_Write_ObjRef); ok { return x.ObjRef } return nil } func (x *MlCommand_Write) GetParams() *MlParams { if x != nil { return x.Params } return nil } func (x *MlCommand_Write) GetPath() string { if x != nil { return x.Path } return "" } func (x *MlCommand_Write) GetShouldOverwrite() bool { if x != nil && x.ShouldOverwrite != nil { return *x.ShouldOverwrite } return false } func (x *MlCommand_Write) GetOptions() map[string]string { if x != nil { return x.Options } return nil } type isMlCommand_Write_Type interface { isMlCommand_Write_Type() } type MlCommand_Write_Operator struct { // Estimator or evaluator Operator *MlOperator `protobuf:"bytes,1,opt,name=operator,proto3,oneof"` } type MlCommand_Write_ObjRef struct { // The cached model ObjRef *ObjectRef `protobuf:"bytes,2,opt,name=obj_ref,json=objRef,proto3,oneof"` } func (*MlCommand_Write_Operator) isMlCommand_Write_Type() {} func (*MlCommand_Write_ObjRef) isMlCommand_Write_Type() {} // Command to load ML operator. type MlCommand_Read struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) ML operator information Operator *MlOperator `protobuf:"bytes,1,opt,name=operator,proto3" json:"operator,omitempty"` // (Required) Load the ML instance from the input path Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` } func (x *MlCommand_Read) Reset() { *x = MlCommand_Read{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommand_Read) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommand_Read) ProtoMessage() {} func (x *MlCommand_Read) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommand_Read.ProtoReflect.Descriptor instead. func (*MlCommand_Read) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{0, 5} } func (x *MlCommand_Read) GetOperator() *MlOperator { if x != nil { return x.Operator } return nil } func (x *MlCommand_Read) GetPath() string { if x != nil { return x.Path } return "" } // Command for evaluator.evaluate(dataset) type MlCommand_Evaluate struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Evaluator information (its type should be OPERATOR_TYPE_EVALUATOR) Evaluator *MlOperator `protobuf:"bytes,1,opt,name=evaluator,proto3" json:"evaluator,omitempty"` // (Optional) parameters of the Evaluator Params *MlParams `protobuf:"bytes,2,opt,name=params,proto3,oneof" json:"params,omitempty"` // (Required) the evaluating dataset Dataset *Relation `protobuf:"bytes,3,opt,name=dataset,proto3" json:"dataset,omitempty"` } func (x *MlCommand_Evaluate) Reset() { *x = MlCommand_Evaluate{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommand_Evaluate) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommand_Evaluate) ProtoMessage() {} func (x *MlCommand_Evaluate) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommand_Evaluate.ProtoReflect.Descriptor instead. func (*MlCommand_Evaluate) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{0, 6} } func (x *MlCommand_Evaluate) GetEvaluator() *MlOperator { if x != nil { return x.Evaluator } return nil } func (x *MlCommand_Evaluate) GetParams() *MlParams { if x != nil { return x.Params } return nil } func (x *MlCommand_Evaluate) GetDataset() *Relation { if x != nil { return x.Dataset } return nil } // Represents an operator info type MlCommandResult_MlOperatorInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Type: // // *MlCommandResult_MlOperatorInfo_ObjRef // *MlCommandResult_MlOperatorInfo_Name Type isMlCommandResult_MlOperatorInfo_Type `protobuf_oneof:"type"` // (Optional) the 'uid' of a ML object // Note it is different from the 'id' of a cached object. Uid *string `protobuf:"bytes,3,opt,name=uid,proto3,oneof" json:"uid,omitempty"` // (Optional) parameters Params *MlParams `protobuf:"bytes,4,opt,name=params,proto3,oneof" json:"params,omitempty"` // (Optional) warning message generated during the ML command execution WarningMessage *string `protobuf:"bytes,5,opt,name=warning_message,json=warningMessage,proto3,oneof" json:"warning_message,omitempty"` } func (x *MlCommandResult_MlOperatorInfo) Reset() { *x = MlCommandResult_MlOperatorInfo{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlCommandResult_MlOperatorInfo) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlCommandResult_MlOperatorInfo) ProtoMessage() {} func (x *MlCommandResult_MlOperatorInfo) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlCommandResult_MlOperatorInfo.ProtoReflect.Descriptor instead. func (*MlCommandResult_MlOperatorInfo) Descriptor() ([]byte, []int) { return file_spark_connect_ml_proto_rawDescGZIP(), []int{1, 0} } func (m *MlCommandResult_MlOperatorInfo) GetType() isMlCommandResult_MlOperatorInfo_Type { if m != nil { return m.Type } return nil } func (x *MlCommandResult_MlOperatorInfo) GetObjRef() *ObjectRef { if x, ok := x.GetType().(*MlCommandResult_MlOperatorInfo_ObjRef); ok { return x.ObjRef } return nil } func (x *MlCommandResult_MlOperatorInfo) GetName() string { if x, ok := x.GetType().(*MlCommandResult_MlOperatorInfo_Name); ok { return x.Name } return "" } func (x *MlCommandResult_MlOperatorInfo) GetUid() string { if x != nil && x.Uid != nil { return *x.Uid } return "" } func (x *MlCommandResult_MlOperatorInfo) GetParams() *MlParams { if x != nil { return x.Params } return nil } func (x *MlCommandResult_MlOperatorInfo) GetWarningMessage() string { if x != nil && x.WarningMessage != nil { return *x.WarningMessage } return "" } type isMlCommandResult_MlOperatorInfo_Type interface { isMlCommandResult_MlOperatorInfo_Type() } type MlCommandResult_MlOperatorInfo_ObjRef struct { // The cached object which could be a model or summary evaluated by a model ObjRef *ObjectRef `protobuf:"bytes,1,opt,name=obj_ref,json=objRef,proto3,oneof"` } type MlCommandResult_MlOperatorInfo_Name struct { // Operator name Name string `protobuf:"bytes,2,opt,name=name,proto3,oneof"` } func (*MlCommandResult_MlOperatorInfo_ObjRef) isMlCommandResult_MlOperatorInfo_Type() {} func (*MlCommandResult_MlOperatorInfo_Name) isMlCommandResult_MlOperatorInfo_Type() {} var File_spark_connect_ml_proto protoreflect.FileDescriptor var file_spark_connect_ml_proto_rawDesc = []byte{ 0x0a, 0x16, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x6d, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x6d, 0x6c, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xb2, 0x0b, 0x0a, 0x09, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x30, 0x0a, 0x03, 0x66, 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x46, 0x69, 0x74, 0x48, 0x00, 0x52, 0x03, 0x66, 0x69, 0x74, 0x12, 0x2c, 0x0a, 0x05, 0x66, 0x65, 0x74, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x66, 0x65, 0x74, 0x63, 0x68, 0x12, 0x39, 0x0a, 0x06, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x06, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x36, 0x0a, 0x05, 0x77, 0x72, 0x69, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x77, 0x72, 0x69, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x04, 0x72, 0x65, 0x61, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x48, 0x00, 0x52, 0x04, 0x72, 0x65, 0x61, 0x64, 0x12, 0x3f, 0x0a, 0x08, 0x65, 0x76, 0x61, 0x6c, 0x75, 0x61, 0x74, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x45, 0x76, 0x61, 0x6c, 0x75, 0x61, 0x74, 0x65, 0x48, 0x00, 0x52, 0x08, 0x65, 0x76, 0x61, 0x6c, 0x75, 0x61, 0x74, 0x65, 0x12, 0x46, 0x0a, 0x0b, 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x6e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, 0x4d, 0x0a, 0x0e, 0x67, 0x65, 0x74, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x00, 0x52, 0x0c, 0x67, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x1a, 0xb2, 0x01, 0x0a, 0x03, 0x46, 0x69, 0x74, 0x12, 0x37, 0x0a, 0x09, 0x65, 0x73, 0x74, 0x69, 0x6d, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x09, 0x65, 0x73, 0x74, 0x69, 0x6d, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x34, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x48, 0x00, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x88, 0x01, 0x01, 0x12, 0x31, 0x0a, 0x07, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x1a, 0x3d, 0x0a, 0x06, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x08, 0x6f, 0x62, 0x6a, 0x5f, 0x72, 0x65, 0x66, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x66, 0x52, 0x07, 0x6f, 0x62, 0x6a, 0x52, 0x65, 0x66, 0x73, 0x1a, 0x0c, 0x0a, 0x0a, 0x43, 0x6c, 0x65, 0x61, 0x6e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x1a, 0x0e, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x1a, 0x9a, 0x03, 0x0a, 0x05, 0x57, 0x72, 0x69, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x07, 0x6f, 0x62, 0x6a, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x66, 0x48, 0x00, 0x52, 0x06, 0x6f, 0x62, 0x6a, 0x52, 0x65, 0x66, 0x12, 0x34, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x48, 0x01, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x88, 0x01, 0x01, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x2e, 0x0a, 0x10, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x48, 0x02, 0x52, 0x0f, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x4f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x88, 0x01, 0x01, 0x12, 0x45, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x1a, 0x51, 0x0a, 0x04, 0x52, 0x65, 0x61, 0x64, 0x12, 0x35, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x1a, 0xb7, 0x01, 0x0a, 0x08, 0x45, 0x76, 0x61, 0x6c, 0x75, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x09, 0x65, 0x76, 0x61, 0x6c, 0x75, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x09, 0x65, 0x76, 0x61, 0x6c, 0x75, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x34, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x48, 0x00, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x88, 0x01, 0x01, 0x12, 0x31, 0x0a, 0x07, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x42, 0x09, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x22, 0xd5, 0x03, 0x0a, 0x0f, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x39, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x1a, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x54, 0x0a, 0x0d, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x00, 0x52, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x1a, 0x85, 0x02, 0x0a, 0x0e, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x33, 0x0a, 0x07, 0x6f, 0x62, 0x6a, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x66, 0x48, 0x00, 0x52, 0x06, 0x6f, 0x62, 0x6a, 0x52, 0x65, 0x66, 0x12, 0x14, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x15, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x03, 0x75, 0x69, 0x64, 0x88, 0x01, 0x01, 0x12, 0x34, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x48, 0x02, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x88, 0x01, 0x01, 0x12, 0x2c, 0x0a, 0x0f, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x0e, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x75, 0x69, 0x64, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x42, 0x0d, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_ml_proto_rawDescOnce sync.Once file_spark_connect_ml_proto_rawDescData = file_spark_connect_ml_proto_rawDesc ) func file_spark_connect_ml_proto_rawDescGZIP() []byte { file_spark_connect_ml_proto_rawDescOnce.Do(func() { file_spark_connect_ml_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_ml_proto_rawDescData) }) return file_spark_connect_ml_proto_rawDescData } var file_spark_connect_ml_proto_msgTypes = make([]protoimpl.MessageInfo, 11) var file_spark_connect_ml_proto_goTypes = []interface{}{ (*MlCommand)(nil), // 0: spark.connect.MlCommand (*MlCommandResult)(nil), // 1: spark.connect.MlCommandResult (*MlCommand_Fit)(nil), // 2: spark.connect.MlCommand.Fit (*MlCommand_Delete)(nil), // 3: spark.connect.MlCommand.Delete (*MlCommand_CleanCache)(nil), // 4: spark.connect.MlCommand.CleanCache (*MlCommand_GetCacheInfo)(nil), // 5: spark.connect.MlCommand.GetCacheInfo (*MlCommand_Write)(nil), // 6: spark.connect.MlCommand.Write (*MlCommand_Read)(nil), // 7: spark.connect.MlCommand.Read (*MlCommand_Evaluate)(nil), // 8: spark.connect.MlCommand.Evaluate nil, // 9: spark.connect.MlCommand.Write.OptionsEntry (*MlCommandResult_MlOperatorInfo)(nil), // 10: spark.connect.MlCommandResult.MlOperatorInfo (*Fetch)(nil), // 11: spark.connect.Fetch (*Expression_Literal)(nil), // 12: spark.connect.Expression.Literal (*MlOperator)(nil), // 13: spark.connect.MlOperator (*MlParams)(nil), // 14: spark.connect.MlParams (*Relation)(nil), // 15: spark.connect.Relation (*ObjectRef)(nil), // 16: spark.connect.ObjectRef } var file_spark_connect_ml_proto_depIdxs = []int32{ 2, // 0: spark.connect.MlCommand.fit:type_name -> spark.connect.MlCommand.Fit 11, // 1: spark.connect.MlCommand.fetch:type_name -> spark.connect.Fetch 3, // 2: spark.connect.MlCommand.delete:type_name -> spark.connect.MlCommand.Delete 6, // 3: spark.connect.MlCommand.write:type_name -> spark.connect.MlCommand.Write 7, // 4: spark.connect.MlCommand.read:type_name -> spark.connect.MlCommand.Read 8, // 5: spark.connect.MlCommand.evaluate:type_name -> spark.connect.MlCommand.Evaluate 4, // 6: spark.connect.MlCommand.clean_cache:type_name -> spark.connect.MlCommand.CleanCache 5, // 7: spark.connect.MlCommand.get_cache_info:type_name -> spark.connect.MlCommand.GetCacheInfo 12, // 8: spark.connect.MlCommandResult.param:type_name -> spark.connect.Expression.Literal 10, // 9: spark.connect.MlCommandResult.operator_info:type_name -> spark.connect.MlCommandResult.MlOperatorInfo 13, // 10: spark.connect.MlCommand.Fit.estimator:type_name -> spark.connect.MlOperator 14, // 11: spark.connect.MlCommand.Fit.params:type_name -> spark.connect.MlParams 15, // 12: spark.connect.MlCommand.Fit.dataset:type_name -> spark.connect.Relation 16, // 13: spark.connect.MlCommand.Delete.obj_refs:type_name -> spark.connect.ObjectRef 13, // 14: spark.connect.MlCommand.Write.operator:type_name -> spark.connect.MlOperator 16, // 15: spark.connect.MlCommand.Write.obj_ref:type_name -> spark.connect.ObjectRef 14, // 16: spark.connect.MlCommand.Write.params:type_name -> spark.connect.MlParams 9, // 17: spark.connect.MlCommand.Write.options:type_name -> spark.connect.MlCommand.Write.OptionsEntry 13, // 18: spark.connect.MlCommand.Read.operator:type_name -> spark.connect.MlOperator 13, // 19: spark.connect.MlCommand.Evaluate.evaluator:type_name -> spark.connect.MlOperator 14, // 20: spark.connect.MlCommand.Evaluate.params:type_name -> spark.connect.MlParams 15, // 21: spark.connect.MlCommand.Evaluate.dataset:type_name -> spark.connect.Relation 16, // 22: spark.connect.MlCommandResult.MlOperatorInfo.obj_ref:type_name -> spark.connect.ObjectRef 14, // 23: spark.connect.MlCommandResult.MlOperatorInfo.params:type_name -> spark.connect.MlParams 24, // [24:24] is the sub-list for method output_type 24, // [24:24] is the sub-list for method input_type 24, // [24:24] is the sub-list for extension type_name 24, // [24:24] is the sub-list for extension extendee 0, // [0:24] is the sub-list for field type_name } func init() { file_spark_connect_ml_proto_init() } func file_spark_connect_ml_proto_init() { if File_spark_connect_ml_proto != nil { return } file_spark_connect_relations_proto_init() file_spark_connect_expressions_proto_init() file_spark_connect_ml_common_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_ml_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommandResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommand_Fit); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommand_Delete); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommand_CleanCache); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommand_GetCacheInfo); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommand_Write); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommand_Read); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommand_Evaluate); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlCommandResult_MlOperatorInfo); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_ml_proto_msgTypes[0].OneofWrappers = []interface{}{ (*MlCommand_Fit_)(nil), (*MlCommand_Fetch)(nil), (*MlCommand_Delete_)(nil), (*MlCommand_Write_)(nil), (*MlCommand_Read_)(nil), (*MlCommand_Evaluate_)(nil), (*MlCommand_CleanCache_)(nil), (*MlCommand_GetCacheInfo_)(nil), } file_spark_connect_ml_proto_msgTypes[1].OneofWrappers = []interface{}{ (*MlCommandResult_Param)(nil), (*MlCommandResult_Summary)(nil), (*MlCommandResult_OperatorInfo)(nil), } file_spark_connect_ml_proto_msgTypes[2].OneofWrappers = []interface{}{} file_spark_connect_ml_proto_msgTypes[6].OneofWrappers = []interface{}{ (*MlCommand_Write_Operator)(nil), (*MlCommand_Write_ObjRef)(nil), } file_spark_connect_ml_proto_msgTypes[8].OneofWrappers = []interface{}{} file_spark_connect_ml_proto_msgTypes[10].OneofWrappers = []interface{}{ (*MlCommandResult_MlOperatorInfo_ObjRef)(nil), (*MlCommandResult_MlOperatorInfo_Name)(nil), } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_ml_proto_rawDesc, NumEnums: 0, NumMessages: 11, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_ml_proto_goTypes, DependencyIndexes: file_spark_connect_ml_proto_depIdxs, MessageInfos: file_spark_connect_ml_proto_msgTypes, }.Build() File_spark_connect_ml_proto = out.File file_spark_connect_ml_proto_rawDesc = nil file_spark_connect_ml_proto_goTypes = nil file_spark_connect_ml_proto_depIdxs = nil } ================================================ FILE: internal/generated/ml_common.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/ml_common.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) type MlOperator_OperatorType int32 const ( MlOperator_OPERATOR_TYPE_UNSPECIFIED MlOperator_OperatorType = 0 // ML estimator MlOperator_OPERATOR_TYPE_ESTIMATOR MlOperator_OperatorType = 1 // ML transformer (non-model) MlOperator_OPERATOR_TYPE_TRANSFORMER MlOperator_OperatorType = 2 // ML evaluator MlOperator_OPERATOR_TYPE_EVALUATOR MlOperator_OperatorType = 3 // ML model MlOperator_OPERATOR_TYPE_MODEL MlOperator_OperatorType = 4 ) // Enum value maps for MlOperator_OperatorType. var ( MlOperator_OperatorType_name = map[int32]string{ 0: "OPERATOR_TYPE_UNSPECIFIED", 1: "OPERATOR_TYPE_ESTIMATOR", 2: "OPERATOR_TYPE_TRANSFORMER", 3: "OPERATOR_TYPE_EVALUATOR", 4: "OPERATOR_TYPE_MODEL", } MlOperator_OperatorType_value = map[string]int32{ "OPERATOR_TYPE_UNSPECIFIED": 0, "OPERATOR_TYPE_ESTIMATOR": 1, "OPERATOR_TYPE_TRANSFORMER": 2, "OPERATOR_TYPE_EVALUATOR": 3, "OPERATOR_TYPE_MODEL": 4, } ) func (x MlOperator_OperatorType) Enum() *MlOperator_OperatorType { p := new(MlOperator_OperatorType) *p = x return p } func (x MlOperator_OperatorType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (MlOperator_OperatorType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_ml_common_proto_enumTypes[0].Descriptor() } func (MlOperator_OperatorType) Type() protoreflect.EnumType { return &file_spark_connect_ml_common_proto_enumTypes[0] } func (x MlOperator_OperatorType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use MlOperator_OperatorType.Descriptor instead. func (MlOperator_OperatorType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_ml_common_proto_rawDescGZIP(), []int{1, 0} } // MlParams stores param settings for ML Estimator / Transformer / Evaluator type MlParams struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // User-supplied params Params map[string]*Expression_Literal `protobuf:"bytes,1,rep,name=params,proto3" json:"params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *MlParams) Reset() { *x = MlParams{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_common_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlParams) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlParams) ProtoMessage() {} func (x *MlParams) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_common_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlParams.ProtoReflect.Descriptor instead. func (*MlParams) Descriptor() ([]byte, []int) { return file_spark_connect_ml_common_proto_rawDescGZIP(), []int{0} } func (x *MlParams) GetParams() map[string]*Expression_Literal { if x != nil { return x.Params } return nil } // MLOperator represents the ML operators like (Estimator, Transformer or Evaluator) type MlOperator struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The qualified name of the ML operator. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // (Required) Unique id of the ML operator Uid string `protobuf:"bytes,2,opt,name=uid,proto3" json:"uid,omitempty"` // (Required) Represents what the ML operator is Type MlOperator_OperatorType `protobuf:"varint,3,opt,name=type,proto3,enum=spark.connect.MlOperator_OperatorType" json:"type,omitempty"` } func (x *MlOperator) Reset() { *x = MlOperator{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_common_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlOperator) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlOperator) ProtoMessage() {} func (x *MlOperator) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_common_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlOperator.ProtoReflect.Descriptor instead. func (*MlOperator) Descriptor() ([]byte, []int) { return file_spark_connect_ml_common_proto_rawDescGZIP(), []int{1} } func (x *MlOperator) GetName() string { if x != nil { return x.Name } return "" } func (x *MlOperator) GetUid() string { if x != nil { return x.Uid } return "" } func (x *MlOperator) GetType() MlOperator_OperatorType { if x != nil { return x.Type } return MlOperator_OPERATOR_TYPE_UNSPECIFIED } // Represents a reference to the cached object which could be a model // or summary evaluated by a model type ObjectRef struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The ID is used to lookup the object on the server side. // Note it is different from the 'uid' of a ML object. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` } func (x *ObjectRef) Reset() { *x = ObjectRef{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_ml_common_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ObjectRef) String() string { return protoimpl.X.MessageStringOf(x) } func (*ObjectRef) ProtoMessage() {} func (x *ObjectRef) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_ml_common_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ObjectRef.ProtoReflect.Descriptor instead. func (*ObjectRef) Descriptor() ([]byte, []int) { return file_spark_connect_ml_common_proto_rawDescGZIP(), []int{2} } func (x *ObjectRef) GetId() string { if x != nil { return x.Id } return "" } var File_spark_connect_ml_common_proto protoreflect.FileDescriptor var file_spark_connect_ml_common_proto_rawDesc = []byte{ 0x0a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x6d, 0x6c, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x1f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xa5, 0x01, 0x0a, 0x08, 0x4d, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x3b, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x1a, 0x5c, 0x0a, 0x0b, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x37, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x90, 0x02, 0x0a, 0x0a, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x3a, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x9f, 0x01, 0x0a, 0x0c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1d, 0x0a, 0x19, 0x4f, 0x50, 0x45, 0x52, 0x41, 0x54, 0x4f, 0x52, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x4f, 0x50, 0x45, 0x52, 0x41, 0x54, 0x4f, 0x52, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x53, 0x54, 0x49, 0x4d, 0x41, 0x54, 0x4f, 0x52, 0x10, 0x01, 0x12, 0x1d, 0x0a, 0x19, 0x4f, 0x50, 0x45, 0x52, 0x41, 0x54, 0x4f, 0x52, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x52, 0x41, 0x4e, 0x53, 0x46, 0x4f, 0x52, 0x4d, 0x45, 0x52, 0x10, 0x02, 0x12, 0x1b, 0x0a, 0x17, 0x4f, 0x50, 0x45, 0x52, 0x41, 0x54, 0x4f, 0x52, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x56, 0x41, 0x4c, 0x55, 0x41, 0x54, 0x4f, 0x52, 0x10, 0x03, 0x12, 0x17, 0x0a, 0x13, 0x4f, 0x50, 0x45, 0x52, 0x41, 0x54, 0x4f, 0x52, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x4c, 0x10, 0x04, 0x22, 0x1b, 0x0a, 0x09, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x66, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_ml_common_proto_rawDescOnce sync.Once file_spark_connect_ml_common_proto_rawDescData = file_spark_connect_ml_common_proto_rawDesc ) func file_spark_connect_ml_common_proto_rawDescGZIP() []byte { file_spark_connect_ml_common_proto_rawDescOnce.Do(func() { file_spark_connect_ml_common_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_ml_common_proto_rawDescData) }) return file_spark_connect_ml_common_proto_rawDescData } var file_spark_connect_ml_common_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_spark_connect_ml_common_proto_msgTypes = make([]protoimpl.MessageInfo, 4) var file_spark_connect_ml_common_proto_goTypes = []interface{}{ (MlOperator_OperatorType)(0), // 0: spark.connect.MlOperator.OperatorType (*MlParams)(nil), // 1: spark.connect.MlParams (*MlOperator)(nil), // 2: spark.connect.MlOperator (*ObjectRef)(nil), // 3: spark.connect.ObjectRef nil, // 4: spark.connect.MlParams.ParamsEntry (*Expression_Literal)(nil), // 5: spark.connect.Expression.Literal } var file_spark_connect_ml_common_proto_depIdxs = []int32{ 4, // 0: spark.connect.MlParams.params:type_name -> spark.connect.MlParams.ParamsEntry 0, // 1: spark.connect.MlOperator.type:type_name -> spark.connect.MlOperator.OperatorType 5, // 2: spark.connect.MlParams.ParamsEntry.value:type_name -> spark.connect.Expression.Literal 3, // [3:3] is the sub-list for method output_type 3, // [3:3] is the sub-list for method input_type 3, // [3:3] is the sub-list for extension type_name 3, // [3:3] is the sub-list for extension extendee 0, // [0:3] is the sub-list for field type_name } func init() { file_spark_connect_ml_common_proto_init() } func file_spark_connect_ml_common_proto_init() { if File_spark_connect_ml_common_proto != nil { return } file_spark_connect_expressions_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_ml_common_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlParams); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_common_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlOperator); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_ml_common_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ObjectRef); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_ml_common_proto_rawDesc, NumEnums: 1, NumMessages: 4, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_ml_common_proto_goTypes, DependencyIndexes: file_spark_connect_ml_common_proto_depIdxs, EnumInfos: file_spark_connect_ml_common_proto_enumTypes, MessageInfos: file_spark_connect_ml_common_proto_msgTypes, }.Build() File_spark_connect_ml_common_proto = out.File file_spark_connect_ml_common_proto_rawDesc = nil file_spark_connect_ml_common_proto_goTypes = nil file_spark_connect_ml_common_proto_depIdxs = nil } ================================================ FILE: internal/generated/pipelines.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/pipelines.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // The type of dataset. type DatasetType int32 const ( // Safe default value. Should not be used. DatasetType_DATASET_TYPE_UNSPECIFIED DatasetType = 0 // A materialized view dataset which is published to the catalog DatasetType_MATERIALIZED_VIEW DatasetType = 1 // A table which is published to the catalog DatasetType_TABLE DatasetType = 2 // A view which is not published to the catalog DatasetType_TEMPORARY_VIEW DatasetType = 3 ) // Enum value maps for DatasetType. var ( DatasetType_name = map[int32]string{ 0: "DATASET_TYPE_UNSPECIFIED", 1: "MATERIALIZED_VIEW", 2: "TABLE", 3: "TEMPORARY_VIEW", } DatasetType_value = map[string]int32{ "DATASET_TYPE_UNSPECIFIED": 0, "MATERIALIZED_VIEW": 1, "TABLE": 2, "TEMPORARY_VIEW": 3, } ) func (x DatasetType) Enum() *DatasetType { p := new(DatasetType) *p = x return p } func (x DatasetType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (DatasetType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_pipelines_proto_enumTypes[0].Descriptor() } func (DatasetType) Type() protoreflect.EnumType { return &file_spark_connect_pipelines_proto_enumTypes[0] } func (x DatasetType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use DatasetType.Descriptor instead. func (DatasetType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{0} } // Dispatch object for pipelines commands. See each individual command for documentation. type PipelineCommand struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to CommandType: // // *PipelineCommand_CreateDataflowGraph_ // *PipelineCommand_DefineDataset_ // *PipelineCommand_DefineFlow_ // *PipelineCommand_DropDataflowGraph_ // *PipelineCommand_StartRun_ // *PipelineCommand_DefineSqlGraphElements CommandType isPipelineCommand_CommandType `protobuf_oneof:"command_type"` } func (x *PipelineCommand) Reset() { *x = PipelineCommand{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommand) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommand) ProtoMessage() {} func (x *PipelineCommand) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommand.ProtoReflect.Descriptor instead. func (*PipelineCommand) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{0} } func (m *PipelineCommand) GetCommandType() isPipelineCommand_CommandType { if m != nil { return m.CommandType } return nil } func (x *PipelineCommand) GetCreateDataflowGraph() *PipelineCommand_CreateDataflowGraph { if x, ok := x.GetCommandType().(*PipelineCommand_CreateDataflowGraph_); ok { return x.CreateDataflowGraph } return nil } func (x *PipelineCommand) GetDefineDataset() *PipelineCommand_DefineDataset { if x, ok := x.GetCommandType().(*PipelineCommand_DefineDataset_); ok { return x.DefineDataset } return nil } func (x *PipelineCommand) GetDefineFlow() *PipelineCommand_DefineFlow { if x, ok := x.GetCommandType().(*PipelineCommand_DefineFlow_); ok { return x.DefineFlow } return nil } func (x *PipelineCommand) GetDropDataflowGraph() *PipelineCommand_DropDataflowGraph { if x, ok := x.GetCommandType().(*PipelineCommand_DropDataflowGraph_); ok { return x.DropDataflowGraph } return nil } func (x *PipelineCommand) GetStartRun() *PipelineCommand_StartRun { if x, ok := x.GetCommandType().(*PipelineCommand_StartRun_); ok { return x.StartRun } return nil } func (x *PipelineCommand) GetDefineSqlGraphElements() *DefineSqlGraphElements { if x, ok := x.GetCommandType().(*PipelineCommand_DefineSqlGraphElements); ok { return x.DefineSqlGraphElements } return nil } type isPipelineCommand_CommandType interface { isPipelineCommand_CommandType() } type PipelineCommand_CreateDataflowGraph_ struct { CreateDataflowGraph *PipelineCommand_CreateDataflowGraph `protobuf:"bytes,1,opt,name=create_dataflow_graph,json=createDataflowGraph,proto3,oneof"` } type PipelineCommand_DefineDataset_ struct { DefineDataset *PipelineCommand_DefineDataset `protobuf:"bytes,2,opt,name=define_dataset,json=defineDataset,proto3,oneof"` } type PipelineCommand_DefineFlow_ struct { DefineFlow *PipelineCommand_DefineFlow `protobuf:"bytes,3,opt,name=define_flow,json=defineFlow,proto3,oneof"` } type PipelineCommand_DropDataflowGraph_ struct { DropDataflowGraph *PipelineCommand_DropDataflowGraph `protobuf:"bytes,4,opt,name=drop_dataflow_graph,json=dropDataflowGraph,proto3,oneof"` } type PipelineCommand_StartRun_ struct { StartRun *PipelineCommand_StartRun `protobuf:"bytes,5,opt,name=start_run,json=startRun,proto3,oneof"` } type PipelineCommand_DefineSqlGraphElements struct { DefineSqlGraphElements *DefineSqlGraphElements `protobuf:"bytes,6,opt,name=define_sql_graph_elements,json=defineSqlGraphElements,proto3,oneof"` } func (*PipelineCommand_CreateDataflowGraph_) isPipelineCommand_CommandType() {} func (*PipelineCommand_DefineDataset_) isPipelineCommand_CommandType() {} func (*PipelineCommand_DefineFlow_) isPipelineCommand_CommandType() {} func (*PipelineCommand_DropDataflowGraph_) isPipelineCommand_CommandType() {} func (*PipelineCommand_StartRun_) isPipelineCommand_CommandType() {} func (*PipelineCommand_DefineSqlGraphElements) isPipelineCommand_CommandType() {} // Parses the SQL file and registers all datasets and flows. type DefineSqlGraphElements struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The graph to attach this dataset to. DataflowGraphId *string `protobuf:"bytes,1,opt,name=dataflow_graph_id,json=dataflowGraphId,proto3,oneof" json:"dataflow_graph_id,omitempty"` // The full path to the SQL file. Can be relative or absolute. SqlFilePath *string `protobuf:"bytes,2,opt,name=sql_file_path,json=sqlFilePath,proto3,oneof" json:"sql_file_path,omitempty"` // The contents of the SQL file. SqlText *string `protobuf:"bytes,3,opt,name=sql_text,json=sqlText,proto3,oneof" json:"sql_text,omitempty"` } func (x *DefineSqlGraphElements) Reset() { *x = DefineSqlGraphElements{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DefineSqlGraphElements) String() string { return protoimpl.X.MessageStringOf(x) } func (*DefineSqlGraphElements) ProtoMessage() {} func (x *DefineSqlGraphElements) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DefineSqlGraphElements.ProtoReflect.Descriptor instead. func (*DefineSqlGraphElements) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{1} } func (x *DefineSqlGraphElements) GetDataflowGraphId() string { if x != nil && x.DataflowGraphId != nil { return *x.DataflowGraphId } return "" } func (x *DefineSqlGraphElements) GetSqlFilePath() string { if x != nil && x.SqlFilePath != nil { return *x.SqlFilePath } return "" } func (x *DefineSqlGraphElements) GetSqlText() string { if x != nil && x.SqlText != nil { return *x.SqlText } return "" } // Dispatch object for pipelines command results. type PipelineCommandResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to ResultType: // // *PipelineCommandResult_CreateDataflowGraphResult_ ResultType isPipelineCommandResult_ResultType `protobuf_oneof:"result_type"` } func (x *PipelineCommandResult) Reset() { *x = PipelineCommandResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommandResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommandResult) ProtoMessage() {} func (x *PipelineCommandResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommandResult.ProtoReflect.Descriptor instead. func (*PipelineCommandResult) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{2} } func (m *PipelineCommandResult) GetResultType() isPipelineCommandResult_ResultType { if m != nil { return m.ResultType } return nil } func (x *PipelineCommandResult) GetCreateDataflowGraphResult() *PipelineCommandResult_CreateDataflowGraphResult { if x, ok := x.GetResultType().(*PipelineCommandResult_CreateDataflowGraphResult_); ok { return x.CreateDataflowGraphResult } return nil } type isPipelineCommandResult_ResultType interface { isPipelineCommandResult_ResultType() } type PipelineCommandResult_CreateDataflowGraphResult_ struct { CreateDataflowGraphResult *PipelineCommandResult_CreateDataflowGraphResult `protobuf:"bytes,1,opt,name=create_dataflow_graph_result,json=createDataflowGraphResult,proto3,oneof"` } func (*PipelineCommandResult_CreateDataflowGraphResult_) isPipelineCommandResult_ResultType() {} // A response containing an event emitted during the run of a pipeline. type PipelineEventResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Event *PipelineEvent `protobuf:"bytes,1,opt,name=event,proto3" json:"event,omitempty"` } func (x *PipelineEventResult) Reset() { *x = PipelineEventResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineEventResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineEventResult) ProtoMessage() {} func (x *PipelineEventResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineEventResult.ProtoReflect.Descriptor instead. func (*PipelineEventResult) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{3} } func (x *PipelineEventResult) GetEvent() *PipelineEvent { if x != nil { return x.Event } return nil } // An event emitted during the run of a graph. type PipelineEvent struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The time of the event. Timestamp *string `protobuf:"bytes,1,opt,name=timestamp,proto3,oneof" json:"timestamp,omitempty"` // The message that should be displayed to users. Message *string `protobuf:"bytes,2,opt,name=message,proto3,oneof" json:"message,omitempty"` } func (x *PipelineEvent) Reset() { *x = PipelineEvent{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineEvent) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineEvent) ProtoMessage() {} func (x *PipelineEvent) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineEvent.ProtoReflect.Descriptor instead. func (*PipelineEvent) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{4} } func (x *PipelineEvent) GetTimestamp() string { if x != nil && x.Timestamp != nil { return *x.Timestamp } return "" } func (x *PipelineEvent) GetMessage() string { if x != nil && x.Message != nil { return *x.Message } return "" } // Request to create a new dataflow graph. type PipelineCommand_CreateDataflowGraph struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The default catalog. DefaultCatalog *string `protobuf:"bytes,1,opt,name=default_catalog,json=defaultCatalog,proto3,oneof" json:"default_catalog,omitempty"` // The default database. DefaultDatabase *string `protobuf:"bytes,2,opt,name=default_database,json=defaultDatabase,proto3,oneof" json:"default_database,omitempty"` // SQL configurations for all flows in this graph. SqlConf map[string]string `protobuf:"bytes,5,rep,name=sql_conf,json=sqlConf,proto3" json:"sql_conf,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *PipelineCommand_CreateDataflowGraph) Reset() { *x = PipelineCommand_CreateDataflowGraph{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommand_CreateDataflowGraph) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommand_CreateDataflowGraph) ProtoMessage() {} func (x *PipelineCommand_CreateDataflowGraph) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommand_CreateDataflowGraph.ProtoReflect.Descriptor instead. func (*PipelineCommand_CreateDataflowGraph) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{0, 0} } func (x *PipelineCommand_CreateDataflowGraph) GetDefaultCatalog() string { if x != nil && x.DefaultCatalog != nil { return *x.DefaultCatalog } return "" } func (x *PipelineCommand_CreateDataflowGraph) GetDefaultDatabase() string { if x != nil && x.DefaultDatabase != nil { return *x.DefaultDatabase } return "" } func (x *PipelineCommand_CreateDataflowGraph) GetSqlConf() map[string]string { if x != nil { return x.SqlConf } return nil } // Drops the graph and stops any running attached flows. type PipelineCommand_DropDataflowGraph struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The graph to drop. DataflowGraphId *string `protobuf:"bytes,1,opt,name=dataflow_graph_id,json=dataflowGraphId,proto3,oneof" json:"dataflow_graph_id,omitempty"` } func (x *PipelineCommand_DropDataflowGraph) Reset() { *x = PipelineCommand_DropDataflowGraph{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommand_DropDataflowGraph) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommand_DropDataflowGraph) ProtoMessage() {} func (x *PipelineCommand_DropDataflowGraph) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommand_DropDataflowGraph.ProtoReflect.Descriptor instead. func (*PipelineCommand_DropDataflowGraph) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{0, 1} } func (x *PipelineCommand_DropDataflowGraph) GetDataflowGraphId() string { if x != nil && x.DataflowGraphId != nil { return *x.DataflowGraphId } return "" } // Request to define a dataset: a table, a materialized view, or a temporary view. type PipelineCommand_DefineDataset struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The graph to attach this dataset to. DataflowGraphId *string `protobuf:"bytes,1,opt,name=dataflow_graph_id,json=dataflowGraphId,proto3,oneof" json:"dataflow_graph_id,omitempty"` // Name of the dataset. Can be partially or fully qualified. DatasetName *string `protobuf:"bytes,2,opt,name=dataset_name,json=datasetName,proto3,oneof" json:"dataset_name,omitempty"` // The type of the dataset. DatasetType *DatasetType `protobuf:"varint,3,opt,name=dataset_type,json=datasetType,proto3,enum=spark.connect.DatasetType,oneof" json:"dataset_type,omitempty"` // Optional comment for the dataset. Comment *string `protobuf:"bytes,4,opt,name=comment,proto3,oneof" json:"comment,omitempty"` // Optional table properties. Only applies to dataset_type == TABLE and dataset_type == MATERIALIZED_VIEW. TableProperties map[string]string `protobuf:"bytes,5,rep,name=table_properties,json=tableProperties,proto3" json:"table_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // Optional partition columns for the dataset. Only applies to dataset_type == TABLE and // dataset_type == MATERIALIZED_VIEW. PartitionCols []string `protobuf:"bytes,6,rep,name=partition_cols,json=partitionCols,proto3" json:"partition_cols,omitempty"` // Schema for the dataset. If unset, this will be inferred from incoming flows. Schema *DataType `protobuf:"bytes,7,opt,name=schema,proto3,oneof" json:"schema,omitempty"` // The output table format of the dataset. Only applies to dataset_type == TABLE and // dataset_type == MATERIALIZED_VIEW. Format *string `protobuf:"bytes,8,opt,name=format,proto3,oneof" json:"format,omitempty"` } func (x *PipelineCommand_DefineDataset) Reset() { *x = PipelineCommand_DefineDataset{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommand_DefineDataset) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommand_DefineDataset) ProtoMessage() {} func (x *PipelineCommand_DefineDataset) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommand_DefineDataset.ProtoReflect.Descriptor instead. func (*PipelineCommand_DefineDataset) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{0, 2} } func (x *PipelineCommand_DefineDataset) GetDataflowGraphId() string { if x != nil && x.DataflowGraphId != nil { return *x.DataflowGraphId } return "" } func (x *PipelineCommand_DefineDataset) GetDatasetName() string { if x != nil && x.DatasetName != nil { return *x.DatasetName } return "" } func (x *PipelineCommand_DefineDataset) GetDatasetType() DatasetType { if x != nil && x.DatasetType != nil { return *x.DatasetType } return DatasetType_DATASET_TYPE_UNSPECIFIED } func (x *PipelineCommand_DefineDataset) GetComment() string { if x != nil && x.Comment != nil { return *x.Comment } return "" } func (x *PipelineCommand_DefineDataset) GetTableProperties() map[string]string { if x != nil { return x.TableProperties } return nil } func (x *PipelineCommand_DefineDataset) GetPartitionCols() []string { if x != nil { return x.PartitionCols } return nil } func (x *PipelineCommand_DefineDataset) GetSchema() *DataType { if x != nil { return x.Schema } return nil } func (x *PipelineCommand_DefineDataset) GetFormat() string { if x != nil && x.Format != nil { return *x.Format } return "" } // Request to define a flow targeting a dataset. type PipelineCommand_DefineFlow struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The graph to attach this flow to. DataflowGraphId *string `protobuf:"bytes,1,opt,name=dataflow_graph_id,json=dataflowGraphId,proto3,oneof" json:"dataflow_graph_id,omitempty"` // Name of the flow. For standalone flows, this must be a single-part name. FlowName *string `protobuf:"bytes,2,opt,name=flow_name,json=flowName,proto3,oneof" json:"flow_name,omitempty"` // Name of the dataset this flow writes to. Can be partially or fully qualified. TargetDatasetName *string `protobuf:"bytes,3,opt,name=target_dataset_name,json=targetDatasetName,proto3,oneof" json:"target_dataset_name,omitempty"` // An unresolved relation that defines the dataset's flow. Plan *Relation `protobuf:"bytes,4,opt,name=plan,proto3,oneof" json:"plan,omitempty"` // SQL configurations set when running this flow. SqlConf map[string]string `protobuf:"bytes,5,rep,name=sql_conf,json=sqlConf,proto3" json:"sql_conf,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // If true, this flow will only be run once per full refresh. Once *bool `protobuf:"varint,6,opt,name=once,proto3,oneof" json:"once,omitempty"` } func (x *PipelineCommand_DefineFlow) Reset() { *x = PipelineCommand_DefineFlow{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommand_DefineFlow) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommand_DefineFlow) ProtoMessage() {} func (x *PipelineCommand_DefineFlow) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommand_DefineFlow.ProtoReflect.Descriptor instead. func (*PipelineCommand_DefineFlow) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{0, 3} } func (x *PipelineCommand_DefineFlow) GetDataflowGraphId() string { if x != nil && x.DataflowGraphId != nil { return *x.DataflowGraphId } return "" } func (x *PipelineCommand_DefineFlow) GetFlowName() string { if x != nil && x.FlowName != nil { return *x.FlowName } return "" } func (x *PipelineCommand_DefineFlow) GetTargetDatasetName() string { if x != nil && x.TargetDatasetName != nil { return *x.TargetDatasetName } return "" } func (x *PipelineCommand_DefineFlow) GetPlan() *Relation { if x != nil { return x.Plan } return nil } func (x *PipelineCommand_DefineFlow) GetSqlConf() map[string]string { if x != nil { return x.SqlConf } return nil } func (x *PipelineCommand_DefineFlow) GetOnce() bool { if x != nil && x.Once != nil { return *x.Once } return false } // Resolves all datasets and flows and start a pipeline update. Should be called after all // graph elements are registered. type PipelineCommand_StartRun struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The graph to start. DataflowGraphId *string `protobuf:"bytes,1,opt,name=dataflow_graph_id,json=dataflowGraphId,proto3,oneof" json:"dataflow_graph_id,omitempty"` } func (x *PipelineCommand_StartRun) Reset() { *x = PipelineCommand_StartRun{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommand_StartRun) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommand_StartRun) ProtoMessage() {} func (x *PipelineCommand_StartRun) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommand_StartRun.ProtoReflect.Descriptor instead. func (*PipelineCommand_StartRun) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{0, 4} } func (x *PipelineCommand_StartRun) GetDataflowGraphId() string { if x != nil && x.DataflowGraphId != nil { return *x.DataflowGraphId } return "" } type PipelineCommand_CreateDataflowGraph_Response struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The ID of the created graph. DataflowGraphId *string `protobuf:"bytes,1,opt,name=dataflow_graph_id,json=dataflowGraphId,proto3,oneof" json:"dataflow_graph_id,omitempty"` } func (x *PipelineCommand_CreateDataflowGraph_Response) Reset() { *x = PipelineCommand_CreateDataflowGraph_Response{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommand_CreateDataflowGraph_Response) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommand_CreateDataflowGraph_Response) ProtoMessage() {} func (x *PipelineCommand_CreateDataflowGraph_Response) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommand_CreateDataflowGraph_Response.ProtoReflect.Descriptor instead. func (*PipelineCommand_CreateDataflowGraph_Response) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{0, 0, 1} } func (x *PipelineCommand_CreateDataflowGraph_Response) GetDataflowGraphId() string { if x != nil && x.DataflowGraphId != nil { return *x.DataflowGraphId } return "" } type PipelineCommandResult_CreateDataflowGraphResult struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // The ID of the created graph. DataflowGraphId *string `protobuf:"bytes,1,opt,name=dataflow_graph_id,json=dataflowGraphId,proto3,oneof" json:"dataflow_graph_id,omitempty"` } func (x *PipelineCommandResult_CreateDataflowGraphResult) Reset() { *x = PipelineCommandResult_CreateDataflowGraphResult{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_pipelines_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PipelineCommandResult_CreateDataflowGraphResult) String() string { return protoimpl.X.MessageStringOf(x) } func (*PipelineCommandResult_CreateDataflowGraphResult) ProtoMessage() {} func (x *PipelineCommandResult_CreateDataflowGraphResult) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_pipelines_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PipelineCommandResult_CreateDataflowGraphResult.ProtoReflect.Descriptor instead. func (*PipelineCommandResult_CreateDataflowGraphResult) Descriptor() ([]byte, []int) { return file_spark_connect_pipelines_proto_rawDescGZIP(), []int{2, 0} } func (x *PipelineCommandResult_CreateDataflowGraphResult) GetDataflowGraphId() string { if x != nil && x.DataflowGraphId != nil { return *x.DataflowGraphId } return "" } var File_spark_connect_pipelines_proto protoreflect.FileDescriptor var file_spark_connect_pipelines_proto_rawDesc = []byte{ 0x0a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x8c, 0x11, 0x0a, 0x0f, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x68, 0x0a, 0x15, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x48, 0x00, 0x52, 0x13, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x55, 0x0a, 0x0e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x12, 0x4c, 0x0a, 0x0b, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x46, 0x6c, 0x6f, 0x77, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x46, 0x6c, 0x6f, 0x77, 0x12, 0x62, 0x0a, 0x13, 0x64, 0x72, 0x6f, 0x70, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x44, 0x72, 0x6f, 0x70, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x48, 0x00, 0x52, 0x11, 0x64, 0x72, 0x6f, 0x70, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x46, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x75, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x48, 0x00, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x12, 0x62, 0x0a, 0x19, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x71, 0x6c, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x53, 0x71, 0x6c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x48, 0x00, 0x52, 0x16, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x53, 0x71, 0x6c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x87, 0x03, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x2c, 0x0a, 0x0f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x88, 0x01, 0x01, 0x12, 0x2e, 0x0a, 0x10, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x88, 0x01, 0x01, 0x12, 0x5a, 0x0a, 0x08, 0x73, 0x71, 0x6c, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x2e, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x73, 0x71, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x1a, 0x3a, 0x0a, 0x0c, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x51, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x11, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x1a, 0x5a, 0x0a, 0x11, 0x44, 0x72, 0x6f, 0x70, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x2f, 0x0a, 0x11, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x1a, 0xd1, 0x04, 0x0a, 0x0d, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x12, 0x2f, 0x0a, 0x11, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x26, 0x0a, 0x0c, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x42, 0x0a, 0x0c, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x54, 0x79, 0x70, 0x65, 0x48, 0x02, 0x52, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x12, 0x6c, 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x41, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6c, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6c, 0x73, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x04, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x05, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x88, 0x01, 0x01, 0x1a, 0x42, 0x0a, 0x14, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x1a, 0xbc, 0x03, 0x0a, 0x0a, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x46, 0x6c, 0x6f, 0x77, 0x12, 0x2f, 0x0a, 0x11, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x08, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x13, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x11, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x30, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x03, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x51, 0x0a, 0x08, 0x73, 0x71, 0x6c, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x2e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x46, 0x6c, 0x6f, 0x77, 0x2e, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x73, 0x71, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x12, 0x17, 0x0a, 0x04, 0x6f, 0x6e, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x48, 0x04, 0x52, 0x04, 0x6f, 0x6e, 0x63, 0x65, 0x88, 0x01, 0x01, 0x1a, 0x3a, 0x0a, 0x0c, 0x53, 0x71, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6f, 0x6e, 0x63, 0x65, 0x1a, 0x51, 0x0a, 0x08, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x12, 0x2f, 0x0a, 0x11, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc7, 0x01, 0x0a, 0x16, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x53, 0x71, 0x6c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x2f, 0x0a, 0x11, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x27, 0x0a, 0x0d, 0x73, 0x71, 0x6c, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0b, 0x73, 0x71, 0x6c, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x88, 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x08, 0x73, 0x71, 0x6c, 0x5f, 0x74, 0x65, 0x78, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x07, 0x73, 0x71, 0x6c, 0x54, 0x65, 0x78, 0x74, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x73, 0x71, 0x6c, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x73, 0x71, 0x6c, 0x5f, 0x74, 0x65, 0x78, 0x74, 0x22, 0x8e, 0x02, 0x0a, 0x15, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x81, 0x01, 0x0a, 0x1c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x48, 0x00, 0x52, 0x19, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x1a, 0x62, 0x0a, 0x19, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x2f, 0x0a, 0x11, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x47, 0x72, 0x61, 0x70, 0x68, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x69, 0x64, 0x42, 0x0d, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x49, 0x0a, 0x13, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x32, 0x0a, 0x05, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x05, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x6b, 0x0a, 0x0d, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x21, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2a, 0x61, 0x0a, 0x0b, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1c, 0x0a, 0x18, 0x44, 0x41, 0x54, 0x41, 0x53, 0x45, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x15, 0x0a, 0x11, 0x4d, 0x41, 0x54, 0x45, 0x52, 0x49, 0x41, 0x4c, 0x49, 0x5a, 0x45, 0x44, 0x5f, 0x56, 0x49, 0x45, 0x57, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x02, 0x12, 0x12, 0x0a, 0x0e, 0x54, 0x45, 0x4d, 0x50, 0x4f, 0x52, 0x41, 0x52, 0x59, 0x5f, 0x56, 0x49, 0x45, 0x57, 0x10, 0x03, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_pipelines_proto_rawDescOnce sync.Once file_spark_connect_pipelines_proto_rawDescData = file_spark_connect_pipelines_proto_rawDesc ) func file_spark_connect_pipelines_proto_rawDescGZIP() []byte { file_spark_connect_pipelines_proto_rawDescOnce.Do(func() { file_spark_connect_pipelines_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_pipelines_proto_rawDescData) }) return file_spark_connect_pipelines_proto_rawDescData } var file_spark_connect_pipelines_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_spark_connect_pipelines_proto_msgTypes = make([]protoimpl.MessageInfo, 15) var file_spark_connect_pipelines_proto_goTypes = []interface{}{ (DatasetType)(0), // 0: spark.connect.DatasetType (*PipelineCommand)(nil), // 1: spark.connect.PipelineCommand (*DefineSqlGraphElements)(nil), // 2: spark.connect.DefineSqlGraphElements (*PipelineCommandResult)(nil), // 3: spark.connect.PipelineCommandResult (*PipelineEventResult)(nil), // 4: spark.connect.PipelineEventResult (*PipelineEvent)(nil), // 5: spark.connect.PipelineEvent (*PipelineCommand_CreateDataflowGraph)(nil), // 6: spark.connect.PipelineCommand.CreateDataflowGraph (*PipelineCommand_DropDataflowGraph)(nil), // 7: spark.connect.PipelineCommand.DropDataflowGraph (*PipelineCommand_DefineDataset)(nil), // 8: spark.connect.PipelineCommand.DefineDataset (*PipelineCommand_DefineFlow)(nil), // 9: spark.connect.PipelineCommand.DefineFlow (*PipelineCommand_StartRun)(nil), // 10: spark.connect.PipelineCommand.StartRun nil, // 11: spark.connect.PipelineCommand.CreateDataflowGraph.SqlConfEntry (*PipelineCommand_CreateDataflowGraph_Response)(nil), // 12: spark.connect.PipelineCommand.CreateDataflowGraph.Response nil, // 13: spark.connect.PipelineCommand.DefineDataset.TablePropertiesEntry nil, // 14: spark.connect.PipelineCommand.DefineFlow.SqlConfEntry (*PipelineCommandResult_CreateDataflowGraphResult)(nil), // 15: spark.connect.PipelineCommandResult.CreateDataflowGraphResult (*DataType)(nil), // 16: spark.connect.DataType (*Relation)(nil), // 17: spark.connect.Relation } var file_spark_connect_pipelines_proto_depIdxs = []int32{ 6, // 0: spark.connect.PipelineCommand.create_dataflow_graph:type_name -> spark.connect.PipelineCommand.CreateDataflowGraph 8, // 1: spark.connect.PipelineCommand.define_dataset:type_name -> spark.connect.PipelineCommand.DefineDataset 9, // 2: spark.connect.PipelineCommand.define_flow:type_name -> spark.connect.PipelineCommand.DefineFlow 7, // 3: spark.connect.PipelineCommand.drop_dataflow_graph:type_name -> spark.connect.PipelineCommand.DropDataflowGraph 10, // 4: spark.connect.PipelineCommand.start_run:type_name -> spark.connect.PipelineCommand.StartRun 2, // 5: spark.connect.PipelineCommand.define_sql_graph_elements:type_name -> spark.connect.DefineSqlGraphElements 15, // 6: spark.connect.PipelineCommandResult.create_dataflow_graph_result:type_name -> spark.connect.PipelineCommandResult.CreateDataflowGraphResult 5, // 7: spark.connect.PipelineEventResult.event:type_name -> spark.connect.PipelineEvent 11, // 8: spark.connect.PipelineCommand.CreateDataflowGraph.sql_conf:type_name -> spark.connect.PipelineCommand.CreateDataflowGraph.SqlConfEntry 0, // 9: spark.connect.PipelineCommand.DefineDataset.dataset_type:type_name -> spark.connect.DatasetType 13, // 10: spark.connect.PipelineCommand.DefineDataset.table_properties:type_name -> spark.connect.PipelineCommand.DefineDataset.TablePropertiesEntry 16, // 11: spark.connect.PipelineCommand.DefineDataset.schema:type_name -> spark.connect.DataType 17, // 12: spark.connect.PipelineCommand.DefineFlow.plan:type_name -> spark.connect.Relation 14, // 13: spark.connect.PipelineCommand.DefineFlow.sql_conf:type_name -> spark.connect.PipelineCommand.DefineFlow.SqlConfEntry 14, // [14:14] is the sub-list for method output_type 14, // [14:14] is the sub-list for method input_type 14, // [14:14] is the sub-list for extension type_name 14, // [14:14] is the sub-list for extension extendee 0, // [0:14] is the sub-list for field type_name } func init() { file_spark_connect_pipelines_proto_init() } func file_spark_connect_pipelines_proto_init() { if File_spark_connect_pipelines_proto != nil { return } file_spark_connect_relations_proto_init() file_spark_connect_types_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_pipelines_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommand); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DefineSqlGraphElements); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommandResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineEventResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineEvent); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommand_CreateDataflowGraph); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommand_DropDataflowGraph); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommand_DefineDataset); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommand_DefineFlow); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommand_StartRun); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommand_CreateDataflowGraph_Response); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_pipelines_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PipelineCommandResult_CreateDataflowGraphResult); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_pipelines_proto_msgTypes[0].OneofWrappers = []interface{}{ (*PipelineCommand_CreateDataflowGraph_)(nil), (*PipelineCommand_DefineDataset_)(nil), (*PipelineCommand_DefineFlow_)(nil), (*PipelineCommand_DropDataflowGraph_)(nil), (*PipelineCommand_StartRun_)(nil), (*PipelineCommand_DefineSqlGraphElements)(nil), } file_spark_connect_pipelines_proto_msgTypes[1].OneofWrappers = []interface{}{} file_spark_connect_pipelines_proto_msgTypes[2].OneofWrappers = []interface{}{ (*PipelineCommandResult_CreateDataflowGraphResult_)(nil), } file_spark_connect_pipelines_proto_msgTypes[4].OneofWrappers = []interface{}{} file_spark_connect_pipelines_proto_msgTypes[5].OneofWrappers = []interface{}{} file_spark_connect_pipelines_proto_msgTypes[6].OneofWrappers = []interface{}{} file_spark_connect_pipelines_proto_msgTypes[7].OneofWrappers = []interface{}{} file_spark_connect_pipelines_proto_msgTypes[8].OneofWrappers = []interface{}{} file_spark_connect_pipelines_proto_msgTypes[9].OneofWrappers = []interface{}{} file_spark_connect_pipelines_proto_msgTypes[11].OneofWrappers = []interface{}{} file_spark_connect_pipelines_proto_msgTypes[14].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_pipelines_proto_rawDesc, NumEnums: 1, NumMessages: 15, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_pipelines_proto_goTypes, DependencyIndexes: file_spark_connect_pipelines_proto_depIdxs, EnumInfos: file_spark_connect_pipelines_proto_enumTypes, MessageInfos: file_spark_connect_pipelines_proto_msgTypes, }.Build() File_spark_connect_pipelines_proto = out.File file_spark_connect_pipelines_proto_rawDesc = nil file_spark_connect_pipelines_proto_goTypes = nil file_spark_connect_pipelines_proto_depIdxs = nil } ================================================ FILE: internal/generated/relations.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/relations.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" anypb "google.golang.org/protobuf/types/known/anypb" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) type Join_JoinType int32 const ( Join_JOIN_TYPE_UNSPECIFIED Join_JoinType = 0 Join_JOIN_TYPE_INNER Join_JoinType = 1 Join_JOIN_TYPE_FULL_OUTER Join_JoinType = 2 Join_JOIN_TYPE_LEFT_OUTER Join_JoinType = 3 Join_JOIN_TYPE_RIGHT_OUTER Join_JoinType = 4 Join_JOIN_TYPE_LEFT_ANTI Join_JoinType = 5 Join_JOIN_TYPE_LEFT_SEMI Join_JoinType = 6 Join_JOIN_TYPE_CROSS Join_JoinType = 7 ) // Enum value maps for Join_JoinType. var ( Join_JoinType_name = map[int32]string{ 0: "JOIN_TYPE_UNSPECIFIED", 1: "JOIN_TYPE_INNER", 2: "JOIN_TYPE_FULL_OUTER", 3: "JOIN_TYPE_LEFT_OUTER", 4: "JOIN_TYPE_RIGHT_OUTER", 5: "JOIN_TYPE_LEFT_ANTI", 6: "JOIN_TYPE_LEFT_SEMI", 7: "JOIN_TYPE_CROSS", } Join_JoinType_value = map[string]int32{ "JOIN_TYPE_UNSPECIFIED": 0, "JOIN_TYPE_INNER": 1, "JOIN_TYPE_FULL_OUTER": 2, "JOIN_TYPE_LEFT_OUTER": 3, "JOIN_TYPE_RIGHT_OUTER": 4, "JOIN_TYPE_LEFT_ANTI": 5, "JOIN_TYPE_LEFT_SEMI": 6, "JOIN_TYPE_CROSS": 7, } ) func (x Join_JoinType) Enum() *Join_JoinType { p := new(Join_JoinType) *p = x return p } func (x Join_JoinType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (Join_JoinType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_relations_proto_enumTypes[0].Descriptor() } func (Join_JoinType) Type() protoreflect.EnumType { return &file_spark_connect_relations_proto_enumTypes[0] } func (x Join_JoinType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use Join_JoinType.Descriptor instead. func (Join_JoinType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{10, 0} } type SetOperation_SetOpType int32 const ( SetOperation_SET_OP_TYPE_UNSPECIFIED SetOperation_SetOpType = 0 SetOperation_SET_OP_TYPE_INTERSECT SetOperation_SetOpType = 1 SetOperation_SET_OP_TYPE_UNION SetOperation_SetOpType = 2 SetOperation_SET_OP_TYPE_EXCEPT SetOperation_SetOpType = 3 ) // Enum value maps for SetOperation_SetOpType. var ( SetOperation_SetOpType_name = map[int32]string{ 0: "SET_OP_TYPE_UNSPECIFIED", 1: "SET_OP_TYPE_INTERSECT", 2: "SET_OP_TYPE_UNION", 3: "SET_OP_TYPE_EXCEPT", } SetOperation_SetOpType_value = map[string]int32{ "SET_OP_TYPE_UNSPECIFIED": 0, "SET_OP_TYPE_INTERSECT": 1, "SET_OP_TYPE_UNION": 2, "SET_OP_TYPE_EXCEPT": 3, } ) func (x SetOperation_SetOpType) Enum() *SetOperation_SetOpType { p := new(SetOperation_SetOpType) *p = x return p } func (x SetOperation_SetOpType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (SetOperation_SetOpType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_relations_proto_enumTypes[1].Descriptor() } func (SetOperation_SetOpType) Type() protoreflect.EnumType { return &file_spark_connect_relations_proto_enumTypes[1] } func (x SetOperation_SetOpType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use SetOperation_SetOpType.Descriptor instead. func (SetOperation_SetOpType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{11, 0} } type Aggregate_GroupType int32 const ( Aggregate_GROUP_TYPE_UNSPECIFIED Aggregate_GroupType = 0 Aggregate_GROUP_TYPE_GROUPBY Aggregate_GroupType = 1 Aggregate_GROUP_TYPE_ROLLUP Aggregate_GroupType = 2 Aggregate_GROUP_TYPE_CUBE Aggregate_GroupType = 3 Aggregate_GROUP_TYPE_PIVOT Aggregate_GroupType = 4 Aggregate_GROUP_TYPE_GROUPING_SETS Aggregate_GroupType = 5 ) // Enum value maps for Aggregate_GroupType. var ( Aggregate_GroupType_name = map[int32]string{ 0: "GROUP_TYPE_UNSPECIFIED", 1: "GROUP_TYPE_GROUPBY", 2: "GROUP_TYPE_ROLLUP", 3: "GROUP_TYPE_CUBE", 4: "GROUP_TYPE_PIVOT", 5: "GROUP_TYPE_GROUPING_SETS", } Aggregate_GroupType_value = map[string]int32{ "GROUP_TYPE_UNSPECIFIED": 0, "GROUP_TYPE_GROUPBY": 1, "GROUP_TYPE_ROLLUP": 2, "GROUP_TYPE_CUBE": 3, "GROUP_TYPE_PIVOT": 4, "GROUP_TYPE_GROUPING_SETS": 5, } ) func (x Aggregate_GroupType) Enum() *Aggregate_GroupType { p := new(Aggregate_GroupType) *p = x return p } func (x Aggregate_GroupType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (Aggregate_GroupType) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_relations_proto_enumTypes[2].Descriptor() } func (Aggregate_GroupType) Type() protoreflect.EnumType { return &file_spark_connect_relations_proto_enumTypes[2] } func (x Aggregate_GroupType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use Aggregate_GroupType.Descriptor instead. func (Aggregate_GroupType) EnumDescriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{15, 0} } type Parse_ParseFormat int32 const ( Parse_PARSE_FORMAT_UNSPECIFIED Parse_ParseFormat = 0 Parse_PARSE_FORMAT_CSV Parse_ParseFormat = 1 Parse_PARSE_FORMAT_JSON Parse_ParseFormat = 2 ) // Enum value maps for Parse_ParseFormat. var ( Parse_ParseFormat_name = map[int32]string{ 0: "PARSE_FORMAT_UNSPECIFIED", 1: "PARSE_FORMAT_CSV", 2: "PARSE_FORMAT_JSON", } Parse_ParseFormat_value = map[string]int32{ "PARSE_FORMAT_UNSPECIFIED": 0, "PARSE_FORMAT_CSV": 1, "PARSE_FORMAT_JSON": 2, } ) func (x Parse_ParseFormat) Enum() *Parse_ParseFormat { p := new(Parse_ParseFormat) *p = x return p } func (x Parse_ParseFormat) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (Parse_ParseFormat) Descriptor() protoreflect.EnumDescriptor { return file_spark_connect_relations_proto_enumTypes[3].Descriptor() } func (Parse_ParseFormat) Type() protoreflect.EnumType { return &file_spark_connect_relations_proto_enumTypes[3] } func (x Parse_ParseFormat) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use Parse_ParseFormat.Descriptor instead. func (Parse_ParseFormat) EnumDescriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{59, 0} } // The main [[Relation]] type. Fundamentally, a relation is a typed container // that has exactly one explicit relation type set. // // When adding new relation types, they have to be registered here. type Relation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Common *RelationCommon `protobuf:"bytes,1,opt,name=common,proto3" json:"common,omitempty"` // Types that are assignable to RelType: // // *Relation_Read // *Relation_Project // *Relation_Filter // *Relation_Join // *Relation_SetOp // *Relation_Sort // *Relation_Limit // *Relation_Aggregate // *Relation_Sql // *Relation_LocalRelation // *Relation_Sample // *Relation_Offset // *Relation_Deduplicate // *Relation_Range // *Relation_SubqueryAlias // *Relation_Repartition // *Relation_ToDf // *Relation_WithColumnsRenamed // *Relation_ShowString // *Relation_Drop // *Relation_Tail // *Relation_WithColumns // *Relation_Hint // *Relation_Unpivot // *Relation_ToSchema // *Relation_RepartitionByExpression // *Relation_MapPartitions // *Relation_CollectMetrics // *Relation_Parse // *Relation_GroupMap // *Relation_CoGroupMap // *Relation_WithWatermark // *Relation_ApplyInPandasWithState // *Relation_HtmlString // *Relation_CachedLocalRelation // *Relation_CachedRemoteRelation // *Relation_CommonInlineUserDefinedTableFunction // *Relation_AsOfJoin // *Relation_CommonInlineUserDefinedDataSource // *Relation_WithRelations // *Relation_Transpose // *Relation_UnresolvedTableValuedFunction // *Relation_LateralJoin // *Relation_FillNa // *Relation_DropNa // *Relation_Replace // *Relation_Summary // *Relation_Crosstab // *Relation_Describe // *Relation_Cov // *Relation_Corr // *Relation_ApproxQuantile // *Relation_FreqItems // *Relation_SampleBy // *Relation_Catalog // *Relation_MlRelation // *Relation_Extension // *Relation_Unknown RelType isRelation_RelType `protobuf_oneof:"rel_type"` } func (x *Relation) Reset() { *x = Relation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Relation) String() string { return protoimpl.X.MessageStringOf(x) } func (*Relation) ProtoMessage() {} func (x *Relation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Relation.ProtoReflect.Descriptor instead. func (*Relation) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{0} } func (x *Relation) GetCommon() *RelationCommon { if x != nil { return x.Common } return nil } func (m *Relation) GetRelType() isRelation_RelType { if m != nil { return m.RelType } return nil } func (x *Relation) GetRead() *Read { if x, ok := x.GetRelType().(*Relation_Read); ok { return x.Read } return nil } func (x *Relation) GetProject() *Project { if x, ok := x.GetRelType().(*Relation_Project); ok { return x.Project } return nil } func (x *Relation) GetFilter() *Filter { if x, ok := x.GetRelType().(*Relation_Filter); ok { return x.Filter } return nil } func (x *Relation) GetJoin() *Join { if x, ok := x.GetRelType().(*Relation_Join); ok { return x.Join } return nil } func (x *Relation) GetSetOp() *SetOperation { if x, ok := x.GetRelType().(*Relation_SetOp); ok { return x.SetOp } return nil } func (x *Relation) GetSort() *Sort { if x, ok := x.GetRelType().(*Relation_Sort); ok { return x.Sort } return nil } func (x *Relation) GetLimit() *Limit { if x, ok := x.GetRelType().(*Relation_Limit); ok { return x.Limit } return nil } func (x *Relation) GetAggregate() *Aggregate { if x, ok := x.GetRelType().(*Relation_Aggregate); ok { return x.Aggregate } return nil } func (x *Relation) GetSql() *SQL { if x, ok := x.GetRelType().(*Relation_Sql); ok { return x.Sql } return nil } func (x *Relation) GetLocalRelation() *LocalRelation { if x, ok := x.GetRelType().(*Relation_LocalRelation); ok { return x.LocalRelation } return nil } func (x *Relation) GetSample() *Sample { if x, ok := x.GetRelType().(*Relation_Sample); ok { return x.Sample } return nil } func (x *Relation) GetOffset() *Offset { if x, ok := x.GetRelType().(*Relation_Offset); ok { return x.Offset } return nil } func (x *Relation) GetDeduplicate() *Deduplicate { if x, ok := x.GetRelType().(*Relation_Deduplicate); ok { return x.Deduplicate } return nil } func (x *Relation) GetRange() *Range { if x, ok := x.GetRelType().(*Relation_Range); ok { return x.Range } return nil } func (x *Relation) GetSubqueryAlias() *SubqueryAlias { if x, ok := x.GetRelType().(*Relation_SubqueryAlias); ok { return x.SubqueryAlias } return nil } func (x *Relation) GetRepartition() *Repartition { if x, ok := x.GetRelType().(*Relation_Repartition); ok { return x.Repartition } return nil } func (x *Relation) GetToDf() *ToDF { if x, ok := x.GetRelType().(*Relation_ToDf); ok { return x.ToDf } return nil } func (x *Relation) GetWithColumnsRenamed() *WithColumnsRenamed { if x, ok := x.GetRelType().(*Relation_WithColumnsRenamed); ok { return x.WithColumnsRenamed } return nil } func (x *Relation) GetShowString() *ShowString { if x, ok := x.GetRelType().(*Relation_ShowString); ok { return x.ShowString } return nil } func (x *Relation) GetDrop() *Drop { if x, ok := x.GetRelType().(*Relation_Drop); ok { return x.Drop } return nil } func (x *Relation) GetTail() *Tail { if x, ok := x.GetRelType().(*Relation_Tail); ok { return x.Tail } return nil } func (x *Relation) GetWithColumns() *WithColumns { if x, ok := x.GetRelType().(*Relation_WithColumns); ok { return x.WithColumns } return nil } func (x *Relation) GetHint() *Hint { if x, ok := x.GetRelType().(*Relation_Hint); ok { return x.Hint } return nil } func (x *Relation) GetUnpivot() *Unpivot { if x, ok := x.GetRelType().(*Relation_Unpivot); ok { return x.Unpivot } return nil } func (x *Relation) GetToSchema() *ToSchema { if x, ok := x.GetRelType().(*Relation_ToSchema); ok { return x.ToSchema } return nil } func (x *Relation) GetRepartitionByExpression() *RepartitionByExpression { if x, ok := x.GetRelType().(*Relation_RepartitionByExpression); ok { return x.RepartitionByExpression } return nil } func (x *Relation) GetMapPartitions() *MapPartitions { if x, ok := x.GetRelType().(*Relation_MapPartitions); ok { return x.MapPartitions } return nil } func (x *Relation) GetCollectMetrics() *CollectMetrics { if x, ok := x.GetRelType().(*Relation_CollectMetrics); ok { return x.CollectMetrics } return nil } func (x *Relation) GetParse() *Parse { if x, ok := x.GetRelType().(*Relation_Parse); ok { return x.Parse } return nil } func (x *Relation) GetGroupMap() *GroupMap { if x, ok := x.GetRelType().(*Relation_GroupMap); ok { return x.GroupMap } return nil } func (x *Relation) GetCoGroupMap() *CoGroupMap { if x, ok := x.GetRelType().(*Relation_CoGroupMap); ok { return x.CoGroupMap } return nil } func (x *Relation) GetWithWatermark() *WithWatermark { if x, ok := x.GetRelType().(*Relation_WithWatermark); ok { return x.WithWatermark } return nil } func (x *Relation) GetApplyInPandasWithState() *ApplyInPandasWithState { if x, ok := x.GetRelType().(*Relation_ApplyInPandasWithState); ok { return x.ApplyInPandasWithState } return nil } func (x *Relation) GetHtmlString() *HtmlString { if x, ok := x.GetRelType().(*Relation_HtmlString); ok { return x.HtmlString } return nil } func (x *Relation) GetCachedLocalRelation() *CachedLocalRelation { if x, ok := x.GetRelType().(*Relation_CachedLocalRelation); ok { return x.CachedLocalRelation } return nil } func (x *Relation) GetCachedRemoteRelation() *CachedRemoteRelation { if x, ok := x.GetRelType().(*Relation_CachedRemoteRelation); ok { return x.CachedRemoteRelation } return nil } func (x *Relation) GetCommonInlineUserDefinedTableFunction() *CommonInlineUserDefinedTableFunction { if x, ok := x.GetRelType().(*Relation_CommonInlineUserDefinedTableFunction); ok { return x.CommonInlineUserDefinedTableFunction } return nil } func (x *Relation) GetAsOfJoin() *AsOfJoin { if x, ok := x.GetRelType().(*Relation_AsOfJoin); ok { return x.AsOfJoin } return nil } func (x *Relation) GetCommonInlineUserDefinedDataSource() *CommonInlineUserDefinedDataSource { if x, ok := x.GetRelType().(*Relation_CommonInlineUserDefinedDataSource); ok { return x.CommonInlineUserDefinedDataSource } return nil } func (x *Relation) GetWithRelations() *WithRelations { if x, ok := x.GetRelType().(*Relation_WithRelations); ok { return x.WithRelations } return nil } func (x *Relation) GetTranspose() *Transpose { if x, ok := x.GetRelType().(*Relation_Transpose); ok { return x.Transpose } return nil } func (x *Relation) GetUnresolvedTableValuedFunction() *UnresolvedTableValuedFunction { if x, ok := x.GetRelType().(*Relation_UnresolvedTableValuedFunction); ok { return x.UnresolvedTableValuedFunction } return nil } func (x *Relation) GetLateralJoin() *LateralJoin { if x, ok := x.GetRelType().(*Relation_LateralJoin); ok { return x.LateralJoin } return nil } func (x *Relation) GetFillNa() *NAFill { if x, ok := x.GetRelType().(*Relation_FillNa); ok { return x.FillNa } return nil } func (x *Relation) GetDropNa() *NADrop { if x, ok := x.GetRelType().(*Relation_DropNa); ok { return x.DropNa } return nil } func (x *Relation) GetReplace() *NAReplace { if x, ok := x.GetRelType().(*Relation_Replace); ok { return x.Replace } return nil } func (x *Relation) GetSummary() *StatSummary { if x, ok := x.GetRelType().(*Relation_Summary); ok { return x.Summary } return nil } func (x *Relation) GetCrosstab() *StatCrosstab { if x, ok := x.GetRelType().(*Relation_Crosstab); ok { return x.Crosstab } return nil } func (x *Relation) GetDescribe() *StatDescribe { if x, ok := x.GetRelType().(*Relation_Describe); ok { return x.Describe } return nil } func (x *Relation) GetCov() *StatCov { if x, ok := x.GetRelType().(*Relation_Cov); ok { return x.Cov } return nil } func (x *Relation) GetCorr() *StatCorr { if x, ok := x.GetRelType().(*Relation_Corr); ok { return x.Corr } return nil } func (x *Relation) GetApproxQuantile() *StatApproxQuantile { if x, ok := x.GetRelType().(*Relation_ApproxQuantile); ok { return x.ApproxQuantile } return nil } func (x *Relation) GetFreqItems() *StatFreqItems { if x, ok := x.GetRelType().(*Relation_FreqItems); ok { return x.FreqItems } return nil } func (x *Relation) GetSampleBy() *StatSampleBy { if x, ok := x.GetRelType().(*Relation_SampleBy); ok { return x.SampleBy } return nil } func (x *Relation) GetCatalog() *Catalog { if x, ok := x.GetRelType().(*Relation_Catalog); ok { return x.Catalog } return nil } func (x *Relation) GetMlRelation() *MlRelation { if x, ok := x.GetRelType().(*Relation_MlRelation); ok { return x.MlRelation } return nil } func (x *Relation) GetExtension() *anypb.Any { if x, ok := x.GetRelType().(*Relation_Extension); ok { return x.Extension } return nil } func (x *Relation) GetUnknown() *Unknown { if x, ok := x.GetRelType().(*Relation_Unknown); ok { return x.Unknown } return nil } type isRelation_RelType interface { isRelation_RelType() } type Relation_Read struct { Read *Read `protobuf:"bytes,2,opt,name=read,proto3,oneof"` } type Relation_Project struct { Project *Project `protobuf:"bytes,3,opt,name=project,proto3,oneof"` } type Relation_Filter struct { Filter *Filter `protobuf:"bytes,4,opt,name=filter,proto3,oneof"` } type Relation_Join struct { Join *Join `protobuf:"bytes,5,opt,name=join,proto3,oneof"` } type Relation_SetOp struct { SetOp *SetOperation `protobuf:"bytes,6,opt,name=set_op,json=setOp,proto3,oneof"` } type Relation_Sort struct { Sort *Sort `protobuf:"bytes,7,opt,name=sort,proto3,oneof"` } type Relation_Limit struct { Limit *Limit `protobuf:"bytes,8,opt,name=limit,proto3,oneof"` } type Relation_Aggregate struct { Aggregate *Aggregate `protobuf:"bytes,9,opt,name=aggregate,proto3,oneof"` } type Relation_Sql struct { Sql *SQL `protobuf:"bytes,10,opt,name=sql,proto3,oneof"` } type Relation_LocalRelation struct { LocalRelation *LocalRelation `protobuf:"bytes,11,opt,name=local_relation,json=localRelation,proto3,oneof"` } type Relation_Sample struct { Sample *Sample `protobuf:"bytes,12,opt,name=sample,proto3,oneof"` } type Relation_Offset struct { Offset *Offset `protobuf:"bytes,13,opt,name=offset,proto3,oneof"` } type Relation_Deduplicate struct { Deduplicate *Deduplicate `protobuf:"bytes,14,opt,name=deduplicate,proto3,oneof"` } type Relation_Range struct { Range *Range `protobuf:"bytes,15,opt,name=range,proto3,oneof"` } type Relation_SubqueryAlias struct { SubqueryAlias *SubqueryAlias `protobuf:"bytes,16,opt,name=subquery_alias,json=subqueryAlias,proto3,oneof"` } type Relation_Repartition struct { Repartition *Repartition `protobuf:"bytes,17,opt,name=repartition,proto3,oneof"` } type Relation_ToDf struct { ToDf *ToDF `protobuf:"bytes,18,opt,name=to_df,json=toDf,proto3,oneof"` } type Relation_WithColumnsRenamed struct { WithColumnsRenamed *WithColumnsRenamed `protobuf:"bytes,19,opt,name=with_columns_renamed,json=withColumnsRenamed,proto3,oneof"` } type Relation_ShowString struct { ShowString *ShowString `protobuf:"bytes,20,opt,name=show_string,json=showString,proto3,oneof"` } type Relation_Drop struct { Drop *Drop `protobuf:"bytes,21,opt,name=drop,proto3,oneof"` } type Relation_Tail struct { Tail *Tail `protobuf:"bytes,22,opt,name=tail,proto3,oneof"` } type Relation_WithColumns struct { WithColumns *WithColumns `protobuf:"bytes,23,opt,name=with_columns,json=withColumns,proto3,oneof"` } type Relation_Hint struct { Hint *Hint `protobuf:"bytes,24,opt,name=hint,proto3,oneof"` } type Relation_Unpivot struct { Unpivot *Unpivot `protobuf:"bytes,25,opt,name=unpivot,proto3,oneof"` } type Relation_ToSchema struct { ToSchema *ToSchema `protobuf:"bytes,26,opt,name=to_schema,json=toSchema,proto3,oneof"` } type Relation_RepartitionByExpression struct { RepartitionByExpression *RepartitionByExpression `protobuf:"bytes,27,opt,name=repartition_by_expression,json=repartitionByExpression,proto3,oneof"` } type Relation_MapPartitions struct { MapPartitions *MapPartitions `protobuf:"bytes,28,opt,name=map_partitions,json=mapPartitions,proto3,oneof"` } type Relation_CollectMetrics struct { CollectMetrics *CollectMetrics `protobuf:"bytes,29,opt,name=collect_metrics,json=collectMetrics,proto3,oneof"` } type Relation_Parse struct { Parse *Parse `protobuf:"bytes,30,opt,name=parse,proto3,oneof"` } type Relation_GroupMap struct { GroupMap *GroupMap `protobuf:"bytes,31,opt,name=group_map,json=groupMap,proto3,oneof"` } type Relation_CoGroupMap struct { CoGroupMap *CoGroupMap `protobuf:"bytes,32,opt,name=co_group_map,json=coGroupMap,proto3,oneof"` } type Relation_WithWatermark struct { WithWatermark *WithWatermark `protobuf:"bytes,33,opt,name=with_watermark,json=withWatermark,proto3,oneof"` } type Relation_ApplyInPandasWithState struct { ApplyInPandasWithState *ApplyInPandasWithState `protobuf:"bytes,34,opt,name=apply_in_pandas_with_state,json=applyInPandasWithState,proto3,oneof"` } type Relation_HtmlString struct { HtmlString *HtmlString `protobuf:"bytes,35,opt,name=html_string,json=htmlString,proto3,oneof"` } type Relation_CachedLocalRelation struct { CachedLocalRelation *CachedLocalRelation `protobuf:"bytes,36,opt,name=cached_local_relation,json=cachedLocalRelation,proto3,oneof"` } type Relation_CachedRemoteRelation struct { CachedRemoteRelation *CachedRemoteRelation `protobuf:"bytes,37,opt,name=cached_remote_relation,json=cachedRemoteRelation,proto3,oneof"` } type Relation_CommonInlineUserDefinedTableFunction struct { CommonInlineUserDefinedTableFunction *CommonInlineUserDefinedTableFunction `protobuf:"bytes,38,opt,name=common_inline_user_defined_table_function,json=commonInlineUserDefinedTableFunction,proto3,oneof"` } type Relation_AsOfJoin struct { AsOfJoin *AsOfJoin `protobuf:"bytes,39,opt,name=as_of_join,json=asOfJoin,proto3,oneof"` } type Relation_CommonInlineUserDefinedDataSource struct { CommonInlineUserDefinedDataSource *CommonInlineUserDefinedDataSource `protobuf:"bytes,40,opt,name=common_inline_user_defined_data_source,json=commonInlineUserDefinedDataSource,proto3,oneof"` } type Relation_WithRelations struct { WithRelations *WithRelations `protobuf:"bytes,41,opt,name=with_relations,json=withRelations,proto3,oneof"` } type Relation_Transpose struct { Transpose *Transpose `protobuf:"bytes,42,opt,name=transpose,proto3,oneof"` } type Relation_UnresolvedTableValuedFunction struct { UnresolvedTableValuedFunction *UnresolvedTableValuedFunction `protobuf:"bytes,43,opt,name=unresolved_table_valued_function,json=unresolvedTableValuedFunction,proto3,oneof"` } type Relation_LateralJoin struct { LateralJoin *LateralJoin `protobuf:"bytes,44,opt,name=lateral_join,json=lateralJoin,proto3,oneof"` } type Relation_FillNa struct { // NA functions FillNa *NAFill `protobuf:"bytes,90,opt,name=fill_na,json=fillNa,proto3,oneof"` } type Relation_DropNa struct { DropNa *NADrop `protobuf:"bytes,91,opt,name=drop_na,json=dropNa,proto3,oneof"` } type Relation_Replace struct { Replace *NAReplace `protobuf:"bytes,92,opt,name=replace,proto3,oneof"` } type Relation_Summary struct { // stat functions Summary *StatSummary `protobuf:"bytes,100,opt,name=summary,proto3,oneof"` } type Relation_Crosstab struct { Crosstab *StatCrosstab `protobuf:"bytes,101,opt,name=crosstab,proto3,oneof"` } type Relation_Describe struct { Describe *StatDescribe `protobuf:"bytes,102,opt,name=describe,proto3,oneof"` } type Relation_Cov struct { Cov *StatCov `protobuf:"bytes,103,opt,name=cov,proto3,oneof"` } type Relation_Corr struct { Corr *StatCorr `protobuf:"bytes,104,opt,name=corr,proto3,oneof"` } type Relation_ApproxQuantile struct { ApproxQuantile *StatApproxQuantile `protobuf:"bytes,105,opt,name=approx_quantile,json=approxQuantile,proto3,oneof"` } type Relation_FreqItems struct { FreqItems *StatFreqItems `protobuf:"bytes,106,opt,name=freq_items,json=freqItems,proto3,oneof"` } type Relation_SampleBy struct { SampleBy *StatSampleBy `protobuf:"bytes,107,opt,name=sample_by,json=sampleBy,proto3,oneof"` } type Relation_Catalog struct { // Catalog API (experimental / unstable) Catalog *Catalog `protobuf:"bytes,200,opt,name=catalog,proto3,oneof"` } type Relation_MlRelation struct { // ML relation MlRelation *MlRelation `protobuf:"bytes,300,opt,name=ml_relation,json=mlRelation,proto3,oneof"` } type Relation_Extension struct { // This field is used to mark extensions to the protocol. When plugins generate arbitrary // relations they can add them here. During the planning the correct resolution is done. Extension *anypb.Any `protobuf:"bytes,998,opt,name=extension,proto3,oneof"` } type Relation_Unknown struct { Unknown *Unknown `protobuf:"bytes,999,opt,name=unknown,proto3,oneof"` } func (*Relation_Read) isRelation_RelType() {} func (*Relation_Project) isRelation_RelType() {} func (*Relation_Filter) isRelation_RelType() {} func (*Relation_Join) isRelation_RelType() {} func (*Relation_SetOp) isRelation_RelType() {} func (*Relation_Sort) isRelation_RelType() {} func (*Relation_Limit) isRelation_RelType() {} func (*Relation_Aggregate) isRelation_RelType() {} func (*Relation_Sql) isRelation_RelType() {} func (*Relation_LocalRelation) isRelation_RelType() {} func (*Relation_Sample) isRelation_RelType() {} func (*Relation_Offset) isRelation_RelType() {} func (*Relation_Deduplicate) isRelation_RelType() {} func (*Relation_Range) isRelation_RelType() {} func (*Relation_SubqueryAlias) isRelation_RelType() {} func (*Relation_Repartition) isRelation_RelType() {} func (*Relation_ToDf) isRelation_RelType() {} func (*Relation_WithColumnsRenamed) isRelation_RelType() {} func (*Relation_ShowString) isRelation_RelType() {} func (*Relation_Drop) isRelation_RelType() {} func (*Relation_Tail) isRelation_RelType() {} func (*Relation_WithColumns) isRelation_RelType() {} func (*Relation_Hint) isRelation_RelType() {} func (*Relation_Unpivot) isRelation_RelType() {} func (*Relation_ToSchema) isRelation_RelType() {} func (*Relation_RepartitionByExpression) isRelation_RelType() {} func (*Relation_MapPartitions) isRelation_RelType() {} func (*Relation_CollectMetrics) isRelation_RelType() {} func (*Relation_Parse) isRelation_RelType() {} func (*Relation_GroupMap) isRelation_RelType() {} func (*Relation_CoGroupMap) isRelation_RelType() {} func (*Relation_WithWatermark) isRelation_RelType() {} func (*Relation_ApplyInPandasWithState) isRelation_RelType() {} func (*Relation_HtmlString) isRelation_RelType() {} func (*Relation_CachedLocalRelation) isRelation_RelType() {} func (*Relation_CachedRemoteRelation) isRelation_RelType() {} func (*Relation_CommonInlineUserDefinedTableFunction) isRelation_RelType() {} func (*Relation_AsOfJoin) isRelation_RelType() {} func (*Relation_CommonInlineUserDefinedDataSource) isRelation_RelType() {} func (*Relation_WithRelations) isRelation_RelType() {} func (*Relation_Transpose) isRelation_RelType() {} func (*Relation_UnresolvedTableValuedFunction) isRelation_RelType() {} func (*Relation_LateralJoin) isRelation_RelType() {} func (*Relation_FillNa) isRelation_RelType() {} func (*Relation_DropNa) isRelation_RelType() {} func (*Relation_Replace) isRelation_RelType() {} func (*Relation_Summary) isRelation_RelType() {} func (*Relation_Crosstab) isRelation_RelType() {} func (*Relation_Describe) isRelation_RelType() {} func (*Relation_Cov) isRelation_RelType() {} func (*Relation_Corr) isRelation_RelType() {} func (*Relation_ApproxQuantile) isRelation_RelType() {} func (*Relation_FreqItems) isRelation_RelType() {} func (*Relation_SampleBy) isRelation_RelType() {} func (*Relation_Catalog) isRelation_RelType() {} func (*Relation_MlRelation) isRelation_RelType() {} func (*Relation_Extension) isRelation_RelType() {} func (*Relation_Unknown) isRelation_RelType() {} // Relation to represent ML world type MlRelation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to MlType: // // *MlRelation_Transform_ // *MlRelation_Fetch MlType isMlRelation_MlType `protobuf_oneof:"ml_type"` } func (x *MlRelation) Reset() { *x = MlRelation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlRelation) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlRelation) ProtoMessage() {} func (x *MlRelation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlRelation.ProtoReflect.Descriptor instead. func (*MlRelation) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{1} } func (m *MlRelation) GetMlType() isMlRelation_MlType { if m != nil { return m.MlType } return nil } func (x *MlRelation) GetTransform() *MlRelation_Transform { if x, ok := x.GetMlType().(*MlRelation_Transform_); ok { return x.Transform } return nil } func (x *MlRelation) GetFetch() *Fetch { if x, ok := x.GetMlType().(*MlRelation_Fetch); ok { return x.Fetch } return nil } type isMlRelation_MlType interface { isMlRelation_MlType() } type MlRelation_Transform_ struct { Transform *MlRelation_Transform `protobuf:"bytes,1,opt,name=transform,proto3,oneof"` } type MlRelation_Fetch struct { Fetch *Fetch `protobuf:"bytes,2,opt,name=fetch,proto3,oneof"` } func (*MlRelation_Transform_) isMlRelation_MlType() {} func (*MlRelation_Fetch) isMlRelation_MlType() {} // Message for fetching attribute from object on the server side. // Fetch can be represented as a Relation or a ML command // Command: model.coefficients, model.summary.weightedPrecision which // returns the final literal result // Relation: model.summary.roc which returns a DataFrame (Relation) type Fetch struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) reference to the object on the server side ObjRef *ObjectRef `protobuf:"bytes,1,opt,name=obj_ref,json=objRef,proto3" json:"obj_ref,omitempty"` // (Required) the calling method chains Methods []*Fetch_Method `protobuf:"bytes,2,rep,name=methods,proto3" json:"methods,omitempty"` } func (x *Fetch) Reset() { *x = Fetch{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Fetch) String() string { return protoimpl.X.MessageStringOf(x) } func (*Fetch) ProtoMessage() {} func (x *Fetch) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Fetch.ProtoReflect.Descriptor instead. func (*Fetch) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{2} } func (x *Fetch) GetObjRef() *ObjectRef { if x != nil { return x.ObjRef } return nil } func (x *Fetch) GetMethods() []*Fetch_Method { if x != nil { return x.Methods } return nil } // Used for testing purposes only. type Unknown struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *Unknown) Reset() { *x = Unknown{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Unknown) String() string { return protoimpl.X.MessageStringOf(x) } func (*Unknown) ProtoMessage() {} func (x *Unknown) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Unknown.ProtoReflect.Descriptor instead. func (*Unknown) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{3} } // Common metadata of all relations. type RelationCommon struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Shared relation metadata. // // Deprecated: Marked as deprecated in spark/connect/relations.proto. SourceInfo string `protobuf:"bytes,1,opt,name=source_info,json=sourceInfo,proto3" json:"source_info,omitempty"` // (Optional) A per-client globally unique id for a given connect plan. PlanId *int64 `protobuf:"varint,2,opt,name=plan_id,json=planId,proto3,oneof" json:"plan_id,omitempty"` // (Optional) Keep the information of the origin for this expression such as stacktrace. Origin *Origin `protobuf:"bytes,3,opt,name=origin,proto3" json:"origin,omitempty"` } func (x *RelationCommon) Reset() { *x = RelationCommon{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *RelationCommon) String() string { return protoimpl.X.MessageStringOf(x) } func (*RelationCommon) ProtoMessage() {} func (x *RelationCommon) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use RelationCommon.ProtoReflect.Descriptor instead. func (*RelationCommon) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{4} } // Deprecated: Marked as deprecated in spark/connect/relations.proto. func (x *RelationCommon) GetSourceInfo() string { if x != nil { return x.SourceInfo } return "" } func (x *RelationCommon) GetPlanId() int64 { if x != nil && x.PlanId != nil { return *x.PlanId } return 0 } func (x *RelationCommon) GetOrigin() *Origin { if x != nil { return x.Origin } return nil } // Relation that uses a SQL query to generate the output. type SQL struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The SQL query. Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` // (Optional) A map of parameter names to literal expressions. // // Deprecated: Marked as deprecated in spark/connect/relations.proto. Args map[string]*Expression_Literal `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Optional) A sequence of literal expressions for positional parameters in the SQL query text. // // Deprecated: Marked as deprecated in spark/connect/relations.proto. PosArgs []*Expression_Literal `protobuf:"bytes,3,rep,name=pos_args,json=posArgs,proto3" json:"pos_args,omitempty"` // (Optional) A map of parameter names to expressions. // It cannot coexist with `pos_arguments`. NamedArguments map[string]*Expression `protobuf:"bytes,4,rep,name=named_arguments,json=namedArguments,proto3" json:"named_arguments,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Optional) A sequence of expressions for positional parameters in the SQL query text. // It cannot coexist with `named_arguments`. PosArguments []*Expression `protobuf:"bytes,5,rep,name=pos_arguments,json=posArguments,proto3" json:"pos_arguments,omitempty"` } func (x *SQL) Reset() { *x = SQL{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SQL) String() string { return protoimpl.X.MessageStringOf(x) } func (*SQL) ProtoMessage() {} func (x *SQL) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SQL.ProtoReflect.Descriptor instead. func (*SQL) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{5} } func (x *SQL) GetQuery() string { if x != nil { return x.Query } return "" } // Deprecated: Marked as deprecated in spark/connect/relations.proto. func (x *SQL) GetArgs() map[string]*Expression_Literal { if x != nil { return x.Args } return nil } // Deprecated: Marked as deprecated in spark/connect/relations.proto. func (x *SQL) GetPosArgs() []*Expression_Literal { if x != nil { return x.PosArgs } return nil } func (x *SQL) GetNamedArguments() map[string]*Expression { if x != nil { return x.NamedArguments } return nil } func (x *SQL) GetPosArguments() []*Expression { if x != nil { return x.PosArguments } return nil } // Relation of type [[WithRelations]]. // // This relation contains a root plan, and one or more references that are used by the root plan. // There are two ways of referencing a relation, by name (through a subquery alias), or by plan_id // (using RelationCommon.plan_id). // // This relation can be used to implement CTEs, describe DAGs, or to reduce tree depth. type WithRelations struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Plan at the root of the query tree. This plan is expected to contain one or more // references. Those references get expanded later on by the engine. Root *Relation `protobuf:"bytes,1,opt,name=root,proto3" json:"root,omitempty"` // (Required) Plans referenced by the root plan. Relations in this list are also allowed to // contain references to other relations in this list, as long they do not form cycles. References []*Relation `protobuf:"bytes,2,rep,name=references,proto3" json:"references,omitempty"` } func (x *WithRelations) Reset() { *x = WithRelations{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WithRelations) String() string { return protoimpl.X.MessageStringOf(x) } func (*WithRelations) ProtoMessage() {} func (x *WithRelations) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WithRelations.ProtoReflect.Descriptor instead. func (*WithRelations) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{6} } func (x *WithRelations) GetRoot() *Relation { if x != nil { return x.Root } return nil } func (x *WithRelations) GetReferences() []*Relation { if x != nil { return x.References } return nil } // Relation that reads from a file / table or other data source. Does not have additional // inputs. type Read struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to ReadType: // // *Read_NamedTable_ // *Read_DataSource_ ReadType isRead_ReadType `protobuf_oneof:"read_type"` // (Optional) Indicates if this is a streaming read. IsStreaming bool `protobuf:"varint,3,opt,name=is_streaming,json=isStreaming,proto3" json:"is_streaming,omitempty"` } func (x *Read) Reset() { *x = Read{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Read) String() string { return protoimpl.X.MessageStringOf(x) } func (*Read) ProtoMessage() {} func (x *Read) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Read.ProtoReflect.Descriptor instead. func (*Read) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{7} } func (m *Read) GetReadType() isRead_ReadType { if m != nil { return m.ReadType } return nil } func (x *Read) GetNamedTable() *Read_NamedTable { if x, ok := x.GetReadType().(*Read_NamedTable_); ok { return x.NamedTable } return nil } func (x *Read) GetDataSource() *Read_DataSource { if x, ok := x.GetReadType().(*Read_DataSource_); ok { return x.DataSource } return nil } func (x *Read) GetIsStreaming() bool { if x != nil { return x.IsStreaming } return false } type isRead_ReadType interface { isRead_ReadType() } type Read_NamedTable_ struct { NamedTable *Read_NamedTable `protobuf:"bytes,1,opt,name=named_table,json=namedTable,proto3,oneof"` } type Read_DataSource_ struct { DataSource *Read_DataSource `protobuf:"bytes,2,opt,name=data_source,json=dataSource,proto3,oneof"` } func (*Read_NamedTable_) isRead_ReadType() {} func (*Read_DataSource_) isRead_ReadType() {} // Projection of a bag of expressions for a given input relation. // // The input relation must be specified. // The projected expression can be an arbitrary expression. type Project struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Input relation is optional for Project. // // For example, `SELECT ABS(-1)` is valid plan without an input plan. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) A Project requires at least one expression. Expressions []*Expression `protobuf:"bytes,3,rep,name=expressions,proto3" json:"expressions,omitempty"` } func (x *Project) Reset() { *x = Project{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Project) String() string { return protoimpl.X.MessageStringOf(x) } func (*Project) ProtoMessage() {} func (x *Project) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Project.ProtoReflect.Descriptor instead. func (*Project) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{8} } func (x *Project) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Project) GetExpressions() []*Expression { if x != nil { return x.Expressions } return nil } // Relation that applies a boolean expression `condition` on each row of `input` to produce // the output result. type Filter struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for a Filter. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) A Filter must have a condition expression. Condition *Expression `protobuf:"bytes,2,opt,name=condition,proto3" json:"condition,omitempty"` } func (x *Filter) Reset() { *x = Filter{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Filter) String() string { return protoimpl.X.MessageStringOf(x) } func (*Filter) ProtoMessage() {} func (x *Filter) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Filter.ProtoReflect.Descriptor instead. func (*Filter) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{9} } func (x *Filter) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Filter) GetCondition() *Expression { if x != nil { return x.Condition } return nil } // Relation of type [[Join]]. // // `left` and `right` must be present. type Join struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Left input relation for a Join. Left *Relation `protobuf:"bytes,1,opt,name=left,proto3" json:"left,omitempty"` // (Required) Right input relation for a Join. Right *Relation `protobuf:"bytes,2,opt,name=right,proto3" json:"right,omitempty"` // (Optional) The join condition. Could be unset when `using_columns` is utilized. // // This field does not co-exist with using_columns. JoinCondition *Expression `protobuf:"bytes,3,opt,name=join_condition,json=joinCondition,proto3" json:"join_condition,omitempty"` // (Required) The join type. JoinType Join_JoinType `protobuf:"varint,4,opt,name=join_type,json=joinType,proto3,enum=spark.connect.Join_JoinType" json:"join_type,omitempty"` // Optional. using_columns provides a list of columns that should present on both sides of // the join inputs that this Join will join on. For example A JOIN B USING col_name is // equivalent to A JOIN B on A.col_name = B.col_name. // // This field does not co-exist with join_condition. UsingColumns []string `protobuf:"bytes,5,rep,name=using_columns,json=usingColumns,proto3" json:"using_columns,omitempty"` // (Optional) Only used by joinWith. Set the left and right join data types. JoinDataType *Join_JoinDataType `protobuf:"bytes,6,opt,name=join_data_type,json=joinDataType,proto3,oneof" json:"join_data_type,omitempty"` } func (x *Join) Reset() { *x = Join{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Join) String() string { return protoimpl.X.MessageStringOf(x) } func (*Join) ProtoMessage() {} func (x *Join) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Join.ProtoReflect.Descriptor instead. func (*Join) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{10} } func (x *Join) GetLeft() *Relation { if x != nil { return x.Left } return nil } func (x *Join) GetRight() *Relation { if x != nil { return x.Right } return nil } func (x *Join) GetJoinCondition() *Expression { if x != nil { return x.JoinCondition } return nil } func (x *Join) GetJoinType() Join_JoinType { if x != nil { return x.JoinType } return Join_JOIN_TYPE_UNSPECIFIED } func (x *Join) GetUsingColumns() []string { if x != nil { return x.UsingColumns } return nil } func (x *Join) GetJoinDataType() *Join_JoinDataType { if x != nil { return x.JoinDataType } return nil } // Relation of type [[SetOperation]] type SetOperation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Left input relation for a Set operation. LeftInput *Relation `protobuf:"bytes,1,opt,name=left_input,json=leftInput,proto3" json:"left_input,omitempty"` // (Required) Right input relation for a Set operation. RightInput *Relation `protobuf:"bytes,2,opt,name=right_input,json=rightInput,proto3" json:"right_input,omitempty"` // (Required) The Set operation type. SetOpType SetOperation_SetOpType `protobuf:"varint,3,opt,name=set_op_type,json=setOpType,proto3,enum=spark.connect.SetOperation_SetOpType" json:"set_op_type,omitempty"` // (Optional) If to remove duplicate rows. // // True to preserve all results. // False to remove duplicate rows. IsAll *bool `protobuf:"varint,4,opt,name=is_all,json=isAll,proto3,oneof" json:"is_all,omitempty"` // (Optional) If to perform the Set operation based on name resolution. // // Only UNION supports this option. ByName *bool `protobuf:"varint,5,opt,name=by_name,json=byName,proto3,oneof" json:"by_name,omitempty"` // (Optional) If to perform the Set operation and allow missing columns. // // Only UNION supports this option. AllowMissingColumns *bool `protobuf:"varint,6,opt,name=allow_missing_columns,json=allowMissingColumns,proto3,oneof" json:"allow_missing_columns,omitempty"` } func (x *SetOperation) Reset() { *x = SetOperation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SetOperation) String() string { return protoimpl.X.MessageStringOf(x) } func (*SetOperation) ProtoMessage() {} func (x *SetOperation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SetOperation.ProtoReflect.Descriptor instead. func (*SetOperation) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{11} } func (x *SetOperation) GetLeftInput() *Relation { if x != nil { return x.LeftInput } return nil } func (x *SetOperation) GetRightInput() *Relation { if x != nil { return x.RightInput } return nil } func (x *SetOperation) GetSetOpType() SetOperation_SetOpType { if x != nil { return x.SetOpType } return SetOperation_SET_OP_TYPE_UNSPECIFIED } func (x *SetOperation) GetIsAll() bool { if x != nil && x.IsAll != nil { return *x.IsAll } return false } func (x *SetOperation) GetByName() bool { if x != nil && x.ByName != nil { return *x.ByName } return false } func (x *SetOperation) GetAllowMissingColumns() bool { if x != nil && x.AllowMissingColumns != nil { return *x.AllowMissingColumns } return false } // Relation of type [[Limit]] that is used to `limit` rows from the input relation. type Limit struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for a Limit. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) the limit. Limit int32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` } func (x *Limit) Reset() { *x = Limit{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Limit) String() string { return protoimpl.X.MessageStringOf(x) } func (*Limit) ProtoMessage() {} func (x *Limit) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Limit.ProtoReflect.Descriptor instead. func (*Limit) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{12} } func (x *Limit) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Limit) GetLimit() int32 { if x != nil { return x.Limit } return 0 } // Relation of type [[Offset]] that is used to read rows staring from the `offset` on // the input relation. type Offset struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for an Offset. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) the limit. Offset int32 `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"` } func (x *Offset) Reset() { *x = Offset{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Offset) String() string { return protoimpl.X.MessageStringOf(x) } func (*Offset) ProtoMessage() {} func (x *Offset) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Offset.ProtoReflect.Descriptor instead. func (*Offset) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{13} } func (x *Offset) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Offset) GetOffset() int32 { if x != nil { return x.Offset } return 0 } // Relation of type [[Tail]] that is used to fetch `limit` rows from the last of the input relation. type Tail struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for an Tail. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) the limit. Limit int32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` } func (x *Tail) Reset() { *x = Tail{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Tail) String() string { return protoimpl.X.MessageStringOf(x) } func (*Tail) ProtoMessage() {} func (x *Tail) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Tail.ProtoReflect.Descriptor instead. func (*Tail) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{14} } func (x *Tail) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Tail) GetLimit() int32 { if x != nil { return x.Limit } return 0 } // Relation of type [[Aggregate]]. type Aggregate struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for a RelationalGroupedDataset. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) How the RelationalGroupedDataset was built. GroupType Aggregate_GroupType `protobuf:"varint,2,opt,name=group_type,json=groupType,proto3,enum=spark.connect.Aggregate_GroupType" json:"group_type,omitempty"` // (Required) Expressions for grouping keys GroupingExpressions []*Expression `protobuf:"bytes,3,rep,name=grouping_expressions,json=groupingExpressions,proto3" json:"grouping_expressions,omitempty"` // (Required) List of values that will be translated to columns in the output DataFrame. AggregateExpressions []*Expression `protobuf:"bytes,4,rep,name=aggregate_expressions,json=aggregateExpressions,proto3" json:"aggregate_expressions,omitempty"` // (Optional) Pivots a column of the current `DataFrame` and performs the specified aggregation. Pivot *Aggregate_Pivot `protobuf:"bytes,5,opt,name=pivot,proto3" json:"pivot,omitempty"` // (Optional) List of values that will be translated to columns in the output DataFrame. GroupingSets []*Aggregate_GroupingSets `protobuf:"bytes,6,rep,name=grouping_sets,json=groupingSets,proto3" json:"grouping_sets,omitempty"` } func (x *Aggregate) Reset() { *x = Aggregate{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Aggregate) String() string { return protoimpl.X.MessageStringOf(x) } func (*Aggregate) ProtoMessage() {} func (x *Aggregate) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Aggregate.ProtoReflect.Descriptor instead. func (*Aggregate) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{15} } func (x *Aggregate) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Aggregate) GetGroupType() Aggregate_GroupType { if x != nil { return x.GroupType } return Aggregate_GROUP_TYPE_UNSPECIFIED } func (x *Aggregate) GetGroupingExpressions() []*Expression { if x != nil { return x.GroupingExpressions } return nil } func (x *Aggregate) GetAggregateExpressions() []*Expression { if x != nil { return x.AggregateExpressions } return nil } func (x *Aggregate) GetPivot() *Aggregate_Pivot { if x != nil { return x.Pivot } return nil } func (x *Aggregate) GetGroupingSets() []*Aggregate_GroupingSets { if x != nil { return x.GroupingSets } return nil } // Relation of type [[Sort]]. type Sort struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for a Sort. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The ordering expressions Order []*Expression_SortOrder `protobuf:"bytes,2,rep,name=order,proto3" json:"order,omitempty"` // (Optional) if this is a global sort. IsGlobal *bool `protobuf:"varint,3,opt,name=is_global,json=isGlobal,proto3,oneof" json:"is_global,omitempty"` } func (x *Sort) Reset() { *x = Sort{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Sort) String() string { return protoimpl.X.MessageStringOf(x) } func (*Sort) ProtoMessage() {} func (x *Sort) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Sort.ProtoReflect.Descriptor instead. func (*Sort) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{16} } func (x *Sort) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Sort) GetOrder() []*Expression_SortOrder { if x != nil { return x.Order } return nil } func (x *Sort) GetIsGlobal() bool { if x != nil && x.IsGlobal != nil { return *x.IsGlobal } return false } // Drop specified columns. type Drop struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) columns to drop. Columns []*Expression `protobuf:"bytes,2,rep,name=columns,proto3" json:"columns,omitempty"` // (Optional) names of columns to drop. ColumnNames []string `protobuf:"bytes,3,rep,name=column_names,json=columnNames,proto3" json:"column_names,omitempty"` } func (x *Drop) Reset() { *x = Drop{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Drop) String() string { return protoimpl.X.MessageStringOf(x) } func (*Drop) ProtoMessage() {} func (x *Drop) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Drop.ProtoReflect.Descriptor instead. func (*Drop) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{17} } func (x *Drop) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Drop) GetColumns() []*Expression { if x != nil { return x.Columns } return nil } func (x *Drop) GetColumnNames() []string { if x != nil { return x.ColumnNames } return nil } // Relation of type [[Deduplicate]] which have duplicate rows removed, could consider either only // the subset of columns or all the columns. type Deduplicate struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for a Deduplicate. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) Deduplicate based on a list of column names. // // This field does not co-use with `all_columns_as_keys`. ColumnNames []string `protobuf:"bytes,2,rep,name=column_names,json=columnNames,proto3" json:"column_names,omitempty"` // (Optional) Deduplicate based on all the columns of the input relation. // // This field does not co-use with `column_names`. AllColumnsAsKeys *bool `protobuf:"varint,3,opt,name=all_columns_as_keys,json=allColumnsAsKeys,proto3,oneof" json:"all_columns_as_keys,omitempty"` // (Optional) Deduplicate within the time range of watermark. WithinWatermark *bool `protobuf:"varint,4,opt,name=within_watermark,json=withinWatermark,proto3,oneof" json:"within_watermark,omitempty"` } func (x *Deduplicate) Reset() { *x = Deduplicate{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Deduplicate) String() string { return protoimpl.X.MessageStringOf(x) } func (*Deduplicate) ProtoMessage() {} func (x *Deduplicate) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Deduplicate.ProtoReflect.Descriptor instead. func (*Deduplicate) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{18} } func (x *Deduplicate) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Deduplicate) GetColumnNames() []string { if x != nil { return x.ColumnNames } return nil } func (x *Deduplicate) GetAllColumnsAsKeys() bool { if x != nil && x.AllColumnsAsKeys != nil { return *x.AllColumnsAsKeys } return false } func (x *Deduplicate) GetWithinWatermark() bool { if x != nil && x.WithinWatermark != nil { return *x.WithinWatermark } return false } // A relation that does not need to be qualified by name. type LocalRelation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Local collection data serialized into Arrow IPC streaming format which contains // the schema of the data. Data []byte `protobuf:"bytes,1,opt,name=data,proto3,oneof" json:"data,omitempty"` // (Optional) The schema of local data. // It should be either a DDL-formatted type string or a JSON string. // // The server side will update the column names and data types according to this schema. // If the 'data' is not provided, then this schema will be required. Schema *string `protobuf:"bytes,2,opt,name=schema,proto3,oneof" json:"schema,omitempty"` } func (x *LocalRelation) Reset() { *x = LocalRelation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *LocalRelation) String() string { return protoimpl.X.MessageStringOf(x) } func (*LocalRelation) ProtoMessage() {} func (x *LocalRelation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use LocalRelation.ProtoReflect.Descriptor instead. func (*LocalRelation) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{19} } func (x *LocalRelation) GetData() []byte { if x != nil { return x.Data } return nil } func (x *LocalRelation) GetSchema() string { if x != nil && x.Schema != nil { return *x.Schema } return "" } // A local relation that has been cached already. type CachedLocalRelation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) A sha-256 hash of the serialized local relation in proto, see LocalRelation. Hash string `protobuf:"bytes,3,opt,name=hash,proto3" json:"hash,omitempty"` } func (x *CachedLocalRelation) Reset() { *x = CachedLocalRelation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CachedLocalRelation) String() string { return protoimpl.X.MessageStringOf(x) } func (*CachedLocalRelation) ProtoMessage() {} func (x *CachedLocalRelation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CachedLocalRelation.ProtoReflect.Descriptor instead. func (*CachedLocalRelation) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{20} } func (x *CachedLocalRelation) GetHash() string { if x != nil { return x.Hash } return "" } // Represents a remote relation that has been cached on server. type CachedRemoteRelation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) ID of the remote related (assigned by the service). RelationId string `protobuf:"bytes,1,opt,name=relation_id,json=relationId,proto3" json:"relation_id,omitempty"` } func (x *CachedRemoteRelation) Reset() { *x = CachedRemoteRelation{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CachedRemoteRelation) String() string { return protoimpl.X.MessageStringOf(x) } func (*CachedRemoteRelation) ProtoMessage() {} func (x *CachedRemoteRelation) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CachedRemoteRelation.ProtoReflect.Descriptor instead. func (*CachedRemoteRelation) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{21} } func (x *CachedRemoteRelation) GetRelationId() string { if x != nil { return x.RelationId } return "" } // Relation of type [[Sample]] that samples a fraction of the dataset. type Sample struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for a Sample. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) lower bound. LowerBound float64 `protobuf:"fixed64,2,opt,name=lower_bound,json=lowerBound,proto3" json:"lower_bound,omitempty"` // (Required) upper bound. UpperBound float64 `protobuf:"fixed64,3,opt,name=upper_bound,json=upperBound,proto3" json:"upper_bound,omitempty"` // (Optional) Whether to sample with replacement. WithReplacement *bool `protobuf:"varint,4,opt,name=with_replacement,json=withReplacement,proto3,oneof" json:"with_replacement,omitempty"` // (Required) The random seed. // This field is required to avoid generating mutable dataframes (see SPARK-48184 for details), // however, still keep it 'optional' here for backward compatibility. Seed *int64 `protobuf:"varint,5,opt,name=seed,proto3,oneof" json:"seed,omitempty"` // (Required) Explicitly sort the underlying plan to make the ordering deterministic or cache it. // This flag is true when invoking `dataframe.randomSplit` to randomly splits DataFrame with the // provided weights. Otherwise, it is false. DeterministicOrder bool `protobuf:"varint,6,opt,name=deterministic_order,json=deterministicOrder,proto3" json:"deterministic_order,omitempty"` } func (x *Sample) Reset() { *x = Sample{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Sample) String() string { return protoimpl.X.MessageStringOf(x) } func (*Sample) ProtoMessage() {} func (x *Sample) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Sample.ProtoReflect.Descriptor instead. func (*Sample) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{22} } func (x *Sample) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Sample) GetLowerBound() float64 { if x != nil { return x.LowerBound } return 0 } func (x *Sample) GetUpperBound() float64 { if x != nil { return x.UpperBound } return 0 } func (x *Sample) GetWithReplacement() bool { if x != nil && x.WithReplacement != nil { return *x.WithReplacement } return false } func (x *Sample) GetSeed() int64 { if x != nil && x.Seed != nil { return *x.Seed } return 0 } func (x *Sample) GetDeterministicOrder() bool { if x != nil { return x.DeterministicOrder } return false } // Relation of type [[Range]] that generates a sequence of integers. type Range struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Default value = 0 Start *int64 `protobuf:"varint,1,opt,name=start,proto3,oneof" json:"start,omitempty"` // (Required) End int64 `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` // (Required) Step int64 `protobuf:"varint,3,opt,name=step,proto3" json:"step,omitempty"` // Optional. Default value is assigned by 1) SQL conf "spark.sql.leafNodeDefaultParallelism" if // it is set, or 2) spark default parallelism. NumPartitions *int32 `protobuf:"varint,4,opt,name=num_partitions,json=numPartitions,proto3,oneof" json:"num_partitions,omitempty"` } func (x *Range) Reset() { *x = Range{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Range) String() string { return protoimpl.X.MessageStringOf(x) } func (*Range) ProtoMessage() {} func (x *Range) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Range.ProtoReflect.Descriptor instead. func (*Range) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{23} } func (x *Range) GetStart() int64 { if x != nil && x.Start != nil { return *x.Start } return 0 } func (x *Range) GetEnd() int64 { if x != nil { return x.End } return 0 } func (x *Range) GetStep() int64 { if x != nil { return x.Step } return 0 } func (x *Range) GetNumPartitions() int32 { if x != nil && x.NumPartitions != nil { return *x.NumPartitions } return 0 } // Relation alias. type SubqueryAlias struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation of SubqueryAlias. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The alias. Alias string `protobuf:"bytes,2,opt,name=alias,proto3" json:"alias,omitempty"` // (Optional) Qualifier of the alias. Qualifier []string `protobuf:"bytes,3,rep,name=qualifier,proto3" json:"qualifier,omitempty"` } func (x *SubqueryAlias) Reset() { *x = SubqueryAlias{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SubqueryAlias) String() string { return protoimpl.X.MessageStringOf(x) } func (*SubqueryAlias) ProtoMessage() {} func (x *SubqueryAlias) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SubqueryAlias.ProtoReflect.Descriptor instead. func (*SubqueryAlias) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{24} } func (x *SubqueryAlias) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *SubqueryAlias) GetAlias() string { if x != nil { return x.Alias } return "" } func (x *SubqueryAlias) GetQualifier() []string { if x != nil { return x.Qualifier } return nil } // Relation repartition. type Repartition struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation of Repartition. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Must be positive. NumPartitions int32 `protobuf:"varint,2,opt,name=num_partitions,json=numPartitions,proto3" json:"num_partitions,omitempty"` // (Optional) Default value is false. Shuffle *bool `protobuf:"varint,3,opt,name=shuffle,proto3,oneof" json:"shuffle,omitempty"` } func (x *Repartition) Reset() { *x = Repartition{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Repartition) String() string { return protoimpl.X.MessageStringOf(x) } func (*Repartition) ProtoMessage() {} func (x *Repartition) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Repartition.ProtoReflect.Descriptor instead. func (*Repartition) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{25} } func (x *Repartition) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Repartition) GetNumPartitions() int32 { if x != nil { return x.NumPartitions } return 0 } func (x *Repartition) GetShuffle() bool { if x != nil && x.Shuffle != nil { return *x.Shuffle } return false } // Compose the string representing rows for output. // It will invoke 'Dataset.showString' to compute the results. type ShowString struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Number of rows to show. NumRows int32 `protobuf:"varint,2,opt,name=num_rows,json=numRows,proto3" json:"num_rows,omitempty"` // (Required) If set to more than 0, truncates strings to // `truncate` characters and all cells will be aligned right. Truncate int32 `protobuf:"varint,3,opt,name=truncate,proto3" json:"truncate,omitempty"` // (Required) If set to true, prints output rows vertically (one line per column value). Vertical bool `protobuf:"varint,4,opt,name=vertical,proto3" json:"vertical,omitempty"` } func (x *ShowString) Reset() { *x = ShowString{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ShowString) String() string { return protoimpl.X.MessageStringOf(x) } func (*ShowString) ProtoMessage() {} func (x *ShowString) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ShowString.ProtoReflect.Descriptor instead. func (*ShowString) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{26} } func (x *ShowString) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *ShowString) GetNumRows() int32 { if x != nil { return x.NumRows } return 0 } func (x *ShowString) GetTruncate() int32 { if x != nil { return x.Truncate } return 0 } func (x *ShowString) GetVertical() bool { if x != nil { return x.Vertical } return false } // Compose the string representing rows for output. // It will invoke 'Dataset.htmlString' to compute the results. type HtmlString struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Number of rows to show. NumRows int32 `protobuf:"varint,2,opt,name=num_rows,json=numRows,proto3" json:"num_rows,omitempty"` // (Required) If set to more than 0, truncates strings to // `truncate` characters and all cells will be aligned right. Truncate int32 `protobuf:"varint,3,opt,name=truncate,proto3" json:"truncate,omitempty"` } func (x *HtmlString) Reset() { *x = HtmlString{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *HtmlString) String() string { return protoimpl.X.MessageStringOf(x) } func (*HtmlString) ProtoMessage() {} func (x *HtmlString) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[27] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use HtmlString.ProtoReflect.Descriptor instead. func (*HtmlString) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{27} } func (x *HtmlString) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *HtmlString) GetNumRows() int32 { if x != nil { return x.NumRows } return 0 } func (x *HtmlString) GetTruncate() int32 { if x != nil { return x.Truncate } return 0 } // Computes specified statistics for numeric and string columns. // It will invoke 'Dataset.summary' (same as 'StatFunctions.summary') // to compute the results. type StatSummary struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) Statistics from to be computed. // // Available statistics are: // // count // mean // stddev // min // max // arbitrary approximate percentiles specified as a percentage (e.g. 75%) // count_distinct // approx_count_distinct // // If no statistics are given, this function computes 'count', 'mean', 'stddev', 'min', // 'approximate quartiles' (percentiles at 25%, 50%, and 75%), and 'max'. Statistics []string `protobuf:"bytes,2,rep,name=statistics,proto3" json:"statistics,omitempty"` } func (x *StatSummary) Reset() { *x = StatSummary{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatSummary) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatSummary) ProtoMessage() {} func (x *StatSummary) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[28] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatSummary.ProtoReflect.Descriptor instead. func (*StatSummary) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{28} } func (x *StatSummary) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *StatSummary) GetStatistics() []string { if x != nil { return x.Statistics } return nil } // Computes basic statistics for numeric and string columns, including count, mean, stddev, min, // and max. If no columns are given, this function computes statistics for all numerical or // string columns. type StatDescribe struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) Columns to compute statistics on. Cols []string `protobuf:"bytes,2,rep,name=cols,proto3" json:"cols,omitempty"` } func (x *StatDescribe) Reset() { *x = StatDescribe{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[29] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatDescribe) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatDescribe) ProtoMessage() {} func (x *StatDescribe) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[29] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatDescribe.ProtoReflect.Descriptor instead. func (*StatDescribe) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{29} } func (x *StatDescribe) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *StatDescribe) GetCols() []string { if x != nil { return x.Cols } return nil } // Computes a pair-wise frequency table of the given columns. Also known as a contingency table. // It will invoke 'Dataset.stat.crosstab' (same as 'StatFunctions.crossTabulate') // to compute the results. type StatCrosstab struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The name of the first column. // // Distinct items will make the first item of each row. Col1 string `protobuf:"bytes,2,opt,name=col1,proto3" json:"col1,omitempty"` // (Required) The name of the second column. // // Distinct items will make the column names of the DataFrame. Col2 string `protobuf:"bytes,3,opt,name=col2,proto3" json:"col2,omitempty"` } func (x *StatCrosstab) Reset() { *x = StatCrosstab{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatCrosstab) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatCrosstab) ProtoMessage() {} func (x *StatCrosstab) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[30] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatCrosstab.ProtoReflect.Descriptor instead. func (*StatCrosstab) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{30} } func (x *StatCrosstab) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *StatCrosstab) GetCol1() string { if x != nil { return x.Col1 } return "" } func (x *StatCrosstab) GetCol2() string { if x != nil { return x.Col2 } return "" } // Calculate the sample covariance of two numerical columns of a DataFrame. // It will invoke 'Dataset.stat.cov' (same as 'StatFunctions.calculateCov') to compute the results. type StatCov struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The name of the first column. Col1 string `protobuf:"bytes,2,opt,name=col1,proto3" json:"col1,omitempty"` // (Required) The name of the second column. Col2 string `protobuf:"bytes,3,opt,name=col2,proto3" json:"col2,omitempty"` } func (x *StatCov) Reset() { *x = StatCov{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatCov) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatCov) ProtoMessage() {} func (x *StatCov) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[31] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatCov.ProtoReflect.Descriptor instead. func (*StatCov) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{31} } func (x *StatCov) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *StatCov) GetCol1() string { if x != nil { return x.Col1 } return "" } func (x *StatCov) GetCol2() string { if x != nil { return x.Col2 } return "" } // Calculates the correlation of two columns of a DataFrame. Currently only supports the Pearson // Correlation Coefficient. It will invoke 'Dataset.stat.corr' (same as // 'StatFunctions.pearsonCorrelation') to compute the results. type StatCorr struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The name of the first column. Col1 string `protobuf:"bytes,2,opt,name=col1,proto3" json:"col1,omitempty"` // (Required) The name of the second column. Col2 string `protobuf:"bytes,3,opt,name=col2,proto3" json:"col2,omitempty"` // (Optional) Default value is 'pearson'. // // Currently only supports the Pearson Correlation Coefficient. Method *string `protobuf:"bytes,4,opt,name=method,proto3,oneof" json:"method,omitempty"` } func (x *StatCorr) Reset() { *x = StatCorr{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatCorr) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatCorr) ProtoMessage() {} func (x *StatCorr) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[32] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatCorr.ProtoReflect.Descriptor instead. func (*StatCorr) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{32} } func (x *StatCorr) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *StatCorr) GetCol1() string { if x != nil { return x.Col1 } return "" } func (x *StatCorr) GetCol2() string { if x != nil { return x.Col2 } return "" } func (x *StatCorr) GetMethod() string { if x != nil && x.Method != nil { return *x.Method } return "" } // Calculates the approximate quantiles of numerical columns of a DataFrame. // It will invoke 'Dataset.stat.approxQuantile' (same as 'StatFunctions.approxQuantile') // to compute the results. type StatApproxQuantile struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The names of the numerical columns. Cols []string `protobuf:"bytes,2,rep,name=cols,proto3" json:"cols,omitempty"` // (Required) A list of quantile probabilities. // // Each number must belong to [0, 1]. // For example 0 is the minimum, 0.5 is the median, 1 is the maximum. Probabilities []float64 `protobuf:"fixed64,3,rep,packed,name=probabilities,proto3" json:"probabilities,omitempty"` // (Required) The relative target precision to achieve (greater than or equal to 0). // // If set to zero, the exact quantiles are computed, which could be very expensive. // Note that values greater than 1 are accepted but give the same result as 1. RelativeError float64 `protobuf:"fixed64,4,opt,name=relative_error,json=relativeError,proto3" json:"relative_error,omitempty"` } func (x *StatApproxQuantile) Reset() { *x = StatApproxQuantile{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatApproxQuantile) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatApproxQuantile) ProtoMessage() {} func (x *StatApproxQuantile) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[33] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatApproxQuantile.ProtoReflect.Descriptor instead. func (*StatApproxQuantile) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{33} } func (x *StatApproxQuantile) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *StatApproxQuantile) GetCols() []string { if x != nil { return x.Cols } return nil } func (x *StatApproxQuantile) GetProbabilities() []float64 { if x != nil { return x.Probabilities } return nil } func (x *StatApproxQuantile) GetRelativeError() float64 { if x != nil { return x.RelativeError } return 0 } // Finding frequent items for columns, possibly with false positives. // It will invoke 'Dataset.stat.freqItems' (same as 'StatFunctions.freqItems') // to compute the results. type StatFreqItems struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The names of the columns to search frequent items in. Cols []string `protobuf:"bytes,2,rep,name=cols,proto3" json:"cols,omitempty"` // (Optional) The minimum frequency for an item to be considered `frequent`. // Should be greater than 1e-4. Support *float64 `protobuf:"fixed64,3,opt,name=support,proto3,oneof" json:"support,omitempty"` } func (x *StatFreqItems) Reset() { *x = StatFreqItems{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[34] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatFreqItems) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatFreqItems) ProtoMessage() {} func (x *StatFreqItems) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[34] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatFreqItems.ProtoReflect.Descriptor instead. func (*StatFreqItems) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{34} } func (x *StatFreqItems) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *StatFreqItems) GetCols() []string { if x != nil { return x.Cols } return nil } func (x *StatFreqItems) GetSupport() float64 { if x != nil && x.Support != nil { return *x.Support } return 0 } // Returns a stratified sample without replacement based on the fraction // given on each stratum. // It will invoke 'Dataset.stat.freqItems' (same as 'StatFunctions.freqItems') // to compute the results. type StatSampleBy struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The column that defines strata. Col *Expression `protobuf:"bytes,2,opt,name=col,proto3" json:"col,omitempty"` // (Required) Sampling fraction for each stratum. // // If a stratum is not specified, we treat its fraction as zero. Fractions []*StatSampleBy_Fraction `protobuf:"bytes,3,rep,name=fractions,proto3" json:"fractions,omitempty"` // (Required) The random seed. // This field is required to avoid generating mutable dataframes (see SPARK-48184 for details), // however, still keep it 'optional' here for backward compatibility. Seed *int64 `protobuf:"varint,5,opt,name=seed,proto3,oneof" json:"seed,omitempty"` } func (x *StatSampleBy) Reset() { *x = StatSampleBy{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[35] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatSampleBy) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatSampleBy) ProtoMessage() {} func (x *StatSampleBy) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[35] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatSampleBy.ProtoReflect.Descriptor instead. func (*StatSampleBy) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{35} } func (x *StatSampleBy) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *StatSampleBy) GetCol() *Expression { if x != nil { return x.Col } return nil } func (x *StatSampleBy) GetFractions() []*StatSampleBy_Fraction { if x != nil { return x.Fractions } return nil } func (x *StatSampleBy) GetSeed() int64 { if x != nil && x.Seed != nil { return *x.Seed } return 0 } // Replaces null values. // It will invoke 'Dataset.na.fill' (same as 'DataFrameNaFunctions.fill') to compute the results. // Following 3 parameter combinations are supported: // // 1, 'values' only contains 1 item, 'cols' is empty: // replaces null values in all type-compatible columns. // 2, 'values' only contains 1 item, 'cols' is not empty: // replaces null values in specified columns. // 3, 'values' contains more than 1 items, then 'cols' is required to have the same length: // replaces each specified column with corresponding value. type NAFill struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) Optional list of column names to consider. Cols []string `protobuf:"bytes,2,rep,name=cols,proto3" json:"cols,omitempty"` // (Required) Values to replace null values with. // // Should contain at least 1 item. // Only 4 data types are supported now: bool, long, double, string Values []*Expression_Literal `protobuf:"bytes,3,rep,name=values,proto3" json:"values,omitempty"` } func (x *NAFill) Reset() { *x = NAFill{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[36] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *NAFill) String() string { return protoimpl.X.MessageStringOf(x) } func (*NAFill) ProtoMessage() {} func (x *NAFill) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[36] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use NAFill.ProtoReflect.Descriptor instead. func (*NAFill) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{36} } func (x *NAFill) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *NAFill) GetCols() []string { if x != nil { return x.Cols } return nil } func (x *NAFill) GetValues() []*Expression_Literal { if x != nil { return x.Values } return nil } // Drop rows containing null values. // It will invoke 'Dataset.na.drop' (same as 'DataFrameNaFunctions.drop') to compute the results. type NADrop struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) Optional list of column names to consider. // // When it is empty, all the columns in the input relation will be considered. Cols []string `protobuf:"bytes,2,rep,name=cols,proto3" json:"cols,omitempty"` // (Optional) The minimum number of non-null and non-NaN values required to keep. // // When not set, it is equivalent to the number of considered columns, which means // a row will be kept only if all columns are non-null. // // 'how' options ('all', 'any') can be easily converted to this field: // - 'all' -> set 'min_non_nulls' 1; // - 'any' -> keep 'min_non_nulls' unset; MinNonNulls *int32 `protobuf:"varint,3,opt,name=min_non_nulls,json=minNonNulls,proto3,oneof" json:"min_non_nulls,omitempty"` } func (x *NADrop) Reset() { *x = NADrop{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[37] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *NADrop) String() string { return protoimpl.X.MessageStringOf(x) } func (*NADrop) ProtoMessage() {} func (x *NADrop) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[37] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use NADrop.ProtoReflect.Descriptor instead. func (*NADrop) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{37} } func (x *NADrop) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *NADrop) GetCols() []string { if x != nil { return x.Cols } return nil } func (x *NADrop) GetMinNonNulls() int32 { if x != nil && x.MinNonNulls != nil { return *x.MinNonNulls } return 0 } // Replaces old values with the corresponding values. // It will invoke 'Dataset.na.replace' (same as 'DataFrameNaFunctions.replace') // to compute the results. type NAReplace struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) List of column names to consider. // // When it is empty, all the type-compatible columns in the input relation will be considered. Cols []string `protobuf:"bytes,2,rep,name=cols,proto3" json:"cols,omitempty"` // (Optional) The value replacement mapping. Replacements []*NAReplace_Replacement `protobuf:"bytes,3,rep,name=replacements,proto3" json:"replacements,omitempty"` } func (x *NAReplace) Reset() { *x = NAReplace{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[38] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *NAReplace) String() string { return protoimpl.X.MessageStringOf(x) } func (*NAReplace) ProtoMessage() {} func (x *NAReplace) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[38] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use NAReplace.ProtoReflect.Descriptor instead. func (*NAReplace) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{38} } func (x *NAReplace) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *NAReplace) GetCols() []string { if x != nil { return x.Cols } return nil } func (x *NAReplace) GetReplacements() []*NAReplace_Replacement { if x != nil { return x.Replacements } return nil } // Rename columns on the input relation by the same length of names. type ToDF struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation of RenameColumnsBySameLengthNames. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) // // The number of columns of the input relation must be equal to the length // of this field. If this is not true, an exception will be returned. ColumnNames []string `protobuf:"bytes,2,rep,name=column_names,json=columnNames,proto3" json:"column_names,omitempty"` } func (x *ToDF) Reset() { *x = ToDF{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[39] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ToDF) String() string { return protoimpl.X.MessageStringOf(x) } func (*ToDF) ProtoMessage() {} func (x *ToDF) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[39] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ToDF.ProtoReflect.Descriptor instead. func (*ToDF) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{39} } func (x *ToDF) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *ToDF) GetColumnNames() []string { if x != nil { return x.ColumnNames } return nil } // Rename columns on the input relation by a map with name to name mapping. type WithColumnsRenamed struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) // // Renaming column names of input relation from A to B where A is the map key // and B is the map value. This is a no-op if schema doesn't contain any A. It // does not require that all input relation column names to present as keys. // duplicated B are not allowed. // // Deprecated: Marked as deprecated in spark/connect/relations.proto. RenameColumnsMap map[string]string `protobuf:"bytes,2,rep,name=rename_columns_map,json=renameColumnsMap,proto3" json:"rename_columns_map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` Renames []*WithColumnsRenamed_Rename `protobuf:"bytes,3,rep,name=renames,proto3" json:"renames,omitempty"` } func (x *WithColumnsRenamed) Reset() { *x = WithColumnsRenamed{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[40] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WithColumnsRenamed) String() string { return protoimpl.X.MessageStringOf(x) } func (*WithColumnsRenamed) ProtoMessage() {} func (x *WithColumnsRenamed) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[40] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WithColumnsRenamed.ProtoReflect.Descriptor instead. func (*WithColumnsRenamed) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{40} } func (x *WithColumnsRenamed) GetInput() *Relation { if x != nil { return x.Input } return nil } // Deprecated: Marked as deprecated in spark/connect/relations.proto. func (x *WithColumnsRenamed) GetRenameColumnsMap() map[string]string { if x != nil { return x.RenameColumnsMap } return nil } func (x *WithColumnsRenamed) GetRenames() []*WithColumnsRenamed_Rename { if x != nil { return x.Renames } return nil } // Adding columns or replacing the existing columns that have the same names. type WithColumns struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) // // Given a column name, apply the corresponding expression on the column. If column // name exists in the input relation, then replace the column. If the column name // does not exist in the input relation, then adds it as a new column. // // Only one name part is expected from each Expression.Alias. // // An exception is thrown when duplicated names are present in the mapping. Aliases []*Expression_Alias `protobuf:"bytes,2,rep,name=aliases,proto3" json:"aliases,omitempty"` } func (x *WithColumns) Reset() { *x = WithColumns{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[41] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WithColumns) String() string { return protoimpl.X.MessageStringOf(x) } func (*WithColumns) ProtoMessage() {} func (x *WithColumns) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[41] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WithColumns.ProtoReflect.Descriptor instead. func (*WithColumns) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{41} } func (x *WithColumns) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *WithColumns) GetAliases() []*Expression_Alias { if x != nil { return x.Aliases } return nil } type WithWatermark struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Name of the column containing event time. EventTime string `protobuf:"bytes,2,opt,name=event_time,json=eventTime,proto3" json:"event_time,omitempty"` // (Required) DelayThreshold string `protobuf:"bytes,3,opt,name=delay_threshold,json=delayThreshold,proto3" json:"delay_threshold,omitempty"` } func (x *WithWatermark) Reset() { *x = WithWatermark{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[42] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WithWatermark) String() string { return protoimpl.X.MessageStringOf(x) } func (*WithWatermark) ProtoMessage() {} func (x *WithWatermark) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[42] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WithWatermark.ProtoReflect.Descriptor instead. func (*WithWatermark) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{42} } func (x *WithWatermark) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *WithWatermark) GetEventTime() string { if x != nil { return x.EventTime } return "" } func (x *WithWatermark) GetDelayThreshold() string { if x != nil { return x.DelayThreshold } return "" } // Specify a hint over a relation. Hint should have a name and optional parameters. type Hint struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Hint name. // // Supported Join hints include BROADCAST, MERGE, SHUFFLE_HASH, SHUFFLE_REPLICATE_NL. // // Supported partitioning hints include COALESCE, REPARTITION, REPARTITION_BY_RANGE. Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` // (Optional) Hint parameters. Parameters []*Expression `protobuf:"bytes,3,rep,name=parameters,proto3" json:"parameters,omitempty"` } func (x *Hint) Reset() { *x = Hint{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[43] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Hint) String() string { return protoimpl.X.MessageStringOf(x) } func (*Hint) ProtoMessage() {} func (x *Hint) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[43] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Hint.ProtoReflect.Descriptor instead. func (*Hint) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{43} } func (x *Hint) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Hint) GetName() string { if x != nil { return x.Name } return "" } func (x *Hint) GetParameters() []*Expression { if x != nil { return x.Parameters } return nil } // Unpivot a DataFrame from wide format to long format, optionally leaving identifier columns set. type Unpivot struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Id columns. Ids []*Expression `protobuf:"bytes,2,rep,name=ids,proto3" json:"ids,omitempty"` // (Optional) Value columns to unpivot. Values *Unpivot_Values `protobuf:"bytes,3,opt,name=values,proto3,oneof" json:"values,omitempty"` // (Required) Name of the variable column. VariableColumnName string `protobuf:"bytes,4,opt,name=variable_column_name,json=variableColumnName,proto3" json:"variable_column_name,omitempty"` // (Required) Name of the value column. ValueColumnName string `protobuf:"bytes,5,opt,name=value_column_name,json=valueColumnName,proto3" json:"value_column_name,omitempty"` } func (x *Unpivot) Reset() { *x = Unpivot{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[44] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Unpivot) String() string { return protoimpl.X.MessageStringOf(x) } func (*Unpivot) ProtoMessage() {} func (x *Unpivot) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[44] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Unpivot.ProtoReflect.Descriptor instead. func (*Unpivot) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{44} } func (x *Unpivot) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Unpivot) GetIds() []*Expression { if x != nil { return x.Ids } return nil } func (x *Unpivot) GetValues() *Unpivot_Values { if x != nil { return x.Values } return nil } func (x *Unpivot) GetVariableColumnName() string { if x != nil { return x.VariableColumnName } return "" } func (x *Unpivot) GetValueColumnName() string { if x != nil { return x.ValueColumnName } return "" } // Transpose a DataFrame, switching rows to columns. // Transforms the DataFrame such that the values in the specified index column // become the new columns of the DataFrame. type Transpose struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Optional) A list of columns that will be treated as the indices. // Only single column is supported now. IndexColumns []*Expression `protobuf:"bytes,2,rep,name=index_columns,json=indexColumns,proto3" json:"index_columns,omitempty"` } func (x *Transpose) Reset() { *x = Transpose{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[45] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Transpose) String() string { return protoimpl.X.MessageStringOf(x) } func (*Transpose) ProtoMessage() {} func (x *Transpose) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[45] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Transpose.ProtoReflect.Descriptor instead. func (*Transpose) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{45} } func (x *Transpose) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Transpose) GetIndexColumns() []*Expression { if x != nil { return x.IndexColumns } return nil } type UnresolvedTableValuedFunction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) name (or unparsed name for user defined function) for the unresolved function. FunctionName string `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` // (Optional) Function arguments. Empty arguments are allowed. Arguments []*Expression `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"` } func (x *UnresolvedTableValuedFunction) Reset() { *x = UnresolvedTableValuedFunction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[46] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *UnresolvedTableValuedFunction) String() string { return protoimpl.X.MessageStringOf(x) } func (*UnresolvedTableValuedFunction) ProtoMessage() {} func (x *UnresolvedTableValuedFunction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[46] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use UnresolvedTableValuedFunction.ProtoReflect.Descriptor instead. func (*UnresolvedTableValuedFunction) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{46} } func (x *UnresolvedTableValuedFunction) GetFunctionName() string { if x != nil { return x.FunctionName } return "" } func (x *UnresolvedTableValuedFunction) GetArguments() []*Expression { if x != nil { return x.Arguments } return nil } type ToSchema struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The user provided schema. // // The Sever side will update the dataframe with this schema. Schema *DataType `protobuf:"bytes,2,opt,name=schema,proto3" json:"schema,omitempty"` } func (x *ToSchema) Reset() { *x = ToSchema{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[47] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ToSchema) String() string { return protoimpl.X.MessageStringOf(x) } func (*ToSchema) ProtoMessage() {} func (x *ToSchema) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[47] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ToSchema.ProtoReflect.Descriptor instead. func (*ToSchema) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{47} } func (x *ToSchema) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *ToSchema) GetSchema() *DataType { if x != nil { return x.Schema } return nil } type RepartitionByExpression struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The partitioning expressions. PartitionExprs []*Expression `protobuf:"bytes,2,rep,name=partition_exprs,json=partitionExprs,proto3" json:"partition_exprs,omitempty"` // (Optional) number of partitions, must be positive. NumPartitions *int32 `protobuf:"varint,3,opt,name=num_partitions,json=numPartitions,proto3,oneof" json:"num_partitions,omitempty"` } func (x *RepartitionByExpression) Reset() { *x = RepartitionByExpression{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[48] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *RepartitionByExpression) String() string { return protoimpl.X.MessageStringOf(x) } func (*RepartitionByExpression) ProtoMessage() {} func (x *RepartitionByExpression) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[48] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use RepartitionByExpression.ProtoReflect.Descriptor instead. func (*RepartitionByExpression) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{48} } func (x *RepartitionByExpression) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *RepartitionByExpression) GetPartitionExprs() []*Expression { if x != nil { return x.PartitionExprs } return nil } func (x *RepartitionByExpression) GetNumPartitions() int32 { if x != nil && x.NumPartitions != nil { return *x.NumPartitions } return 0 } type MapPartitions struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for a mapPartitions-equivalent API: mapInPandas, mapInArrow. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Input user-defined function. Func *CommonInlineUserDefinedFunction `protobuf:"bytes,2,opt,name=func,proto3" json:"func,omitempty"` // (Optional) Whether to use barrier mode execution or not. IsBarrier *bool `protobuf:"varint,3,opt,name=is_barrier,json=isBarrier,proto3,oneof" json:"is_barrier,omitempty"` // (Optional) ResourceProfile id used for the stage level scheduling. ProfileId *int32 `protobuf:"varint,4,opt,name=profile_id,json=profileId,proto3,oneof" json:"profile_id,omitempty"` } func (x *MapPartitions) Reset() { *x = MapPartitions{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[49] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MapPartitions) String() string { return protoimpl.X.MessageStringOf(x) } func (*MapPartitions) ProtoMessage() {} func (x *MapPartitions) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[49] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MapPartitions.ProtoReflect.Descriptor instead. func (*MapPartitions) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{49} } func (x *MapPartitions) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *MapPartitions) GetFunc() *CommonInlineUserDefinedFunction { if x != nil { return x.Func } return nil } func (x *MapPartitions) GetIsBarrier() bool { if x != nil && x.IsBarrier != nil { return *x.IsBarrier } return false } func (x *MapPartitions) GetProfileId() int32 { if x != nil && x.ProfileId != nil { return *x.ProfileId } return 0 } type GroupMap struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for Group Map API: apply, applyInPandas. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Expressions for grouping keys. GroupingExpressions []*Expression `protobuf:"bytes,2,rep,name=grouping_expressions,json=groupingExpressions,proto3" json:"grouping_expressions,omitempty"` // (Required) Input user-defined function. Func *CommonInlineUserDefinedFunction `protobuf:"bytes,3,opt,name=func,proto3" json:"func,omitempty"` // (Optional) Expressions for sorting. Only used by Scala Sorted Group Map API. SortingExpressions []*Expression `protobuf:"bytes,4,rep,name=sorting_expressions,json=sortingExpressions,proto3" json:"sorting_expressions,omitempty"` // Below fields are only used by (Flat)MapGroupsWithState // (Optional) Input relation for initial State. InitialInput *Relation `protobuf:"bytes,5,opt,name=initial_input,json=initialInput,proto3" json:"initial_input,omitempty"` // (Optional) Expressions for grouping keys of the initial state input relation. InitialGroupingExpressions []*Expression `protobuf:"bytes,6,rep,name=initial_grouping_expressions,json=initialGroupingExpressions,proto3" json:"initial_grouping_expressions,omitempty"` // (Optional) True if MapGroupsWithState, false if FlatMapGroupsWithState. IsMapGroupsWithState *bool `protobuf:"varint,7,opt,name=is_map_groups_with_state,json=isMapGroupsWithState,proto3,oneof" json:"is_map_groups_with_state,omitempty"` // (Optional) The output mode of the function. OutputMode *string `protobuf:"bytes,8,opt,name=output_mode,json=outputMode,proto3,oneof" json:"output_mode,omitempty"` // (Optional) Timeout configuration for groups that do not receive data for a while. TimeoutConf *string `protobuf:"bytes,9,opt,name=timeout_conf,json=timeoutConf,proto3,oneof" json:"timeout_conf,omitempty"` // (Optional) The schema for the grouped state. StateSchema *DataType `protobuf:"bytes,10,opt,name=state_schema,json=stateSchema,proto3,oneof" json:"state_schema,omitempty"` // Below fields are used by TransformWithState and TransformWithStateInPandas // (Optional) TransformWithState related parameters. TransformWithStateInfo *TransformWithStateInfo `protobuf:"bytes,11,opt,name=transform_with_state_info,json=transformWithStateInfo,proto3,oneof" json:"transform_with_state_info,omitempty"` } func (x *GroupMap) Reset() { *x = GroupMap{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[50] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *GroupMap) String() string { return protoimpl.X.MessageStringOf(x) } func (*GroupMap) ProtoMessage() {} func (x *GroupMap) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[50] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use GroupMap.ProtoReflect.Descriptor instead. func (*GroupMap) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{50} } func (x *GroupMap) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *GroupMap) GetGroupingExpressions() []*Expression { if x != nil { return x.GroupingExpressions } return nil } func (x *GroupMap) GetFunc() *CommonInlineUserDefinedFunction { if x != nil { return x.Func } return nil } func (x *GroupMap) GetSortingExpressions() []*Expression { if x != nil { return x.SortingExpressions } return nil } func (x *GroupMap) GetInitialInput() *Relation { if x != nil { return x.InitialInput } return nil } func (x *GroupMap) GetInitialGroupingExpressions() []*Expression { if x != nil { return x.InitialGroupingExpressions } return nil } func (x *GroupMap) GetIsMapGroupsWithState() bool { if x != nil && x.IsMapGroupsWithState != nil { return *x.IsMapGroupsWithState } return false } func (x *GroupMap) GetOutputMode() string { if x != nil && x.OutputMode != nil { return *x.OutputMode } return "" } func (x *GroupMap) GetTimeoutConf() string { if x != nil && x.TimeoutConf != nil { return *x.TimeoutConf } return "" } func (x *GroupMap) GetStateSchema() *DataType { if x != nil { return x.StateSchema } return nil } func (x *GroupMap) GetTransformWithStateInfo() *TransformWithStateInfo { if x != nil { return x.TransformWithStateInfo } return nil } // Additional input parameters used for TransformWithState operator. type TransformWithStateInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Time mode string for transformWithState. TimeMode string `protobuf:"bytes,1,opt,name=time_mode,json=timeMode,proto3" json:"time_mode,omitempty"` // (Optional) Event time column name. EventTimeColumnName *string `protobuf:"bytes,2,opt,name=event_time_column_name,json=eventTimeColumnName,proto3,oneof" json:"event_time_column_name,omitempty"` // (Optional) Schema for the output DataFrame. // Only required used for TransformWithStateInPandas. OutputSchema *DataType `protobuf:"bytes,3,opt,name=output_schema,json=outputSchema,proto3,oneof" json:"output_schema,omitempty"` } func (x *TransformWithStateInfo) Reset() { *x = TransformWithStateInfo{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[51] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *TransformWithStateInfo) String() string { return protoimpl.X.MessageStringOf(x) } func (*TransformWithStateInfo) ProtoMessage() {} func (x *TransformWithStateInfo) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[51] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TransformWithStateInfo.ProtoReflect.Descriptor instead. func (*TransformWithStateInfo) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{51} } func (x *TransformWithStateInfo) GetTimeMode() string { if x != nil { return x.TimeMode } return "" } func (x *TransformWithStateInfo) GetEventTimeColumnName() string { if x != nil && x.EventTimeColumnName != nil { return *x.EventTimeColumnName } return "" } func (x *TransformWithStateInfo) GetOutputSchema() *DataType { if x != nil { return x.OutputSchema } return nil } type CoGroupMap struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) One input relation for CoGroup Map API - applyInPandas. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // Expressions for grouping keys of the first input relation. InputGroupingExpressions []*Expression `protobuf:"bytes,2,rep,name=input_grouping_expressions,json=inputGroupingExpressions,proto3" json:"input_grouping_expressions,omitempty"` // (Required) The other input relation. Other *Relation `protobuf:"bytes,3,opt,name=other,proto3" json:"other,omitempty"` // Expressions for grouping keys of the other input relation. OtherGroupingExpressions []*Expression `protobuf:"bytes,4,rep,name=other_grouping_expressions,json=otherGroupingExpressions,proto3" json:"other_grouping_expressions,omitempty"` // (Required) Input user-defined function. Func *CommonInlineUserDefinedFunction `protobuf:"bytes,5,opt,name=func,proto3" json:"func,omitempty"` // (Optional) Expressions for sorting. Only used by Scala Sorted CoGroup Map API. InputSortingExpressions []*Expression `protobuf:"bytes,6,rep,name=input_sorting_expressions,json=inputSortingExpressions,proto3" json:"input_sorting_expressions,omitempty"` // (Optional) Expressions for sorting. Only used by Scala Sorted CoGroup Map API. OtherSortingExpressions []*Expression `protobuf:"bytes,7,rep,name=other_sorting_expressions,json=otherSortingExpressions,proto3" json:"other_sorting_expressions,omitempty"` } func (x *CoGroupMap) Reset() { *x = CoGroupMap{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[52] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CoGroupMap) String() string { return protoimpl.X.MessageStringOf(x) } func (*CoGroupMap) ProtoMessage() {} func (x *CoGroupMap) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[52] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CoGroupMap.ProtoReflect.Descriptor instead. func (*CoGroupMap) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{52} } func (x *CoGroupMap) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *CoGroupMap) GetInputGroupingExpressions() []*Expression { if x != nil { return x.InputGroupingExpressions } return nil } func (x *CoGroupMap) GetOther() *Relation { if x != nil { return x.Other } return nil } func (x *CoGroupMap) GetOtherGroupingExpressions() []*Expression { if x != nil { return x.OtherGroupingExpressions } return nil } func (x *CoGroupMap) GetFunc() *CommonInlineUserDefinedFunction { if x != nil { return x.Func } return nil } func (x *CoGroupMap) GetInputSortingExpressions() []*Expression { if x != nil { return x.InputSortingExpressions } return nil } func (x *CoGroupMap) GetOtherSortingExpressions() []*Expression { if x != nil { return x.OtherSortingExpressions } return nil } type ApplyInPandasWithState struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation for applyInPandasWithState. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Expressions for grouping keys. GroupingExpressions []*Expression `protobuf:"bytes,2,rep,name=grouping_expressions,json=groupingExpressions,proto3" json:"grouping_expressions,omitempty"` // (Required) Input user-defined function. Func *CommonInlineUserDefinedFunction `protobuf:"bytes,3,opt,name=func,proto3" json:"func,omitempty"` // (Required) Schema for the output DataFrame. OutputSchema string `protobuf:"bytes,4,opt,name=output_schema,json=outputSchema,proto3" json:"output_schema,omitempty"` // (Required) Schema for the state. StateSchema string `protobuf:"bytes,5,opt,name=state_schema,json=stateSchema,proto3" json:"state_schema,omitempty"` // (Required) The output mode of the function. OutputMode string `protobuf:"bytes,6,opt,name=output_mode,json=outputMode,proto3" json:"output_mode,omitempty"` // (Required) Timeout configuration for groups that do not receive data for a while. TimeoutConf string `protobuf:"bytes,7,opt,name=timeout_conf,json=timeoutConf,proto3" json:"timeout_conf,omitempty"` } func (x *ApplyInPandasWithState) Reset() { *x = ApplyInPandasWithState{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[53] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *ApplyInPandasWithState) String() string { return protoimpl.X.MessageStringOf(x) } func (*ApplyInPandasWithState) ProtoMessage() {} func (x *ApplyInPandasWithState) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[53] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ApplyInPandasWithState.ProtoReflect.Descriptor instead. func (*ApplyInPandasWithState) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{53} } func (x *ApplyInPandasWithState) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *ApplyInPandasWithState) GetGroupingExpressions() []*Expression { if x != nil { return x.GroupingExpressions } return nil } func (x *ApplyInPandasWithState) GetFunc() *CommonInlineUserDefinedFunction { if x != nil { return x.Func } return nil } func (x *ApplyInPandasWithState) GetOutputSchema() string { if x != nil { return x.OutputSchema } return "" } func (x *ApplyInPandasWithState) GetStateSchema() string { if x != nil { return x.StateSchema } return "" } func (x *ApplyInPandasWithState) GetOutputMode() string { if x != nil { return x.OutputMode } return "" } func (x *ApplyInPandasWithState) GetTimeoutConf() string { if x != nil { return x.TimeoutConf } return "" } type CommonInlineUserDefinedTableFunction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Name of the user-defined table function. FunctionName string `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` // (Optional) Whether the user-defined table function is deterministic. Deterministic bool `protobuf:"varint,2,opt,name=deterministic,proto3" json:"deterministic,omitempty"` // (Optional) Function input arguments. Empty arguments are allowed. Arguments []*Expression `protobuf:"bytes,3,rep,name=arguments,proto3" json:"arguments,omitempty"` // (Required) Type of the user-defined table function. // // Types that are assignable to Function: // // *CommonInlineUserDefinedTableFunction_PythonUdtf Function isCommonInlineUserDefinedTableFunction_Function `protobuf_oneof:"function"` } func (x *CommonInlineUserDefinedTableFunction) Reset() { *x = CommonInlineUserDefinedTableFunction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[54] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CommonInlineUserDefinedTableFunction) String() string { return protoimpl.X.MessageStringOf(x) } func (*CommonInlineUserDefinedTableFunction) ProtoMessage() {} func (x *CommonInlineUserDefinedTableFunction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[54] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CommonInlineUserDefinedTableFunction.ProtoReflect.Descriptor instead. func (*CommonInlineUserDefinedTableFunction) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{54} } func (x *CommonInlineUserDefinedTableFunction) GetFunctionName() string { if x != nil { return x.FunctionName } return "" } func (x *CommonInlineUserDefinedTableFunction) GetDeterministic() bool { if x != nil { return x.Deterministic } return false } func (x *CommonInlineUserDefinedTableFunction) GetArguments() []*Expression { if x != nil { return x.Arguments } return nil } func (m *CommonInlineUserDefinedTableFunction) GetFunction() isCommonInlineUserDefinedTableFunction_Function { if m != nil { return m.Function } return nil } func (x *CommonInlineUserDefinedTableFunction) GetPythonUdtf() *PythonUDTF { if x, ok := x.GetFunction().(*CommonInlineUserDefinedTableFunction_PythonUdtf); ok { return x.PythonUdtf } return nil } type isCommonInlineUserDefinedTableFunction_Function interface { isCommonInlineUserDefinedTableFunction_Function() } type CommonInlineUserDefinedTableFunction_PythonUdtf struct { PythonUdtf *PythonUDTF `protobuf:"bytes,4,opt,name=python_udtf,json=pythonUdtf,proto3,oneof"` } func (*CommonInlineUserDefinedTableFunction_PythonUdtf) isCommonInlineUserDefinedTableFunction_Function() { } type PythonUDTF struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Return type of the Python UDTF. ReturnType *DataType `protobuf:"bytes,1,opt,name=return_type,json=returnType,proto3,oneof" json:"return_type,omitempty"` // (Required) EvalType of the Python UDTF. EvalType int32 `protobuf:"varint,2,opt,name=eval_type,json=evalType,proto3" json:"eval_type,omitempty"` // (Required) The encoded commands of the Python UDTF. Command []byte `protobuf:"bytes,3,opt,name=command,proto3" json:"command,omitempty"` // (Required) Python version being used in the client. PythonVer string `protobuf:"bytes,4,opt,name=python_ver,json=pythonVer,proto3" json:"python_ver,omitempty"` } func (x *PythonUDTF) Reset() { *x = PythonUDTF{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[55] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PythonUDTF) String() string { return protoimpl.X.MessageStringOf(x) } func (*PythonUDTF) ProtoMessage() {} func (x *PythonUDTF) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[55] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PythonUDTF.ProtoReflect.Descriptor instead. func (*PythonUDTF) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{55} } func (x *PythonUDTF) GetReturnType() *DataType { if x != nil { return x.ReturnType } return nil } func (x *PythonUDTF) GetEvalType() int32 { if x != nil { return x.EvalType } return 0 } func (x *PythonUDTF) GetCommand() []byte { if x != nil { return x.Command } return nil } func (x *PythonUDTF) GetPythonVer() string { if x != nil { return x.PythonVer } return "" } type CommonInlineUserDefinedDataSource struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Name of the data source. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // (Required) The data source type. // // Types that are assignable to DataSource: // // *CommonInlineUserDefinedDataSource_PythonDataSource DataSource isCommonInlineUserDefinedDataSource_DataSource `protobuf_oneof:"data_source"` } func (x *CommonInlineUserDefinedDataSource) Reset() { *x = CommonInlineUserDefinedDataSource{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[56] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CommonInlineUserDefinedDataSource) String() string { return protoimpl.X.MessageStringOf(x) } func (*CommonInlineUserDefinedDataSource) ProtoMessage() {} func (x *CommonInlineUserDefinedDataSource) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[56] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CommonInlineUserDefinedDataSource.ProtoReflect.Descriptor instead. func (*CommonInlineUserDefinedDataSource) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{56} } func (x *CommonInlineUserDefinedDataSource) GetName() string { if x != nil { return x.Name } return "" } func (m *CommonInlineUserDefinedDataSource) GetDataSource() isCommonInlineUserDefinedDataSource_DataSource { if m != nil { return m.DataSource } return nil } func (x *CommonInlineUserDefinedDataSource) GetPythonDataSource() *PythonDataSource { if x, ok := x.GetDataSource().(*CommonInlineUserDefinedDataSource_PythonDataSource); ok { return x.PythonDataSource } return nil } type isCommonInlineUserDefinedDataSource_DataSource interface { isCommonInlineUserDefinedDataSource_DataSource() } type CommonInlineUserDefinedDataSource_PythonDataSource struct { PythonDataSource *PythonDataSource `protobuf:"bytes,2,opt,name=python_data_source,json=pythonDataSource,proto3,oneof"` } func (*CommonInlineUserDefinedDataSource_PythonDataSource) isCommonInlineUserDefinedDataSource_DataSource() { } type PythonDataSource struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The encoded commands of the Python data source. Command []byte `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"` // (Required) Python version being used in the client. PythonVer string `protobuf:"bytes,2,opt,name=python_ver,json=pythonVer,proto3" json:"python_ver,omitempty"` } func (x *PythonDataSource) Reset() { *x = PythonDataSource{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[57] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *PythonDataSource) String() string { return protoimpl.X.MessageStringOf(x) } func (*PythonDataSource) ProtoMessage() {} func (x *PythonDataSource) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[57] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PythonDataSource.ProtoReflect.Descriptor instead. func (*PythonDataSource) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{57} } func (x *PythonDataSource) GetCommand() []byte { if x != nil { return x.Command } return nil } func (x *PythonDataSource) GetPythonVer() string { if x != nil { return x.PythonVer } return "" } // Collect arbitrary (named) metrics from a dataset. type CollectMetrics struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The input relation. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) Name of the metrics. Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` // (Required) The metric sequence. Metrics []*Expression `protobuf:"bytes,3,rep,name=metrics,proto3" json:"metrics,omitempty"` } func (x *CollectMetrics) Reset() { *x = CollectMetrics{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[58] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *CollectMetrics) String() string { return protoimpl.X.MessageStringOf(x) } func (*CollectMetrics) ProtoMessage() {} func (x *CollectMetrics) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[58] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CollectMetrics.ProtoReflect.Descriptor instead. func (*CollectMetrics) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{58} } func (x *CollectMetrics) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *CollectMetrics) GetName() string { if x != nil { return x.Name } return "" } func (x *CollectMetrics) GetMetrics() []*Expression { if x != nil { return x.Metrics } return nil } type Parse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Input relation to Parse. The input is expected to have single text column. Input *Relation `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` // (Required) The expected format of the text. Format Parse_ParseFormat `protobuf:"varint,2,opt,name=format,proto3,enum=spark.connect.Parse_ParseFormat" json:"format,omitempty"` // (Optional) DataType representing the schema. If not set, Spark will infer the schema. Schema *DataType `protobuf:"bytes,3,opt,name=schema,proto3,oneof" json:"schema,omitempty"` // Options for the csv/json parser. The map key is case insensitive. Options map[string]string `protobuf:"bytes,4,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *Parse) Reset() { *x = Parse{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[59] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Parse) String() string { return protoimpl.X.MessageStringOf(x) } func (*Parse) ProtoMessage() {} func (x *Parse) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[59] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Parse.ProtoReflect.Descriptor instead. func (*Parse) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{59} } func (x *Parse) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *Parse) GetFormat() Parse_ParseFormat { if x != nil { return x.Format } return Parse_PARSE_FORMAT_UNSPECIFIED } func (x *Parse) GetSchema() *DataType { if x != nil { return x.Schema } return nil } func (x *Parse) GetOptions() map[string]string { if x != nil { return x.Options } return nil } // Relation of type [[AsOfJoin]]. // // `left` and `right` must be present. type AsOfJoin struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Left input relation for a Join. Left *Relation `protobuf:"bytes,1,opt,name=left,proto3" json:"left,omitempty"` // (Required) Right input relation for a Join. Right *Relation `protobuf:"bytes,2,opt,name=right,proto3" json:"right,omitempty"` // (Required) Field to join on in left DataFrame LeftAsOf *Expression `protobuf:"bytes,3,opt,name=left_as_of,json=leftAsOf,proto3" json:"left_as_of,omitempty"` // (Required) Field to join on in right DataFrame RightAsOf *Expression `protobuf:"bytes,4,opt,name=right_as_of,json=rightAsOf,proto3" json:"right_as_of,omitempty"` // (Optional) The join condition. Could be unset when `using_columns` is utilized. // // This field does not co-exist with using_columns. JoinExpr *Expression `protobuf:"bytes,5,opt,name=join_expr,json=joinExpr,proto3" json:"join_expr,omitempty"` // Optional. using_columns provides a list of columns that should present on both sides of // the join inputs that this Join will join on. For example A JOIN B USING col_name is // equivalent to A JOIN B on A.col_name = B.col_name. // // This field does not co-exist with join_condition. UsingColumns []string `protobuf:"bytes,6,rep,name=using_columns,json=usingColumns,proto3" json:"using_columns,omitempty"` // (Required) The join type. JoinType string `protobuf:"bytes,7,opt,name=join_type,json=joinType,proto3" json:"join_type,omitempty"` // (Optional) The asof tolerance within this range. Tolerance *Expression `protobuf:"bytes,8,opt,name=tolerance,proto3" json:"tolerance,omitempty"` // (Required) Whether allow matching with the same value or not. AllowExactMatches bool `protobuf:"varint,9,opt,name=allow_exact_matches,json=allowExactMatches,proto3" json:"allow_exact_matches,omitempty"` // (Required) Whether to search for prior, subsequent, or closest matches. Direction string `protobuf:"bytes,10,opt,name=direction,proto3" json:"direction,omitempty"` } func (x *AsOfJoin) Reset() { *x = AsOfJoin{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[60] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *AsOfJoin) String() string { return protoimpl.X.MessageStringOf(x) } func (*AsOfJoin) ProtoMessage() {} func (x *AsOfJoin) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[60] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use AsOfJoin.ProtoReflect.Descriptor instead. func (*AsOfJoin) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{60} } func (x *AsOfJoin) GetLeft() *Relation { if x != nil { return x.Left } return nil } func (x *AsOfJoin) GetRight() *Relation { if x != nil { return x.Right } return nil } func (x *AsOfJoin) GetLeftAsOf() *Expression { if x != nil { return x.LeftAsOf } return nil } func (x *AsOfJoin) GetRightAsOf() *Expression { if x != nil { return x.RightAsOf } return nil } func (x *AsOfJoin) GetJoinExpr() *Expression { if x != nil { return x.JoinExpr } return nil } func (x *AsOfJoin) GetUsingColumns() []string { if x != nil { return x.UsingColumns } return nil } func (x *AsOfJoin) GetJoinType() string { if x != nil { return x.JoinType } return "" } func (x *AsOfJoin) GetTolerance() *Expression { if x != nil { return x.Tolerance } return nil } func (x *AsOfJoin) GetAllowExactMatches() bool { if x != nil { return x.AllowExactMatches } return false } func (x *AsOfJoin) GetDirection() string { if x != nil { return x.Direction } return "" } // Relation of type [[LateralJoin]]. // // `left` and `right` must be present. type LateralJoin struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Left input relation for a Join. Left *Relation `protobuf:"bytes,1,opt,name=left,proto3" json:"left,omitempty"` // (Required) Right input relation for a Join. Right *Relation `protobuf:"bytes,2,opt,name=right,proto3" json:"right,omitempty"` // (Optional) The join condition. JoinCondition *Expression `protobuf:"bytes,3,opt,name=join_condition,json=joinCondition,proto3" json:"join_condition,omitempty"` // (Required) The join type. JoinType Join_JoinType `protobuf:"varint,4,opt,name=join_type,json=joinType,proto3,enum=spark.connect.Join_JoinType" json:"join_type,omitempty"` } func (x *LateralJoin) Reset() { *x = LateralJoin{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[61] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *LateralJoin) String() string { return protoimpl.X.MessageStringOf(x) } func (*LateralJoin) ProtoMessage() {} func (x *LateralJoin) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[61] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use LateralJoin.ProtoReflect.Descriptor instead. func (*LateralJoin) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{61} } func (x *LateralJoin) GetLeft() *Relation { if x != nil { return x.Left } return nil } func (x *LateralJoin) GetRight() *Relation { if x != nil { return x.Right } return nil } func (x *LateralJoin) GetJoinCondition() *Expression { if x != nil { return x.JoinCondition } return nil } func (x *LateralJoin) GetJoinType() Join_JoinType { if x != nil { return x.JoinType } return Join_JOIN_TYPE_UNSPECIFIED } // Relation to represent transform(input) of the operator // which could be a cached model or a new transformer type MlRelation_Transform struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Operator: // // *MlRelation_Transform_ObjRef // *MlRelation_Transform_Transformer Operator isMlRelation_Transform_Operator `protobuf_oneof:"operator"` // the input dataframe Input *Relation `protobuf:"bytes,3,opt,name=input,proto3" json:"input,omitempty"` // the operator specific parameters Params *MlParams `protobuf:"bytes,4,opt,name=params,proto3" json:"params,omitempty"` } func (x *MlRelation_Transform) Reset() { *x = MlRelation_Transform{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[62] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *MlRelation_Transform) String() string { return protoimpl.X.MessageStringOf(x) } func (*MlRelation_Transform) ProtoMessage() {} func (x *MlRelation_Transform) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[62] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MlRelation_Transform.ProtoReflect.Descriptor instead. func (*MlRelation_Transform) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{1, 0} } func (m *MlRelation_Transform) GetOperator() isMlRelation_Transform_Operator { if m != nil { return m.Operator } return nil } func (x *MlRelation_Transform) GetObjRef() *ObjectRef { if x, ok := x.GetOperator().(*MlRelation_Transform_ObjRef); ok { return x.ObjRef } return nil } func (x *MlRelation_Transform) GetTransformer() *MlOperator { if x, ok := x.GetOperator().(*MlRelation_Transform_Transformer); ok { return x.Transformer } return nil } func (x *MlRelation_Transform) GetInput() *Relation { if x != nil { return x.Input } return nil } func (x *MlRelation_Transform) GetParams() *MlParams { if x != nil { return x.Params } return nil } type isMlRelation_Transform_Operator interface { isMlRelation_Transform_Operator() } type MlRelation_Transform_ObjRef struct { // Object reference ObjRef *ObjectRef `protobuf:"bytes,1,opt,name=obj_ref,json=objRef,proto3,oneof"` } type MlRelation_Transform_Transformer struct { // Could be an ML transformer like VectorAssembler Transformer *MlOperator `protobuf:"bytes,2,opt,name=transformer,proto3,oneof"` } func (*MlRelation_Transform_ObjRef) isMlRelation_Transform_Operator() {} func (*MlRelation_Transform_Transformer) isMlRelation_Transform_Operator() {} // Represents a method with inclusion of method name and its arguments type Fetch_Method struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) the method name Method string `protobuf:"bytes,1,opt,name=method,proto3" json:"method,omitempty"` // (Optional) the arguments of the method Args []*Fetch_Method_Args `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"` } func (x *Fetch_Method) Reset() { *x = Fetch_Method{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[63] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Fetch_Method) String() string { return protoimpl.X.MessageStringOf(x) } func (*Fetch_Method) ProtoMessage() {} func (x *Fetch_Method) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[63] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Fetch_Method.ProtoReflect.Descriptor instead. func (*Fetch_Method) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{2, 0} } func (x *Fetch_Method) GetMethod() string { if x != nil { return x.Method } return "" } func (x *Fetch_Method) GetArgs() []*Fetch_Method_Args { if x != nil { return x.Args } return nil } type Fetch_Method_Args struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to ArgsType: // // *Fetch_Method_Args_Param // *Fetch_Method_Args_Input ArgsType isFetch_Method_Args_ArgsType `protobuf_oneof:"args_type"` } func (x *Fetch_Method_Args) Reset() { *x = Fetch_Method_Args{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[64] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Fetch_Method_Args) String() string { return protoimpl.X.MessageStringOf(x) } func (*Fetch_Method_Args) ProtoMessage() {} func (x *Fetch_Method_Args) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[64] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Fetch_Method_Args.ProtoReflect.Descriptor instead. func (*Fetch_Method_Args) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{2, 0, 0} } func (m *Fetch_Method_Args) GetArgsType() isFetch_Method_Args_ArgsType { if m != nil { return m.ArgsType } return nil } func (x *Fetch_Method_Args) GetParam() *Expression_Literal { if x, ok := x.GetArgsType().(*Fetch_Method_Args_Param); ok { return x.Param } return nil } func (x *Fetch_Method_Args) GetInput() *Relation { if x, ok := x.GetArgsType().(*Fetch_Method_Args_Input); ok { return x.Input } return nil } type isFetch_Method_Args_ArgsType interface { isFetch_Method_Args_ArgsType() } type Fetch_Method_Args_Param struct { Param *Expression_Literal `protobuf:"bytes,1,opt,name=param,proto3,oneof"` } type Fetch_Method_Args_Input struct { Input *Relation `protobuf:"bytes,2,opt,name=input,proto3,oneof"` } func (*Fetch_Method_Args_Param) isFetch_Method_Args_ArgsType() {} func (*Fetch_Method_Args_Input) isFetch_Method_Args_ArgsType() {} type Read_NamedTable struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Unparsed identifier for the table. UnparsedIdentifier string `protobuf:"bytes,1,opt,name=unparsed_identifier,json=unparsedIdentifier,proto3" json:"unparsed_identifier,omitempty"` // Options for the named table. The map key is case insensitive. Options map[string]string `protobuf:"bytes,2,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *Read_NamedTable) Reset() { *x = Read_NamedTable{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[67] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Read_NamedTable) String() string { return protoimpl.X.MessageStringOf(x) } func (*Read_NamedTable) ProtoMessage() {} func (x *Read_NamedTable) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[67] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Read_NamedTable.ProtoReflect.Descriptor instead. func (*Read_NamedTable) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{7, 0} } func (x *Read_NamedTable) GetUnparsedIdentifier() string { if x != nil { return x.UnparsedIdentifier } return "" } func (x *Read_NamedTable) GetOptions() map[string]string { if x != nil { return x.Options } return nil } type Read_DataSource struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Optional) Supported formats include: parquet, orc, text, json, parquet, csv, avro. // // If not set, the value from SQL conf 'spark.sql.sources.default' will be used. Format *string `protobuf:"bytes,1,opt,name=format,proto3,oneof" json:"format,omitempty"` // (Optional) If not set, Spark will infer the schema. // // This schema string should be either DDL-formatted or JSON-formatted. Schema *string `protobuf:"bytes,2,opt,name=schema,proto3,oneof" json:"schema,omitempty"` // Options for the data source. The context of this map varies based on the // data source format. This options could be empty for valid data source format. // The map key is case insensitive. Options map[string]string `protobuf:"bytes,3,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (Optional) A list of path for file-system backed data sources. Paths []string `protobuf:"bytes,4,rep,name=paths,proto3" json:"paths,omitempty"` // (Optional) Condition in the where clause for each partition. // // This is only supported by the JDBC data source. Predicates []string `protobuf:"bytes,5,rep,name=predicates,proto3" json:"predicates,omitempty"` } func (x *Read_DataSource) Reset() { *x = Read_DataSource{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[68] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Read_DataSource) String() string { return protoimpl.X.MessageStringOf(x) } func (*Read_DataSource) ProtoMessage() {} func (x *Read_DataSource) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[68] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Read_DataSource.ProtoReflect.Descriptor instead. func (*Read_DataSource) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{7, 1} } func (x *Read_DataSource) GetFormat() string { if x != nil && x.Format != nil { return *x.Format } return "" } func (x *Read_DataSource) GetSchema() string { if x != nil && x.Schema != nil { return *x.Schema } return "" } func (x *Read_DataSource) GetOptions() map[string]string { if x != nil { return x.Options } return nil } func (x *Read_DataSource) GetPaths() []string { if x != nil { return x.Paths } return nil } func (x *Read_DataSource) GetPredicates() []string { if x != nil { return x.Predicates } return nil } type Join_JoinDataType struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // If the left data type is a struct. IsLeftStruct bool `protobuf:"varint,1,opt,name=is_left_struct,json=isLeftStruct,proto3" json:"is_left_struct,omitempty"` // If the right data type is a struct. IsRightStruct bool `protobuf:"varint,2,opt,name=is_right_struct,json=isRightStruct,proto3" json:"is_right_struct,omitempty"` } func (x *Join_JoinDataType) Reset() { *x = Join_JoinDataType{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[71] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Join_JoinDataType) String() string { return protoimpl.X.MessageStringOf(x) } func (*Join_JoinDataType) ProtoMessage() {} func (x *Join_JoinDataType) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[71] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Join_JoinDataType.ProtoReflect.Descriptor instead. func (*Join_JoinDataType) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{10, 0} } func (x *Join_JoinDataType) GetIsLeftStruct() bool { if x != nil { return x.IsLeftStruct } return false } func (x *Join_JoinDataType) GetIsRightStruct() bool { if x != nil { return x.IsRightStruct } return false } type Aggregate_Pivot struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The column to pivot Col *Expression `protobuf:"bytes,1,opt,name=col,proto3" json:"col,omitempty"` // (Optional) List of values that will be translated to columns in the output DataFrame. // // Note that if it is empty, the server side will immediately trigger a job to collect // the distinct values of the column. Values []*Expression_Literal `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` } func (x *Aggregate_Pivot) Reset() { *x = Aggregate_Pivot{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[72] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Aggregate_Pivot) String() string { return protoimpl.X.MessageStringOf(x) } func (*Aggregate_Pivot) ProtoMessage() {} func (x *Aggregate_Pivot) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[72] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Aggregate_Pivot.ProtoReflect.Descriptor instead. func (*Aggregate_Pivot) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{15, 0} } func (x *Aggregate_Pivot) GetCol() *Expression { if x != nil { return x.Col } return nil } func (x *Aggregate_Pivot) GetValues() []*Expression_Literal { if x != nil { return x.Values } return nil } type Aggregate_GroupingSets struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) Individual grouping set GroupingSet []*Expression `protobuf:"bytes,1,rep,name=grouping_set,json=groupingSet,proto3" json:"grouping_set,omitempty"` } func (x *Aggregate_GroupingSets) Reset() { *x = Aggregate_GroupingSets{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[73] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Aggregate_GroupingSets) String() string { return protoimpl.X.MessageStringOf(x) } func (*Aggregate_GroupingSets) ProtoMessage() {} func (x *Aggregate_GroupingSets) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[73] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Aggregate_GroupingSets.ProtoReflect.Descriptor instead. func (*Aggregate_GroupingSets) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{15, 1} } func (x *Aggregate_GroupingSets) GetGroupingSet() []*Expression { if x != nil { return x.GroupingSet } return nil } type StatSampleBy_Fraction struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The stratum. Stratum *Expression_Literal `protobuf:"bytes,1,opt,name=stratum,proto3" json:"stratum,omitempty"` // (Required) The fraction value. Must be in [0, 1]. Fraction float64 `protobuf:"fixed64,2,opt,name=fraction,proto3" json:"fraction,omitempty"` } func (x *StatSampleBy_Fraction) Reset() { *x = StatSampleBy_Fraction{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[74] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *StatSampleBy_Fraction) String() string { return protoimpl.X.MessageStringOf(x) } func (*StatSampleBy_Fraction) ProtoMessage() {} func (x *StatSampleBy_Fraction) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[74] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use StatSampleBy_Fraction.ProtoReflect.Descriptor instead. func (*StatSampleBy_Fraction) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{35, 0} } func (x *StatSampleBy_Fraction) GetStratum() *Expression_Literal { if x != nil { return x.Stratum } return nil } func (x *StatSampleBy_Fraction) GetFraction() float64 { if x != nil { return x.Fraction } return 0 } type NAReplace_Replacement struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The old value. // // Only 4 data types are supported now: null, bool, double, string. OldValue *Expression_Literal `protobuf:"bytes,1,opt,name=old_value,json=oldValue,proto3" json:"old_value,omitempty"` // (Required) The new value. // // Should be of the same data type with the old value. NewValue *Expression_Literal `protobuf:"bytes,2,opt,name=new_value,json=newValue,proto3" json:"new_value,omitempty"` } func (x *NAReplace_Replacement) Reset() { *x = NAReplace_Replacement{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[75] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *NAReplace_Replacement) String() string { return protoimpl.X.MessageStringOf(x) } func (*NAReplace_Replacement) ProtoMessage() {} func (x *NAReplace_Replacement) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[75] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use NAReplace_Replacement.ProtoReflect.Descriptor instead. func (*NAReplace_Replacement) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{38, 0} } func (x *NAReplace_Replacement) GetOldValue() *Expression_Literal { if x != nil { return x.OldValue } return nil } func (x *NAReplace_Replacement) GetNewValue() *Expression_Literal { if x != nil { return x.NewValue } return nil } type WithColumnsRenamed_Rename struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The existing column name. ColName string `protobuf:"bytes,1,opt,name=col_name,json=colName,proto3" json:"col_name,omitempty"` // (Required) The new column name. NewColName string `protobuf:"bytes,2,opt,name=new_col_name,json=newColName,proto3" json:"new_col_name,omitempty"` } func (x *WithColumnsRenamed_Rename) Reset() { *x = WithColumnsRenamed_Rename{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[77] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *WithColumnsRenamed_Rename) String() string { return protoimpl.X.MessageStringOf(x) } func (*WithColumnsRenamed_Rename) ProtoMessage() {} func (x *WithColumnsRenamed_Rename) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[77] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use WithColumnsRenamed_Rename.ProtoReflect.Descriptor instead. func (*WithColumnsRenamed_Rename) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{40, 1} } func (x *WithColumnsRenamed_Rename) GetColName() string { if x != nil { return x.ColName } return "" } func (x *WithColumnsRenamed_Rename) GetNewColName() string { if x != nil { return x.NewColName } return "" } type Unpivot_Values struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Values []*Expression `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` } func (x *Unpivot_Values) Reset() { *x = Unpivot_Values{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_relations_proto_msgTypes[78] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Unpivot_Values) String() string { return protoimpl.X.MessageStringOf(x) } func (*Unpivot_Values) ProtoMessage() {} func (x *Unpivot_Values) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_relations_proto_msgTypes[78] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Unpivot_Values.ProtoReflect.Descriptor instead. func (*Unpivot_Values) Descriptor() ([]byte, []int) { return file_spark_connect_relations_proto_rawDescGZIP(), []int{44, 0} } func (x *Unpivot_Values) GetValues() []*Expression { if x != nil { return x.Values } return nil } var File_spark_connect_relations_proto protoreflect.FileDescriptor var file_spark_connect_relations_proto_rawDesc = []byte{ 0x0a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1a, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x6d, 0x6c, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x9c, 0x1d, 0x0a, 0x08, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x52, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x12, 0x29, 0x0a, 0x04, 0x72, 0x65, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x48, 0x00, 0x52, 0x04, 0x72, 0x65, 0x61, 0x64, 0x12, 0x32, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x48, 0x00, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x29, 0x0a, 0x04, 0x6a, 0x6f, 0x69, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4a, 0x6f, 0x69, 0x6e, 0x48, 0x00, 0x52, 0x04, 0x6a, 0x6f, 0x69, 0x6e, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x65, 0x74, 0x5f, 0x6f, 0x70, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x05, 0x73, 0x65, 0x74, 0x4f, 0x70, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x48, 0x00, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x38, 0x0a, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x48, 0x00, 0x52, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x26, 0x0a, 0x03, 0x73, 0x71, 0x6c, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x51, 0x4c, 0x48, 0x00, 0x52, 0x03, 0x73, 0x71, 0x6c, 0x12, 0x45, 0x0a, 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0d, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x06, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x48, 0x00, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x3e, 0x0a, 0x0b, 0x64, 0x65, 0x64, 0x75, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x65, 0x64, 0x75, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x64, 0x65, 0x64, 0x75, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x12, 0x2c, 0x0a, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x45, 0x0a, 0x0e, 0x73, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x41, 0x6c, 0x69, 0x61, 0x73, 0x48, 0x00, 0x52, 0x0d, 0x73, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x41, 0x6c, 0x69, 0x61, 0x73, 0x12, 0x3e, 0x0a, 0x0b, 0x72, 0x65, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0b, 0x72, 0x65, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2a, 0x0a, 0x05, 0x74, 0x6f, 0x5f, 0x64, 0x66, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x54, 0x6f, 0x44, 0x46, 0x48, 0x00, 0x52, 0x04, 0x74, 0x6f, 0x44, 0x66, 0x12, 0x55, 0x0a, 0x14, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x5f, 0x72, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x69, 0x74, 0x68, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x48, 0x00, 0x52, 0x12, 0x77, 0x69, 0x74, 0x68, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x73, 0x68, 0x6f, 0x77, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x68, 0x6f, 0x77, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x68, 0x6f, 0x77, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x29, 0x0a, 0x04, 0x64, 0x72, 0x6f, 0x70, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x72, 0x6f, 0x70, 0x48, 0x00, 0x52, 0x04, 0x64, 0x72, 0x6f, 0x70, 0x12, 0x29, 0x0a, 0x04, 0x74, 0x61, 0x69, 0x6c, 0x18, 0x16, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x54, 0x61, 0x69, 0x6c, 0x48, 0x00, 0x52, 0x04, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x3f, 0x0a, 0x0c, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x69, 0x74, 0x68, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0b, 0x77, 0x69, 0x74, 0x68, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x68, 0x69, 0x6e, 0x74, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x48, 0x69, 0x6e, 0x74, 0x48, 0x00, 0x52, 0x04, 0x68, 0x69, 0x6e, 0x74, 0x12, 0x32, 0x0a, 0x07, 0x75, 0x6e, 0x70, 0x69, 0x76, 0x6f, 0x74, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x6e, 0x70, 0x69, 0x76, 0x6f, 0x74, 0x48, 0x00, 0x52, 0x07, 0x75, 0x6e, 0x70, 0x69, 0x76, 0x6f, 0x74, 0x12, 0x36, 0x0a, 0x09, 0x74, 0x6f, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x54, 0x6f, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x48, 0x00, 0x52, 0x08, 0x74, 0x6f, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x64, 0x0a, 0x19, 0x72, 0x65, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x79, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x1b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x79, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x17, 0x72, 0x65, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x79, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x45, 0x0a, 0x0e, 0x6d, 0x61, 0x70, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x1c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x61, 0x70, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0d, 0x6d, 0x61, 0x70, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x48, 0x0a, 0x0f, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x1d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x2c, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x09, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x1f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x08, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x4d, 0x61, 0x70, 0x12, 0x3d, 0x0a, 0x0c, 0x63, 0x6f, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x20, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6f, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4d, 0x61, 0x70, 0x12, 0x45, 0x0a, 0x0e, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x77, 0x61, 0x74, 0x65, 0x72, 0x6d, 0x61, 0x72, 0x6b, 0x18, 0x21, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x69, 0x74, 0x68, 0x57, 0x61, 0x74, 0x65, 0x72, 0x6d, 0x61, 0x72, 0x6b, 0x48, 0x00, 0x52, 0x0d, 0x77, 0x69, 0x74, 0x68, 0x57, 0x61, 0x74, 0x65, 0x72, 0x6d, 0x61, 0x72, 0x6b, 0x12, 0x63, 0x0a, 0x1a, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x5f, 0x69, 0x6e, 0x5f, 0x70, 0x61, 0x6e, 0x64, 0x61, 0x73, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x22, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x49, 0x6e, 0x50, 0x61, 0x6e, 0x64, 0x61, 0x73, 0x57, 0x69, 0x74, 0x68, 0x53, 0x74, 0x61, 0x74, 0x65, 0x48, 0x00, 0x52, 0x16, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x49, 0x6e, 0x50, 0x61, 0x6e, 0x64, 0x61, 0x73, 0x57, 0x69, 0x74, 0x68, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0b, 0x68, 0x74, 0x6d, 0x6c, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x23, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x48, 0x74, 0x6d, 0x6c, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x0a, 0x68, 0x74, 0x6d, 0x6c, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x58, 0x0a, 0x15, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x24, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x13, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x5b, 0x0a, 0x16, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x25, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x14, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x8e, 0x01, 0x0a, 0x29, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x26, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x24, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x37, 0x0a, 0x0a, 0x61, 0x73, 0x5f, 0x6f, 0x66, 0x5f, 0x6a, 0x6f, 0x69, 0x6e, 0x18, 0x27, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x73, 0x4f, 0x66, 0x4a, 0x6f, 0x69, 0x6e, 0x48, 0x00, 0x52, 0x08, 0x61, 0x73, 0x4f, 0x66, 0x4a, 0x6f, 0x69, 0x6e, 0x12, 0x85, 0x01, 0x0a, 0x26, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x28, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x48, 0x00, 0x52, 0x21, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x0e, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x29, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x69, 0x74, 0x68, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0d, 0x77, 0x69, 0x74, 0x68, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x18, 0x2a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x48, 0x00, 0x52, 0x09, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x12, 0x77, 0x0a, 0x20, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x64, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x2b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x1d, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x0c, 0x6c, 0x61, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x5f, 0x6a, 0x6f, 0x69, 0x6e, 0x18, 0x2c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4a, 0x6f, 0x69, 0x6e, 0x48, 0x00, 0x52, 0x0b, 0x6c, 0x61, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4a, 0x6f, 0x69, 0x6e, 0x12, 0x30, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x6c, 0x5f, 0x6e, 0x61, 0x18, 0x5a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4e, 0x41, 0x46, 0x69, 0x6c, 0x6c, 0x48, 0x00, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x6c, 0x4e, 0x61, 0x12, 0x30, 0x0a, 0x07, 0x64, 0x72, 0x6f, 0x70, 0x5f, 0x6e, 0x61, 0x18, 0x5b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4e, 0x41, 0x44, 0x72, 0x6f, 0x70, 0x48, 0x00, 0x52, 0x06, 0x64, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x12, 0x34, 0x0a, 0x07, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x18, 0x5c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4e, 0x41, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x48, 0x00, 0x52, 0x07, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x12, 0x36, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x64, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x48, 0x00, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x39, 0x0a, 0x08, 0x63, 0x72, 0x6f, 0x73, 0x73, 0x74, 0x61, 0x62, 0x18, 0x65, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x74, 0x61, 0x62, 0x48, 0x00, 0x52, 0x08, 0x63, 0x72, 0x6f, 0x73, 0x73, 0x74, 0x61, 0x62, 0x12, 0x39, 0x0a, 0x08, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x18, 0x66, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x48, 0x00, 0x52, 0x08, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x12, 0x2a, 0x0a, 0x03, 0x63, 0x6f, 0x76, 0x18, 0x67, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x43, 0x6f, 0x76, 0x48, 0x00, 0x52, 0x03, 0x63, 0x6f, 0x76, 0x12, 0x2d, 0x0a, 0x04, 0x63, 0x6f, 0x72, 0x72, 0x18, 0x68, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x43, 0x6f, 0x72, 0x72, 0x48, 0x00, 0x52, 0x04, 0x63, 0x6f, 0x72, 0x72, 0x12, 0x4c, 0x0a, 0x0f, 0x61, 0x70, 0x70, 0x72, 0x6f, 0x78, 0x5f, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x18, 0x69, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x41, 0x70, 0x70, 0x72, 0x6f, 0x78, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0e, 0x61, 0x70, 0x70, 0x72, 0x6f, 0x78, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x12, 0x3d, 0x0a, 0x0a, 0x66, 0x72, 0x65, 0x71, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x6a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x46, 0x72, 0x65, 0x71, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x48, 0x00, 0x52, 0x09, 0x66, 0x72, 0x65, 0x71, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x3a, 0x0a, 0x09, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x62, 0x79, 0x18, 0x6b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x42, 0x79, 0x48, 0x00, 0x52, 0x08, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x42, 0x79, 0x12, 0x33, 0x0a, 0x07, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x18, 0xc8, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x07, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x12, 0x3d, 0x0a, 0x0b, 0x6d, 0x6c, 0x5f, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xac, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0a, 0x6d, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0xe6, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x48, 0x00, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x33, 0x0a, 0x07, 0x75, 0x6e, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x6e, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x48, 0x00, 0x52, 0x07, 0x75, 0x6e, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xf8, 0x02, 0x0a, 0x0a, 0x4d, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, 0x0a, 0x09, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x48, 0x00, 0x52, 0x09, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x12, 0x2c, 0x0a, 0x05, 0x66, 0x65, 0x74, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x66, 0x65, 0x74, 0x63, 0x68, 0x1a, 0xeb, 0x01, 0x0a, 0x09, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x12, 0x33, 0x0a, 0x07, 0x6f, 0x62, 0x6a, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x66, 0x48, 0x00, 0x52, 0x06, 0x6f, 0x62, 0x6a, 0x52, 0x65, 0x66, 0x12, 0x3d, 0x0a, 0x0b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4d, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x42, 0x0a, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xcb, 0x02, 0x0a, 0x05, 0x46, 0x65, 0x74, 0x63, 0x68, 0x12, 0x31, 0x0a, 0x07, 0x6f, 0x62, 0x6a, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x66, 0x52, 0x06, 0x6f, 0x62, 0x6a, 0x52, 0x65, 0x66, 0x12, 0x35, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x2e, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x73, 0x1a, 0xd7, 0x01, 0x0a, 0x06, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, 0x34, 0x0a, 0x04, 0x61, 0x72, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x2e, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x2e, 0x41, 0x72, 0x67, 0x73, 0x52, 0x04, 0x61, 0x72, 0x67, 0x73, 0x1a, 0x7f, 0x0a, 0x04, 0x41, 0x72, 0x67, 0x73, 0x12, 0x39, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x2f, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x42, 0x0b, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x73, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x09, 0x0a, 0x07, 0x55, 0x6e, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x22, 0x8e, 0x01, 0x0a, 0x0e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x1c, 0x0a, 0x07, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x06, 0x70, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x2d, 0x0a, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x22, 0xde, 0x03, 0x0a, 0x03, 0x53, 0x51, 0x4c, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x12, 0x34, 0x0a, 0x04, 0x61, 0x72, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x51, 0x4c, 0x2e, 0x41, 0x72, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x04, 0x61, 0x72, 0x67, 0x73, 0x12, 0x40, 0x0a, 0x08, 0x70, 0x6f, 0x73, 0x5f, 0x61, 0x72, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x70, 0x6f, 0x73, 0x41, 0x72, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0f, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x51, 0x4c, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x3e, 0x0a, 0x0d, 0x70, 0x6f, 0x73, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x70, 0x6f, 0x73, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x5a, 0x0a, 0x09, 0x41, 0x72, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x37, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5c, 0x0a, 0x13, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2f, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x75, 0x0a, 0x0d, 0x57, 0x69, 0x74, 0x68, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x2b, 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x12, 0x37, 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0x97, 0x05, 0x0a, 0x04, 0x52, 0x65, 0x61, 0x64, 0x12, 0x41, 0x0a, 0x0b, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x41, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x73, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x69, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x1a, 0xc0, 0x01, 0x0a, 0x0a, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x2f, 0x0a, 0x13, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x45, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x95, 0x02, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1b, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x12, 0x45, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x61, 0x74, 0x68, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x70, 0x61, 0x74, 0x68, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x42, 0x0b, 0x0a, 0x09, 0x72, 0x65, 0x61, 0x64, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x75, 0x0a, 0x07, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x3b, 0x0a, 0x0b, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x70, 0x0a, 0x06, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x37, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x95, 0x05, 0x0a, 0x04, 0x4a, 0x6f, 0x69, 0x6e, 0x12, 0x2b, 0x0a, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x12, 0x40, 0x0a, 0x0e, 0x6a, 0x6f, 0x69, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x6a, 0x6f, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x39, 0x0a, 0x09, 0x6a, 0x6f, 0x69, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4a, 0x6f, 0x69, 0x6e, 0x2e, 0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x6a, 0x6f, 0x69, 0x6e, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4a, 0x6f, 0x69, 0x6e, 0x2e, 0x4a, 0x6f, 0x69, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x00, 0x52, 0x0c, 0x6a, 0x6f, 0x69, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x1a, 0x5c, 0x0a, 0x0c, 0x4a, 0x6f, 0x69, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x69, 0x73, 0x5f, 0x6c, 0x65, 0x66, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x69, 0x73, 0x4c, 0x65, 0x66, 0x74, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x69, 0x73, 0x5f, 0x72, 0x69, 0x67, 0x68, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x69, 0x73, 0x52, 0x69, 0x67, 0x68, 0x74, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x22, 0xd0, 0x01, 0x0a, 0x08, 0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x15, 0x4a, 0x4f, 0x49, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x4a, 0x4f, 0x49, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x4e, 0x45, 0x52, 0x10, 0x01, 0x12, 0x18, 0x0a, 0x14, 0x4a, 0x4f, 0x49, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x55, 0x4c, 0x4c, 0x5f, 0x4f, 0x55, 0x54, 0x45, 0x52, 0x10, 0x02, 0x12, 0x18, 0x0a, 0x14, 0x4a, 0x4f, 0x49, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4c, 0x45, 0x46, 0x54, 0x5f, 0x4f, 0x55, 0x54, 0x45, 0x52, 0x10, 0x03, 0x12, 0x19, 0x0a, 0x15, 0x4a, 0x4f, 0x49, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x52, 0x49, 0x47, 0x48, 0x54, 0x5f, 0x4f, 0x55, 0x54, 0x45, 0x52, 0x10, 0x04, 0x12, 0x17, 0x0a, 0x13, 0x4a, 0x4f, 0x49, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4c, 0x45, 0x46, 0x54, 0x5f, 0x41, 0x4e, 0x54, 0x49, 0x10, 0x05, 0x12, 0x17, 0x0a, 0x13, 0x4a, 0x4f, 0x49, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4c, 0x45, 0x46, 0x54, 0x5f, 0x53, 0x45, 0x4d, 0x49, 0x10, 0x06, 0x12, 0x13, 0x0a, 0x0f, 0x4a, 0x4f, 0x49, 0x4e, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x52, 0x4f, 0x53, 0x53, 0x10, 0x07, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x6a, 0x6f, 0x69, 0x6e, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xdf, 0x03, 0x0a, 0x0c, 0x53, 0x65, 0x74, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x36, 0x0a, 0x0a, 0x6c, 0x65, 0x66, 0x74, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x6c, 0x65, 0x66, 0x74, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x38, 0x0a, 0x0b, 0x72, 0x69, 0x67, 0x68, 0x74, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x72, 0x69, 0x67, 0x68, 0x74, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x45, 0x0a, 0x0b, 0x73, 0x65, 0x74, 0x5f, 0x6f, 0x70, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x74, 0x4f, 0x70, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x73, 0x65, 0x74, 0x4f, 0x70, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x06, 0x69, 0x73, 0x5f, 0x61, 0x6c, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x05, 0x69, 0x73, 0x41, 0x6c, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x1c, 0x0a, 0x07, 0x62, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x48, 0x01, 0x52, 0x06, 0x62, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x37, 0x0a, 0x15, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x48, 0x02, 0x52, 0x13, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x4d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x22, 0x72, 0x0a, 0x09, 0x53, 0x65, 0x74, 0x4f, 0x70, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x45, 0x54, 0x5f, 0x4f, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x19, 0x0a, 0x15, 0x53, 0x45, 0x54, 0x5f, 0x4f, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x53, 0x45, 0x43, 0x54, 0x10, 0x01, 0x12, 0x15, 0x0a, 0x11, 0x53, 0x45, 0x54, 0x5f, 0x4f, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x45, 0x54, 0x5f, 0x4f, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x58, 0x43, 0x45, 0x50, 0x54, 0x10, 0x03, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x69, 0x73, 0x5f, 0x61, 0x6c, 0x6c, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x62, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x18, 0x0a, 0x16, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0x4c, 0x0a, 0x05, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x4f, 0x0a, 0x06, 0x4f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x22, 0x4b, 0x0a, 0x04, 0x54, 0x61, 0x69, 0x6c, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0xfe, 0x05, 0x0a, 0x09, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x41, 0x0a, 0x0a, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x54, 0x79, 0x70, 0x65, 0x12, 0x4c, 0x0a, 0x14, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4e, 0x0a, 0x15, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x14, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x34, 0x0a, 0x05, 0x70, 0x69, 0x76, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x2e, 0x50, 0x69, 0x76, 0x6f, 0x74, 0x52, 0x05, 0x70, 0x69, 0x76, 0x6f, 0x74, 0x12, 0x4a, 0x0a, 0x0d, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x65, 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x74, 0x73, 0x52, 0x0c, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x74, 0x73, 0x1a, 0x6f, 0x0a, 0x05, 0x50, 0x69, 0x76, 0x6f, 0x74, 0x12, 0x2b, 0x0a, 0x03, 0x63, 0x6f, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x03, 0x63, 0x6f, 0x6c, 0x12, 0x39, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0x4c, 0x0a, 0x0c, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x74, 0x73, 0x12, 0x3c, 0x0a, 0x0c, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x74, 0x22, 0x9f, 0x01, 0x0a, 0x09, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x16, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x16, 0x0a, 0x12, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x42, 0x59, 0x10, 0x01, 0x12, 0x15, 0x0a, 0x11, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x52, 0x4f, 0x4c, 0x4c, 0x55, 0x50, 0x10, 0x02, 0x12, 0x13, 0x0a, 0x0f, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x55, 0x42, 0x45, 0x10, 0x03, 0x12, 0x14, 0x0a, 0x10, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x49, 0x56, 0x4f, 0x54, 0x10, 0x04, 0x12, 0x1c, 0x0a, 0x18, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x49, 0x4e, 0x47, 0x5f, 0x53, 0x45, 0x54, 0x53, 0x10, 0x05, 0x22, 0xa0, 0x01, 0x0a, 0x04, 0x53, 0x6f, 0x72, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x39, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x20, 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x08, 0x69, 0x73, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x69, 0x73, 0x5f, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x22, 0x8d, 0x01, 0x0a, 0x04, 0x44, 0x72, 0x6f, 0x70, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x33, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x22, 0xf0, 0x01, 0x0a, 0x0b, 0x44, 0x65, 0x64, 0x75, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x13, 0x61, 0x6c, 0x6c, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x5f, 0x61, 0x73, 0x5f, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x10, 0x61, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x41, 0x73, 0x4b, 0x65, 0x79, 0x73, 0x88, 0x01, 0x01, 0x12, 0x2e, 0x0a, 0x10, 0x77, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x5f, 0x77, 0x61, 0x74, 0x65, 0x72, 0x6d, 0x61, 0x72, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x48, 0x01, 0x52, 0x0f, 0x77, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x57, 0x61, 0x74, 0x65, 0x72, 0x6d, 0x61, 0x72, 0x6b, 0x88, 0x01, 0x01, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, 0x6c, 0x6c, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x5f, 0x61, 0x73, 0x5f, 0x6b, 0x65, 0x79, 0x73, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x5f, 0x77, 0x61, 0x74, 0x65, 0x72, 0x6d, 0x61, 0x72, 0x6b, 0x22, 0x59, 0x0a, 0x0d, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x17, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0x48, 0x0a, 0x13, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x4a, 0x04, 0x08, 0x01, 0x10, 0x02, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x52, 0x09, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0x37, 0x0a, 0x14, 0x43, 0x61, 0x63, 0x68, 0x65, 0x64, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0x91, 0x02, 0x0a, 0x06, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x5f, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x70, 0x65, 0x72, 0x5f, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x75, 0x70, 0x70, 0x65, 0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x12, 0x2e, 0x0a, 0x10, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0f, 0x77, 0x69, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x12, 0x17, 0x0a, 0x04, 0x73, 0x65, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x48, 0x01, 0x52, 0x04, 0x73, 0x65, 0x65, 0x64, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x13, 0x64, 0x65, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x69, 0x73, 0x74, 0x69, 0x63, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x12, 0x64, 0x65, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x69, 0x73, 0x74, 0x69, 0x63, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x73, 0x65, 0x65, 0x64, 0x22, 0x91, 0x01, 0x0a, 0x05, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x88, 0x01, 0x01, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x74, 0x65, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x12, 0x2a, 0x0a, 0x0e, 0x6e, 0x75, 0x6d, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x0d, 0x6e, 0x75, 0x6d, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x72, 0x0a, 0x0d, 0x53, 0x75, 0x62, 0x71, 0x75, 0x65, 0x72, 0x79, 0x41, 0x6c, 0x69, 0x61, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x71, 0x75, 0x61, 0x6c, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x71, 0x75, 0x61, 0x6c, 0x69, 0x66, 0x69, 0x65, 0x72, 0x22, 0x8e, 0x01, 0x0a, 0x0b, 0x52, 0x65, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x6e, 0x75, 0x6d, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x6e, 0x75, 0x6d, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x73, 0x68, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x07, 0x73, 0x68, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x73, 0x68, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x22, 0x8e, 0x01, 0x0a, 0x0a, 0x53, 0x68, 0x6f, 0x77, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x6e, 0x75, 0x6d, 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x6e, 0x75, 0x6d, 0x52, 0x6f, 0x77, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x76, 0x65, 0x72, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x76, 0x65, 0x72, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x22, 0x72, 0x0a, 0x0a, 0x48, 0x74, 0x6d, 0x6c, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x6e, 0x75, 0x6d, 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x6e, 0x75, 0x6d, 0x52, 0x6f, 0x77, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x22, 0x5c, 0x0a, 0x0b, 0x53, 0x74, 0x61, 0x74, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x51, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x74, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x22, 0x65, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x74, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x74, 0x61, 0x62, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x31, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x31, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x32, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x32, 0x22, 0x60, 0x0a, 0x07, 0x53, 0x74, 0x61, 0x74, 0x43, 0x6f, 0x76, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x31, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x31, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x32, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x32, 0x22, 0x89, 0x01, 0x0a, 0x08, 0x53, 0x74, 0x61, 0x74, 0x43, 0x6f, 0x72, 0x72, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x31, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x31, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x32, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x32, 0x12, 0x1b, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x22, 0xa4, 0x01, 0x0a, 0x12, 0x53, 0x74, 0x61, 0x74, 0x41, 0x70, 0x70, 0x72, 0x6f, 0x78, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x12, 0x24, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0d, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x7d, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x74, 0x46, 0x72, 0x65, 0x71, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x07, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x22, 0xb5, 0x02, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x74, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x42, 0x79, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x2b, 0x0a, 0x03, 0x63, 0x6f, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x03, 0x63, 0x6f, 0x6c, 0x12, 0x42, 0x0a, 0x09, 0x66, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x42, 0x79, 0x2e, 0x46, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x66, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x17, 0x0a, 0x04, 0x73, 0x65, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x04, 0x73, 0x65, 0x65, 0x64, 0x88, 0x01, 0x01, 0x1a, 0x63, 0x0a, 0x08, 0x46, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x07, 0x73, 0x74, 0x72, 0x61, 0x74, 0x75, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x07, 0x73, 0x74, 0x72, 0x61, 0x74, 0x75, 0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x66, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x73, 0x65, 0x65, 0x64, 0x22, 0x86, 0x01, 0x0a, 0x06, 0x4e, 0x41, 0x46, 0x69, 0x6c, 0x6c, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x12, 0x39, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x86, 0x01, 0x0a, 0x06, 0x4e, 0x41, 0x44, 0x72, 0x6f, 0x70, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x12, 0x27, 0x0a, 0x0d, 0x6d, 0x69, 0x6e, 0x5f, 0x6e, 0x6f, 0x6e, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x0b, 0x6d, 0x69, 0x6e, 0x4e, 0x6f, 0x6e, 0x4e, 0x75, 0x6c, 0x6c, 0x73, 0x88, 0x01, 0x01, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x6d, 0x69, 0x6e, 0x5f, 0x6e, 0x6f, 0x6e, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x73, 0x22, 0xa8, 0x02, 0x0a, 0x09, 0x4e, 0x41, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x6c, 0x73, 0x12, 0x48, 0x0a, 0x0c, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4e, 0x41, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0c, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x8d, 0x01, 0x0a, 0x0b, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x3e, 0x0a, 0x09, 0x6f, 0x6c, 0x64, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x08, 0x6f, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x3e, 0x0a, 0x09, 0x6e, 0x65, 0x77, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x08, 0x6e, 0x65, 0x77, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x58, 0x0a, 0x04, 0x54, 0x6f, 0x44, 0x46, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x22, 0xfe, 0x02, 0x0a, 0x12, 0x57, 0x69, 0x74, 0x68, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x69, 0x0a, 0x12, 0x72, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x69, 0x74, 0x68, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x2e, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x10, 0x72, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x4d, 0x61, 0x70, 0x12, 0x42, 0x0a, 0x07, 0x72, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x69, 0x74, 0x68, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x2e, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x07, 0x72, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x1a, 0x43, 0x0a, 0x15, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x45, 0x0a, 0x06, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x63, 0x6f, 0x6c, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0c, 0x6e, 0x65, 0x77, 0x5f, 0x63, 0x6f, 0x6c, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6e, 0x65, 0x77, 0x43, 0x6f, 0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x77, 0x0a, 0x0b, 0x57, 0x69, 0x74, 0x68, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x39, 0x0a, 0x07, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x41, 0x6c, 0x69, 0x61, 0x73, 0x52, 0x07, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x65, 0x73, 0x22, 0x86, 0x01, 0x0a, 0x0d, 0x57, 0x69, 0x74, 0x68, 0x57, 0x61, 0x74, 0x65, 0x72, 0x6d, 0x61, 0x72, 0x6b, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x64, 0x65, 0x6c, 0x61, 0x79, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x64, 0x65, 0x6c, 0x61, 0x79, 0x54, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22, 0x84, 0x01, 0x0a, 0x04, 0x48, 0x69, 0x6e, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x22, 0xc7, 0x02, 0x0a, 0x07, 0x55, 0x6e, 0x70, 0x69, 0x76, 0x6f, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x2b, 0x0a, 0x03, 0x69, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, 0x3a, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x55, 0x6e, 0x70, 0x69, 0x76, 0x6f, 0x74, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x48, 0x00, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x88, 0x01, 0x01, 0x12, 0x30, 0x0a, 0x14, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x3b, 0x0a, 0x06, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x31, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x7a, 0x0a, 0x09, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x3e, 0x0a, 0x0d, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0x7d, 0x0a, 0x1d, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x6a, 0x0a, 0x08, 0x54, 0x6f, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0xcb, 0x01, 0x0a, 0x17, 0x52, 0x65, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x79, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x42, 0x0a, 0x0f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0e, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x73, 0x12, 0x2a, 0x0a, 0x0e, 0x6e, 0x75, 0x6d, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x0d, 0x6e, 0x75, 0x6d, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xe8, 0x01, 0x0a, 0x0d, 0x4d, 0x61, 0x70, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x42, 0x0a, 0x04, 0x66, 0x75, 0x6e, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x66, 0x75, 0x6e, 0x63, 0x12, 0x22, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x62, 0x61, 0x72, 0x72, 0x69, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x69, 0x73, 0x42, 0x61, 0x72, 0x72, 0x69, 0x65, 0x72, 0x88, 0x01, 0x01, 0x12, 0x22, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x69, 0x73, 0x5f, 0x62, 0x61, 0x72, 0x72, 0x69, 0x65, 0x72, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x22, 0xd2, 0x06, 0x0a, 0x08, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4d, 0x61, 0x70, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x4c, 0x0a, 0x14, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x04, 0x66, 0x75, 0x6e, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x66, 0x75, 0x6e, 0x63, 0x12, 0x4a, 0x0a, 0x13, 0x73, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x12, 0x73, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3c, 0x0a, 0x0d, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x5b, 0x0a, 0x1c, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x1a, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3b, 0x0a, 0x18, 0x69, 0x73, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x14, 0x69, 0x73, 0x4d, 0x61, 0x70, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x57, 0x69, 0x74, 0x68, 0x53, 0x74, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x88, 0x01, 0x01, 0x12, 0x26, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x88, 0x01, 0x01, 0x12, 0x3f, 0x0a, 0x0c, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x03, 0x52, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x12, 0x65, 0x0a, 0x19, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x57, 0x69, 0x74, 0x68, 0x53, 0x74, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x04, 0x52, 0x16, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x57, 0x69, 0x74, 0x68, 0x53, 0x74, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x88, 0x01, 0x01, 0x42, 0x1b, 0x0a, 0x19, 0x5f, 0x69, 0x73, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x42, 0x1c, 0x0a, 0x1a, 0x5f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x22, 0xdf, 0x01, 0x0a, 0x16, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x57, 0x69, 0x74, 0x68, 0x53, 0x74, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x69, 0x6d, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x38, 0x0a, 0x16, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x13, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x41, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x01, 0x52, 0x0c, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x42, 0x19, 0x0a, 0x17, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0x8e, 0x04, 0x0a, 0x0a, 0x43, 0x6f, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4d, 0x61, 0x70, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x57, 0x0a, 0x1a, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x18, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x12, 0x57, 0x0a, 0x1a, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x18, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x04, 0x66, 0x75, 0x6e, 0x63, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x66, 0x75, 0x6e, 0x63, 0x12, 0x55, 0x0a, 0x19, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x17, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x53, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x55, 0x0a, 0x19, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x5f, 0x73, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x17, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x53, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xe5, 0x02, 0x0a, 0x16, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x49, 0x6e, 0x50, 0x61, 0x6e, 0x64, 0x61, 0x73, 0x57, 0x69, 0x74, 0x68, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x4c, 0x0a, 0x14, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x04, 0x66, 0x75, 0x6e, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x66, 0x75, 0x6e, 0x63, 0x12, 0x23, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x22, 0xf4, 0x01, 0x0a, 0x24, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x64, 0x65, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x69, 0x73, 0x74, 0x69, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x64, 0x65, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x69, 0x73, 0x74, 0x69, 0x63, 0x12, 0x37, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x3c, 0x0a, 0x0b, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x75, 0x64, 0x74, 0x66, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x55, 0x44, 0x54, 0x46, 0x48, 0x00, 0x52, 0x0a, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x55, 0x64, 0x74, 0x66, 0x42, 0x0a, 0x0a, 0x08, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xb1, 0x01, 0x0a, 0x0a, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x55, 0x44, 0x54, 0x46, 0x12, 0x3d, 0x0a, 0x0b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x09, 0x65, 0x76, 0x61, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x65, 0x76, 0x61, 0x6c, 0x54, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x76, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x56, 0x65, 0x72, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x97, 0x01, 0x0a, 0x21, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x49, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x4f, 0x0a, 0x12, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x48, 0x00, 0x52, 0x10, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x0d, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x4b, 0x0a, 0x10, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x76, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x56, 0x65, 0x72, 0x22, 0x88, 0x01, 0x0a, 0x0e, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x22, 0x84, 0x03, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x38, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x00, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x12, 0x3b, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x58, 0x0a, 0x0b, 0x50, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x1c, 0x0a, 0x18, 0x50, 0x41, 0x52, 0x53, 0x45, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x50, 0x41, 0x52, 0x53, 0x45, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x43, 0x53, 0x56, 0x10, 0x01, 0x12, 0x15, 0x0a, 0x11, 0x50, 0x41, 0x52, 0x53, 0x45, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x4a, 0x53, 0x4f, 0x4e, 0x10, 0x02, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0xdb, 0x03, 0x0a, 0x08, 0x41, 0x73, 0x4f, 0x66, 0x4a, 0x6f, 0x69, 0x6e, 0x12, 0x2b, 0x0a, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x12, 0x37, 0x0a, 0x0a, 0x6c, 0x65, 0x66, 0x74, 0x5f, 0x61, 0x73, 0x5f, 0x6f, 0x66, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x6c, 0x65, 0x66, 0x74, 0x41, 0x73, 0x4f, 0x66, 0x12, 0x39, 0x0a, 0x0b, 0x72, 0x69, 0x67, 0x68, 0x74, 0x5f, 0x61, 0x73, 0x5f, 0x6f, 0x66, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x72, 0x69, 0x67, 0x68, 0x74, 0x41, 0x73, 0x4f, 0x66, 0x12, 0x36, 0x0a, 0x09, 0x6a, 0x6f, 0x69, 0x6e, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x6a, 0x6f, 0x69, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x12, 0x23, 0x0a, 0x0d, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x6a, 0x6f, 0x69, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x37, 0x0a, 0x09, 0x74, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x74, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x2e, 0x0a, 0x13, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x65, 0x78, 0x61, 0x63, 0x74, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x61, 0x63, 0x74, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xe6, 0x01, 0x0a, 0x0b, 0x4c, 0x61, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4a, 0x6f, 0x69, 0x6e, 0x12, 0x2b, 0x0a, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x12, 0x40, 0x0a, 0x0e, 0x6a, 0x6f, 0x69, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x6a, 0x6f, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x39, 0x0a, 0x09, 0x6a, 0x6f, 0x69, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x4a, 0x6f, 0x69, 0x6e, 0x2e, 0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_relations_proto_rawDescOnce sync.Once file_spark_connect_relations_proto_rawDescData = file_spark_connect_relations_proto_rawDesc ) func file_spark_connect_relations_proto_rawDescGZIP() []byte { file_spark_connect_relations_proto_rawDescOnce.Do(func() { file_spark_connect_relations_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_relations_proto_rawDescData) }) return file_spark_connect_relations_proto_rawDescData } var file_spark_connect_relations_proto_enumTypes = make([]protoimpl.EnumInfo, 4) var file_spark_connect_relations_proto_msgTypes = make([]protoimpl.MessageInfo, 80) var file_spark_connect_relations_proto_goTypes = []interface{}{ (Join_JoinType)(0), // 0: spark.connect.Join.JoinType (SetOperation_SetOpType)(0), // 1: spark.connect.SetOperation.SetOpType (Aggregate_GroupType)(0), // 2: spark.connect.Aggregate.GroupType (Parse_ParseFormat)(0), // 3: spark.connect.Parse.ParseFormat (*Relation)(nil), // 4: spark.connect.Relation (*MlRelation)(nil), // 5: spark.connect.MlRelation (*Fetch)(nil), // 6: spark.connect.Fetch (*Unknown)(nil), // 7: spark.connect.Unknown (*RelationCommon)(nil), // 8: spark.connect.RelationCommon (*SQL)(nil), // 9: spark.connect.SQL (*WithRelations)(nil), // 10: spark.connect.WithRelations (*Read)(nil), // 11: spark.connect.Read (*Project)(nil), // 12: spark.connect.Project (*Filter)(nil), // 13: spark.connect.Filter (*Join)(nil), // 14: spark.connect.Join (*SetOperation)(nil), // 15: spark.connect.SetOperation (*Limit)(nil), // 16: spark.connect.Limit (*Offset)(nil), // 17: spark.connect.Offset (*Tail)(nil), // 18: spark.connect.Tail (*Aggregate)(nil), // 19: spark.connect.Aggregate (*Sort)(nil), // 20: spark.connect.Sort (*Drop)(nil), // 21: spark.connect.Drop (*Deduplicate)(nil), // 22: spark.connect.Deduplicate (*LocalRelation)(nil), // 23: spark.connect.LocalRelation (*CachedLocalRelation)(nil), // 24: spark.connect.CachedLocalRelation (*CachedRemoteRelation)(nil), // 25: spark.connect.CachedRemoteRelation (*Sample)(nil), // 26: spark.connect.Sample (*Range)(nil), // 27: spark.connect.Range (*SubqueryAlias)(nil), // 28: spark.connect.SubqueryAlias (*Repartition)(nil), // 29: spark.connect.Repartition (*ShowString)(nil), // 30: spark.connect.ShowString (*HtmlString)(nil), // 31: spark.connect.HtmlString (*StatSummary)(nil), // 32: spark.connect.StatSummary (*StatDescribe)(nil), // 33: spark.connect.StatDescribe (*StatCrosstab)(nil), // 34: spark.connect.StatCrosstab (*StatCov)(nil), // 35: spark.connect.StatCov (*StatCorr)(nil), // 36: spark.connect.StatCorr (*StatApproxQuantile)(nil), // 37: spark.connect.StatApproxQuantile (*StatFreqItems)(nil), // 38: spark.connect.StatFreqItems (*StatSampleBy)(nil), // 39: spark.connect.StatSampleBy (*NAFill)(nil), // 40: spark.connect.NAFill (*NADrop)(nil), // 41: spark.connect.NADrop (*NAReplace)(nil), // 42: spark.connect.NAReplace (*ToDF)(nil), // 43: spark.connect.ToDF (*WithColumnsRenamed)(nil), // 44: spark.connect.WithColumnsRenamed (*WithColumns)(nil), // 45: spark.connect.WithColumns (*WithWatermark)(nil), // 46: spark.connect.WithWatermark (*Hint)(nil), // 47: spark.connect.Hint (*Unpivot)(nil), // 48: spark.connect.Unpivot (*Transpose)(nil), // 49: spark.connect.Transpose (*UnresolvedTableValuedFunction)(nil), // 50: spark.connect.UnresolvedTableValuedFunction (*ToSchema)(nil), // 51: spark.connect.ToSchema (*RepartitionByExpression)(nil), // 52: spark.connect.RepartitionByExpression (*MapPartitions)(nil), // 53: spark.connect.MapPartitions (*GroupMap)(nil), // 54: spark.connect.GroupMap (*TransformWithStateInfo)(nil), // 55: spark.connect.TransformWithStateInfo (*CoGroupMap)(nil), // 56: spark.connect.CoGroupMap (*ApplyInPandasWithState)(nil), // 57: spark.connect.ApplyInPandasWithState (*CommonInlineUserDefinedTableFunction)(nil), // 58: spark.connect.CommonInlineUserDefinedTableFunction (*PythonUDTF)(nil), // 59: spark.connect.PythonUDTF (*CommonInlineUserDefinedDataSource)(nil), // 60: spark.connect.CommonInlineUserDefinedDataSource (*PythonDataSource)(nil), // 61: spark.connect.PythonDataSource (*CollectMetrics)(nil), // 62: spark.connect.CollectMetrics (*Parse)(nil), // 63: spark.connect.Parse (*AsOfJoin)(nil), // 64: spark.connect.AsOfJoin (*LateralJoin)(nil), // 65: spark.connect.LateralJoin (*MlRelation_Transform)(nil), // 66: spark.connect.MlRelation.Transform (*Fetch_Method)(nil), // 67: spark.connect.Fetch.Method (*Fetch_Method_Args)(nil), // 68: spark.connect.Fetch.Method.Args nil, // 69: spark.connect.SQL.ArgsEntry nil, // 70: spark.connect.SQL.NamedArgumentsEntry (*Read_NamedTable)(nil), // 71: spark.connect.Read.NamedTable (*Read_DataSource)(nil), // 72: spark.connect.Read.DataSource nil, // 73: spark.connect.Read.NamedTable.OptionsEntry nil, // 74: spark.connect.Read.DataSource.OptionsEntry (*Join_JoinDataType)(nil), // 75: spark.connect.Join.JoinDataType (*Aggregate_Pivot)(nil), // 76: spark.connect.Aggregate.Pivot (*Aggregate_GroupingSets)(nil), // 77: spark.connect.Aggregate.GroupingSets (*StatSampleBy_Fraction)(nil), // 78: spark.connect.StatSampleBy.Fraction (*NAReplace_Replacement)(nil), // 79: spark.connect.NAReplace.Replacement nil, // 80: spark.connect.WithColumnsRenamed.RenameColumnsMapEntry (*WithColumnsRenamed_Rename)(nil), // 81: spark.connect.WithColumnsRenamed.Rename (*Unpivot_Values)(nil), // 82: spark.connect.Unpivot.Values nil, // 83: spark.connect.Parse.OptionsEntry (*Catalog)(nil), // 84: spark.connect.Catalog (*anypb.Any)(nil), // 85: google.protobuf.Any (*ObjectRef)(nil), // 86: spark.connect.ObjectRef (*Origin)(nil), // 87: spark.connect.Origin (*Expression_Literal)(nil), // 88: spark.connect.Expression.Literal (*Expression)(nil), // 89: spark.connect.Expression (*Expression_SortOrder)(nil), // 90: spark.connect.Expression.SortOrder (*Expression_Alias)(nil), // 91: spark.connect.Expression.Alias (*DataType)(nil), // 92: spark.connect.DataType (*CommonInlineUserDefinedFunction)(nil), // 93: spark.connect.CommonInlineUserDefinedFunction (*MlOperator)(nil), // 94: spark.connect.MlOperator (*MlParams)(nil), // 95: spark.connect.MlParams } var file_spark_connect_relations_proto_depIdxs = []int32{ 8, // 0: spark.connect.Relation.common:type_name -> spark.connect.RelationCommon 11, // 1: spark.connect.Relation.read:type_name -> spark.connect.Read 12, // 2: spark.connect.Relation.project:type_name -> spark.connect.Project 13, // 3: spark.connect.Relation.filter:type_name -> spark.connect.Filter 14, // 4: spark.connect.Relation.join:type_name -> spark.connect.Join 15, // 5: spark.connect.Relation.set_op:type_name -> spark.connect.SetOperation 20, // 6: spark.connect.Relation.sort:type_name -> spark.connect.Sort 16, // 7: spark.connect.Relation.limit:type_name -> spark.connect.Limit 19, // 8: spark.connect.Relation.aggregate:type_name -> spark.connect.Aggregate 9, // 9: spark.connect.Relation.sql:type_name -> spark.connect.SQL 23, // 10: spark.connect.Relation.local_relation:type_name -> spark.connect.LocalRelation 26, // 11: spark.connect.Relation.sample:type_name -> spark.connect.Sample 17, // 12: spark.connect.Relation.offset:type_name -> spark.connect.Offset 22, // 13: spark.connect.Relation.deduplicate:type_name -> spark.connect.Deduplicate 27, // 14: spark.connect.Relation.range:type_name -> spark.connect.Range 28, // 15: spark.connect.Relation.subquery_alias:type_name -> spark.connect.SubqueryAlias 29, // 16: spark.connect.Relation.repartition:type_name -> spark.connect.Repartition 43, // 17: spark.connect.Relation.to_df:type_name -> spark.connect.ToDF 44, // 18: spark.connect.Relation.with_columns_renamed:type_name -> spark.connect.WithColumnsRenamed 30, // 19: spark.connect.Relation.show_string:type_name -> spark.connect.ShowString 21, // 20: spark.connect.Relation.drop:type_name -> spark.connect.Drop 18, // 21: spark.connect.Relation.tail:type_name -> spark.connect.Tail 45, // 22: spark.connect.Relation.with_columns:type_name -> spark.connect.WithColumns 47, // 23: spark.connect.Relation.hint:type_name -> spark.connect.Hint 48, // 24: spark.connect.Relation.unpivot:type_name -> spark.connect.Unpivot 51, // 25: spark.connect.Relation.to_schema:type_name -> spark.connect.ToSchema 52, // 26: spark.connect.Relation.repartition_by_expression:type_name -> spark.connect.RepartitionByExpression 53, // 27: spark.connect.Relation.map_partitions:type_name -> spark.connect.MapPartitions 62, // 28: spark.connect.Relation.collect_metrics:type_name -> spark.connect.CollectMetrics 63, // 29: spark.connect.Relation.parse:type_name -> spark.connect.Parse 54, // 30: spark.connect.Relation.group_map:type_name -> spark.connect.GroupMap 56, // 31: spark.connect.Relation.co_group_map:type_name -> spark.connect.CoGroupMap 46, // 32: spark.connect.Relation.with_watermark:type_name -> spark.connect.WithWatermark 57, // 33: spark.connect.Relation.apply_in_pandas_with_state:type_name -> spark.connect.ApplyInPandasWithState 31, // 34: spark.connect.Relation.html_string:type_name -> spark.connect.HtmlString 24, // 35: spark.connect.Relation.cached_local_relation:type_name -> spark.connect.CachedLocalRelation 25, // 36: spark.connect.Relation.cached_remote_relation:type_name -> spark.connect.CachedRemoteRelation 58, // 37: spark.connect.Relation.common_inline_user_defined_table_function:type_name -> spark.connect.CommonInlineUserDefinedTableFunction 64, // 38: spark.connect.Relation.as_of_join:type_name -> spark.connect.AsOfJoin 60, // 39: spark.connect.Relation.common_inline_user_defined_data_source:type_name -> spark.connect.CommonInlineUserDefinedDataSource 10, // 40: spark.connect.Relation.with_relations:type_name -> spark.connect.WithRelations 49, // 41: spark.connect.Relation.transpose:type_name -> spark.connect.Transpose 50, // 42: spark.connect.Relation.unresolved_table_valued_function:type_name -> spark.connect.UnresolvedTableValuedFunction 65, // 43: spark.connect.Relation.lateral_join:type_name -> spark.connect.LateralJoin 40, // 44: spark.connect.Relation.fill_na:type_name -> spark.connect.NAFill 41, // 45: spark.connect.Relation.drop_na:type_name -> spark.connect.NADrop 42, // 46: spark.connect.Relation.replace:type_name -> spark.connect.NAReplace 32, // 47: spark.connect.Relation.summary:type_name -> spark.connect.StatSummary 34, // 48: spark.connect.Relation.crosstab:type_name -> spark.connect.StatCrosstab 33, // 49: spark.connect.Relation.describe:type_name -> spark.connect.StatDescribe 35, // 50: spark.connect.Relation.cov:type_name -> spark.connect.StatCov 36, // 51: spark.connect.Relation.corr:type_name -> spark.connect.StatCorr 37, // 52: spark.connect.Relation.approx_quantile:type_name -> spark.connect.StatApproxQuantile 38, // 53: spark.connect.Relation.freq_items:type_name -> spark.connect.StatFreqItems 39, // 54: spark.connect.Relation.sample_by:type_name -> spark.connect.StatSampleBy 84, // 55: spark.connect.Relation.catalog:type_name -> spark.connect.Catalog 5, // 56: spark.connect.Relation.ml_relation:type_name -> spark.connect.MlRelation 85, // 57: spark.connect.Relation.extension:type_name -> google.protobuf.Any 7, // 58: spark.connect.Relation.unknown:type_name -> spark.connect.Unknown 66, // 59: spark.connect.MlRelation.transform:type_name -> spark.connect.MlRelation.Transform 6, // 60: spark.connect.MlRelation.fetch:type_name -> spark.connect.Fetch 86, // 61: spark.connect.Fetch.obj_ref:type_name -> spark.connect.ObjectRef 67, // 62: spark.connect.Fetch.methods:type_name -> spark.connect.Fetch.Method 87, // 63: spark.connect.RelationCommon.origin:type_name -> spark.connect.Origin 69, // 64: spark.connect.SQL.args:type_name -> spark.connect.SQL.ArgsEntry 88, // 65: spark.connect.SQL.pos_args:type_name -> spark.connect.Expression.Literal 70, // 66: spark.connect.SQL.named_arguments:type_name -> spark.connect.SQL.NamedArgumentsEntry 89, // 67: spark.connect.SQL.pos_arguments:type_name -> spark.connect.Expression 4, // 68: spark.connect.WithRelations.root:type_name -> spark.connect.Relation 4, // 69: spark.connect.WithRelations.references:type_name -> spark.connect.Relation 71, // 70: spark.connect.Read.named_table:type_name -> spark.connect.Read.NamedTable 72, // 71: spark.connect.Read.data_source:type_name -> spark.connect.Read.DataSource 4, // 72: spark.connect.Project.input:type_name -> spark.connect.Relation 89, // 73: spark.connect.Project.expressions:type_name -> spark.connect.Expression 4, // 74: spark.connect.Filter.input:type_name -> spark.connect.Relation 89, // 75: spark.connect.Filter.condition:type_name -> spark.connect.Expression 4, // 76: spark.connect.Join.left:type_name -> spark.connect.Relation 4, // 77: spark.connect.Join.right:type_name -> spark.connect.Relation 89, // 78: spark.connect.Join.join_condition:type_name -> spark.connect.Expression 0, // 79: spark.connect.Join.join_type:type_name -> spark.connect.Join.JoinType 75, // 80: spark.connect.Join.join_data_type:type_name -> spark.connect.Join.JoinDataType 4, // 81: spark.connect.SetOperation.left_input:type_name -> spark.connect.Relation 4, // 82: spark.connect.SetOperation.right_input:type_name -> spark.connect.Relation 1, // 83: spark.connect.SetOperation.set_op_type:type_name -> spark.connect.SetOperation.SetOpType 4, // 84: spark.connect.Limit.input:type_name -> spark.connect.Relation 4, // 85: spark.connect.Offset.input:type_name -> spark.connect.Relation 4, // 86: spark.connect.Tail.input:type_name -> spark.connect.Relation 4, // 87: spark.connect.Aggregate.input:type_name -> spark.connect.Relation 2, // 88: spark.connect.Aggregate.group_type:type_name -> spark.connect.Aggregate.GroupType 89, // 89: spark.connect.Aggregate.grouping_expressions:type_name -> spark.connect.Expression 89, // 90: spark.connect.Aggregate.aggregate_expressions:type_name -> spark.connect.Expression 76, // 91: spark.connect.Aggregate.pivot:type_name -> spark.connect.Aggregate.Pivot 77, // 92: spark.connect.Aggregate.grouping_sets:type_name -> spark.connect.Aggregate.GroupingSets 4, // 93: spark.connect.Sort.input:type_name -> spark.connect.Relation 90, // 94: spark.connect.Sort.order:type_name -> spark.connect.Expression.SortOrder 4, // 95: spark.connect.Drop.input:type_name -> spark.connect.Relation 89, // 96: spark.connect.Drop.columns:type_name -> spark.connect.Expression 4, // 97: spark.connect.Deduplicate.input:type_name -> spark.connect.Relation 4, // 98: spark.connect.Sample.input:type_name -> spark.connect.Relation 4, // 99: spark.connect.SubqueryAlias.input:type_name -> spark.connect.Relation 4, // 100: spark.connect.Repartition.input:type_name -> spark.connect.Relation 4, // 101: spark.connect.ShowString.input:type_name -> spark.connect.Relation 4, // 102: spark.connect.HtmlString.input:type_name -> spark.connect.Relation 4, // 103: spark.connect.StatSummary.input:type_name -> spark.connect.Relation 4, // 104: spark.connect.StatDescribe.input:type_name -> spark.connect.Relation 4, // 105: spark.connect.StatCrosstab.input:type_name -> spark.connect.Relation 4, // 106: spark.connect.StatCov.input:type_name -> spark.connect.Relation 4, // 107: spark.connect.StatCorr.input:type_name -> spark.connect.Relation 4, // 108: spark.connect.StatApproxQuantile.input:type_name -> spark.connect.Relation 4, // 109: spark.connect.StatFreqItems.input:type_name -> spark.connect.Relation 4, // 110: spark.connect.StatSampleBy.input:type_name -> spark.connect.Relation 89, // 111: spark.connect.StatSampleBy.col:type_name -> spark.connect.Expression 78, // 112: spark.connect.StatSampleBy.fractions:type_name -> spark.connect.StatSampleBy.Fraction 4, // 113: spark.connect.NAFill.input:type_name -> spark.connect.Relation 88, // 114: spark.connect.NAFill.values:type_name -> spark.connect.Expression.Literal 4, // 115: spark.connect.NADrop.input:type_name -> spark.connect.Relation 4, // 116: spark.connect.NAReplace.input:type_name -> spark.connect.Relation 79, // 117: spark.connect.NAReplace.replacements:type_name -> spark.connect.NAReplace.Replacement 4, // 118: spark.connect.ToDF.input:type_name -> spark.connect.Relation 4, // 119: spark.connect.WithColumnsRenamed.input:type_name -> spark.connect.Relation 80, // 120: spark.connect.WithColumnsRenamed.rename_columns_map:type_name -> spark.connect.WithColumnsRenamed.RenameColumnsMapEntry 81, // 121: spark.connect.WithColumnsRenamed.renames:type_name -> spark.connect.WithColumnsRenamed.Rename 4, // 122: spark.connect.WithColumns.input:type_name -> spark.connect.Relation 91, // 123: spark.connect.WithColumns.aliases:type_name -> spark.connect.Expression.Alias 4, // 124: spark.connect.WithWatermark.input:type_name -> spark.connect.Relation 4, // 125: spark.connect.Hint.input:type_name -> spark.connect.Relation 89, // 126: spark.connect.Hint.parameters:type_name -> spark.connect.Expression 4, // 127: spark.connect.Unpivot.input:type_name -> spark.connect.Relation 89, // 128: spark.connect.Unpivot.ids:type_name -> spark.connect.Expression 82, // 129: spark.connect.Unpivot.values:type_name -> spark.connect.Unpivot.Values 4, // 130: spark.connect.Transpose.input:type_name -> spark.connect.Relation 89, // 131: spark.connect.Transpose.index_columns:type_name -> spark.connect.Expression 89, // 132: spark.connect.UnresolvedTableValuedFunction.arguments:type_name -> spark.connect.Expression 4, // 133: spark.connect.ToSchema.input:type_name -> spark.connect.Relation 92, // 134: spark.connect.ToSchema.schema:type_name -> spark.connect.DataType 4, // 135: spark.connect.RepartitionByExpression.input:type_name -> spark.connect.Relation 89, // 136: spark.connect.RepartitionByExpression.partition_exprs:type_name -> spark.connect.Expression 4, // 137: spark.connect.MapPartitions.input:type_name -> spark.connect.Relation 93, // 138: spark.connect.MapPartitions.func:type_name -> spark.connect.CommonInlineUserDefinedFunction 4, // 139: spark.connect.GroupMap.input:type_name -> spark.connect.Relation 89, // 140: spark.connect.GroupMap.grouping_expressions:type_name -> spark.connect.Expression 93, // 141: spark.connect.GroupMap.func:type_name -> spark.connect.CommonInlineUserDefinedFunction 89, // 142: spark.connect.GroupMap.sorting_expressions:type_name -> spark.connect.Expression 4, // 143: spark.connect.GroupMap.initial_input:type_name -> spark.connect.Relation 89, // 144: spark.connect.GroupMap.initial_grouping_expressions:type_name -> spark.connect.Expression 92, // 145: spark.connect.GroupMap.state_schema:type_name -> spark.connect.DataType 55, // 146: spark.connect.GroupMap.transform_with_state_info:type_name -> spark.connect.TransformWithStateInfo 92, // 147: spark.connect.TransformWithStateInfo.output_schema:type_name -> spark.connect.DataType 4, // 148: spark.connect.CoGroupMap.input:type_name -> spark.connect.Relation 89, // 149: spark.connect.CoGroupMap.input_grouping_expressions:type_name -> spark.connect.Expression 4, // 150: spark.connect.CoGroupMap.other:type_name -> spark.connect.Relation 89, // 151: spark.connect.CoGroupMap.other_grouping_expressions:type_name -> spark.connect.Expression 93, // 152: spark.connect.CoGroupMap.func:type_name -> spark.connect.CommonInlineUserDefinedFunction 89, // 153: spark.connect.CoGroupMap.input_sorting_expressions:type_name -> spark.connect.Expression 89, // 154: spark.connect.CoGroupMap.other_sorting_expressions:type_name -> spark.connect.Expression 4, // 155: spark.connect.ApplyInPandasWithState.input:type_name -> spark.connect.Relation 89, // 156: spark.connect.ApplyInPandasWithState.grouping_expressions:type_name -> spark.connect.Expression 93, // 157: spark.connect.ApplyInPandasWithState.func:type_name -> spark.connect.CommonInlineUserDefinedFunction 89, // 158: spark.connect.CommonInlineUserDefinedTableFunction.arguments:type_name -> spark.connect.Expression 59, // 159: spark.connect.CommonInlineUserDefinedTableFunction.python_udtf:type_name -> spark.connect.PythonUDTF 92, // 160: spark.connect.PythonUDTF.return_type:type_name -> spark.connect.DataType 61, // 161: spark.connect.CommonInlineUserDefinedDataSource.python_data_source:type_name -> spark.connect.PythonDataSource 4, // 162: spark.connect.CollectMetrics.input:type_name -> spark.connect.Relation 89, // 163: spark.connect.CollectMetrics.metrics:type_name -> spark.connect.Expression 4, // 164: spark.connect.Parse.input:type_name -> spark.connect.Relation 3, // 165: spark.connect.Parse.format:type_name -> spark.connect.Parse.ParseFormat 92, // 166: spark.connect.Parse.schema:type_name -> spark.connect.DataType 83, // 167: spark.connect.Parse.options:type_name -> spark.connect.Parse.OptionsEntry 4, // 168: spark.connect.AsOfJoin.left:type_name -> spark.connect.Relation 4, // 169: spark.connect.AsOfJoin.right:type_name -> spark.connect.Relation 89, // 170: spark.connect.AsOfJoin.left_as_of:type_name -> spark.connect.Expression 89, // 171: spark.connect.AsOfJoin.right_as_of:type_name -> spark.connect.Expression 89, // 172: spark.connect.AsOfJoin.join_expr:type_name -> spark.connect.Expression 89, // 173: spark.connect.AsOfJoin.tolerance:type_name -> spark.connect.Expression 4, // 174: spark.connect.LateralJoin.left:type_name -> spark.connect.Relation 4, // 175: spark.connect.LateralJoin.right:type_name -> spark.connect.Relation 89, // 176: spark.connect.LateralJoin.join_condition:type_name -> spark.connect.Expression 0, // 177: spark.connect.LateralJoin.join_type:type_name -> spark.connect.Join.JoinType 86, // 178: spark.connect.MlRelation.Transform.obj_ref:type_name -> spark.connect.ObjectRef 94, // 179: spark.connect.MlRelation.Transform.transformer:type_name -> spark.connect.MlOperator 4, // 180: spark.connect.MlRelation.Transform.input:type_name -> spark.connect.Relation 95, // 181: spark.connect.MlRelation.Transform.params:type_name -> spark.connect.MlParams 68, // 182: spark.connect.Fetch.Method.args:type_name -> spark.connect.Fetch.Method.Args 88, // 183: spark.connect.Fetch.Method.Args.param:type_name -> spark.connect.Expression.Literal 4, // 184: spark.connect.Fetch.Method.Args.input:type_name -> spark.connect.Relation 88, // 185: spark.connect.SQL.ArgsEntry.value:type_name -> spark.connect.Expression.Literal 89, // 186: spark.connect.SQL.NamedArgumentsEntry.value:type_name -> spark.connect.Expression 73, // 187: spark.connect.Read.NamedTable.options:type_name -> spark.connect.Read.NamedTable.OptionsEntry 74, // 188: spark.connect.Read.DataSource.options:type_name -> spark.connect.Read.DataSource.OptionsEntry 89, // 189: spark.connect.Aggregate.Pivot.col:type_name -> spark.connect.Expression 88, // 190: spark.connect.Aggregate.Pivot.values:type_name -> spark.connect.Expression.Literal 89, // 191: spark.connect.Aggregate.GroupingSets.grouping_set:type_name -> spark.connect.Expression 88, // 192: spark.connect.StatSampleBy.Fraction.stratum:type_name -> spark.connect.Expression.Literal 88, // 193: spark.connect.NAReplace.Replacement.old_value:type_name -> spark.connect.Expression.Literal 88, // 194: spark.connect.NAReplace.Replacement.new_value:type_name -> spark.connect.Expression.Literal 89, // 195: spark.connect.Unpivot.Values.values:type_name -> spark.connect.Expression 196, // [196:196] is the sub-list for method output_type 196, // [196:196] is the sub-list for method input_type 196, // [196:196] is the sub-list for extension type_name 196, // [196:196] is the sub-list for extension extendee 0, // [0:196] is the sub-list for field type_name } func init() { file_spark_connect_relations_proto_init() } func file_spark_connect_relations_proto_init() { if File_spark_connect_relations_proto != nil { return } file_spark_connect_expressions_proto_init() file_spark_connect_types_proto_init() file_spark_connect_catalog_proto_init() file_spark_connect_common_proto_init() file_spark_connect_ml_common_proto_init() if !protoimpl.UnsafeEnabled { file_spark_connect_relations_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Relation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlRelation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Fetch); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Unknown); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RelationCommon); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SQL); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WithRelations); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Read); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Project); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Filter); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Join); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SetOperation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Limit); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Offset); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Tail); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Aggregate); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Sort); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Drop); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Deduplicate); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*LocalRelation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CachedLocalRelation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CachedRemoteRelation); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Sample); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Range); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SubqueryAlias); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Repartition); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ShowString); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*HtmlString); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatSummary); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatDescribe); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatCrosstab); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatCov); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatCorr); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatApproxQuantile); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatFreqItems); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatSampleBy); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NAFill); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NADrop); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NAReplace); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ToDF); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WithColumnsRenamed); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WithColumns); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WithWatermark); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Hint); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Unpivot); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[45].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Transpose); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*UnresolvedTableValuedFunction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[47].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ToSchema); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RepartitionByExpression); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MapPartitions); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[50].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GroupMap); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[51].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*TransformWithStateInfo); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[52].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CoGroupMap); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[53].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ApplyInPandasWithState); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[54].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CommonInlineUserDefinedTableFunction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[55].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PythonUDTF); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[56].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CommonInlineUserDefinedDataSource); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[57].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PythonDataSource); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[58].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CollectMetrics); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[59].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Parse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[60].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*AsOfJoin); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[61].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*LateralJoin); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[62].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MlRelation_Transform); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[63].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Fetch_Method); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[64].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Fetch_Method_Args); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[67].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Read_NamedTable); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[68].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Read_DataSource); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[71].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Join_JoinDataType); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[72].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Aggregate_Pivot); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[73].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Aggregate_GroupingSets); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[74].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*StatSampleBy_Fraction); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[75].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NAReplace_Replacement); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[77].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WithColumnsRenamed_Rename); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_relations_proto_msgTypes[78].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Unpivot_Values); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_relations_proto_msgTypes[0].OneofWrappers = []interface{}{ (*Relation_Read)(nil), (*Relation_Project)(nil), (*Relation_Filter)(nil), (*Relation_Join)(nil), (*Relation_SetOp)(nil), (*Relation_Sort)(nil), (*Relation_Limit)(nil), (*Relation_Aggregate)(nil), (*Relation_Sql)(nil), (*Relation_LocalRelation)(nil), (*Relation_Sample)(nil), (*Relation_Offset)(nil), (*Relation_Deduplicate)(nil), (*Relation_Range)(nil), (*Relation_SubqueryAlias)(nil), (*Relation_Repartition)(nil), (*Relation_ToDf)(nil), (*Relation_WithColumnsRenamed)(nil), (*Relation_ShowString)(nil), (*Relation_Drop)(nil), (*Relation_Tail)(nil), (*Relation_WithColumns)(nil), (*Relation_Hint)(nil), (*Relation_Unpivot)(nil), (*Relation_ToSchema)(nil), (*Relation_RepartitionByExpression)(nil), (*Relation_MapPartitions)(nil), (*Relation_CollectMetrics)(nil), (*Relation_Parse)(nil), (*Relation_GroupMap)(nil), (*Relation_CoGroupMap)(nil), (*Relation_WithWatermark)(nil), (*Relation_ApplyInPandasWithState)(nil), (*Relation_HtmlString)(nil), (*Relation_CachedLocalRelation)(nil), (*Relation_CachedRemoteRelation)(nil), (*Relation_CommonInlineUserDefinedTableFunction)(nil), (*Relation_AsOfJoin)(nil), (*Relation_CommonInlineUserDefinedDataSource)(nil), (*Relation_WithRelations)(nil), (*Relation_Transpose)(nil), (*Relation_UnresolvedTableValuedFunction)(nil), (*Relation_LateralJoin)(nil), (*Relation_FillNa)(nil), (*Relation_DropNa)(nil), (*Relation_Replace)(nil), (*Relation_Summary)(nil), (*Relation_Crosstab)(nil), (*Relation_Describe)(nil), (*Relation_Cov)(nil), (*Relation_Corr)(nil), (*Relation_ApproxQuantile)(nil), (*Relation_FreqItems)(nil), (*Relation_SampleBy)(nil), (*Relation_Catalog)(nil), (*Relation_MlRelation)(nil), (*Relation_Extension)(nil), (*Relation_Unknown)(nil), } file_spark_connect_relations_proto_msgTypes[1].OneofWrappers = []interface{}{ (*MlRelation_Transform_)(nil), (*MlRelation_Fetch)(nil), } file_spark_connect_relations_proto_msgTypes[4].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[7].OneofWrappers = []interface{}{ (*Read_NamedTable_)(nil), (*Read_DataSource_)(nil), } file_spark_connect_relations_proto_msgTypes[10].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[11].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[16].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[18].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[19].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[22].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[23].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[25].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[32].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[34].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[35].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[37].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[44].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[48].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[49].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[50].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[51].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[54].OneofWrappers = []interface{}{ (*CommonInlineUserDefinedTableFunction_PythonUdtf)(nil), } file_spark_connect_relations_proto_msgTypes[55].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[56].OneofWrappers = []interface{}{ (*CommonInlineUserDefinedDataSource_PythonDataSource)(nil), } file_spark_connect_relations_proto_msgTypes[59].OneofWrappers = []interface{}{} file_spark_connect_relations_proto_msgTypes[62].OneofWrappers = []interface{}{ (*MlRelation_Transform_ObjRef)(nil), (*MlRelation_Transform_Transformer)(nil), } file_spark_connect_relations_proto_msgTypes[64].OneofWrappers = []interface{}{ (*Fetch_Method_Args_Param)(nil), (*Fetch_Method_Args_Input)(nil), } file_spark_connect_relations_proto_msgTypes[68].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_relations_proto_rawDesc, NumEnums: 4, NumMessages: 80, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_relations_proto_goTypes, DependencyIndexes: file_spark_connect_relations_proto_depIdxs, EnumInfos: file_spark_connect_relations_proto_enumTypes, MessageInfos: file_spark_connect_relations_proto_msgTypes, }.Build() File_spark_connect_relations_proto = out.File file_spark_connect_relations_proto_rawDesc = nil file_spark_connect_relations_proto_goTypes = nil file_spark_connect_relations_proto_depIdxs = nil } ================================================ FILE: internal/generated/types.pb.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.30.0 // protoc (unknown) // source: spark/connect/types.proto package generated import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // This message describes the logical [[DataType]] of something. It does not carry the value // itself but only describes it. type DataType struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Types that are assignable to Kind: // // *DataType_Null // *DataType_Binary_ // *DataType_Boolean_ // *DataType_Byte_ // *DataType_Short_ // *DataType_Integer_ // *DataType_Long_ // *DataType_Float_ // *DataType_Double_ // *DataType_Decimal_ // *DataType_String_ // *DataType_Char_ // *DataType_VarChar_ // *DataType_Date_ // *DataType_Timestamp_ // *DataType_TimestampNtz // *DataType_CalendarInterval_ // *DataType_YearMonthInterval_ // *DataType_DayTimeInterval_ // *DataType_Array_ // *DataType_Struct_ // *DataType_Map_ // *DataType_Variant_ // *DataType_Udt // *DataType_Unparsed_ Kind isDataType_Kind `protobuf_oneof:"kind"` } func (x *DataType) Reset() { *x = DataType{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType) ProtoMessage() {} func (x *DataType) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType.ProtoReflect.Descriptor instead. func (*DataType) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0} } func (m *DataType) GetKind() isDataType_Kind { if m != nil { return m.Kind } return nil } func (x *DataType) GetNull() *DataType_NULL { if x, ok := x.GetKind().(*DataType_Null); ok { return x.Null } return nil } func (x *DataType) GetBinary() *DataType_Binary { if x, ok := x.GetKind().(*DataType_Binary_); ok { return x.Binary } return nil } func (x *DataType) GetBoolean() *DataType_Boolean { if x, ok := x.GetKind().(*DataType_Boolean_); ok { return x.Boolean } return nil } func (x *DataType) GetByte() *DataType_Byte { if x, ok := x.GetKind().(*DataType_Byte_); ok { return x.Byte } return nil } func (x *DataType) GetShort() *DataType_Short { if x, ok := x.GetKind().(*DataType_Short_); ok { return x.Short } return nil } func (x *DataType) GetInteger() *DataType_Integer { if x, ok := x.GetKind().(*DataType_Integer_); ok { return x.Integer } return nil } func (x *DataType) GetLong() *DataType_Long { if x, ok := x.GetKind().(*DataType_Long_); ok { return x.Long } return nil } func (x *DataType) GetFloat() *DataType_Float { if x, ok := x.GetKind().(*DataType_Float_); ok { return x.Float } return nil } func (x *DataType) GetDouble() *DataType_Double { if x, ok := x.GetKind().(*DataType_Double_); ok { return x.Double } return nil } func (x *DataType) GetDecimal() *DataType_Decimal { if x, ok := x.GetKind().(*DataType_Decimal_); ok { return x.Decimal } return nil } func (x *DataType) GetString_() *DataType_String { if x, ok := x.GetKind().(*DataType_String_); ok { return x.String_ } return nil } func (x *DataType) GetChar() *DataType_Char { if x, ok := x.GetKind().(*DataType_Char_); ok { return x.Char } return nil } func (x *DataType) GetVarChar() *DataType_VarChar { if x, ok := x.GetKind().(*DataType_VarChar_); ok { return x.VarChar } return nil } func (x *DataType) GetDate() *DataType_Date { if x, ok := x.GetKind().(*DataType_Date_); ok { return x.Date } return nil } func (x *DataType) GetTimestamp() *DataType_Timestamp { if x, ok := x.GetKind().(*DataType_Timestamp_); ok { return x.Timestamp } return nil } func (x *DataType) GetTimestampNtz() *DataType_TimestampNTZ { if x, ok := x.GetKind().(*DataType_TimestampNtz); ok { return x.TimestampNtz } return nil } func (x *DataType) GetCalendarInterval() *DataType_CalendarInterval { if x, ok := x.GetKind().(*DataType_CalendarInterval_); ok { return x.CalendarInterval } return nil } func (x *DataType) GetYearMonthInterval() *DataType_YearMonthInterval { if x, ok := x.GetKind().(*DataType_YearMonthInterval_); ok { return x.YearMonthInterval } return nil } func (x *DataType) GetDayTimeInterval() *DataType_DayTimeInterval { if x, ok := x.GetKind().(*DataType_DayTimeInterval_); ok { return x.DayTimeInterval } return nil } func (x *DataType) GetArray() *DataType_Array { if x, ok := x.GetKind().(*DataType_Array_); ok { return x.Array } return nil } func (x *DataType) GetStruct() *DataType_Struct { if x, ok := x.GetKind().(*DataType_Struct_); ok { return x.Struct } return nil } func (x *DataType) GetMap() *DataType_Map { if x, ok := x.GetKind().(*DataType_Map_); ok { return x.Map } return nil } func (x *DataType) GetVariant() *DataType_Variant { if x, ok := x.GetKind().(*DataType_Variant_); ok { return x.Variant } return nil } func (x *DataType) GetUdt() *DataType_UDT { if x, ok := x.GetKind().(*DataType_Udt); ok { return x.Udt } return nil } func (x *DataType) GetUnparsed() *DataType_Unparsed { if x, ok := x.GetKind().(*DataType_Unparsed_); ok { return x.Unparsed } return nil } type isDataType_Kind interface { isDataType_Kind() } type DataType_Null struct { Null *DataType_NULL `protobuf:"bytes,1,opt,name=null,proto3,oneof"` } type DataType_Binary_ struct { Binary *DataType_Binary `protobuf:"bytes,2,opt,name=binary,proto3,oneof"` } type DataType_Boolean_ struct { Boolean *DataType_Boolean `protobuf:"bytes,3,opt,name=boolean,proto3,oneof"` } type DataType_Byte_ struct { // Numeric types Byte *DataType_Byte `protobuf:"bytes,4,opt,name=byte,proto3,oneof"` } type DataType_Short_ struct { Short *DataType_Short `protobuf:"bytes,5,opt,name=short,proto3,oneof"` } type DataType_Integer_ struct { Integer *DataType_Integer `protobuf:"bytes,6,opt,name=integer,proto3,oneof"` } type DataType_Long_ struct { Long *DataType_Long `protobuf:"bytes,7,opt,name=long,proto3,oneof"` } type DataType_Float_ struct { Float *DataType_Float `protobuf:"bytes,8,opt,name=float,proto3,oneof"` } type DataType_Double_ struct { Double *DataType_Double `protobuf:"bytes,9,opt,name=double,proto3,oneof"` } type DataType_Decimal_ struct { Decimal *DataType_Decimal `protobuf:"bytes,10,opt,name=decimal,proto3,oneof"` } type DataType_String_ struct { // String types String_ *DataType_String `protobuf:"bytes,11,opt,name=string,proto3,oneof"` } type DataType_Char_ struct { Char *DataType_Char `protobuf:"bytes,12,opt,name=char,proto3,oneof"` } type DataType_VarChar_ struct { VarChar *DataType_VarChar `protobuf:"bytes,13,opt,name=var_char,json=varChar,proto3,oneof"` } type DataType_Date_ struct { // Datatime types Date *DataType_Date `protobuf:"bytes,14,opt,name=date,proto3,oneof"` } type DataType_Timestamp_ struct { Timestamp *DataType_Timestamp `protobuf:"bytes,15,opt,name=timestamp,proto3,oneof"` } type DataType_TimestampNtz struct { TimestampNtz *DataType_TimestampNTZ `protobuf:"bytes,16,opt,name=timestamp_ntz,json=timestampNtz,proto3,oneof"` } type DataType_CalendarInterval_ struct { // Interval types CalendarInterval *DataType_CalendarInterval `protobuf:"bytes,17,opt,name=calendar_interval,json=calendarInterval,proto3,oneof"` } type DataType_YearMonthInterval_ struct { YearMonthInterval *DataType_YearMonthInterval `protobuf:"bytes,18,opt,name=year_month_interval,json=yearMonthInterval,proto3,oneof"` } type DataType_DayTimeInterval_ struct { DayTimeInterval *DataType_DayTimeInterval `protobuf:"bytes,19,opt,name=day_time_interval,json=dayTimeInterval,proto3,oneof"` } type DataType_Array_ struct { // Complex types Array *DataType_Array `protobuf:"bytes,20,opt,name=array,proto3,oneof"` } type DataType_Struct_ struct { Struct *DataType_Struct `protobuf:"bytes,21,opt,name=struct,proto3,oneof"` } type DataType_Map_ struct { Map *DataType_Map `protobuf:"bytes,22,opt,name=map,proto3,oneof"` } type DataType_Variant_ struct { Variant *DataType_Variant `protobuf:"bytes,25,opt,name=variant,proto3,oneof"` } type DataType_Udt struct { // UserDefinedType Udt *DataType_UDT `protobuf:"bytes,23,opt,name=udt,proto3,oneof"` } type DataType_Unparsed_ struct { // UnparsedDataType Unparsed *DataType_Unparsed `protobuf:"bytes,24,opt,name=unparsed,proto3,oneof"` } func (*DataType_Null) isDataType_Kind() {} func (*DataType_Binary_) isDataType_Kind() {} func (*DataType_Boolean_) isDataType_Kind() {} func (*DataType_Byte_) isDataType_Kind() {} func (*DataType_Short_) isDataType_Kind() {} func (*DataType_Integer_) isDataType_Kind() {} func (*DataType_Long_) isDataType_Kind() {} func (*DataType_Float_) isDataType_Kind() {} func (*DataType_Double_) isDataType_Kind() {} func (*DataType_Decimal_) isDataType_Kind() {} func (*DataType_String_) isDataType_Kind() {} func (*DataType_Char_) isDataType_Kind() {} func (*DataType_VarChar_) isDataType_Kind() {} func (*DataType_Date_) isDataType_Kind() {} func (*DataType_Timestamp_) isDataType_Kind() {} func (*DataType_TimestampNtz) isDataType_Kind() {} func (*DataType_CalendarInterval_) isDataType_Kind() {} func (*DataType_YearMonthInterval_) isDataType_Kind() {} func (*DataType_DayTimeInterval_) isDataType_Kind() {} func (*DataType_Array_) isDataType_Kind() {} func (*DataType_Struct_) isDataType_Kind() {} func (*DataType_Map_) isDataType_Kind() {} func (*DataType_Variant_) isDataType_Kind() {} func (*DataType_Udt) isDataType_Kind() {} func (*DataType_Unparsed_) isDataType_Kind() {} type DataType_Boolean struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Boolean) Reset() { *x = DataType_Boolean{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Boolean) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Boolean) ProtoMessage() {} func (x *DataType_Boolean) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Boolean.ProtoReflect.Descriptor instead. func (*DataType_Boolean) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 0} } func (x *DataType_Boolean) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Byte struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Byte) Reset() { *x = DataType_Byte{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Byte) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Byte) ProtoMessage() {} func (x *DataType_Byte) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Byte.ProtoReflect.Descriptor instead. func (*DataType_Byte) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 1} } func (x *DataType_Byte) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Short struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Short) Reset() { *x = DataType_Short{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Short) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Short) ProtoMessage() {} func (x *DataType_Short) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Short.ProtoReflect.Descriptor instead. func (*DataType_Short) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 2} } func (x *DataType_Short) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Integer struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Integer) Reset() { *x = DataType_Integer{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Integer) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Integer) ProtoMessage() {} func (x *DataType_Integer) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Integer.ProtoReflect.Descriptor instead. func (*DataType_Integer) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 3} } func (x *DataType_Integer) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Long struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Long) Reset() { *x = DataType_Long{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Long) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Long) ProtoMessage() {} func (x *DataType_Long) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Long.ProtoReflect.Descriptor instead. func (*DataType_Long) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 4} } func (x *DataType_Long) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Float struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Float) Reset() { *x = DataType_Float{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Float) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Float) ProtoMessage() {} func (x *DataType_Float) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Float.ProtoReflect.Descriptor instead. func (*DataType_Float) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 5} } func (x *DataType_Float) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Double struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Double) Reset() { *x = DataType_Double{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Double) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Double) ProtoMessage() {} func (x *DataType_Double) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Double.ProtoReflect.Descriptor instead. func (*DataType_Double) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 6} } func (x *DataType_Double) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_String struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` Collation string `protobuf:"bytes,2,opt,name=collation,proto3" json:"collation,omitempty"` } func (x *DataType_String) Reset() { *x = DataType_String{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_String) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_String) ProtoMessage() {} func (x *DataType_String) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_String.ProtoReflect.Descriptor instead. func (*DataType_String) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 7} } func (x *DataType_String) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } func (x *DataType_String) GetCollation() string { if x != nil { return x.Collation } return "" } type DataType_Binary struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Binary) Reset() { *x = DataType_Binary{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Binary) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Binary) ProtoMessage() {} func (x *DataType_Binary) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Binary.ProtoReflect.Descriptor instead. func (*DataType_Binary) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 8} } func (x *DataType_Binary) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_NULL struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_NULL) Reset() { *x = DataType_NULL{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_NULL) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_NULL) ProtoMessage() {} func (x *DataType_NULL) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_NULL.ProtoReflect.Descriptor instead. func (*DataType_NULL) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 9} } func (x *DataType_NULL) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Timestamp struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Timestamp) Reset() { *x = DataType_Timestamp{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Timestamp) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Timestamp) ProtoMessage() {} func (x *DataType_Timestamp) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Timestamp.ProtoReflect.Descriptor instead. func (*DataType_Timestamp) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 10} } func (x *DataType_Timestamp) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Date struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Date) Reset() { *x = DataType_Date{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Date) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Date) ProtoMessage() {} func (x *DataType_Date) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Date.ProtoReflect.Descriptor instead. func (*DataType_Date) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 11} } func (x *DataType_Date) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_TimestampNTZ struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_TimestampNTZ) Reset() { *x = DataType_TimestampNTZ{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_TimestampNTZ) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_TimestampNTZ) ProtoMessage() {} func (x *DataType_TimestampNTZ) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_TimestampNTZ.ProtoReflect.Descriptor instead. func (*DataType_TimestampNTZ) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 12} } func (x *DataType_TimestampNTZ) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_CalendarInterval struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_CalendarInterval) Reset() { *x = DataType_CalendarInterval{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_CalendarInterval) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_CalendarInterval) ProtoMessage() {} func (x *DataType_CalendarInterval) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_CalendarInterval.ProtoReflect.Descriptor instead. func (*DataType_CalendarInterval) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 13} } func (x *DataType_CalendarInterval) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_YearMonthInterval struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields StartField *int32 `protobuf:"varint,1,opt,name=start_field,json=startField,proto3,oneof" json:"start_field,omitempty"` EndField *int32 `protobuf:"varint,2,opt,name=end_field,json=endField,proto3,oneof" json:"end_field,omitempty"` TypeVariationReference uint32 `protobuf:"varint,3,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_YearMonthInterval) Reset() { *x = DataType_YearMonthInterval{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_YearMonthInterval) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_YearMonthInterval) ProtoMessage() {} func (x *DataType_YearMonthInterval) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_YearMonthInterval.ProtoReflect.Descriptor instead. func (*DataType_YearMonthInterval) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 14} } func (x *DataType_YearMonthInterval) GetStartField() int32 { if x != nil && x.StartField != nil { return *x.StartField } return 0 } func (x *DataType_YearMonthInterval) GetEndField() int32 { if x != nil && x.EndField != nil { return *x.EndField } return 0 } func (x *DataType_YearMonthInterval) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_DayTimeInterval struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields StartField *int32 `protobuf:"varint,1,opt,name=start_field,json=startField,proto3,oneof" json:"start_field,omitempty"` EndField *int32 `protobuf:"varint,2,opt,name=end_field,json=endField,proto3,oneof" json:"end_field,omitempty"` TypeVariationReference uint32 `protobuf:"varint,3,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_DayTimeInterval) Reset() { *x = DataType_DayTimeInterval{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_DayTimeInterval) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_DayTimeInterval) ProtoMessage() {} func (x *DataType_DayTimeInterval) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_DayTimeInterval.ProtoReflect.Descriptor instead. func (*DataType_DayTimeInterval) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 15} } func (x *DataType_DayTimeInterval) GetStartField() int32 { if x != nil && x.StartField != nil { return *x.StartField } return 0 } func (x *DataType_DayTimeInterval) GetEndField() int32 { if x != nil && x.EndField != nil { return *x.EndField } return 0 } func (x *DataType_DayTimeInterval) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } // Start compound types. type DataType_Char struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Length int32 `protobuf:"varint,1,opt,name=length,proto3" json:"length,omitempty"` TypeVariationReference uint32 `protobuf:"varint,2,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Char) Reset() { *x = DataType_Char{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Char) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Char) ProtoMessage() {} func (x *DataType_Char) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Char.ProtoReflect.Descriptor instead. func (*DataType_Char) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 16} } func (x *DataType_Char) GetLength() int32 { if x != nil { return x.Length } return 0 } func (x *DataType_Char) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_VarChar struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Length int32 `protobuf:"varint,1,opt,name=length,proto3" json:"length,omitempty"` TypeVariationReference uint32 `protobuf:"varint,2,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_VarChar) Reset() { *x = DataType_VarChar{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_VarChar) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_VarChar) ProtoMessage() {} func (x *DataType_VarChar) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_VarChar.ProtoReflect.Descriptor instead. func (*DataType_VarChar) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 17} } func (x *DataType_VarChar) GetLength() int32 { if x != nil { return x.Length } return 0 } func (x *DataType_VarChar) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Decimal struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Scale *int32 `protobuf:"varint,1,opt,name=scale,proto3,oneof" json:"scale,omitempty"` Precision *int32 `protobuf:"varint,2,opt,name=precision,proto3,oneof" json:"precision,omitempty"` TypeVariationReference uint32 `protobuf:"varint,3,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Decimal) Reset() { *x = DataType_Decimal{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Decimal) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Decimal) ProtoMessage() {} func (x *DataType_Decimal) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Decimal.ProtoReflect.Descriptor instead. func (*DataType_Decimal) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 18} } func (x *DataType_Decimal) GetScale() int32 { if x != nil && x.Scale != nil { return *x.Scale } return 0 } func (x *DataType_Decimal) GetPrecision() int32 { if x != nil && x.Precision != nil { return *x.Precision } return 0 } func (x *DataType_Decimal) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_StructField struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` DataType *DataType `protobuf:"bytes,2,opt,name=data_type,json=dataType,proto3" json:"data_type,omitempty"` Nullable bool `protobuf:"varint,3,opt,name=nullable,proto3" json:"nullable,omitempty"` Metadata *string `protobuf:"bytes,4,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` } func (x *DataType_StructField) Reset() { *x = DataType_StructField{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_StructField) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_StructField) ProtoMessage() {} func (x *DataType_StructField) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_StructField.ProtoReflect.Descriptor instead. func (*DataType_StructField) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 19} } func (x *DataType_StructField) GetName() string { if x != nil { return x.Name } return "" } func (x *DataType_StructField) GetDataType() *DataType { if x != nil { return x.DataType } return nil } func (x *DataType_StructField) GetNullable() bool { if x != nil { return x.Nullable } return false } func (x *DataType_StructField) GetMetadata() string { if x != nil && x.Metadata != nil { return *x.Metadata } return "" } type DataType_Struct struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Fields []*DataType_StructField `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` TypeVariationReference uint32 `protobuf:"varint,2,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Struct) Reset() { *x = DataType_Struct{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Struct) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Struct) ProtoMessage() {} func (x *DataType_Struct) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Struct.ProtoReflect.Descriptor instead. func (*DataType_Struct) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 20} } func (x *DataType_Struct) GetFields() []*DataType_StructField { if x != nil { return x.Fields } return nil } func (x *DataType_Struct) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Array struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ElementType *DataType `protobuf:"bytes,1,opt,name=element_type,json=elementType,proto3" json:"element_type,omitempty"` ContainsNull bool `protobuf:"varint,2,opt,name=contains_null,json=containsNull,proto3" json:"contains_null,omitempty"` TypeVariationReference uint32 `protobuf:"varint,3,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Array) Reset() { *x = DataType_Array{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Array) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Array) ProtoMessage() {} func (x *DataType_Array) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Array.ProtoReflect.Descriptor instead. func (*DataType_Array) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 21} } func (x *DataType_Array) GetElementType() *DataType { if x != nil { return x.ElementType } return nil } func (x *DataType_Array) GetContainsNull() bool { if x != nil { return x.ContainsNull } return false } func (x *DataType_Array) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Map struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields KeyType *DataType `protobuf:"bytes,1,opt,name=key_type,json=keyType,proto3" json:"key_type,omitempty"` ValueType *DataType `protobuf:"bytes,2,opt,name=value_type,json=valueType,proto3" json:"value_type,omitempty"` ValueContainsNull bool `protobuf:"varint,3,opt,name=value_contains_null,json=valueContainsNull,proto3" json:"value_contains_null,omitempty"` TypeVariationReference uint32 `protobuf:"varint,4,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Map) Reset() { *x = DataType_Map{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Map) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Map) ProtoMessage() {} func (x *DataType_Map) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Map.ProtoReflect.Descriptor instead. func (*DataType_Map) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 22} } func (x *DataType_Map) GetKeyType() *DataType { if x != nil { return x.KeyType } return nil } func (x *DataType_Map) GetValueType() *DataType { if x != nil { return x.ValueType } return nil } func (x *DataType_Map) GetValueContainsNull() bool { if x != nil { return x.ValueContainsNull } return false } func (x *DataType_Map) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_Variant struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields TypeVariationReference uint32 `protobuf:"varint,1,opt,name=type_variation_reference,json=typeVariationReference,proto3" json:"type_variation_reference,omitempty"` } func (x *DataType_Variant) Reset() { *x = DataType_Variant{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Variant) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Variant) ProtoMessage() {} func (x *DataType_Variant) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Variant.ProtoReflect.Descriptor instead. func (*DataType_Variant) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 23} } func (x *DataType_Variant) GetTypeVariationReference() uint32 { if x != nil { return x.TypeVariationReference } return 0 } type DataType_UDT struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` // Required for Scala/Java UDT JvmClass *string `protobuf:"bytes,2,opt,name=jvm_class,json=jvmClass,proto3,oneof" json:"jvm_class,omitempty"` // Required for Python UDT PythonClass *string `protobuf:"bytes,3,opt,name=python_class,json=pythonClass,proto3,oneof" json:"python_class,omitempty"` // Required for Python UDT SerializedPythonClass *string `protobuf:"bytes,4,opt,name=serialized_python_class,json=serializedPythonClass,proto3,oneof" json:"serialized_python_class,omitempty"` // Required for Python UDT SqlType *DataType `protobuf:"bytes,5,opt,name=sql_type,json=sqlType,proto3,oneof" json:"sql_type,omitempty"` } func (x *DataType_UDT) Reset() { *x = DataType_UDT{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_UDT) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_UDT) ProtoMessage() {} func (x *DataType_UDT) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_UDT.ProtoReflect.Descriptor instead. func (*DataType_UDT) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 24} } func (x *DataType_UDT) GetType() string { if x != nil { return x.Type } return "" } func (x *DataType_UDT) GetJvmClass() string { if x != nil && x.JvmClass != nil { return *x.JvmClass } return "" } func (x *DataType_UDT) GetPythonClass() string { if x != nil && x.PythonClass != nil { return *x.PythonClass } return "" } func (x *DataType_UDT) GetSerializedPythonClass() string { if x != nil && x.SerializedPythonClass != nil { return *x.SerializedPythonClass } return "" } func (x *DataType_UDT) GetSqlType() *DataType { if x != nil { return x.SqlType } return nil } type DataType_Unparsed struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // (Required) The unparsed data type string DataTypeString string `protobuf:"bytes,1,opt,name=data_type_string,json=dataTypeString,proto3" json:"data_type_string,omitempty"` } func (x *DataType_Unparsed) Reset() { *x = DataType_Unparsed{} if protoimpl.UnsafeEnabled { mi := &file_spark_connect_types_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *DataType_Unparsed) String() string { return protoimpl.X.MessageStringOf(x) } func (*DataType_Unparsed) ProtoMessage() {} func (x *DataType_Unparsed) ProtoReflect() protoreflect.Message { mi := &file_spark_connect_types_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DataType_Unparsed.ProtoReflect.Descriptor instead. func (*DataType_Unparsed) Descriptor() ([]byte, []int) { return file_spark_connect_types_proto_rawDescGZIP(), []int{0, 25} } func (x *DataType_Unparsed) GetDataTypeString() string { if x != nil { return x.DataTypeString } return "" } var File_spark_connect_types_proto protoreflect.FileDescriptor var file_spark_connect_types_proto_rawDesc = []byte{ 0x0a, 0x19, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x22, 0xf9, 0x21, 0x0a, 0x08, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x32, 0x0a, 0x04, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x55, 0x4c, 0x4c, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x75, 0x6c, 0x6c, 0x12, 0x38, 0x0a, 0x06, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x42, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x48, 0x00, 0x52, 0x06, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x12, 0x3b, 0x0a, 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x48, 0x00, 0x52, 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x04, 0x62, 0x79, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x42, 0x79, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x62, 0x79, 0x74, 0x65, 0x12, 0x35, 0x0a, 0x05, 0x73, 0x68, 0x6f, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x68, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x05, 0x73, 0x68, 0x6f, 0x72, 0x74, 0x12, 0x3b, 0x0a, 0x07, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x48, 0x00, 0x52, 0x07, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x12, 0x32, 0x0a, 0x04, 0x6c, 0x6f, 0x6e, 0x67, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x4c, 0x6f, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x04, 0x6c, 0x6f, 0x6e, 0x67, 0x12, 0x35, 0x0a, 0x05, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x48, 0x00, 0x52, 0x05, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x38, 0x0a, 0x06, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x06, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x12, 0x3b, 0x0a, 0x07, 0x64, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x44, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x07, 0x64, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x12, 0x38, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x32, 0x0a, 0x04, 0x63, 0x68, 0x61, 0x72, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x43, 0x68, 0x61, 0x72, 0x48, 0x00, 0x52, 0x04, 0x63, 0x68, 0x61, 0x72, 0x12, 0x3c, 0x0a, 0x08, 0x76, 0x61, 0x72, 0x5f, 0x63, 0x68, 0x61, 0x72, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x56, 0x61, 0x72, 0x43, 0x68, 0x61, 0x72, 0x48, 0x00, 0x52, 0x07, 0x76, 0x61, 0x72, 0x43, 0x68, 0x61, 0x72, 0x12, 0x32, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x64, 0x61, 0x74, 0x65, 0x12, 0x41, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x4b, 0x0a, 0x0d, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x6e, 0x74, 0x7a, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4e, 0x54, 0x5a, 0x48, 0x00, 0x52, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4e, 0x74, 0x7a, 0x12, 0x57, 0x0a, 0x11, 0x63, 0x61, 0x6c, 0x65, 0x6e, 0x64, 0x61, 0x72, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x43, 0x61, 0x6c, 0x65, 0x6e, 0x64, 0x61, 0x72, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x10, 0x63, 0x61, 0x6c, 0x65, 0x6e, 0x64, 0x61, 0x72, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x5b, 0x0a, 0x13, 0x79, 0x65, 0x61, 0x72, 0x5f, 0x6d, 0x6f, 0x6e, 0x74, 0x68, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x59, 0x65, 0x61, 0x72, 0x4d, 0x6f, 0x6e, 0x74, 0x68, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x11, 0x79, 0x65, 0x61, 0x72, 0x4d, 0x6f, 0x6e, 0x74, 0x68, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x55, 0x0a, 0x11, 0x64, 0x61, 0x79, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x44, 0x61, 0x79, 0x54, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x48, 0x00, 0x52, 0x0f, 0x64, 0x61, 0x79, 0x54, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x35, 0x0a, 0x05, 0x61, 0x72, 0x72, 0x61, 0x79, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x41, 0x72, 0x72, 0x61, 0x79, 0x48, 0x00, 0x52, 0x05, 0x61, 0x72, 0x72, 0x61, 0x79, 0x12, 0x38, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x12, 0x2f, 0x0a, 0x03, 0x6d, 0x61, 0x70, 0x18, 0x16, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x03, 0x6d, 0x61, 0x70, 0x12, 0x3b, 0x0a, 0x07, 0x76, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74, 0x48, 0x00, 0x52, 0x07, 0x76, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74, 0x12, 0x2f, 0x0a, 0x03, 0x75, 0x64, 0x74, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x44, 0x54, 0x48, 0x00, 0x52, 0x03, 0x75, 0x64, 0x74, 0x12, 0x3e, 0x0a, 0x08, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x48, 0x00, 0x52, 0x08, 0x75, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x1a, 0x43, 0x0a, 0x07, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x40, 0x0a, 0x04, 0x42, 0x79, 0x74, 0x65, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x41, 0x0a, 0x05, 0x53, 0x68, 0x6f, 0x72, 0x74, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x43, 0x0a, 0x07, 0x49, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x40, 0x0a, 0x04, 0x4c, 0x6f, 0x6e, 0x67, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x41, 0x0a, 0x05, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x42, 0x0a, 0x06, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x60, 0x0a, 0x06, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6f, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x42, 0x0a, 0x06, 0x42, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x40, 0x0a, 0x04, 0x4e, 0x55, 0x4c, 0x4c, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x45, 0x0a, 0x09, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x40, 0x0a, 0x04, 0x44, 0x61, 0x74, 0x65, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x48, 0x0a, 0x0c, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4e, 0x54, 0x5a, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x4c, 0x0a, 0x10, 0x43, 0x61, 0x6c, 0x65, 0x6e, 0x64, 0x61, 0x72, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0xb3, 0x01, 0x0a, 0x11, 0x59, 0x65, 0x61, 0x72, 0x4d, 0x6f, 0x6e, 0x74, 0x68, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x24, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x65, 0x6e, 0x64, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x88, 0x01, 0x01, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x65, 0x6e, 0x64, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x1a, 0xb1, 0x01, 0x0a, 0x0f, 0x44, 0x61, 0x79, 0x54, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x24, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x65, 0x6e, 0x64, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x88, 0x01, 0x01, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x65, 0x6e, 0x64, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x1a, 0x58, 0x0a, 0x04, 0x43, 0x68, 0x61, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x5b, 0x0a, 0x07, 0x56, 0x61, 0x72, 0x43, 0x68, 0x61, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x99, 0x01, 0x0a, 0x07, 0x44, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x05, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x88, 0x01, 0x01, 0x12, 0x21, 0x0a, 0x09, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x09, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0xa1, 0x01, 0x0a, 0x0b, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x6e, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6e, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x1f, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x7f, 0x0a, 0x06, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x12, 0x3b, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0xa2, 0x01, 0x0a, 0x05, 0x41, 0x72, 0x72, 0x61, 0x79, 0x12, 0x3a, 0x0a, 0x0c, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0b, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0xdb, 0x01, 0x0a, 0x03, 0x4d, 0x61, 0x70, 0x12, 0x32, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x36, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x2e, 0x0a, 0x13, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0x43, 0x0a, 0x07, 0x56, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74, 0x12, 0x38, 0x0a, 0x18, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x74, 0x79, 0x70, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x1a, 0xa1, 0x02, 0x0a, 0x03, 0x55, 0x44, 0x54, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x09, 0x6a, 0x76, 0x6d, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x6a, 0x76, 0x6d, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x88, 0x01, 0x01, 0x12, 0x26, 0x0a, 0x0c, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0b, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x88, 0x01, 0x01, 0x12, 0x3b, 0x0a, 0x17, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x15, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x88, 0x01, 0x01, 0x12, 0x37, 0x0a, 0x08, 0x73, 0x71, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x48, 0x03, 0x52, 0x07, 0x73, 0x71, 0x6c, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x6a, 0x76, 0x6d, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x42, 0x1a, 0x0a, 0x18, 0x5f, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x73, 0x71, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x1a, 0x34, 0x0a, 0x08, 0x55, 0x6e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x64, 0x12, 0x28, 0x0a, 0x10, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x64, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x42, 0x36, 0x0a, 0x1e, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x73, 0x70, 0x61, 0x72, 0x6b, 0x2e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x12, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_spark_connect_types_proto_rawDescOnce sync.Once file_spark_connect_types_proto_rawDescData = file_spark_connect_types_proto_rawDesc ) func file_spark_connect_types_proto_rawDescGZIP() []byte { file_spark_connect_types_proto_rawDescOnce.Do(func() { file_spark_connect_types_proto_rawDescData = protoimpl.X.CompressGZIP(file_spark_connect_types_proto_rawDescData) }) return file_spark_connect_types_proto_rawDescData } var file_spark_connect_types_proto_msgTypes = make([]protoimpl.MessageInfo, 27) var file_spark_connect_types_proto_goTypes = []interface{}{ (*DataType)(nil), // 0: spark.connect.DataType (*DataType_Boolean)(nil), // 1: spark.connect.DataType.Boolean (*DataType_Byte)(nil), // 2: spark.connect.DataType.Byte (*DataType_Short)(nil), // 3: spark.connect.DataType.Short (*DataType_Integer)(nil), // 4: spark.connect.DataType.Integer (*DataType_Long)(nil), // 5: spark.connect.DataType.Long (*DataType_Float)(nil), // 6: spark.connect.DataType.Float (*DataType_Double)(nil), // 7: spark.connect.DataType.Double (*DataType_String)(nil), // 8: spark.connect.DataType.String (*DataType_Binary)(nil), // 9: spark.connect.DataType.Binary (*DataType_NULL)(nil), // 10: spark.connect.DataType.NULL (*DataType_Timestamp)(nil), // 11: spark.connect.DataType.Timestamp (*DataType_Date)(nil), // 12: spark.connect.DataType.Date (*DataType_TimestampNTZ)(nil), // 13: spark.connect.DataType.TimestampNTZ (*DataType_CalendarInterval)(nil), // 14: spark.connect.DataType.CalendarInterval (*DataType_YearMonthInterval)(nil), // 15: spark.connect.DataType.YearMonthInterval (*DataType_DayTimeInterval)(nil), // 16: spark.connect.DataType.DayTimeInterval (*DataType_Char)(nil), // 17: spark.connect.DataType.Char (*DataType_VarChar)(nil), // 18: spark.connect.DataType.VarChar (*DataType_Decimal)(nil), // 19: spark.connect.DataType.Decimal (*DataType_StructField)(nil), // 20: spark.connect.DataType.StructField (*DataType_Struct)(nil), // 21: spark.connect.DataType.Struct (*DataType_Array)(nil), // 22: spark.connect.DataType.Array (*DataType_Map)(nil), // 23: spark.connect.DataType.Map (*DataType_Variant)(nil), // 24: spark.connect.DataType.Variant (*DataType_UDT)(nil), // 25: spark.connect.DataType.UDT (*DataType_Unparsed)(nil), // 26: spark.connect.DataType.Unparsed } var file_spark_connect_types_proto_depIdxs = []int32{ 10, // 0: spark.connect.DataType.null:type_name -> spark.connect.DataType.NULL 9, // 1: spark.connect.DataType.binary:type_name -> spark.connect.DataType.Binary 1, // 2: spark.connect.DataType.boolean:type_name -> spark.connect.DataType.Boolean 2, // 3: spark.connect.DataType.byte:type_name -> spark.connect.DataType.Byte 3, // 4: spark.connect.DataType.short:type_name -> spark.connect.DataType.Short 4, // 5: spark.connect.DataType.integer:type_name -> spark.connect.DataType.Integer 5, // 6: spark.connect.DataType.long:type_name -> spark.connect.DataType.Long 6, // 7: spark.connect.DataType.float:type_name -> spark.connect.DataType.Float 7, // 8: spark.connect.DataType.double:type_name -> spark.connect.DataType.Double 19, // 9: spark.connect.DataType.decimal:type_name -> spark.connect.DataType.Decimal 8, // 10: spark.connect.DataType.string:type_name -> spark.connect.DataType.String 17, // 11: spark.connect.DataType.char:type_name -> spark.connect.DataType.Char 18, // 12: spark.connect.DataType.var_char:type_name -> spark.connect.DataType.VarChar 12, // 13: spark.connect.DataType.date:type_name -> spark.connect.DataType.Date 11, // 14: spark.connect.DataType.timestamp:type_name -> spark.connect.DataType.Timestamp 13, // 15: spark.connect.DataType.timestamp_ntz:type_name -> spark.connect.DataType.TimestampNTZ 14, // 16: spark.connect.DataType.calendar_interval:type_name -> spark.connect.DataType.CalendarInterval 15, // 17: spark.connect.DataType.year_month_interval:type_name -> spark.connect.DataType.YearMonthInterval 16, // 18: spark.connect.DataType.day_time_interval:type_name -> spark.connect.DataType.DayTimeInterval 22, // 19: spark.connect.DataType.array:type_name -> spark.connect.DataType.Array 21, // 20: spark.connect.DataType.struct:type_name -> spark.connect.DataType.Struct 23, // 21: spark.connect.DataType.map:type_name -> spark.connect.DataType.Map 24, // 22: spark.connect.DataType.variant:type_name -> spark.connect.DataType.Variant 25, // 23: spark.connect.DataType.udt:type_name -> spark.connect.DataType.UDT 26, // 24: spark.connect.DataType.unparsed:type_name -> spark.connect.DataType.Unparsed 0, // 25: spark.connect.DataType.StructField.data_type:type_name -> spark.connect.DataType 20, // 26: spark.connect.DataType.Struct.fields:type_name -> spark.connect.DataType.StructField 0, // 27: spark.connect.DataType.Array.element_type:type_name -> spark.connect.DataType 0, // 28: spark.connect.DataType.Map.key_type:type_name -> spark.connect.DataType 0, // 29: spark.connect.DataType.Map.value_type:type_name -> spark.connect.DataType 0, // 30: spark.connect.DataType.UDT.sql_type:type_name -> spark.connect.DataType 31, // [31:31] is the sub-list for method output_type 31, // [31:31] is the sub-list for method input_type 31, // [31:31] is the sub-list for extension type_name 31, // [31:31] is the sub-list for extension extendee 0, // [0:31] is the sub-list for field type_name } func init() { file_spark_connect_types_proto_init() } func file_spark_connect_types_proto_init() { if File_spark_connect_types_proto != nil { return } if !protoimpl.UnsafeEnabled { file_spark_connect_types_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Boolean); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Byte); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Short); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Integer); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Long); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Float); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Double); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_String); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Binary); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_NULL); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Timestamp); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Date); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_TimestampNTZ); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_CalendarInterval); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_YearMonthInterval); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_DayTimeInterval); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Char); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_VarChar); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Decimal); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_StructField); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Struct); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Array); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Map); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Variant); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_UDT); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_spark_connect_types_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DataType_Unparsed); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } file_spark_connect_types_proto_msgTypes[0].OneofWrappers = []interface{}{ (*DataType_Null)(nil), (*DataType_Binary_)(nil), (*DataType_Boolean_)(nil), (*DataType_Byte_)(nil), (*DataType_Short_)(nil), (*DataType_Integer_)(nil), (*DataType_Long_)(nil), (*DataType_Float_)(nil), (*DataType_Double_)(nil), (*DataType_Decimal_)(nil), (*DataType_String_)(nil), (*DataType_Char_)(nil), (*DataType_VarChar_)(nil), (*DataType_Date_)(nil), (*DataType_Timestamp_)(nil), (*DataType_TimestampNtz)(nil), (*DataType_CalendarInterval_)(nil), (*DataType_YearMonthInterval_)(nil), (*DataType_DayTimeInterval_)(nil), (*DataType_Array_)(nil), (*DataType_Struct_)(nil), (*DataType_Map_)(nil), (*DataType_Variant_)(nil), (*DataType_Udt)(nil), (*DataType_Unparsed_)(nil), } file_spark_connect_types_proto_msgTypes[15].OneofWrappers = []interface{}{} file_spark_connect_types_proto_msgTypes[16].OneofWrappers = []interface{}{} file_spark_connect_types_proto_msgTypes[19].OneofWrappers = []interface{}{} file_spark_connect_types_proto_msgTypes[20].OneofWrappers = []interface{}{} file_spark_connect_types_proto_msgTypes[25].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_spark_connect_types_proto_rawDesc, NumEnums: 0, NumMessages: 27, NumExtensions: 0, NumServices: 0, }, GoTypes: file_spark_connect_types_proto_goTypes, DependencyIndexes: file_spark_connect_types_proto_depIdxs, MessageInfos: file_spark_connect_types_proto_msgTypes, }.Build() File_spark_connect_types_proto = out.File file_spark_connect_types_proto_rawDesc = nil file_spark_connect_types_proto_goTypes = nil file_spark_connect_types_proto_depIdxs = nil } ================================================ FILE: internal/tests/integration/dataframe_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package integration import ( "context" "fmt" "os" "testing" "github.com/apache/spark-connect-go/spark/sql/utils" "github.com/apache/spark-connect-go/spark/sql/types" "github.com/apache/spark-connect-go/spark/sql/column" "github.com/apache/spark-connect-go/spark/sql/functions" "github.com/apache/spark-connect-go/spark/sql" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestDataFrame_Select(t *testing.T) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) assert.NoError(t, err) df, err := spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) df, err = df.Select(ctx, functions.StringLit("1"), functions.StringLit("2")) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 100, len(res)) rowZero := res[0] assert.Equal(t, 2, rowZero.Len()) df, err = spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) _, err = df.Select(ctx, column.OfDF(df, "id2")) assert.Error(t, err) } func TestDataFrame_SelectExpr(t *testing.T) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) assert.NoError(t, err) df, err := spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) df, err = df.SelectExpr(ctx, "1", "2", "spark_partition_id()") assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 100, len(res)) row_zero := res[0] assert.Equal(t, 3, row_zero.Len()) } func TestDataFrame_Alias(t *testing.T) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) assert.NoError(t, err) df, err := spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) df = df.Alias(ctx, "df") res, er := df.Collect(ctx) assert.NoError(t, er) assert.Equal(t, 100, len(res)) } func TestDataFrame_CrossJoin(t *testing.T) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) assert.NoError(t, err) df1, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df2, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df := df1.CrossJoin(ctx, df2) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 100, len(res)) assert.Equal(t, 2, res[0].Len()) } func TestDataFrame_GroupBy(t *testing.T) { ctx, spark := connect() src, _ := spark.Sql(ctx, "select 'a' as a, 1 as b from range(10)") df, _ := src.GroupBy(functions.Col("a")).Agg(ctx, functions.Sum(functions.Col("b"))) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(res)) df, err = src.GroupBy(functions.Col("a")).Count(ctx) assert.NoError(t, err) res, err = df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(res)) assert.Equal(t, "a", res[0].At(0)) assert.Equal(t, int64(10), res[0].At(1)) } func TestDataFrame_Count(t *testing.T) { ctx, spark := connect() src, _ := spark.Sql(ctx, "select 'a' as a, 1 as b from range(10)") res, err := src.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(10), res) } func TestDataFrame_OfDFWithRegex(t *testing.T) { ctx, spark := connect() src, _ := spark.Sql(ctx, "select 'a' as myColumnName, 1 as b from range(10)") col := column.OfDFWithRegex(src, "`.*(Column).*`") res, err := src.Select(ctx, col) assert.NoError(t, err) schema, err := res.Schema(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(schema.Fields)) } func TestSparkSession_CreateDataFrame(t *testing.T) { ctx, spark := connect() tbl := createArrowTable() defer tbl.Release() df, err := spark.CreateDataFrameFromArrow(ctx, tbl) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 3, len(res)) } func TestSparkSession_CreateDataFrameWithSchema(t *testing.T) { ctx, spark := connect() data := [][]any{ {1, 1.1, "a"}, {2, 2.2, "b"}, } schema := types.StructOf( types.NewStructField("f1-i32", types.INTEGER), types.NewStructField("f2-f64", types.DOUBLE), types.NewStructField("f3-string", types.STRING)) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 2, len(res)) assert.Equal(t, 3, res[0].Len()) assert.Equal(t, int32(1), res[0].At(0)) assert.Equal(t, 1.1, res[0].At(1)) assert.Equal(t, "a", res[0].At(2)) } func TestDataFrame_Corr(t *testing.T) { ctx, spark := connect() data := [][]any{ {1, 12}, {10, 1}, {19, 8}, } schema := types.StructOf( types.NewStructField("c1", types.INTEGER), types.NewStructField("c2", types.INTEGER), ) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) res, err := df.Corr(ctx, "c1", "c2") assert.NoError(t, err) assert.Equal(t, -0.3592106040535498, res) res2, err := df.Stat().Corr(ctx, "c1", "c2") assert.NoError(t, err) assert.Equal(t, res, res2) } func TestDataFrame_Cov(t *testing.T) { ctx, spark := connect() data := [][]any{ {1, 12}, {10, 1}, {19, 8}, } schema := types.StructOf( types.NewStructField("c1", types.INTEGER), types.NewStructField("c2", types.INTEGER), ) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) res, err := df.Cov(ctx, "c1", "c2") assert.NoError(t, err) assert.Equal(t, -18.0, res) res2, err := df.Stat().Cov(ctx, "c1", "c2") assert.NoError(t, err) assert.Equal(t, res, res2) } func TestDataFrame_WithColumn(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df, err = df.WithColumn(ctx, "newCol", functions.IntLit(1)) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 10, len(res)) // Check the values of the new column for _, row := range res { assert.Equal(t, 2, row.Len()) assert.Equal(t, int64(1), row.At(1)) } } func TestDataFrame_WithColumns(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df, err = df.WithColumns(ctx, column.WithAlias("newCol1", functions.IntLit(1)), column.WithAlias("newCol2", functions.IntLit(2))) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 10, len(res)) // Check the values of the new columns for _, row := range res { assert.Equal(t, 3, row.Len()) assert.Equal(t, int64(1), row.At(1)) assert.Equal(t, int64(2), row.At(2)) } } func TestDataFrame_WithMetadata(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df, err = df.WithMetadata(ctx, map[string]string{"id": "value"}) assert.NoError(t, err) _, err = df.Schema(ctx) assert.Error(t, err, "Expecting malformed metadata") df, err = spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df, err = df.WithMetadata(ctx, map[string]string{"id": "{\"kk\": \"value\"}"}) assert.NoError(t, err) schema, err := df.Schema(ctx) assert.NoError(t, err) fields := schema.Fields[0] assert.Equal(t, "id", fields.Name) assert.Equal(t, "{\"kk\":\"value\"}", *fields.Metadata) } func TestDataFrame_WithColumnRenamed(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df, err = df.WithColumnRenamed(ctx, "id", "newId") assert.NoError(t, err) // Check the schema of the new dataframe schema, err := df.Schema(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(schema.Fields)) assert.Equal(t, "newId", schema.Fields[0].Name) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 10, len(res)) // Check the values of the new column for i, row := range res { assert.Equal(t, 1, row.Len()) assert.Equal(t, int64(i), row.At(0)) } // Test that renaming a non-existing column does not change anything. df, _ = spark.Sql(ctx, "select * from range(10)") df, err = df.WithColumnRenamed(ctx, "nonExisting", "newId") assert.NoError(t, err) schema, err = df.Schema(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(schema.Fields)) assert.Equal(t, "id", schema.Fields[0].Name) // Test that single column renaming works as well. df, _ = spark.Sql(ctx, "select * from range(10)") df, err = df.WithColumnRenamed(ctx, "id", "newId") assert.NoError(t, err) schema, err = df.Schema(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(schema.Fields)) assert.Equal(t, "newId", schema.Fields[0].Name) } func TestDataFrame_WithWatermark(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select current_timestamp() as this_time from range(10)") assert.NoError(t, err) df, err = df.WithWatermark(ctx, "this_time", "1 minute") assert.NoError(t, err) schema, err := df.Schema(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(schema.Fields)) assert.Equal(t, "this_time", schema.Fields[0].Name) } func TestDataFrame_Where(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df, err = df.Where(ctx, "id = 0") assert.NoError(t, err) res, err := df.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(1), res) } func TestDataFrame_Drop(t *testing.T) { ctx, spark := connect() src, err := spark.Sql(ctx, "select 1 as id, 2 as other from range(10)") assert.NoError(t, err) df, err := src.DropByName(ctx, "id") assert.NoError(t, err) schema, err := df.Schema(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(schema.Fields)) assert.Equal(t, "other", schema.Fields[0].Name) df, err = src.Drop(ctx, column.OfDF(src, "other")) assert.NoError(t, err) schema, err = df.Schema(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(schema.Fields)) assert.Equal(t, "id", schema.Fields[0].Name) } func TestDataFrame_DropDuplicates(t *testing.T) { ctx, spark := connect() src, err := spark.Sql(ctx, "select 1 as id, 2 as other from range(10)") assert.NoError(t, err) df, err := src.DropDuplicates(ctx) assert.NoError(t, err) res, err := df.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(1), res) // Create a dataframe with duplicate rows data := [][]any{ {"Alice", 5, 80}, {"Alice", 5, 80}, {"Alice", 10, 80}, } schema := types.StructOf( types.NewStructField("name", types.STRING), types.NewStructField("age", types.INTEGER), types.NewStructField("height", types.INTEGER), ) df, err = spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) // Check the schema of the dataframe schema, err = df.Schema(ctx) assert.NoError(t, err) assert.Equal(t, 3, len(schema.Fields)) assert.Equal(t, "name", schema.Fields[0].Name) assert.Equal(t, "age", schema.Fields[1].Name) assert.Equal(t, "height", schema.Fields[2].Name) df, err = df.DropDuplicates(ctx) assert.NoError(t, err) res, err = df.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(2), res) // Check the two ages rows, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 2, len(rows)) assert.Equal(t, "Alice", rows[0].At(0)) assert.Equal(t, int32(5), rows[0].At(1)) assert.Equal(t, "Alice", rows[1].At(0)) assert.Equal(t, int32(10), rows[1].At(1)) // Test drop duplicates with column names df, err = df.DropDuplicates(ctx, "name") assert.NoError(t, err) res, err = df.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(1), res) } func TestDataFrame_Explain(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) res, err := df.Explain(ctx, utils.ExplainModeSimple) assert.NoError(t, err) assert.Contains(t, res, "Physical Plan") res, err = df.Explain(ctx, utils.ExplainModeExtended) assert.NoError(t, err) assert.Contains(t, res, "Physical Plan") res, err = df.Explain(ctx, utils.ExplainModeCodegen) assert.NoError(t, err) assert.Contains(t, res, "WholeStageCodegen") res, err = df.Explain(ctx, utils.ExplainModeCost) assert.NoError(t, err) assert.Contains(t, res, "Physical Plan") res, err = df.Explain(ctx, utils.ExplainModeFormatted) assert.NoError(t, err) assert.Contains(t, res, "Physical Plan") } func TestDataFrame_CachingAndPersistence(t *testing.T) { ctx, spark := connect() levels := []utils.StorageLevel{ utils.StorageLevelDiskOnly, utils.StorageLevelDiskOnly2, utils.StorageLevelDiskOnly3, utils.StorageLevelMemoryAndDisk, utils.StorageLevelMemoryAndDisk2, utils.StorageLevelMemoryOnly, utils.StorageLevelMemoryOnly2, utils.StorageLevelMemoyAndDiskDeser, utils.StorageLevelOffHeap, } for _, lvl := range levels { df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) err = df.Persist(ctx, lvl) assert.NoError(t, err) l, err := df.GetStorageLevel(ctx) assert.NoError(t, err) assert.Contains(t, []utils.StorageLevel{lvl, utils.StorageLevelMemoryOnly}, *l) err = df.Unpersist(ctx) assert.NoError(t, err) } df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) err = df.Cache(ctx) assert.NoError(t, err) l, err := df.GetStorageLevel(ctx) assert.NoError(t, err) assert.Equal(t, utils.StorageLevelMemoryOnly, *l, "%v != %v", utils.StorageLevelMemoryOnly, *l) } func TestDataFrame_SetOps(t *testing.T) { ctx, spark := connect() df1, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df2, err := spark.Sql(ctx, "select * from range(5)") assert.NoError(t, err) df := df1.Union(ctx, df2) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 15, len(res)) df = df1.Intersect(ctx, df2) res, err = df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 5, len(res)) df = df1.ExceptAll(ctx, df2) res, err = df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 5, len(res)) } func TestDataFrame_ToArrow(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) tbl, err := df.ToArrow(ctx) assert.NoError(t, err) assert.NotNil(t, tbl) } func TestDataFrame_LimitVersions(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df = df.Limit(ctx, int32(5)) assert.NoError(t, err) rows, err := df.Collect(ctx) assert.NoError(t, err) assert.Len(t, rows, 5) rows, err = df.Tail(ctx, int32(3)) assert.NoError(t, err) assert.Len(t, rows, 3) rows, err = df.Head(ctx, int32(3)) assert.NoError(t, err) assert.Len(t, rows, 3) rows, err = df.Take(ctx, int32(3)) assert.NoError(t, err) assert.Len(t, rows, 3) } func TestDataFrame_Sort(t *testing.T) { ctx, spark := connect() src, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df, err := src.Sort(ctx, functions.Col("id").Desc()) assert.NoError(t, err) res, err := df.Head(ctx, 1) assert.NoError(t, err) assert.Equal(t, int64(9), res[0].At(0)) df, err = src.Sort(ctx, functions.Col("id").Asc()) assert.NoError(t, err) res, err = df.Head(ctx, 1) assert.NoError(t, err) assert.Equal(t, int64(0), res[0].At(0)) } func TestDataFrame_Join(t *testing.T) { ctx, spark := connect() df1, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df2, err := spark.Sql(ctx, "select * from range(5)") assert.NoError(t, err) df, err := df1.Join(ctx, df2, column.OfDF(df1, "id").Eq(column.OfDF(df2, "id")), utils.JoinTypeInner) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 5, len(res)) } func TestDataFrame_RandomSplits(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(1000)") assert.NoError(t, err) dfs, err := df.RandomSplit(ctx, []float64{0.3, 0.7}) assert.NoError(t, err) assert.Len(t, dfs, 2) c1, err := dfs[0].Count(ctx) assert.NoError(t, err) c2, err := dfs[1].Count(ctx) assert.NoError(t, err) assert.Less(t, c1, c2) } func TestDataFrame_Describe(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) res, err := df.Describe(ctx, "id").Collect(ctx) assert.NoError(t, err) assert.Len(t, res, 5) } func TestDataFrame_Summary(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select id, 'a' as col, 2 as other from range(10)") assert.NoError(t, err) res, err := df.Summary(ctx, "count", "stddev").Collect(ctx) assert.NoError(t, err) assert.Len(t, res, 2) assert.Equal(t, "count", res[0].At(0)) assert.Equal(t, 4, res[0].Len()) } func TestDataFrame_Pivot(t *testing.T) { ctx, spark := connect() data := [][]any{ {"dotNET", 2012, 10000}, {"Java", 2012, 20000}, {"dotNET", 2012, 5000}, {"dotNET", 2013, 48000}, {"Java", 2013, 30000}, } schema := types.StructOf( types.NewStructField("course", types.STRING), types.NewStructField("year", types.INTEGER), types.NewStructField("earnings", types.INTEGER)) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) gd := df.GroupBy(functions.Col("year")) gd, err = gd.Pivot(ctx, "course", []types.LiteralType{types.String("Java"), types.String("dotNET")}) assert.NoError(t, err) df, err = gd.Sum(ctx, "earnings") assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Len(t, res, 2) } func TestDataFrame_Offset(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df = df.Offset(ctx, int32(5)) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Len(t, res, 5) } func TestDataFrame_IsEmpty(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) empty, err := df.IsEmpty(ctx) assert.NoError(t, err) assert.False(t, empty) df, err = spark.Sql(ctx, "select * from range(0)") assert.NoError(t, err) empty, err = df.IsEmpty(ctx) assert.NoError(t, err) assert.True(t, empty) } func TestDataFrame_First(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) row, err := df.First(ctx) assert.NoError(t, err) assert.Equal(t, int64(0), row.At(0)) } func TestDataFrame_Distinct(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df = df.Distinct(ctx) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Len(t, res, 10) } func TestDataFrame_CrossTab(t *testing.T) { ctx, spark := connect() data := [][]any{{1, 11}, {1, 11}, {3, 10}, {4, 8}, {4, 8}} schema := types.StructOf( types.NewStructField("c1", types.INTEGER), types.NewStructField("c2", types.INTEGER), ) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) df = df.CrossTab(ctx, "c1", "c2") df, err = df.Sort(ctx, column.OfDF(df, "c1_c2").Asc()) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Len(t, res, 3) assert.Equal(t, "1", res[0].At(0)) assert.Equal(t, int64(0), res[0].At(1)) assert.Equal(t, int64(2), res[0].At(2)) assert.Equal(t, int64(0), res[0].At(3)) df, err = spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) df = df.Stat().CrossTab(ctx, "c1", "c2") df, err = df.Sort(ctx, column.OfDF(df, "c1_c2").Asc()) assert.NoError(t, err) res2, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, res, res2) } func TestDataFrame_SameSemantics(t *testing.T) { ctx, spark := connect() df1, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) df2, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) res, _ := df1.SameSemantics(ctx, df2) assert.True(t, res) } func TestDataFrame_SemanticHash(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) hash, err := df.SemanticHash(ctx) assert.NoError(t, err) assert.NotEmpty(t, hash) } func TestDataFrame_FreqItems(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select id % 4 as id from range(100)") assert.NoError(t, err) res, err := df.FreqItems(ctx, "id").Collect(ctx) assert.NoErrorf(t, err, "%+v", err) assert.Len(t, res, 1) res2, err := df.Stat().FreqItems(ctx, "id").Collect(ctx) assert.NoError(t, err) assert.Equal(t, res, res2) } func TestDataFrame_Config_GetAll(t *testing.T) { ctx, spark := connect() result, err := spark.Config().GetAll(ctx) assert.NoError(t, err) assert.Equal(t, "driver", result["spark.executor.id"]) } func TestDataFrame_Config_Get(t *testing.T) { ctx, spark := connect() result, err := spark.Config().Get(ctx, "spark.executor.id") assert.NoError(t, err) assert.Equal(t, "driver", result) } func TestDataFrame_Config_GetWithDefault(t *testing.T) { ctx, spark := connect() result, err := spark.Config().GetWithDefault(ctx, "spark.whatever", "whatever_not_set") assert.NoError(t, err) assert.Equal(t, "whatever_not_set", result) } func TestDataFrame_Config_Set(t *testing.T) { ctx, spark := connect() err := spark.Config().Set(ctx, "spark.whatever", "whatever_set") assert.NoError(t, err) } func TestDataFrame_Config_IsModifiable(t *testing.T) { ctx, spark := connect() result, err := spark.Config().IsModifiable(ctx, "spark.executor.id") assert.NoError(t, err) assert.Equal(t, false, result) } func TestDataFrame_Config_Unset(t *testing.T) { ctx, spark := connect() err := spark.Config().Set(ctx, "spark.whatever", "whatever_set") assert.NoError(t, err) err = spark.Config().Unset(ctx, "spark.whatever") assert.NoError(t, err) } func TestDataFrame_Config_e2e_test(t *testing.T) { ctx, spark := connect() // add keys that we know is "modifiable" key := "spark.sql.ansi.enabled" result, err := spark.Config().IsModifiable(ctx, key) assert.NoError(t, err) assert.Equal(t, true, result) _, err = spark.Config().Get(ctx, key) assert.NoError(t, err) err = spark.Config().Set(ctx, "spark.sql.ansi.enabled", "true") assert.NoError(t, err) m, err := spark.Config().Get(ctx, "spark.sql.ansi.enabled") assert.NoError(t, err) assert.Equal(t, "true", m) } func TestDataFrame_WithOption(t *testing.T) { ctx, spark := connect() file, err := os.CreateTemp("", "example") defer os.Remove(file.Name()) assert.NoError(t, err) defer file.Close() _, err = file.WriteString("id#name,name\n") assert.NoError(t, err) for i := 0; i < 10; i++ { _, err = fmt.Fprintf(file, "%d#alice,alice\n", i) assert.NoError(t, err) } df, err := spark.Read().Format("csv"). Option("header", "true"). Option("quote", "\""). Option("sep", "#"). Option("escapeQuotes", "true"). // Option("skipLines", "5"). //TODO: this needs more insight Option("inferSchema", "false"). Load(file.Name()) assert.NoError(t, err) c, err := df.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(10), c) } func TestDataFrame_Sample(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) testCases := []struct { name string fraction float64 }{ { name: "Default behavior", fraction: 0.1, }, { name: "Large fraction", fraction: 0.9, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { sampledDF, err := df.Sample(ctx, tc.fraction) assert.NoError(t, err) count, err := sampledDF.Count(ctx) assert.NoError(t, err) expectedSize := int(100 * tc.fraction) assert.InDelta(t, expectedSize, count, float64(expectedSize), 10) rows, err := sampledDF.Collect(ctx) assert.NoError(t, err) // If sampling without replacement, check for duplicates seen := make(map[int64]bool) for _, row := range rows { value := row.At(0).(int64) if seen[value] { t.Fatal("Found duplicate value when sampling without replacement") } seen[value] = true } }) } } func TestDataFrame_SampleWithReplacement(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) testCases := []struct { name string withReplacement bool fraction float64 }{ { name: "With replacement", withReplacement: true, fraction: 0.1, }, { name: "Without replacement", withReplacement: false, fraction: 0.1, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { sampledDF, err := df.SampleWithReplacement(ctx, tc.withReplacement, tc.fraction) assert.NoError(t, err) count, err := sampledDF.Count(ctx) assert.NoError(t, err) expectedSize := int(100 * tc.fraction) assert.InDelta(t, expectedSize, count, float64(expectedSize), 10) rows, err := sampledDF.Collect(ctx) assert.NoError(t, err) // If sampling without replacement, check for duplicates if tc.withReplacement == false { seen := make(map[int64]bool) for _, row := range rows { value := row.At(0).(int64) if seen[value] { t.Fatal("Found duplicate value when sampling without replacement") } seen[value] = true } } }) } } func TestDataFrame_SampleSeed(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) fraction := 0.1 seed := int64(17) sampledDF, err := df.SampleWithSeed(ctx, fraction, seed) assert.NoError(t, err) count, err := sampledDF.Count(ctx) assert.NoError(t, err) expectedSize := int(100 * fraction) assert.InDelta(t, expectedSize, count, float64(expectedSize), 10) rows, err := sampledDF.Collect(ctx) assert.NoError(t, err) // If sampling without replacement, check for duplicates seen := make(map[int64]bool) for _, row := range rows { value := row.At(0).(int64) if seen[value] { t.Fatal("Found duplicate value when sampling without replacement") } seen[value] = true } // same seed should return same output sampledDFRepeat, err := df.SampleWithSeed(ctx, fraction, seed) assert.NoError(t, err) count2, err := sampledDFRepeat.Count(ctx) assert.NoError(t, err) assert.Equal(t, count, count2) rows2, err := sampledDFRepeat.Collect(ctx) assert.NoError(t, err) assert.Equal(t, rows, rows2) } func TestDataFrame_SampleWithReplacementSeed(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) fraction := 0.1 seed := int64(17) sampledDF, err := df.SampleWithReplacementAndSeed(ctx, true, fraction, seed) assert.NoError(t, err) count, err := sampledDF.Count(ctx) assert.NoError(t, err) expectedSize := int(100 * fraction) assert.InDelta(t, expectedSize, count, float64(expectedSize), 10) rows, err := sampledDF.Collect(ctx) assert.NoError(t, err) // same seed should return same output sampledDFRepeat, err := df.SampleWithReplacementAndSeed(ctx, true, fraction, seed) assert.NoError(t, err) count2, err := sampledDFRepeat.Count(ctx) assert.NoError(t, err) assert.Equal(t, count, count2) rows2, err := sampledDFRepeat.Collect(ctx) assert.NoError(t, err) assert.Equal(t, rows, rows2) } func TestDataFrame_Unpivot(t *testing.T) { ctx, spark := connect() data := [][]any{{1, 11, 1.1}, {2, 12, 1.2}} schema := types.StructOf( types.NewStructField("id", types.INTEGER), types.NewStructField("int", types.INTEGER), types.NewStructField("double", types.DOUBLE), ) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) udf, err := df.Unpivot(ctx, []column.Convertible{functions.Col("id")}, []column.Convertible{functions.Col("int"), functions.Col("double")}, "type", "value") assert.NoError(t, err) cnt, err := udf.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(4), cnt) } func TestDataFrame_Replace(t *testing.T) { ctx, spark := connect() data := [][]any{ {10, 80, "Alice"}, {5, nil, "Bob"}, {nil, 10, "Tom"}, {nil, nil, nil}, } schema := types.StructOf( types.NewStructField("age", types.INTEGER), types.NewStructField("height", types.INTEGER), types.NewStructField("name", types.STRING), ) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) res, err := df.Replace(ctx, []types.PrimitiveTypeLiteral{types.Int32(10)}, []types.PrimitiveTypeLiteral{types.Int32(20)}, ) assert.NoError(t, err) cnt, err := res.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(4), cnt) rows, err := res.Collect(ctx) assert.NoError(t, err) assert.Equal(t, int32(20), rows[0].At(0)) assert.Equal(t, int32(20), rows[2].At(1)) res, err = df.Replace(ctx, []types.PrimitiveTypeLiteral{types.Int32(10)}, []types.PrimitiveTypeLiteral{types.Int32Nil}, ) assert.NoError(t, err) rows, err = res.Collect(ctx) assert.NoError(t, err) assert.Nil(t, rows[0].At(0)) } func TestDataFrame_ReplaceWithColumn(t *testing.T) { ctx, spark := connect() data := [][]any{ {10, 80, "Alice"}, {5, nil, "Bob"}, {nil, 10, "Tom"}, {nil, nil, nil}, } schema := types.StructOf( types.NewStructField("age", types.INTEGER), types.NewStructField("height", types.INTEGER), types.NewStructField("name", types.STRING), ) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) res, err := df.Replace(ctx, []types.PrimitiveTypeLiteral{types.Int32(10)}, []types.PrimitiveTypeLiteral{types.Int32(20)}, "age") assert.NoError(t, err) rows, err := res.Collect(ctx) assert.NoError(t, err) // Should only repalce the age column but not the height column assert.Equal(t, int32(20), rows[0].At(0)) assert.Equal(t, int32(10), rows[2].At(1)) } func TestDataFrame_FillNa(t *testing.T) { ctx, spark := connect() file, err := os.CreateTemp("", "fillna") defer os.Remove(file.Name()) assert.NoError(t, err) defer file.Close() _, err = file.WriteString(`{"id":1,"int":null, "int2": 1} {"id":null,"int":12, "int2": null} `) assert.NoError(t, err) df, err := spark.Read().Format("json"). Option("inferSchema", "true"). Load(file.Name()) assert.NoError(t, err) // all columns filled, err := df.FillNa(ctx, types.Int64(10)) assert.NoError(t, err) sorted, err := filled.Sort(ctx, functions.Col("id").Asc()) assert.NoError(t, err) res, err := sorted.Collect(ctx) assert.NoError(t, err) require.Equal(t, 2, len(res)) assert.Equal(t, []any{int64(1), int64(10), int64(1)}, res[0].Values()) assert.Equal(t, []any{int64(10), int64(12), int64(10)}, res[1].Values()) // specific columns filled, err = df.FillNa(ctx, types.Int64(10), "int", "int2") assert.NoError(t, err) sorted, err = filled.Sort(ctx, functions.Col("id").Asc()) assert.NoError(t, err) res, err = sorted.Collect(ctx) assert.NoError(t, err) require.Equal(t, 2, len(res)) assert.Equal(t, []any{nil, int64(12), int64(10)}, res[0].Values()) assert.Equal(t, []any{int64(1), int64(10), int64(1)}, res[1].Values()) // specific columns with map filled, err = df.FillNaWithValues(ctx, map[string]types.PrimitiveTypeLiteral{ "int": types.Int64(10), "int2": types.Int64(20), }) assert.NoError(t, err) sorted, err = filled.Sort(ctx, functions.Col("id").Asc()) assert.NoError(t, err) res, err = sorted.Collect(ctx) assert.NoError(t, err) require.Equal(t, 2, len(res)) assert.Equal(t, []any{nil, int64(12), int64(20)}, res[0].Values()) assert.Equal(t, []any{int64(1), int64(10), int64(1)}, res[1].Values()) } func TestDataFrame_ApproxQuantile(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select id, 1 as id2 from range(100)") assert.NoError(t, err) res, err := df.ApproxQuantile(ctx, []float64{float64(0.5)}, float64(0.1), "id") assert.NoError(t, err) assert.Len(t, res, 1) data := [][]any{ {"bob", "Developer", 125000, 1}, {"mark", "Developer", 108000, 2}, {"carl", "Tester", 70000, 2}, {"peter", "Developer", 185000, 2}, {"jon", "Tester", 65000, 1}, {"roman", "Tester", 82000, 2}, {"simon", "Developer", 98000, 1}, {"eric", "Developer", 144000, 2}, {"carlos", "Tester", 75000, 1}, {"henry", "Developer", 110000, 1}, } schema := types.StructOf( types.NewStructField("Name", types.STRING), types.NewStructField("Role", types.STRING), types.NewStructField("Salary", types.LONG), types.NewStructField("Performance", types.LONG), ) df, err = spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) med, err := df.ApproxQuantile(ctx, []float64{float64(0.5)}, float64(0.25), "Salary") assert.NoError(t, err) assert.Len(t, med, 1) assert.GreaterOrEqual(t, med[0][0], 75000.0) _, err = df.Stat().ApproxQuantile(ctx, []float64{0.5}, 0.25, "Salary") assert.NoError(t, err) } func TestDataFrame_DFNaFunctions(t *testing.T) { ctx, spark := connect() data := [][]any{ {10, 80.5, "Alice", true}, {5, nil, "Bob", nil}, {nil, nil, "Tom", nil}, {nil, nil, nil, nil}, } schema := types.StructOf( types.NewStructField("age", types.INTEGER), types.NewStructField("height", types.DOUBLE), types.NewStructField("name", types.STRING), types.NewStructField("bool", types.BOOLEAN), ) df, err := spark.CreateDataFrame(ctx, data, schema) assert.NoError(t, err) res, err := df.Na().Drop(ctx) assert.NoError(t, err) rows, err := res.Collect(ctx) assert.NoError(t, err) assert.Len(t, rows, 1) assert.Equal(t, rows[0].At(2), "Alice") res, err = df.Na().DropAll(ctx) assert.NoError(t, err) rows, err = res.Collect(ctx) assert.NoError(t, err) assert.Len(t, rows, 3) // Fill must only use long types res, err = df.Na().Fill(ctx, types.Int64(50)) assert.NoError(t, err) rows, err = res.Collect(ctx) assert.NoError(t, err) assert.Len(t, rows, 4) assert.Equal(t, int32(50), rows[2].At(0)) assert.Equal(t, int32(50), rows[3].At(0)) assert.Equal(t, float64(50), rows[2].At(1)) assert.Equal(t, float64(50), rows[3].At(1)) res, err = df.Na().Replace(ctx, []types.PrimitiveTypeLiteral{types.String("Alice")}, []types.PrimitiveTypeLiteral{ types.String("Bob"), }) assert.NoError(t, err) rows, err = res.Collect(ctx) assert.NoError(t, err) assert.Len(t, rows, 4) assert.Equal(t, "Bob", rows[0].At(2)) } func TestDataFrame_RangeIter(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) cnt := 0 for row, err := range df.All(ctx) { assert.NoError(t, err) assert.NotNil(t, row) cnt++ } assert.Equal(t, 10, cnt) // Check that errors are properly propagated df, err = spark.Sql(ctx, "select if(id = 5, raise_error('handle'), false) from range(10)") assert.NoError(t, err) for _, err := range df.All(ctx) { // The error is immediately thrown: assert.Error(t, err) } } func TestDataFrame_PrintSchema(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select * from range(10)") assert.NoError(t, err) err = df.PrintSchema(ctx) assert.NoError(t, err) } func TestDataFrame_SchemaTreeString(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select map('a', 1) as first, array(1,2,3) as second, map('a', map('b', 2)) as third") assert.NoError(t, err) schema, err := df.Schema(ctx) assert.NoError(t, err) ts := schema.TreeString() assert.Contains(t, ts, "|-- first: map") assert.Contains(t, ts, "|-- second: array") assert.Contains(t, ts, "|-- third: map") } ================================================ FILE: internal/tests/integration/functions_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package integration import ( "context" "testing" "github.com/apache/spark-connect-go/spark/sql/types" "github.com/apache/spark-connect-go/spark/sql/functions" "github.com/apache/spark-connect-go/spark/sql" "github.com/stretchr/testify/assert" ) func TestIntegration_BuiltinFunctions(t *testing.T) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) if err != nil { t.Fatal(err) } df, _ := spark.Sql(ctx, "select '[2]' as a from range(10)") df, _ = df.Filter(ctx, functions.JsonArrayLength(functions.Col("a")).Eq(functions.IntLit(1))) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 10, len(res)) } func TestAggregationFunctions_Agg(t *testing.T) { ctx, spark := connect() df, err := spark.Sql(ctx, "select id, 1, 2, 3 from range(100)") assert.NoError(t, err) res, err := df.Agg(ctx, functions.Count(functions.Col("id"))) assert.NoError(t, err) cnt, err := res.Count(ctx) assert.NoError(t, err) assert.Equal(t, int64(1), cnt) res, err = df.AggWithMap(ctx, map[string]string{"id": "sum"}) assert.NoError(t, err) rows, err := res.Collect(ctx) assert.NoError(t, err) assert.Len(t, rows, 1) assert.Equal(t, int64(4950), rows[0].At(0)) } func TestIntegration_ColumnGetItem(t *testing.T) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) if err != nil { t.Fatal(err) } df, _ := spark.Sql(ctx, "select sequence(1,10) as s") df, err = df.Select(ctx, functions.Col("s").GetItem(types.Int64(2))) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, int32(3), res[0].Values()[0]) } ================================================ FILE: internal/tests/integration/helper.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package integration import ( "context" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/array" "github.com/apache/arrow-go/v18/arrow/memory" "github.com/apache/spark-connect-go/spark/sql" ) func connect() (context.Context, sql.SparkSession) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) if err != nil { panic(err) } return ctx, spark } func createArrowTable() arrow.Table { pool := memory.NewGoAllocator() schema := arrow.NewSchema( []arrow.Field{ {Name: "f1-i32", Type: arrow.PrimitiveTypes.Int32}, {Name: "f2-f64", Type: arrow.PrimitiveTypes.Float64}, {Name: "f3-string", Type: &arrow.StringType{}}, }, nil, ) b := array.NewRecordBuilder(pool, schema) defer b.Release() b.Field(0).(*array.Int32Builder).AppendValues([]int32{1, 2, 3}, nil) b.Field(1).(*array.Float64Builder).AppendValues([]float64{1.1, 2.2, 3.3}, nil) b.Field(2).(*array.StringBuilder).AppendValues([]string{"a", "b", "c"}, nil) rec1 := b.NewRecord() // Do not release the table tbl := array.NewTableFromRecords(schema, []arrow.Record{rec1}) return tbl } ================================================ FILE: internal/tests/integration/spark_runner.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package integration import ( "fmt" "io" "net" "os" "os/exec" "time" "github.com/apache/spark-connect-go/spark/sparkerrors" ) func StartSparkConnect() (int64, error) { sparkHome := os.Getenv("SPARK_HOME") if sparkHome == "" { return -1, sparkerrors.WithString(sparkerrors.TestSetupError, "SPARK_HOME not set") } fmt.Printf("Starting Spark Connect Server in: %v\n", os.Getenv("SPARK_HOME")) cmd := exec.Command("./sbin/start-connect-server.sh", "--conf", "spark.log.structuredLogging.enabled=false") cmd.Dir = sparkHome baseEnv := os.Environ() baseEnv = append(baseEnv, "SPARK_NO_DAEMONIZE=1") cmd.Env = baseEnv stdout, _ := cmd.StdoutPipe() if err := cmd.Start(); err != nil { return -1, sparkerrors.WithType(sparkerrors.TestSetupError, err) } timeout := time.After(60 * time.Second) tick := time.NewTicker(1 * time.Second) for { select { case <-timeout: out, _ := io.ReadAll(stdout) fmt.Printf("Output: %v\n", string(out)) return -1, sparkerrors.WithString(sparkerrors.TestSetupError, "timeout waiting for Spark Connect to start") case <-tick.C: if cmd.ProcessState != nil && cmd.ProcessState.Exited() { return -1, sparkerrors.WithString(sparkerrors.TestSetupError, "Spark Connect exited") } conn, err := net.Dial("tcp", "localhost:15002") if err == nil { conn.Close() return int64(cmd.Process.Pid), nil } } } } func StopSparkConnect(pid int64) error { return nil } ================================================ FILE: internal/tests/integration/sql_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package integration import ( "context" "fmt" "log" "os" "testing" "github.com/apache/spark-connect-go/spark/sql/column" "github.com/apache/spark-connect-go/spark/sql/functions" "github.com/apache/spark-connect-go/spark/sql/types" "github.com/apache/spark-connect-go/spark/sql" "github.com/stretchr/testify/assert" ) func TestIntegration_RunSQLCommand(t *testing.T) { // Run SQL command ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) if err != nil { t.Fatal(err) } df, err := spark.Sql(ctx, "select * from range(100)") assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 100, len(res)) df, err = df.Filter(ctx, column.OfDF(df, "id").Lt(functions.IntLit(10))) assert.NoError(t, err) res, err = df.Collect(ctx) assert.NoErrorf(t, err, "Must be able to collect the rows.") assert.Equal(t, 10, len(res)) } func TestIntegration_Schema(t *testing.T) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) assert.NoError(t, err) df, err := spark.Sql(ctx, "select * from range(1)") assert.NoError(t, err) schema, err := df.Schema(ctx) assert.NoError(t, err) assert.Len(t, schema.Fields, 1) assert.Equal(t, "id", schema.Fields[0].Name) assert.Equal(t, types.LongType{}, schema.Fields[0].DataType) } func TestIntegration_StructConversion(t *testing.T) { ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote("sc://localhost").Build(ctx) if err != nil { t.Fatal(err) } query := ` select named_struct( 'a', 1, 'b', 2, 'c', cast(10.32 as double), 'd', array(1, 2, 3, 4) ) struct_col ` df, err := spark.Sql(ctx, query) assert.NoError(t, err) res, err := df.Collect(ctx) assert.NoError(t, err) assert.Equal(t, 1, len(res)) columnData := res[0].Values()[0] assert.NotNil(t, columnData) structDataMap, ok := columnData.(map[string]any) assert.True(t, ok) assert.Contains(t, structDataMap, "a") assert.Contains(t, structDataMap, "b") assert.Contains(t, structDataMap, "c") assert.Contains(t, structDataMap, "d") assert.Equal(t, int32(1), structDataMap["a"]) assert.Equal(t, int32(2), structDataMap["b"]) assert.Equal(t, float64(10.32), structDataMap["c"]) arrayData := []any{int32(1), int32(2), int32(3), int32(4)} assert.Equal(t, arrayData, structDataMap["d"]) schema, err := df.Schema(ctx) assert.NoError(t, err) assert.Equal(t, "struct_col", schema.Fields[0].Name) } func TestMain(m *testing.M) { envShouldStartService := os.Getenv("START_SPARK_CONNECT_SERVICE") shouldStartService := envShouldStartService == "" || envShouldStartService == "1" pid := int64(-1) var err error if shouldStartService { fmt.Println("Starting Spark Connect service...") pid, err = StartSparkConnect() if err != nil { log.Fatal(err) } } code := m.Run() if shouldStartService { if err = StopSparkConnect(pid); err != nil { log.Fatal(err) } } os.Exit(code) } ================================================ FILE: java/.gitignore ================================================ project target ================================================ FILE: java/README.md ================================================ # Sample Spark-Submit Wrapper This directory provides a simple wrapper library that can be used to submit a Spark Connect Go application to a Spark Cluster. ## Wrapper Library The wrapper library expects to variable input values: 1. The path to the binary file that contains the Spark Connect Go application. This path is specified via the Spark conf property `spark.golang.binary`. 2. The actual binary has to be submitted as part of the application using the `--files` parameter to the `spark-submit` script. Building the libary can be done using: ```bash sbt package ``` ## Run Script The `run.sh` script is a simple script that can be used to submit a Spark Connect Go application to a Spark Cluster. The script can be called as follows: ```bash export SPARK_HOME=/path/to/spark ./run.sh ../cmd/spark-connect-example-spark-session/spark-connect-example-spark-session ``` When this is called from the current directory and with the Spark Connect Golang client build, it will submit the example application to the Spark Cluster. The `run.sh` script can be modified according to your needs. ================================================ FILE: java/build.sbt ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. ThisBuild / version := "0.1.0-SNAPSHOT" ThisBuild / scalaVersion := "2.13.16" lazy val root = (project in file(".")) .settings( name := "SparkConnectGoRunner" ) libraryDependencies += "org.apache.spark" %% "spark-sql-api" % "4.0.0" libraryDependencies += "org.apache.spark" %% "spark-sql" % "4.0.0" libraryDependencies += "org.apache.spark" %% "spark-core" % "4.0.0" libraryDependencies += "org.apache.spark" %% "spark-connect-common" % "4.0.0" libraryDependencies += "org.apache.spark" %% "spark-connect" % "4.0.0" ================================================ FILE: java/run.sh ================================================ #!/bin/bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e SCALA_VERSION=2.13 SPARK_VERSION=4.0.0 if [ -z "$SPARK_HOME" ]; then echo "SPARK_HOME must be set to run this script." exit 1 fi BINARY_PATH=$1 if [ -z "$BINARY_PATH" ]; then echo "Usage: $0 " exit 1 fi # Check if the binary exists. if [ ! -f "$BINARY_PATH" ]; then echo "Binary not found: $BINARY_PATH, make sure the path is valid." exit 1 fi # Get the absolute path of the binary. BINARY_PATH=$(realpath $BINARY_PATH) BINARY_NAME=$(basename $BINARY_PATH) # Call the spark-submit script. $SPARK_HOME/bin/spark-submit \ --files $BINARY_PATH \ --conf spark.golang.binary=$BINARY_NAME \ --class org.apache.spark.golang.Runner \ --packages org.apache.spark:spark-connect_$SCALA_VERSION:$SPARK_VERSION \ target/scala-$SCALA_VERSION/sparkconnectgorunner_$SCALA_VERSION-0.1.0-SNAPSHOT.jar ================================================ FILE: java/src/main/scala/org/apache/spark/golang/Runner.scala ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.golang import scala.sys.process._ import org.apache.spark.{SparkContext, SparkFiles} import org.apache.spark.internal.Logging import org.apache.spark.sql.SparkSession import org.apache.spark.sql.connect.service.SparkConnectService /** * This is the main entry point for the Spark Connect Go runner. * * To run any Go code on your Spark cluster using spark-submit, you can use * this very simple wrapper to do so. To */ object Runner extends Logging { def main(args: Array[String]): Unit = { // Instantiate a new Spark Context. val ctx = SparkContext.getOrCreate() // Start the SparkConnect service which will listen for incoming requests. SparkConnectService.start(ctx) // Create a new Spark Session to fetch the port configuration that the service // listens on. val spark = SparkSession.builder().getOrCreate() val port = spark.conf.get("spark.connect.grpc.binding.port").toInt // Fetch the binary of the program to be executed. val bin = spark.conf.get("spark.golang.binary") // Fetch the local path of the binary. val path = SparkFiles.get(bin) val process = Process(path, None, "SPARK_REMOTE" -> s"sc://localhost:$port") process.! logWarning("Stopping Spark Connect service") SparkConnectService.stop() ctx.stop() } } ================================================ FILE: merge_connect_go_pr.py ================================================ #!/usr/bin/env python3 # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Utility for creating well-formed pull request merges and pushing them to Apache # Spark. # usage: ./merge_connect_go_pr.py (see config env vars below) # # This utility assumes you already have a local Spark git folder and that you # have added remotes corresponding to both (i) the github apache Spark # mirror and (ii) the apache git repo. import json import os import re import subprocess import sys import traceback import requests from urllib.request import urlopen from urllib.request import Request from urllib.error import HTTPError try: import jira.client JIRA_IMPORTED = True except ImportError: JIRA_IMPORTED = False # Location of your Spark git development area SPARK_CONNECT_GO_HOME = os.environ.get("SPARK_CONNECT_GO_HOME", os.getcwd()) # Remote name which points to the Gihub site PR_REMOTE_NAME = os.environ.get("PR_REMOTE_NAME", "apache-github") # Remote name which points to Apache git PUSH_REMOTE_NAME = os.environ.get("PUSH_REMOTE_NAME", "apache") # ASF JIRA username JIRA_USERNAME = os.environ.get("JIRA_USERNAME", "") # ASF JIRA password JIRA_PASSWORD = os.environ.get("JIRA_PASSWORD", "") # ASF JIRA Token JIRA_ACCESS_TOKEN = os.environ.get("JIRA_ACCESS_TOKEN") # OAuth key used for issuing requests against the GitHub API. If this is not defined, then requests # will be unauthenticated. You should only need to configure this if you find yourself regularly # exceeding your IP's unauthenticated request rate limit. You can create an OAuth key at # https://github.com/settings/tokens. This script only requires the "public_repo" scope. GITHUB_OAUTH_KEY = os.environ.get("GITHUB_OAUTH_KEY") GITHUB_BASE = "https://github.com/apache/spark-connect-go/pull" GITHUB_API_BASE = "https://api.github.com/repos/apache/spark-connect-go" JIRA_BASE = "https://issues.apache.org/jira/browse" JIRA_API_BASE = "https://issues.apache.org/jira" # Prefix added to temporary branches BRANCH_PREFIX = "PR_TOOL" asf_jira = None def get_json(url): try: request = Request(url) if GITHUB_OAUTH_KEY: request.add_header("Authorization", "token %s" % GITHUB_OAUTH_KEY) return json.load(urlopen(request)) except HTTPError as e: if "X-RateLimit-Remaining" in e.headers and e.headers["X-RateLimit-Remaining"] == "0": print( "Exceeded the GitHub API rate limit; see the instructions in " + "dev/merge_connect_go_pr.py to configure an OAuth token for making authenticated " + "GitHub requests." ) else: print("Unable to fetch URL, exiting: %s" % url) sys.exit(-1) def get_pull_request(pr_num): headers = { "Authorization": f"token {GITHUB_OAUTH_KEY}", "Accept": "application/vnd.github.v3+json", } response = requests.get( f"{GITHUB_API_BASE}/pulls/{pr_num}", headers=headers, ) if response.status_code == 200: return response.json() else: error_message = (f"Failed to get pull request #{pr_num}. " f"Status code: {response.status_code}") error_message += f"\nResponse: {response.text}" fail(error_message) def fail(msg): print(msg) clean_up() sys.exit(-1) def run_cmd(cmd): print(cmd) if isinstance(cmd, list): return subprocess.check_output(cmd).decode("utf-8") else: return subprocess.check_output(cmd.split(" ")).decode("utf-8") def continue_maybe(prompt): result = input("\n%s (y/n): " % prompt) if result.lower() != "y": fail("Okay, exiting") def clean_up(): if "original_head" in globals(): print("Restoring head pointer to %s" % original_head) run_cmd("git checkout %s" % original_head) branches = run_cmd("git branch").replace(" ", "").split("\n") for branch in list(filter(lambda x: x.startswith(BRANCH_PREFIX), branches)): print("Deleting local branch %s" % branch) run_cmd("git branch -D %s" % branch) # merge the requested PR and return the merge hash def merge_pr(pr_num, target_ref, title, body, pr_repo_desc): pr_branch_name = "%s_MERGE_PR_%s" % (BRANCH_PREFIX, pr_num) target_branch_name = "%s_MERGE_PR_%s_%s" % (BRANCH_PREFIX, pr_num, target_ref.upper()) run_cmd("git fetch %s pull/%s/head:%s" % (PR_REMOTE_NAME, pr_num, pr_branch_name)) run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, target_ref, target_branch_name)) run_cmd("git checkout %s" % target_branch_name) # Get all the data from the pull request. pr = get_pull_request(pr_num) # Check if the PR is mergeable and still open: if not pr["mergeable"]: fail(f"Pull request #{pr_num} is not mergeable in its current form.") # Check if the PR is still open. if pr["state"] != "open": fail(f"Pull request #{pr_num} is not open.") if pr["merged"]: fail(f"Pull request #{pr_num} has already been merged.") if pr["draft"]: fail(f"Pull request #{pr_num} is a draft.") # First commit author should be considered as the primary author when the rank is the same commit_authors = run_cmd( ["git", "log", "%s..%s" % (target_branch_name, pr_branch_name), "--pretty=format:%an <%ae>", "--reverse"] ).split("\n") distinct_authors = sorted( list(dict.fromkeys(commit_authors)), key=lambda x: commit_authors.count(x), reverse=True ) primary_author = input( 'Enter primary author in the format of "name " [%s]: ' % distinct_authors[0] ) if primary_author == "": primary_author = distinct_authors[0] else: # When primary author is specified manually, de-dup it from author list and # put it at the head of author list. distinct_authors = list(filter(lambda x: x != primary_author, distinct_authors)) distinct_authors.insert(0, primary_author) merge_message = "" if body is not None: # We remove @ symbols from the body to avoid triggering e-mails # to people every time someone creates a public fork of Spark. merge_message += body.replace("@", "") committer_name = run_cmd("git config --get user.name").strip() committer_email = run_cmd("git config --get user.email").strip() # The string "Closes #%s" string is required for GitHub to correctly close the PR merge_message += "\n\n" merge_message += "Closes #%s from %s." % (pr_num, pr_repo_desc) authors = "Authored-by:" if len(distinct_authors) == 1 else "Lead-authored-by:" authors += " %s" % (distinct_authors.pop(0)) if len(distinct_authors) > 0: authors += "\n" + "\n".join(["Co-authored-by: %s" % a for a in distinct_authors]) authors += "\n" + "Signed-off-by: %s <%s>" % (committer_name, committer_email) merge_message += "\n\n" merge_message += authors # Merge the Pull Request using the commit message and title and squash it. headers = { "Authorization": f"token {GITHUB_OAUTH_KEY}", "Accept": "application/vnd.github.v3+json", } data = { "commit_title": title, "commit_message": merge_message, # Must be squash, always. "merge_method": "squash", } continue_maybe("Collected all data. Ready to merge PR?") # Run the request to merge the PR. response = requests.put( f"{GITHUB_API_BASE}/pulls/{pr_num}/merge", headers=headers, json=data ) if response.status_code == 200: merge_response_json = response.json() merge_commit_sha = merge_response_json.get("sha") print(f"Pull request #{pr_num} merged. Sha: #{merge_commit_sha}") clean_up() return merge_commit_sha else: error_message = f"Failed to merge pull request #{pr_num}. Status code: {response.status_code}" error_message += f"\nResponse: {response.text}" clean_up() fail(error_message) def cherry_pick(pr_num, merge_hash, default_branch): pick_ref = input("Enter a branch name [%s]: " % default_branch) if pick_ref == "": pick_ref = default_branch pick_branch_name = "%s_PICK_PR_%s_%s" % (BRANCH_PREFIX, pr_num, pick_ref.upper()) run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, pick_ref, pick_branch_name)) run_cmd("git checkout %s" % pick_branch_name) try: run_cmd("git cherry-pick -sx %s" % merge_hash) except Exception as e: msg = "Error cherry-picking: %s\nWould you like to manually fix-up this merge?" % e continue_maybe(msg) msg = "Okay, please fix any conflicts and finish the cherry-pick. Finished?" continue_maybe(msg) continue_maybe( "Pick complete (local ref %s). Push to %s?" % (pick_branch_name, PUSH_REMOTE_NAME) ) try: run_cmd("git push %s %s:%s" % (PUSH_REMOTE_NAME, pick_branch_name, pick_ref)) except Exception as e: clean_up() fail("Exception while pushing: %s" % e) pick_hash = run_cmd("git rev-parse %s" % pick_branch_name)[:8] clean_up() print("Pull request #%s picked into %s!" % (pr_num, pick_ref)) print("Pick hash: %s" % pick_hash) return pick_ref def fix_version_from_branch(branch, versions): # Note: Assumes this is a sorted (newest->oldest) list of un-released versions if branch == "master": return versions[0] else: branch_ver = branch.replace("branch-", "") return list(filter(lambda x: x.name.startswith(branch_ver), versions))[-1] def resolve_jira_issue(merge_branches, comment, default_jira_id=""): global asf_jira jira_id = input("Enter a JIRA id [%s]: " % default_jira_id) if jira_id == "": jira_id = default_jira_id try: issue = asf_jira.issue(jira_id) except Exception as e: fail("ASF JIRA could not find %s\n%s" % (jira_id, e)) cur_status = issue.fields.status.name cur_summary = issue.fields.summary cur_assignee = issue.fields.assignee if cur_assignee is None: cur_assignee = choose_jira_assignee(issue, asf_jira) # Check again, we might not have chosen an assignee if cur_assignee is None: cur_assignee = "NOT ASSIGNED!!!" else: cur_assignee = cur_assignee.displayName if cur_status == "Resolved" or cur_status == "Closed": fail("JIRA issue %s already has status '%s'" % (jira_id, cur_status)) print("=== JIRA %s ===" % jira_id) print( "summary\t\t%s\nassignee\t%s\nstatus\t\t%s\nurl\t\t%s/%s\n" % (cur_summary, cur_assignee, cur_status, JIRA_BASE, jira_id) ) versions = asf_jira.project_versions("SPARK") versions = sorted(versions, key=lambda x: x.name, reverse=True) versions = list(filter(lambda x: x.raw["released"] is False, versions)) # Consider only x.y.z versions versions = list(filter(lambda x: re.match(r"\d+\.\d+\.\d+", x.name), versions)) default_fix_versions = list( map(lambda x: fix_version_from_branch(x, versions).name, merge_branches) ) for v in default_fix_versions: # Handles the case where we have forked a release branch but not yet made the release. # In this case, if the PR is committed to the master branch and the release branch, we # only consider the release branch to be the fix version. E.g. it is not valid to have # both 1.1.0 and 1.0.0 as fix versions. (major, minor, patch) = v.split(".") if patch == "0": previous = "%s.%s.%s" % (major, int(minor) - 1, 0) if previous in default_fix_versions: default_fix_versions = list(filter(lambda x: x != v, default_fix_versions)) default_fix_versions = ",".join(default_fix_versions) available_versions = set(list(map(lambda v: v.name, versions))) while True: try: fix_versions = input( "Enter comma-separated fix version(s) [%s]: " % default_fix_versions ) if fix_versions == "": fix_versions = default_fix_versions fix_versions = fix_versions.replace(" ", "").split(",") if set(fix_versions).issubset(available_versions): break else: print( "Specified version(s) [%s] not found in the available versions, try " "again (or leave blank and fix manually)." % (", ".join(fix_versions)) ) except KeyboardInterrupt: raise except BaseException: traceback.print_exc() print("Error setting fix version(s), try again (or leave blank and fix manually)") def get_version_json(version_str): return list(filter(lambda v: v.name == version_str, versions))[0].raw jira_fix_versions = list(map(lambda v: get_version_json(v), fix_versions)) resolve = list(filter(lambda a: a["name"] == "Resolve Issue", asf_jira.transitions(jira_id)))[0] resolution = list(filter(lambda r: r.raw["name"] == "Fixed", asf_jira.resolutions()))[0] asf_jira.transition_issue( jira_id, resolve["id"], fixVersions=jira_fix_versions, comment=comment, resolution={"id": resolution.raw["id"]}, ) print("Successfully resolved %s with fixVersions=%s!" % (jira_id, fix_versions)) def choose_jira_assignee(issue, asf_jira): """ Prompt the user to choose who to assign the issue to in jira, given a list of candidates, including the original reporter and all commentors """ while True: try: reporter = issue.fields.reporter commentors = list(map(lambda x: x.author, issue.fields.comment.comments)) candidates = set(commentors) candidates.add(reporter) candidates = list(candidates) print("JIRA is unassigned, choose assignee") for idx, author in enumerate(candidates): if author.key == "apachespark": continue annotations = ["Reporter"] if author == reporter else [] if author in commentors: annotations.append("Commentor") print("[%d] %s (%s)" % (idx, author.displayName, ",".join(annotations))) raw_assignee = input( "Enter number of user, or userid, to assign to (blank to leave unassigned):" ) if raw_assignee == "": return None else: try: id = int(raw_assignee) assignee = candidates[id] except BaseException: # assume it's a user id, and try to assign (might fail, we just prompt again) assignee = asf_jira.user(raw_assignee) asf_jira.assign_issue(issue.key, assignee.name) return assignee except KeyboardInterrupt: raise except BaseException: traceback.print_exc() print("Error assigning JIRA, try again (or leave blank and fix manually)") def resolve_jira_issues(title, merge_branches, comment): jira_ids = re.findall("SPARK-[0-9]{4,5}", title) if len(jira_ids) == 0: resolve_jira_issue(merge_branches, comment) for jira_id in jira_ids: resolve_jira_issue(merge_branches, comment, jira_id) def standardize_jira_ref(text): """ Standardize the [SPARK-XXXXX] [MODULE] prefix Converts "[SPARK-XXX][mllib] Issue", "[MLLib] SPARK-XXX. Issue" or "SPARK XXX [MLLIB]: Issue" to "[SPARK-XXX][MLLIB] Issue" >>> standardize_jira_ref( ... "[SPARK-5821] [SQL] ParquetRelation2 CTAS should check if delete is successful") '[SPARK-5821][SQL] ParquetRelation2 CTAS should check if delete is successful' >>> standardize_jira_ref( ... "[SPARK-4123][Project Infra][WIP]: Show new dependencies added in pull requests") '[SPARK-4123][PROJECT INFRA][WIP] Show new dependencies added in pull requests' >>> standardize_jira_ref("[MLlib] Spark 5954: Top by key") '[SPARK-5954][MLLIB] Top by key' >>> standardize_jira_ref("[SPARK-979] a LRU scheduler for load balancing in TaskSchedulerImpl") '[SPARK-979] a LRU scheduler for load balancing in TaskSchedulerImpl' >>> standardize_jira_ref( ... "SPARK-1094 Support MiMa for reporting binary compatibility across versions.") '[SPARK-1094] Support MiMa for reporting binary compatibility across versions.' >>> standardize_jira_ref("[WIP] [SPARK-1146] Vagrant support for Spark") '[SPARK-1146][WIP] Vagrant support for Spark' >>> standardize_jira_ref( ... "SPARK-1032. If Yarn app fails before registering, app master stays aroun...") '[SPARK-1032] If Yarn app fails before registering, app master stays aroun...' >>> standardize_jira_ref( ... "[SPARK-6250][SPARK-6146][SPARK-5911][SQL] Types are now reserved words in DDL parser.") '[SPARK-6250][SPARK-6146][SPARK-5911][SQL] Types are now reserved words in DDL parser.' >>> standardize_jira_ref("Additional information for users building from source code") 'Additional information for users building from source code' """ jira_refs = [] components = [] # If the string is compliant, no need to process any further if re.search(r"^\[SPARK-[0-9]{3,6}\](\[[A-Z0-9_\s,]+\] )+\S+", text): return text # Extract JIRA ref(s): pattern = re.compile(r"(SPARK[-\s]*[0-9]{3,6})+", re.IGNORECASE) for ref in pattern.findall(text): # Add brackets, replace spaces with a dash, & convert to uppercase jira_refs.append("[" + re.sub(r"\s+", "-", ref.upper()) + "]") text = text.replace(ref, "") # Extract spark component(s): # Look for alphanumeric chars, spaces, dashes, periods, and/or commas pattern = re.compile(r"(\[[\w\s,.-]+\])", re.IGNORECASE) for component in pattern.findall(text): components.append(component.upper()) text = text.replace(component, "") # Cleanup any remaining symbols: pattern = re.compile(r"^\W+(.*)", re.IGNORECASE) if pattern.search(text) is not None: text = pattern.search(text).groups()[0] # Assemble full text (JIRA ref(s), module(s), remaining text) clean_text = "".join(jira_refs).strip() + "".join(components).strip() + " " + text.strip() # Replace multiple spaces with a single space, e.g. if no jira refs and/or components were # included clean_text = re.sub(r"\s+", " ", clean_text.strip()) return clean_text def get_current_ref(): ref = run_cmd("git rev-parse --abbrev-ref HEAD").strip() if ref == "HEAD": # The current ref is a detached HEAD, so grab its SHA. return run_cmd("git rev-parse HEAD").strip() else: return ref def initialize_jira(): global asf_jira jira_server = {"server": JIRA_API_BASE} if not JIRA_IMPORTED: print_error("ERROR finding jira library. Run 'pip3 install jira' to install.") continue_maybe("Continue without jira?") elif JIRA_ACCESS_TOKEN: client = jira.client.JIRA(jira_server, token_auth=JIRA_ACCESS_TOKEN) try: # Eagerly check if the token is valid to align with the behavior of username/password # authn client.current_user() asf_jira = client except Exception as e: if e.__class__.__name__ == "JIRAError" and getattr(e, "status_code", None) == 401: msg = ( "ASF JIRA could not authenticate with the invalid or expired token '%s'" % JIRA_ACCESS_TOKEN ) fail(msg) else: raise e elif JIRA_USERNAME and JIRA_PASSWORD: print("You can use JIRA_ACCESS_TOKEN instead of JIRA_USERNAME/JIRA_PASSWORD.") print("Visit https://issues.apache.org/jira/secure/ViewProfile.jspa ") print("and click 'Personal Access Tokens' menu to manage your own tokens.") asf_jira = jira.client.JIRA(jira_server, basic_auth=(JIRA_USERNAME, JIRA_PASSWORD)) else: print("Neither JIRA_ACCESS_TOKEN nor JIRA_USERNAME/JIRA_PASSWORD are set.") continue_maybe("Continue without jira?") def main(): global original_head global asf_jira initialize_jira() os.chdir(SPARK_CONNECT_GO_HOME) original_head = get_current_ref() branches = get_json("%s/branches" % GITHUB_API_BASE) branch_names = list(filter(lambda x: x.startswith("branch-"), [x["name"] for x in branches])) # Assumes branch names can be sorted lexicographically if len(branch_names) == 0: # Remove this when we have a branch. It fails now because we don't have branch-*. latest_branch = "master" else: latest_branch = sorted(branch_names, reverse=True)[0] pr_num = input("Which pull request would you like to merge? (e.g. 34): ") pr = get_json("%s/pulls/%s" % (GITHUB_API_BASE, pr_num)) pr_events = get_json("%s/issues/%s/events" % (GITHUB_API_BASE, pr_num)) url = pr["url"] # Warn if the PR is WIP if "[WIP]" in pr["title"]: msg = "The PR title has `[WIP]`:\n%s\nContinue?" % pr["title"] continue_maybe(msg) # Decide whether to use the modified title or not modified_title = standardize_jira_ref(pr["title"]).rstrip(".") if modified_title != pr["title"]: print("I've re-written the title as follows to match the standard format:") print("Original: %s" % pr["title"]) print("Modified: %s" % modified_title) result = input("Would you like to use the modified title? (y/n): ") if result.lower() == "y": title = modified_title print("Using modified title:") else: title = pr["title"] print("Using original title:") print(title) else: title = pr["title"] body = pr["body"] if body is None: body = "" modified_body = re.sub(re.compile(r"\n?", re.DOTALL), "", body).lstrip() if modified_body != body: print("=" * 80) print(modified_body) print("=" * 80) print("I've removed the comments from PR template like the above:") result = input("Would you like to use the modified body? (y/n): ") if result.lower() == "y": body = modified_body print("Using modified body:") else: print("Using original body:") print("=" * 80) print(body) print("=" * 80) target_ref = pr["base"]["ref"] user_login = pr["user"]["login"] base_ref = pr["head"]["ref"] pr_repo_desc = "%s/%s" % (user_login, base_ref) # Merged pull requests don't appear as merged in the GitHub API; # Instead, they're closed by asfgit. merge_commits = [ e for e in pr_events if e["actor"]["login"] == "asfgit" and e["event"] == "closed" ] if merge_commits: merge_hash = merge_commits[0]["commit_id"] message = get_json("%s/commits/%s" % (GITHUB_API_BASE, merge_hash))["commit"]["message"] print("Pull request %s has already been merged, assuming you want to backport" % pr_num) commit_is_downloaded = ( run_cmd(["git", "rev-parse", "--quiet", "--verify", "%s^{commit}" % merge_hash]).strip() != "" ) if not commit_is_downloaded: fail("Couldn't find any merge commit for #%s, you may need to update HEAD." % pr_num) print("Found commit %s:\n%s" % (merge_hash, message)) cherry_pick(pr_num, merge_hash, latest_branch) sys.exit(0) if not bool(pr["mergeable"]): msg = ( "Pull request %s is not mergeable in its current form.\n" % pr_num + "Continue? (experts only!)" ) continue_maybe(msg) print("\n=== Pull Request #%s ===" % pr_num) print("title\t%s\nsource\t%s\ntarget\t%s\nurl\t%s" % (title, pr_repo_desc, target_ref, url)) continue_maybe("Proceed with merging pull request #%s?" % pr_num) merged_refs = [target_ref] merge_hash = merge_pr(pr_num, target_ref, title, body, pr_repo_desc) pick_prompt = "Would you like to pick %s into another branch?" % merge_hash while input("\n%s (y/n): " % pick_prompt).lower() == "y": merged_refs = merged_refs + [cherry_pick(pr_num, merge_hash, latest_branch)] if JIRA_IMPORTED: if asf_jira is not None: continue_maybe("Would you like to update an associated JIRA?") jira_comment = "Issue resolved by pull request %s\n[%s/%s]" % ( pr_num, GITHUB_BASE, pr_num, ) resolve_jira_issues(title, merged_refs, jira_comment) else: print("JIRA_USERNAME and JIRA_PASSWORD not set") print("Exiting without trying to close the associated JIRA.") else: print("Could not find jira-python library. Run 'sudo pip3 install jira' to install.") print("Exiting without trying to close the associated JIRA.") if __name__ == "__main__": import doctest (failure_count, test_count) = doctest.testmod() if failure_count: sys.exit(-1) try: main() except BaseException: clean_up() raise ================================================ FILE: quick-start.md ================================================ # Quick Start Guide to Write Spark Connect Client Application in Go ## Add Reference to `spark-connect-go` Library In your Go project `go.mod` file, add `spark-connect-go` library: ``` require ( github.com/apache/spark-connect-go master ) ``` In your Go project, run `go mod tidy` to download the library on your local machine. ## Write Spark Connect Client Application Create `main.go` file with following code: ```go package main import ( "context" "flag" "fmt" "log" "github.com/apache/spark-connect-go/spark/sql" ) var ( remote = flag.String("remote", "sc://localhost:15002", "the remote address of Spark Connect server to connect to") filedir = flag.String("filedir", "/tmp", "the directory to save the files") ) func main() { flag.Parse() ctx := context.Background() spark, err := sql.NewSessionBuilder().Remote(*remote).Build(ctx) if err != nil { log.Fatalf("Failed: %s", err) } defer spark.Stop() df, err := spark.Sql(ctx, "select 'apple' as word, 123 as count union all select 'orange' as word, 456 as count") if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("DataFrame from sql: select 'apple' as word, 123 as count union all select 'orange' as word, 456 as count") err = df.Show(ctx, 100, false) if err != nil { log.Fatalf("Failed: %s", err) } schema, err := df.Schema(ctx) if err != nil { log.Fatalf("Failed: %s", err) } for _, f := range schema.Fields { log.Printf("Field in dataframe schema: %s - %s", f.Name, f.DataType.TypeName()) } rows, err := df.Collect(ctx) if err != nil { log.Fatalf("Failed: %s", err) } schema, err = rows[0].Schema() if err != nil { log.Fatalf("Failed: %s", err) } for _, f := range schema.Fields { log.Printf("Field in row: %s - %s", f.Name, f.DataType.TypeName()) } for _, row := range rows { log.Printf("Row: %v", row) } err = df.Writer().Mode("overwrite"). Format("parquet"). Save(ctx, fmt.Sprintf("file://%s/spark-connect-write-example-output.parquet", *filedir)) if err != nil { log.Fatalf("Failed: %s", err) } df, err = spark.Read().Format("parquet"). Load(fmt.Sprintf("file://%s/spark-connect-write-example-output.parquet", *filedir)) if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("DataFrame from reading parquet") err = df.Show(ctx, 100, false) if err != nil { log.Fatalf("Failed: %s", err) } err = df.CreateTempView(ctx, "view1", true, false) if err != nil { log.Fatalf("Failed: %s", err) } df, err = spark.Sql(ctx, "select count, word from view1 order by count") if err != nil { log.Fatalf("Failed: %s", err) } log.Printf("DataFrame from sql: select count, word from view1 order by count") df.Show(ctx, 100, false) } ``` ## Start Spark Connect Server (Driver) Download a Spark distribution (4.0.0+), unzip the folder, run command: ``` sbin/start-connect-server.sh ``` ## Run Spark Connect Client Application ``` go run main.go --filedir YOUR_TMP_DIR ``` You will see the client application connects to the Spark Connect server and prints out the output from your application. ================================================ FILE: spark/client/base/base.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package base import ( "context" "github.com/apache/spark-connect-go/spark/sql/utils" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sql/types" ) type SparkConnectRPCClient generated.SparkConnectServiceClient // SparkConnectClient is the interface for executing a plan in Spark. // // This interface does not deal with the public Spark API abstractions but roughly deals on the // RPC API level and the necessary translation of Arrow to Row objects. type SparkConnectClient interface { ExecutePlan(ctx context.Context, plan *generated.Plan) (ExecuteResponseStream, error) ExecuteCommand(ctx context.Context, plan *generated.Plan) (arrow.Table, *types.StructType, map[string]any, error) AnalyzePlan(ctx context.Context, plan *generated.Plan) (*generated.AnalyzePlanResponse, error) Explain(ctx context.Context, plan *generated.Plan, explainMode utils.ExplainMode) (*generated.AnalyzePlanResponse, error) Persist(ctx context.Context, plan *generated.Plan, storageLevel utils.StorageLevel) error Unpersist(ctx context.Context, plan *generated.Plan) error GetStorageLevel(ctx context.Context, plan *generated.Plan) (*utils.StorageLevel, error) SparkVersion(ctx context.Context) (string, error) DDLParse(ctx context.Context, sql string) (*types.StructType, error) SameSemantics(ctx context.Context, plan1 *generated.Plan, plan2 *generated.Plan) (bool, error) SemanticHash(ctx context.Context, plan *generated.Plan) (int32, error) Config(ctx context.Context, configRequest *generated.ConfigRequest_Operation) (*generated.ConfigResponse, error) } type ExecuteResponseStream interface { ToTable() (*types.StructType, arrow.Table, error) Properties() map[string]any } ================================================ FILE: spark/client/channel/channel.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package channel import ( "context" "crypto/tls" "crypto/x509" "errors" "fmt" "net" "net/url" "os" "runtime" "strconv" "strings" "github.com/apache/spark-connect-go/spark" "github.com/google/uuid" "google.golang.org/grpc/credentials/insecure" "github.com/apache/spark-connect-go/spark/sparkerrors" "golang.org/x/oauth2" "google.golang.org/grpc" "google.golang.org/grpc/credentials" "google.golang.org/grpc/credentials/oauth" ) // Builder is the interface that is used to implement different patterns that // create the GRPC connection. // // This allows other consumers to plugin custom authentication and authorization // handlers without having to extend directly the Spark Connect code. type Builder interface { // Build creates the grpc.ClientConn according to the configuration of the builder. // Implementations are free to provide additional paramters in their implementation // and simply must satisfy this minimal set of requirements. Build(ctx context.Context) (*grpc.ClientConn, error) // User identifies the username passed as part of the Spark Connect requests. User() string // Headers refers to the request metadata that is passed for every request from the // client to the server. Headers() map[string]string // SessionId identifies the client side session identifier. This value must be a UUID formatted // as a string. SessionId() string // UserAgent identifies the user agent string that is passed as part of the request. It contains // information about the operating system, Go version etc. UserAgent() string } // BaseBuilder is used to parse the different parameters of the connection // string according to the specification documented here: // // https://github.com/apache/spark/blob/master/connector/connect/docs/client-connection-string.md type BaseBuilder struct { host string port int token string user string headers map[string]string sessionId string userAgent string } func (cb *BaseBuilder) Host() string { return cb.host } func (cb *BaseBuilder) Port() int { return cb.port } func (cb *BaseBuilder) Token() string { return cb.token } func (cb *BaseBuilder) User() string { return cb.user } func (cb *BaseBuilder) Headers() map[string]string { return cb.headers } func (cb *BaseBuilder) SessionId() string { return cb.sessionId } func (cb *BaseBuilder) UserAgent() string { return cb.userAgent } // Build finalizes the creation of the gprc.ClientConn by creating a GRPC channel // with the necessary options extracted from the connection string. For // TLS connections, this function will load the system certificates. func (cb *BaseBuilder) Build(ctx context.Context) (*grpc.ClientConn, error) { var opts []grpc.DialOption opts = append(opts, grpc.WithAuthority(cb.host)) if cb.token == "" { opts = append(opts, grpc.WithTransportCredentials(insecure.NewCredentials())) } else { // Note: On the Windows platform, use of x509.SystemCertPool() requires // go version 1.18 or higher. systemRoots, err := x509.SystemCertPool() if err != nil { return nil, err } cred := credentials.NewTLS(&tls.Config{ RootCAs: systemRoots, }) opts = append(opts, grpc.WithTransportCredentials(cred)) ts := oauth2.StaticTokenSource(&oauth2.Token{ AccessToken: cb.token, TokenType: "bearer", }) opts = append(opts, grpc.WithPerRPCCredentials(oauth.TokenSource{TokenSource: ts})) } remote := fmt.Sprintf("%v:%v", cb.host, cb.port) conn, err := grpc.NewClient(remote, opts...) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to connect to remote %s: %w", remote, err), sparkerrors.ConnectionError) } return conn, nil } // NewBuilder creates a new instance of the BaseBuilder. This constructor effectively // parses the connection string and extracts the relevant parameters directly. // // The following parameters to the connection string are reserved: user_id, session_id, use_ssl, // and token. These parameters are not allowed to be injected as headers. func NewBuilder(connection string) (*BaseBuilder, error) { u, err := url.Parse(connection) if err != nil { return nil, err } if u.Hostname() == "" { return nil, sparkerrors.WithType(errors.New("URL must contain a hostname"), sparkerrors.InvalidInputError) } if u.Scheme != "sc" { return nil, sparkerrors.WithType(errors.New("URL schema must be set to `sc`"), sparkerrors.InvalidInputError) } port := 15002 host := u.Host // Check if the host part of the URL contains a port and extract. if strings.Contains(u.Host, ":") { // We can ignore the error here already since the url parsing // raises the error about invalid port. hostStr, portStr, _ := net.SplitHostPort(u.Host) host = hostStr if len(portStr) != 0 { port, err = strconv.Atoi(portStr) if err != nil { return nil, err } } } // Validate that the URL path is empty or follows the right format. if u.Path != "" && !strings.HasPrefix(u.Path, "/;") { return nil, sparkerrors.WithType( fmt.Errorf("the URL path (%v) must be empty or have a proper parameter syntax", u.Path), sparkerrors.InvalidInputError) } cb := &BaseBuilder{ host: host, port: port, headers: map[string]string{}, sessionId: uuid.NewString(), userAgent: "", } elements := strings.Split(u.Path, ";") for _, e := range elements { props := strings.Split(e, "=") if len(props) == 2 { switch props[0] { case "token": cb.token = props[1] case "user_id": cb.user = props[1] case "session_id": cb.sessionId = props[1] case "user_agent": cb.userAgent = props[1] default: cb.headers[props[0]] = props[1] } } } // Set default user ID if not set. if cb.user == "" { cb.user = os.Getenv("USER") if cb.user == "" { cb.user = "na" } } // Update the user agent if it is not set or set to a custom value. val := os.Getenv("SPARK_CONNECT_USER_AGENT") if cb.userAgent == "" && val != "" { cb.userAgent = os.Getenv("SPARK_CONNECT_USER_AGENT") } else if cb.userAgent == "" { cb.userAgent = "_SPARK_CONNECT_GO" } // In addition, to the specified user agent, we need to append information about the // host encoded as user agent components. cb.userAgent = fmt.Sprintf("%s spark/%s os/%s go/%s", cb.userAgent, spark.Version(), runtime.GOOS, runtime.Version()) return cb, nil } ================================================ FILE: spark/client/channel/channel_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package channel_test import ( "context" "strings" "testing" "github.com/google/uuid" "github.com/apache/spark-connect-go/spark/client/channel" "github.com/apache/spark-connect-go/spark/sparkerrors" "github.com/stretchr/testify/assert" ) const goodChannelURL = "sc://host:15002/;user_id=a;token=b;x-other-header=c" func TestBasicChannelBuilder(t *testing.T) { cb, _ := channel.NewBuilder(goodChannelURL) if cb == nil { t.Error("ChannelBuilder must not be null") } } func TestBasicChannelParsing(t *testing.T) { _, err := channel.NewBuilder("abc://asdada:1333") assert.False(t, strings.Contains(err.Error(), "scheme"), "Channel build should fail with wrong scheme") _, err = channel.NewBuilder("sc://:1333") assert.False(t, strings.Contains(err.Error(), "scheme"), "Should not have an error for a proper URL") cb, err := channel.NewBuilder("sc://empty") assert.Nilf(t, err, "Valid path should not fail: %v", err) assert.Equalf(t, 15002, cb.Port(), "Default port must be set, but got %v", cb.Port) _, err = channel.NewBuilder("sc://empty:port") assert.NotNilf(t, err, "Port must be a valid integer %v", err) _, err = channel.NewBuilder("sc://empty:9999999999999") assert.Nilf(t, err, "Port must be a valid number %v", err) _, err = channel.NewBuilder("sc://abcd/this") assert.True(t, strings.Contains(err.Error(), "URL path"), "URL path elements are not allowed") assert.ErrorIs(t, err, sparkerrors.InvalidInputError) cb, err = channel.NewBuilder(goodChannelURL) assert.Nilf(t, err, "Should not have an error for a proper URL") assert.Equal(t, "host", cb.Host()) assert.Equal(t, 15002, cb.Port()) assert.Len(t, cb.Headers(), 1) assert.Equal(t, "c", cb.Headers()["x-other-header"]) assert.Equal(t, "a", cb.User()) assert.Equal(t, "b", cb.Token()) cb, err = channel.NewBuilder("sc://localhost:443/;token=token;user_id=user_id;cluster_id=a;session_id=session") assert.NoError(t, err) assert.Equal(t, 443, cb.Port()) assert.Equal(t, "localhost", cb.Host()) assert.Equal(t, "token", cb.Token()) assert.Equal(t, "user_id", cb.User()) assert.Equal(t, "session", cb.SessionId()) } func TestChannelBuildConnect(t *testing.T) { ctx := context.Background() cb, err := channel.NewBuilder("sc://localhost") assert.NoError(t, err) id := cb.SessionId() _, err = uuid.Parse(id) assert.NoError(t, err) assert.NoError(t, err, "Should not have an error for a proper URL.") conn, err := cb.Build(ctx) assert.Nil(t, err, "no error for proper connection") assert.NotNil(t, conn) cb, err = channel.NewBuilder("sc://localhost:443/;token=abcd;user_id=a") assert.Nil(t, err, "Should not have an error for a proper URL.") conn, err = cb.Build(ctx) assert.Nil(t, err, "no error for proper connection") assert.NotNil(t, conn) } func TestChannelBulder_UserAgent(t *testing.T) { cb, err := channel.NewBuilder("sc://localhost") assert.NoError(t, err) assert.True(t, strings.Contains(cb.UserAgent(), "_SPARK_CONNECT_GO")) assert.True(t, strings.Contains(cb.UserAgent(), "go/")) assert.True(t, strings.Contains(cb.UserAgent(), "spark/")) assert.True(t, strings.Contains(cb.UserAgent(), "os/")) cb, err = channel.NewBuilder("sc://localhost/;user_agent=custom") assert.NoError(t, err) assert.True(t, strings.Contains(cb.UserAgent(), "custom")) assert.True(t, strings.Contains(cb.UserAgent(), "go/")) assert.True(t, strings.Contains(cb.UserAgent(), "spark/")) assert.True(t, strings.Contains(cb.UserAgent(), "os/")) } ================================================ FILE: spark/client/channel/compat.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package channel // ChannelBuilder re-exports BaseBuilder as its previous name for compatibility. // // Deprecated: use BaseBuilder instead. type ChannelBuilder = BaseBuilder ================================================ FILE: spark/client/client.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package client import ( "context" "errors" "fmt" "io" "github.com/apache/spark-connect-go/spark/sql/utils" "google.golang.org/grpc" "google.golang.org/grpc/metadata" "github.com/apache/spark-connect-go/spark/client/base" "github.com/apache/spark-connect-go/spark/mocks" "github.com/apache/spark-connect-go/spark/client/options" "github.com/google/uuid" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/array" "github.com/apache/spark-connect-go/spark/sql/types" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sparkerrors" ) type sparkConnectClientImpl struct { client base.SparkConnectRPCClient metadata metadata.MD sessionId string opts options.SparkClientOptions } func (s *sparkConnectClientImpl) newExecutePlanRequest(plan *proto.Plan) *proto.ExecutePlanRequest { // Every new executin needs to get a new operation ID. operationId := uuid.NewString() return &proto.ExecutePlanRequest{ SessionId: s.sessionId, Plan: plan, UserContext: &proto.UserContext{ UserId: s.opts.UserId, }, ClientType: &s.opts.UserAgent, // Operation ID is needed for being able to reattach. OperationId: &operationId, RequestOptions: []*proto.ExecutePlanRequest_RequestOption{ { RequestOption: &proto.ExecutePlanRequest_RequestOption_ReattachOptions{ ReattachOptions: &proto.ReattachOptions{ Reattachable: s.opts.ReattachExecution, }, }, }, }, } } func (s *sparkConnectClientImpl) ExecuteCommand(ctx context.Context, plan *proto.Plan) (arrow.Table, *types.StructType, map[string]any, error) { request := s.newExecutePlanRequest(plan) // Check that the supplied plan is actually a command. if plan.GetCommand() == nil { return nil, nil, nil, sparkerrors.WithType( fmt.Errorf("the supplied plan does not contain a command"), sparkerrors.ExecutionError) } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) c, err := s.client.ExecutePlan(ctx, request) if err != nil { return nil, nil, nil, sparkerrors.WithType( fmt.Errorf("failed to call ExecutePlan in session %s: %w", s.sessionId, err), sparkerrors.ExecutionError) } respHandler := NewExecuteResponseStream(c, s.sessionId, *request.OperationId, s.opts) schema, table, err := respHandler.ToTable() if err != nil { return nil, nil, nil, err } return table, schema, respHandler.Properties(), nil } func (s *sparkConnectClientImpl) ExecutePlan(ctx context.Context, plan *proto.Plan) (base.ExecuteResponseStream, error) { request := s.newExecutePlanRequest(plan) // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) c, err := s.client.ExecutePlan(ctx, request) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf( "failed to call ExecutePlan in session %s: %w", s.sessionId, err), sparkerrors.ExecutionError) } return NewExecuteResponseStream(c, s.sessionId, *request.OperationId, s.opts), nil } // Creates a new AnalyzePlanRequest with the necessary metadata. func (s *sparkConnectClientImpl) newAnalyzePlanStub() proto.AnalyzePlanRequest { return proto.AnalyzePlanRequest{ SessionId: s.sessionId, UserContext: &proto.UserContext{ UserId: s.opts.UserId, }, ClientType: &s.opts.UserAgent, } } func (s *sparkConnectClientImpl) AnalyzePlan(ctx context.Context, plan *proto.Plan) (*proto.AnalyzePlanResponse, error) { request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_Schema_{ Schema: &proto.AnalyzePlanRequest_Schema{ Plan: plan, }, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) response, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return nil, sparkerrors.WithType(se, sparkerrors.ExecutionError) } return response, nil } func (s *sparkConnectClientImpl) Explain(ctx context.Context, plan *proto.Plan, explainMode utils.ExplainMode, ) (*proto.AnalyzePlanResponse, error) { var mode proto.AnalyzePlanRequest_Explain_ExplainMode switch explainMode { case utils.ExplainModeExtended: mode = proto.AnalyzePlanRequest_Explain_EXPLAIN_MODE_EXTENDED case utils.ExplainModeSimple: mode = proto.AnalyzePlanRequest_Explain_EXPLAIN_MODE_SIMPLE case utils.ExplainModeCost: mode = proto.AnalyzePlanRequest_Explain_EXPLAIN_MODE_COST case utils.ExplainModeFormatted: mode = proto.AnalyzePlanRequest_Explain_EXPLAIN_MODE_FORMATTED case utils.ExplainModeCodegen: mode = proto.AnalyzePlanRequest_Explain_EXPLAIN_MODE_CODEGEN default: return nil, sparkerrors.WithType(fmt.Errorf("unsupported explain mode %v", explainMode), sparkerrors.InvalidArgumentError) } request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_Explain_{ Explain: &proto.AnalyzePlanRequest_Explain{ Plan: plan, ExplainMode: mode, }, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) response, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return nil, sparkerrors.WithType(se, sparkerrors.ExecutionError) } return response, nil } func (s *sparkConnectClientImpl) Persist(ctx context.Context, plan *proto.Plan, storageLevel utils.StorageLevel) error { protoLevel := utils.ToProtoStorageLevel(storageLevel) request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_Persist_{ Persist: &proto.AnalyzePlanRequest_Persist{ Relation: plan.GetRoot(), StorageLevel: protoLevel, }, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) _, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return sparkerrors.WithType(se, sparkerrors.ExecutionError) } return nil } func (s *sparkConnectClientImpl) Unpersist(ctx context.Context, plan *proto.Plan) error { request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_Unpersist_{ Unpersist: &proto.AnalyzePlanRequest_Unpersist{ Relation: plan.GetRoot(), }, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) _, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return sparkerrors.WithType(se, sparkerrors.ExecutionError) } return nil } func (s *sparkConnectClientImpl) GetStorageLevel(ctx context.Context, plan *proto.Plan) (*utils.StorageLevel, error) { request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_GetStorageLevel_{ GetStorageLevel: &proto.AnalyzePlanRequest_GetStorageLevel{ Relation: plan.GetRoot(), }, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) response, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return nil, sparkerrors.WithType(se, sparkerrors.ExecutionError) } level := response.GetGetStorageLevel().StorageLevel res := utils.FromProtoStorageLevel(level) return &res, nil } func (s *sparkConnectClientImpl) SparkVersion(ctx context.Context) (string, error) { request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_SparkVersion_{ SparkVersion: &proto.AnalyzePlanRequest_SparkVersion{}, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) response, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return "", sparkerrors.WithType(se, sparkerrors.ExecutionError) } return response.GetSparkVersion().Version, nil } func (s *sparkConnectClientImpl) DDLParse(ctx context.Context, sql string) (*types.StructType, error) { request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_DdlParse{ DdlParse: &proto.AnalyzePlanRequest_DDLParse{ DdlString: sql, }, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) response, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return nil, sparkerrors.WithType(se, sparkerrors.ExecutionError) } return types.ConvertProtoDataTypeToStructType(response.GetDdlParse().Parsed) } func (s *sparkConnectClientImpl) SameSemantics(ctx context.Context, plan1 *proto.Plan, plan2 *proto.Plan) (bool, error) { request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_SameSemantics_{ SameSemantics: &proto.AnalyzePlanRequest_SameSemantics{ TargetPlan: plan1, OtherPlan: plan2, }, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) response, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return false, sparkerrors.WithType(se, sparkerrors.ExecutionError) } return response.GetSameSemantics().GetResult(), nil } func (s *sparkConnectClientImpl) SemanticHash(ctx context.Context, plan *proto.Plan) (int32, error) { request := s.newAnalyzePlanStub() request.Analyze = &proto.AnalyzePlanRequest_SemanticHash_{ SemanticHash: &proto.AnalyzePlanRequest_SemanticHash{ Plan: plan, }, } // Append the other items to the request. ctx = metadata.NewOutgoingContext(ctx, s.metadata) response, err := s.client.AnalyzePlan(ctx, &request) if se := sparkerrors.FromRPCError(err); se != nil { return 0, sparkerrors.WithType(se, sparkerrors.ExecutionError) } return response.GetSemanticHash().GetResult(), nil } func (s *sparkConnectClientImpl) Config(ctx context.Context, operation *proto.ConfigRequest_Operation, ) (*proto.ConfigResponse, error) { request := &proto.ConfigRequest{ Operation: operation, UserContext: &proto.UserContext{ UserId: s.opts.UserId, }, ClientType: &s.opts.UserAgent, } request.SessionId = s.sessionId resp, err := s.client.Config(ctx, request) if err != nil { return nil, err } return resp, nil } func NewSparkExecutor(conn *grpc.ClientConn, md metadata.MD, sessionId string, opts options.SparkClientOptions) base.SparkConnectClient { var client base.SparkConnectRPCClient if opts.ReattachExecution { client = NewRetriableSparkConnectClient(conn, sessionId, opts) } else { client = proto.NewSparkConnectServiceClient(conn) } return &sparkConnectClientImpl{ client: client, metadata: md, sessionId: sessionId, opts: opts, } } // NewSparkExecutorFromClient creates a new SparkConnectClient from an existing client and is mostly // used in testing. func NewSparkExecutorFromClient(client base.SparkConnectRPCClient, md metadata.MD, sessionId string) base.SparkConnectClient { return &sparkConnectClientImpl{ client: client, metadata: md, sessionId: sessionId, opts: options.DefaultSparkClientOptions, } } // ExecutePlanClient is the wrapper around the result of the execution of a query plan using // Spark Connect. type ExecutePlanClient struct { // The GRPC stream to read the response messages. responseStream proto.SparkConnectService_ExecutePlanClient // The schema of the result of the operation. schema *types.StructType // The sessionId is ised to verify the server side session. sessionId string done bool properties map[string]any opts options.SparkClientOptions } func (c *ExecutePlanClient) Properties() map[string]any { return c.properties } // ToTable converts the result of the execution of a query plan to an Arrow Table. func (c *ExecutePlanClient) ToTable() (*types.StructType, arrow.Table, error) { var recordBatches []arrow.Record var arrowSchema *arrow.Schema recordBatches = make([]arrow.Record, 0) // Explicitly needed when tracking re-attachble execution. c.done = false for { resp, err := c.responseStream.Recv() // EOF is received when the last message has been processed and the stream // finished normally. if errors.Is(err, io.EOF) { break } // If the error was not EOF, there might be another error. if se := sparkerrors.FromRPCError(err); se != nil { return nil, nil, sparkerrors.WithType(se, sparkerrors.ExecutionError) } // Process the message // Check that the server returned the session ID that we were expecting // and that it has not changed. if resp.GetSessionId() != c.sessionId { return c.schema, nil, sparkerrors.WithType(&sparkerrors.InvalidServerSideSessionDetailsError{ OwnSessionId: c.sessionId, ReceivedSessionId: resp.GetSessionId(), }, sparkerrors.InvalidServerSideSessionError) } // Check if the response has already the schema set and if yes, convert // the proto DataType to a StructType. if resp.Schema != nil { c.schema, err = types.ConvertProtoDataTypeToStructType(resp.Schema) if err != nil { return nil, nil, sparkerrors.WithType(err, sparkerrors.ExecutionError) } } switch x := resp.ResponseType.(type) { case *proto.ExecutePlanResponse_SqlCommandResult_: if val := x.SqlCommandResult.GetRelation(); val != nil { c.properties["sql_command_result"] = val } case *proto.ExecutePlanResponse_ArrowBatch_: // Do nothing. record, err := types.ReadArrowBatchToRecord(x.ArrowBatch.Data, c.schema) if err != nil { return nil, nil, err } arrowSchema = record.Schema() record.Retain() recordBatches = append(recordBatches, record) case *proto.ExecutePlanResponse_ResultComplete_: c.done = true default: // Explicitly ignore messages that we cannot process at the moment. } } // Check that the result is logically complete. The result might not be complete // because after 2 minutes the server will interrupt the connection, and we have to // send a ReAttach execute request. if c.opts.ReattachExecution && !c.done { return nil, nil, sparkerrors.WithType(fmt.Errorf("the result is not complete"), sparkerrors.ExecutionError) } // Return the schema and table. if arrowSchema == nil { return c.schema, nil, nil } else { return c.schema, array.NewTableFromRecords(arrowSchema, recordBatches), nil } } func NewExecuteResponseStream( responseClient proto.SparkConnectService_ExecutePlanClient, sessionId string, operationId string, opts options.SparkClientOptions, ) base.ExecuteResponseStream { return &ExecutePlanClient{ responseStream: responseClient, sessionId: sessionId, done: false, properties: make(map[string]any), opts: opts, } } func NewTestConnectClientFromResponses(sessionId string, r ...*mocks.MockResponse) base.SparkConnectClient { protoClient := mocks.NewProtoClientMock(r...) stream := NewExecuteResponseStream(protoClient, sessionId, uuid.NewString(), options.DefaultSparkClientOptions) return &mocks.TestExecutor{ Client: stream, } } func NewTestConnectClientWithImmediateError(sessionId string, err error) base.SparkConnectClient { stream := NewExecuteResponseStream(nil, sessionId, uuid.NewString(), options.DefaultSparkClientOptions) return &mocks.TestExecutor{ Client: stream, Err: err, } } ================================================ FILE: spark/client/client_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package client_test import ( "context" "testing" "github.com/google/uuid" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/client" "github.com/apache/spark-connect-go/spark/client/testutils" "github.com/apache/spark-connect-go/spark/mocks" "github.com/apache/spark-connect-go/spark/sparkerrors" "github.com/stretchr/testify/assert" ) func TestAnalyzePlanCallsAnalyzePlanOnClient(t *testing.T) { ctx := context.Background() response := &proto.AnalyzePlanResponse{} c := client.NewSparkExecutorFromClient( testutils.NewConnectServiceClientMock(nil, response, nil, nil), nil, mocks.MockSessionId) resp, err := c.AnalyzePlan(ctx, &proto.Plan{}) assert.NoError(t, err) assert.NotNil(t, resp) } func TestAnalyzePlanFailsIfClientFails(t *testing.T) { ctx := context.Background() c := client.NewSparkExecutorFromClient( testutils.NewConnectServiceClientMock(nil, nil, assert.AnError, nil), nil, mocks.MockSessionId) resp, err := c.AnalyzePlan(ctx, &proto.Plan{}) assert.Nil(t, resp) assert.Error(t, err) } func TestExecutePlanCallsExecutePlanOnClient(t *testing.T) { ctx := context.Background() plan := &proto.Plan{} // Generate a mock client responseStream := mocks.NewProtoClientMock(&mocks.ExecutePlanResponseDone) c := client.NewSparkExecutorFromClient( testutils.NewConnectServiceClientMock(responseStream, nil, nil, t), nil, mocks.MockSessionId) resp, err := c.ExecutePlan(ctx, plan) assert.NoError(t, err) assert.NotNil(t, resp) } func TestExecutePlanCallsExecuteCommandOnClient(t *testing.T) { ctx := context.Background() plan := &proto.Plan{} // Generate a mock client responseStream := mocks.NewProtoClientMock(&mocks.ExecutePlanResponseDone, &mocks.ExecutePlanResponseEOF) // Check that the execution fails if no command is supplied. c := client.NewSparkExecutorFromClient( testutils.NewConnectServiceClientMock(responseStream, nil, nil, t), nil, mocks.MockSessionId) _, _, _, err := c.ExecuteCommand(ctx, plan) assert.ErrorIs(t, err, sparkerrors.ExecutionError) // Generate a command and the execution should succeed. sqlCommand := mocks.NewSqlCommand("select range(10)") c = client.NewSparkExecutorFromClient(testutils.NewConnectServiceClientMock(responseStream, nil, nil, t), nil, mocks.MockSessionId) _, _, _, err = c.ExecuteCommand(ctx, sqlCommand) assert.NoError(t, err) } func Test_ExecuteWithWrongSession(t *testing.T) { ctx := context.Background() sqlCommand := mocks.NewSqlCommand("select range(10)") // Generate a mock client responseStream := mocks.NewProtoClientMock(&mocks.ExecutePlanResponseDone, &mocks.ExecutePlanResponseEOF) // Check that the execution fails if no command is supplied. c := client.NewSparkExecutorFromClient( testutils.NewConnectServiceClientMock(responseStream, nil, nil, t), nil, uuid.NewString()) _, _, _, err := c.ExecuteCommand(ctx, sqlCommand) assert.ErrorIs(t, err, sparkerrors.InvalidServerSideSessionError) } func Test_Execute_SchemaParsingFails(t *testing.T) { ctx := context.Background() sqlCommand := mocks.NewSqlCommand("select range(10)") responseStream := mocks.NewProtoClientMock( &mocks.ExecutePlanResponseBrokenSchema, &mocks.ExecutePlanResponseDone, &mocks.ExecutePlanResponseEOF) c := client.NewSparkExecutorFromClient( testutils.NewConnectServiceClientMock(responseStream, nil, nil, t), nil, mocks.MockSessionId) _, _, _, err := c.ExecuteCommand(ctx, sqlCommand) assert.ErrorIs(t, err, sparkerrors.ExecutionError) } ================================================ FILE: spark/client/conf.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package client import ( "context" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/client/base" ) // Public interface RuntimeConfig type RuntimeConfig interface { GetAll(ctx context.Context) (map[string]string, error) Set(ctx context.Context, key string, value string) error Get(ctx context.Context, key string) (string, error) Unset(ctx context.Context, key string) error IsModifiable(ctx context.Context, key string) (bool, error) GetWithDefault(ctx context.Context, key string, default_value string) (string, error) } // private type with private member client type runtimeConfig struct { client *base.SparkConnectClient } // GetAll returns all configured keys in a map of strings func (r runtimeConfig) GetAll(ctx context.Context) (map[string]string, error) { req := &proto.ConfigRequest_GetAll{} operation := &proto.ConfigRequest_Operation_GetAll{GetAll: req} op := &proto.ConfigRequest_Operation{OpType: operation} resp, err := (*r.client).Config(ctx, op) if err != nil { return nil, err } m := make(map[string]string, 0) for _, k := range resp.GetPairs() { if k.Value != nil { m[k.Key] = *k.Value } } return m, nil } // Set takes a key and a value and sets it in the config func (r runtimeConfig) Set(ctx context.Context, key string, value string) error { reqArr := []*proto.KeyValue{{Key: key, Value: &value}} req := &proto.ConfigRequest_Set{ Pairs: reqArr, } op := &proto.ConfigRequest_Operation{OpType: &proto.ConfigRequest_Operation_Set{Set: req}} _, err := (*r.client).Config(ctx, op) if err != nil { return err } return nil } func (r runtimeConfig) Get(ctx context.Context, key string) (string, error) { req := &proto.ConfigRequest_Get{Keys: []string{key}} operation := &proto.ConfigRequest_Operation_Get{Get: req} op := &proto.ConfigRequest_Operation{OpType: operation} resp, err := (*r.client).Config(ctx, op) if err != nil { return "", err } return *resp.GetPairs()[0].Value, nil } func (r runtimeConfig) Unset(ctx context.Context, key string) error { req := &proto.ConfigRequest_Unset{Keys: []string{key}} operation := &proto.ConfigRequest_Operation_Unset{Unset: req} op := &proto.ConfigRequest_Operation{OpType: operation} _, err := (*r.client).Config(ctx, op) if err != nil { return err } return nil } func (r runtimeConfig) IsModifiable(ctx context.Context, key string) (bool, error) { req := &proto.ConfigRequest_IsModifiable{Keys: []string{key}} operation := &proto.ConfigRequest_Operation_IsModifiable{IsModifiable: req} op := &proto.ConfigRequest_Operation{OpType: operation} resp, err := (*r.client).Config(ctx, op) if err != nil { return false, err } re := *resp.GetPairs()[0].Value if re == "true" { return true, nil } else { return false, nil } } func (r runtimeConfig) GetWithDefault(ctx context.Context, key string, default_value string) (string, error) { p := make([]*proto.KeyValue, 0) p = append(p, &proto.KeyValue{Key: key, Value: &default_value}) req := &proto.ConfigRequest_GetWithDefault{Pairs: p} operation := &proto.ConfigRequest_Operation_GetWithDefault{GetWithDefault: req} op := &proto.ConfigRequest_Operation{OpType: operation} resp, err := (*r.client).Config(ctx, op) if err != nil { return "", err } return *resp.GetPairs()[0].Value, nil } // Constructor for runtimeConfig used by SparkSession func NewRuntimeConfig(client *base.SparkConnectClient) *runtimeConfig { return &runtimeConfig{client: client} } ================================================ FILE: spark/client/options/options.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package options type SparkClientOptions struct { ReattachExecution bool UserAgent string UserId string } var DefaultSparkClientOptions = SparkClientOptions{ ReattachExecution: false, } func NewSparkClientOptions(reattach bool) SparkClientOptions { return SparkClientOptions{ ReattachExecution: reattach, } } ================================================ FILE: spark/client/retry.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package client import ( "context" "errors" "io" "math/rand" "strings" "time" "github.com/apache/spark-connect-go/spark/client/base" "github.com/apache/spark-connect-go/spark/client/options" "google.golang.org/grpc/metadata" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sparkerrors" "google.golang.org/grpc" "google.golang.org/grpc/codes" ) type RetryHandler func(error) bool // RetryPolicy defines the parameters for a retry policy. The policy is used to determine if an // error is retriable and how to handle retries. The policy defines the behavior of the client // in how it backs off in case of an error and how the retries are spread out over time. type RetryPolicy struct { MaxRetries int32 InitialBackoff time.Duration MaxBackoff time.Duration BackoffMultiplier float32 Jitter time.Duration MinJitterThreshold time.Duration Name string Handler RetryHandler } // DefaultRetryPolicy is the default retry policy used by the client. It will retry on Unavailable and // in case the cursor has been disconnected. All other errors are considered to be not retriable. var DefaultRetryPolicy = RetryPolicy{ MaxRetries: 15, InitialBackoff: 50 * time.Millisecond, MaxBackoff: 1 * time.Minute, BackoffMultiplier: 4.0, Jitter: 500 * time.Millisecond, MinJitterThreshold: 2000 * time.Millisecond, Name: "DefaultRetryPolicy", Handler: func(e error) bool { status := sparkerrors.FromRPCError(e) switch status.Code { case codes.Unavailable: return true case codes.Internal: if strings.Contains(status.Message, "INVALID_CURSOR.DISCONNECTED") { return true } } return false }, } var TestingRetryPolicy = RetryPolicy{ MaxRetries: 5, InitialBackoff: 0, MaxBackoff: 1, BackoffMultiplier: 2, Jitter: 0, MinJitterThreshold: 0, Name: "TestingRetryPolicy", Handler: func(e error) bool { status := sparkerrors.FromRPCError(e) switch status.Code { case codes.Unavailable: return true case codes.Internal: if strings.Contains(status.Message, "INVALID_CURSOR.DISCONNECTED") { return true } } return false }, } // DefaultRetryPolicyRegistry is the default set of retry policies used by the client. It contains // all those policies that are enabled by default. var DefaultRetryPolicyRegistry = []RetryPolicy{DefaultRetryPolicy} // retryState is the current state of the retries for one particular RPC request. The retry // state is independent of the retry policy. type retryState struct { retryCount int32 nextWait time.Duration } // nextAttempt calculates the next wait time for the next retry attempt. The function returns // nil if the maximum number of retries has been exceeded, otherwise it returns the amount // of time the caller should wait. func (rs *retryState) nextAttempt(p RetryPolicy) *time.Duration { if rs.retryCount >= p.MaxRetries { return nil } // For the first retry pick the initial backoff of the matching policy. if rs.retryCount == 0 { rs.nextWait = p.InitialBackoff } // Adjust the retry count and calculate the next wait. rs.retryCount++ wait := rs.nextWait rs.nextWait = time.Duration(float32(rs.nextWait.Milliseconds())*p.BackoffMultiplier) * time.Millisecond if rs.nextWait > p.MaxBackoff { rs.nextWait = p.MaxBackoff } // Some policies define that jitter should only be applied after a particular threshold. if wait > p.MinJitterThreshold { wait += time.Duration(rand.Float32() * float32(p.Jitter.Milliseconds())) } return &wait } func NewRetriableSparkConnectClient(conn *grpc.ClientConn, sessionId string, opts options.SparkClientOptions, ) base.SparkConnectRPCClient { innerClient := proto.NewSparkConnectServiceClient(conn) return &retriableSparkConnectClient{ client: innerClient, sessionId: sessionId, retryPolicies: DefaultRetryPolicyRegistry, options: opts, } } // wrapRetriableCall wraps a call to a function that returns a result and an error. The function is // retried according to the retry policies. The function will return the result or an error if the // retries are exceeded. func wrapRetriableCall[Res rpcType](ctx context.Context, retryPolicies []RetryPolicy, in func(context.Context) (Res, error)) (Res, error) { var lastErr error var response Res // Create the retry state for this wrapped call. The retry state captures the information about // the wait time and how many retries to perform. state := retryState{} // As long as the error is retriable, we will retry the operation. canRetry := true for canRetry { // Every loop iteration starts with being non-retriable. canRetry = false response, lastErr = in(ctx) if lastErr != nil { for _, h := range retryPolicies { if h.Handler(lastErr) { canRetry = true wait := state.nextAttempt(h) if wait != nil { time.Sleep(*wait) } else { // If the retries are exceeded, simply return from here. return nil, sparkerrors.WithType(lastErr, sparkerrors.RetriesExceeded) } // Breaks out of the retry handler loop. break } } } else { // Exit loop if no error has been received. return response, nil } } // TODO: Should this simoly return the original error? return nil, sparkerrors.WithType(lastErr, sparkerrors.RetriesExceeded) } type rpcType interface { *proto.AnalyzePlanResponse | *proto.ConfigResponse | *proto.ArtifactStatusesResponse | *proto.InterruptResponse | *proto.ReleaseExecuteResponse | *proto.ExecutePlanResponse | *proto.ReleaseSessionResponse | *proto.FetchErrorDetailsResponse } // retriableSparkConnectClient wraps the SparkConnectServiceClient implementation to // transparently handle retries. type retriableSparkConnectClient struct { client base.SparkConnectRPCClient sessionId string // Not yet used. // serverSideSessionId string retryPolicies []RetryPolicy options options.SparkClientOptions } // FetchErrorDetails implements base.SparkConnectRPCClient. func (r *retriableSparkConnectClient) FetchErrorDetails(ctx context.Context, in *proto.FetchErrorDetailsRequest, opts ...grpc.CallOption, ) (*proto.FetchErrorDetailsResponse, error) { return wrapRetriableCall(ctx, r.retryPolicies, func(ctx2 context.Context) ( *proto.FetchErrorDetailsResponse, error, ) { return r.client.FetchErrorDetails(ctx2, in, opts...) }) } // ReleaseSession implements base.SparkConnectRPCClient. func (r *retriableSparkConnectClient) ReleaseSession(ctx context.Context, in *proto.ReleaseSessionRequest, opts ...grpc.CallOption, ) (*proto.ReleaseSessionResponse, error) { return wrapRetriableCall(ctx, r.retryPolicies, func(ctx2 context.Context) (*proto.ReleaseSessionResponse, error) { return r.client.ReleaseSession(ctx2, in, opts...) }) } func (r *retriableSparkConnectClient) ExecutePlan(ctx context.Context, in *proto.ExecutePlanRequest, opts ...grpc.CallOption, ) (proto.SparkConnectService_ExecutePlanClient, error) { var lastErr error // Create the retry state for this wrapped call. The retry state captures the information about // the wait time and how many retries to perform. state := retryState{} // As long as the error is retriable, we will retry the operation. canRetry := true for canRetry { // Every loop iteration starts with being non-retriable. canRetry = false response, lastErr := r.client.ExecutePlan(ctx, in, opts...) if lastErr != nil { for _, h := range r.retryPolicies { if h.Handler(lastErr) { canRetry = true wait := state.nextAttempt(h) if wait != nil { time.Sleep(*wait) } else { // If the retries are exceeded, simply return from here. return nil, sparkerrors.WithType(lastErr, sparkerrors.RetriesExceeded) } // Breaks out of the retry handler loop. break } } } else { // Exit loop if no error has been received. rc := retriableExecutePlanClient{ context: ctx, retryContext: &retryContext{ stream: response, client: r, request: in, resultComplete: false, retryPolicies: r.retryPolicies, }, } return rc, nil } } return nil, sparkerrors.WithType(lastErr, sparkerrors.RetriesExceeded) } func (r *retriableSparkConnectClient) AnalyzePlan(ctx context.Context, in *proto.AnalyzePlanRequest, opts ...grpc.CallOption, ) (*proto.AnalyzePlanResponse, error) { return wrapRetriableCall(ctx, r.retryPolicies, func(ctx2 context.Context) (*proto.AnalyzePlanResponse, error) { return r.client.AnalyzePlan(ctx2, in, opts...) }) } func (r *retriableSparkConnectClient) Config(ctx context.Context, in *proto.ConfigRequest, opts ...grpc.CallOption) (*proto.ConfigResponse, error) { return wrapRetriableCall(ctx, r.retryPolicies, func(ctx2 context.Context) (*proto.ConfigResponse, error) { return r.client.Config(ctx2, in, opts...) }) } func (r *retriableSparkConnectClient) AddArtifacts(ctx context.Context, opts ...grpc.CallOption) (proto.SparkConnectService_AddArtifactsClient, error) { var lastErr error // Create the retry state for this wrapped call. The retry state captures the information about // the wait time and how many retries to perform. state := retryState{} // As long as the error is retriable, we will retry the operation. canRetry := true for canRetry { // Every loop iteration starts with being non-retriable. canRetry = false response, lastErr := r.client.AddArtifacts(ctx, opts...) if lastErr != nil { for _, h := range r.retryPolicies { if h.Handler(lastErr) { canRetry = true wait := state.nextAttempt(h) if wait != nil { time.Sleep(*wait) } else { // If the retries are exceeded, simply return from here. return nil, sparkerrors.WithType(lastErr, sparkerrors.RetriesExceeded) } // Breaks out of the retry handler loop. break } } } else { // Exit loop if no error has been received. return response, nil } } return nil, sparkerrors.WithType(lastErr, sparkerrors.RetriesExceeded) } func (r *retriableSparkConnectClient) ArtifactStatus(ctx context.Context, in *proto.ArtifactStatusesRequest, opts ...grpc.CallOption, ) (*proto.ArtifactStatusesResponse, error) { return wrapRetriableCall(ctx, r.retryPolicies, func(ctx2 context.Context) ( *proto.ArtifactStatusesResponse, error, ) { return r.client.ArtifactStatus(ctx2, in, opts...) }) } func (r *retriableSparkConnectClient) Interrupt(ctx context.Context, in *proto.InterruptRequest, opts ...grpc.CallOption, ) (*proto.InterruptResponse, error) { return wrapRetriableCall(ctx, r.retryPolicies, func(ctx2 context.Context) (*proto.InterruptResponse, error) { return r.client.Interrupt(ctx2, in, opts...) }) } func (r *retriableSparkConnectClient) ReattachExecute(ctx context.Context, in *proto.ReattachExecuteRequest, opts ...grpc.CallOption, ) (proto.SparkConnectService_ReattachExecuteClient, error) { var lastErr error // Create the retry state for this wrapped call. The retry state captures the information about // the wait time and how many retries to perform. state := retryState{} // As long as the error is retriable, we will retry the operation. canRetry := true for canRetry { // Every loop iteration starts with being non-retriable. canRetry = false response, lastErr := r.client.ReattachExecute(ctx, in, opts...) if lastErr != nil { for _, h := range r.retryPolicies { if h.Handler(lastErr) { canRetry = true wait := state.nextAttempt(h) if wait != nil { time.Sleep(*wait) } else { // If the retries are exceeded, simply return from here. return nil, sparkerrors.WithType(lastErr, sparkerrors.RetriesExceeded) } // Breaks out of the retry handler loop. break } } } else { // Exit loop if no error has been received. // TODO: Re-attaching needs to be retriable as well. return response, nil } } return nil, sparkerrors.WithType(lastErr, sparkerrors.RetriesExceeded) } func (r *retriableSparkConnectClient) ReleaseExecute(ctx context.Context, in *proto.ReleaseExecuteRequest, opts ...grpc.CallOption, ) (*proto.ReleaseExecuteResponse, error) { return wrapRetriableCall(ctx, r.retryPolicies, func(ctx2 context.Context) (*proto.ReleaseExecuteResponse, error) { return r.client.ReleaseExecute(ctx2, in, opts...) }) } type retryContext struct { stream proto.SparkConnectService_ExecutePlanClient client base.SparkConnectRPCClient request *proto.ExecutePlanRequest lastResponseId *string resultComplete bool retryPolicies []RetryPolicy } // retriableExecutePlanClient is a wrapper around the ExecutePlanClient that handles retries // transparently. Since the interface has to follow the ExecutePlanClient interface, we have to // implement all methods of the interface and follow their method receiver pattern. As the main // methods do not implement a pointer receiver we're wrapping the variable part of the retry // behahivor in a separate struct. // // In addition, we capture the original Context of the caller that is passed to the interface. While // this is typically not a desired pattern it is the only way to make sure the same context is used // across the retrying and underlying struct. type retriableExecutePlanClient struct { retryContext *retryContext context context.Context } func (r retriableExecutePlanClient) Recv() (*proto.ExecutePlanResponse, error) { return wrapRetriableCall(r.context, r.retryContext.retryPolicies, func(ctx2 context.Context) (*proto.ExecutePlanResponse, error) { resp, err := r.retryContext.stream.Recv() // Success, simply return the result. if err == nil { r.retryContext.lastResponseId = &resp.ResponseId return resp, nil } // Ignore successful closure. if errors.Is(err, io.EOF) { return nil, err } // Now we have to assume that the request has failed, and we distinguish two cases: First, we have // never received a result and in this case we simply execute the same request again. Second, // we will send a reattach request with the same operation ID and the last response ID. if r.retryContext.lastResponseId == nil { // Send the request again. rs, execErr := r.retryContext.client.ExecutePlan(ctx2, r.retryContext.request) if execErr != nil { return nil, execErr } switch stream := rs.(type) { case retriableExecutePlanClient: r.retryContext.stream = stream.retryContext.stream default: r.retryContext.stream = stream } return nil, err } else { // Send a reattach req := &proto.ReattachExecuteRequest{ SessionId: r.retryContext.request.SessionId, UserContext: r.retryContext.request.UserContext, OperationId: *r.retryContext.request.OperationId, LastResponseId: r.retryContext.lastResponseId, } re, execErr := r.retryContext.client.ReattachExecute(ctx2, req) if execErr != nil { return nil, execErr } switch stream := re.(type) { case retriableExecutePlanClient: r.retryContext.stream = stream.retryContext.stream default: r.retryContext.stream = stream } return nil, err } }) } func (r retriableExecutePlanClient) Header() (metadata.MD, error) { return r.retryContext.stream.Header() } func (r retriableExecutePlanClient) Trailer() metadata.MD { return r.retryContext.stream.Trailer() } func (r retriableExecutePlanClient) CloseSend() error { return r.retryContext.stream.CloseSend() } func (r retriableExecutePlanClient) Context() context.Context { return r.retryContext.stream.Context() } func (r retriableExecutePlanClient) SendMsg(m any) error { return r.retryContext.stream.SendMsg(m) } func (r retriableExecutePlanClient) RecvMsg(m any) error { return r.retryContext.stream.RecvMsg(m) } ================================================ FILE: spark/client/retry_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package client import ( "context" "errors" "io" "testing" "time" "github.com/apache/spark-connect-go/spark/client/options" "github.com/apache/spark-connect-go/spark/client/testutils" "github.com/apache/spark-connect-go/spark/mocks" "github.com/apache/spark-connect-go/spark/sparkerrors" "github.com/stretchr/testify/assert" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) func alwaysRetry(err error) bool { return true } func Test_retryState_nextAttempt(t *testing.T) { basePolicy := RetryPolicy{ MaxRetries: 15, MaxBackoff: 60 * time.Second, InitialBackoff: 100 * time.Millisecond, BackoffMultiplier: 4, Jitter: time.Millisecond * 100, MinJitterThreshold: 2 * time.Second, Name: "BasePolicy", Handler: alwaysRetry, } type fields struct { retryCount int32 nextWait time.Duration } type args struct { p RetryPolicy } tests := []struct { name string fields fields args args wantLower time.Duration wantUpper time.Duration exceeded bool }{ { "BasicRetry - initial backoff", fields{ 0, 0, }, args{ basePolicy, }, 100 * time.Millisecond, 0, false, }, { "Jitter applied correctly", fields{ 1, 3 * time.Second, }, args{ basePolicy, }, 3 * time.Second, 3*time.Second + basePolicy.Jitter, false, }, { "Retries Exceeded", fields{ 16, 0, }, args{ basePolicy, }, 0, 0, true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { rs := &retryState{ retryCount: tt.fields.retryCount, nextWait: tt.fields.nextWait, } if tt.exceeded { assert.Nilf(t, rs.nextAttempt(tt.args.p), "Expecting retries to be exceeded (%v, %v)", rs, tt.args.p) } else { val := *rs.nextAttempt(tt.args.p) if tt.wantUpper > 0 { assert.LessOrEqualf(t, tt.wantLower, val, "nextAttempt(%v, %v)", rs, tt.args.p) assert.GreaterOrEqualf(t, tt.wantUpper, val, "nextAttempt(%v, %v)", rs, tt.args.p) } else { assert.Equalf(t, tt.wantLower, val, "nextAttempt(%v, %v)", rs, tt.args.p) } } }) } } func Test_retryMaxBackOff_applied(t *testing.T) { basePolicy := RetryPolicy{ MaxRetries: 15, MaxBackoff: 60 * time.Second, InitialBackoff: 100 * time.Millisecond, BackoffMultiplier: 4, Jitter: time.Millisecond * 100, MinJitterThreshold: 2 * time.Second, Name: "BasePolicy", Handler: alwaysRetry, } state := retryState{ retryCount: 3, nextWait: 30 * time.Second, } wait := state.nextAttempt(basePolicy) assert.LessOrEqualf(t, 30*time.Second, *wait, " nowWait: nextAttempt(%v, %v)", state, basePolicy) assert.GreaterOrEqualf(t, 30*time.Second+basePolicy.Jitter, *wait, " nowWait: nextAttempt(%v, %v)", state, basePolicy) assert.Equalf(t, 60*time.Second, state.nextWait, " nextWait: nextAttempt(%v, %v)", state, basePolicy) } func Test_defaultRetryPolicyBehavior(t *testing.T) { state := retryState{ 0, 0, } w := state.nextAttempt(DefaultRetryPolicy) assert.NotNil(t, w) assert.Equal(t, DefaultRetryPolicy.InitialBackoff, *w) // Check the next iterations until failure w = state.nextAttempt(DefaultRetryPolicy) assert.NotNil(t, w) expected := time.Duration(int64(float32(DefaultRetryPolicy.InitialBackoff.Milliseconds())* DefaultRetryPolicy.BackoffMultiplier)) * time.Millisecond assert.GreaterOrEqual(t, expected, *w) for i := int32(2); i < DefaultRetryPolicy.MaxRetries; i++ { w = state.nextAttempt(DefaultRetryPolicy) assert.NotNil(t, w) assert.LessOrEqualf(t, *w, 60*time.Second+DefaultRetryPolicy.Jitter, "nextAttempt(%v, %v)", state, DefaultRetryPolicy) } // Check that the next attempt is nil w = state.nextAttempt(DefaultRetryPolicy) assert.Nil(t, w) } func Test_default_retryHandler(t *testing.T) { err := io.EOF assert.Falsef(t, DefaultRetryPolicy.Handler(err), "Must not retry other errors") err = errors.New("Some error") assert.Falsef(t, DefaultRetryPolicy.Handler(err), "Must not retry other errors") s := status.New(codes.Unavailable, "Unavailable") assert.Truef(t, DefaultRetryPolicy.Handler(s.Err()), "Must retry Unavailable") s = status.New(codes.Internal, "ANALYSIS EXCEPTION") assert.Falsef(t, DefaultRetryPolicy.Handler(s.Err()), "Must not retry Internal") s = status.New(codes.Internal, "Error: INVALID_CURSOR.DISCONNECTED") assert.Truef(t, DefaultRetryPolicy.Handler(s.Err()), "Must retry Internal with INVALID_CURSOR.DISCONNECTED") } func Test_retriable_success(t *testing.T) { toRetry := mocks.NewProtoClientMock( &mocks.ExecutePlanResponseUnavailable, &mocks.ExecutePlanResponseUnavailable, ) responseStream := mocks.NewProtoClientMock( &mocks.ExecutePlanResponseDone, &mocks.ExecutePlanResponseEOF) c := testutils.NewConnectServiceClientMock(responseStream, nil, nil, t) stream := retriableExecutePlanClient{ context: context.Background(), retryContext: &retryContext{ stream: toRetry, client: c, retryPolicies: []RetryPolicy{TestingRetryPolicy}, }, } _, err := stream.Recv() assert.NoError(t, err) } func Test_retriable_client(t *testing.T) { toRetry := mocks.NewProtoClientMock( &mocks.ExecutePlanResponseUnavailable, &mocks.ExecutePlanResponseUnavailable, &mocks.ExecutePlanResponseUnavailable, &mocks.ExecutePlanResponseUnavailable, &mocks.ExecutePlanResponseUnavailable, &mocks.ExecutePlanResponseUnavailable, &mocks.ExecutePlanResponseUnavailable, &mocks.ExecutePlanResponseUnavailable, ) responseStream := mocks.NewProtoClientMock( &mocks.ExecutePlanResponseDone, &mocks.ExecutePlanResponseEOF) c := testutils.NewConnectServiceClientMock(responseStream, nil, nil, t) stream := retriableExecutePlanClient{ context: context.Background(), retryContext: &retryContext{ stream: toRetry, client: c, }, } _, err := stream.Recv() assert.ErrorIs(t, err, sparkerrors.RetriesExceeded) c = testutils.NewConnectServiceClientMock(toRetry, nil, nil, t) stream = retriableExecutePlanClient{ context: context.Background(), retryContext: &retryContext{ stream: toRetry, client: c, retryPolicies: []RetryPolicy{TestingRetryPolicy}, }, } _, err = stream.Recv() assert.ErrorIs(t, err, sparkerrors.RetriesExceeded) } func Test_retriable_with_reattach(t *testing.T) { // toRetry := mocks.NewProtoClientMock( &mocks.ExecutePlanResponseWithSchema, &mocks.ExecutePlanResponseUnavailable, ) // Final response stream. responseStream := mocks.NewProtoClientMock( // First let's do another round of retry and then complete. &mocks.ExecutePlanResponseUnavailable, // Now, finsih the stream successfully &mocks.ExecutePlanResponseDone, &mocks.ExecutePlanResponseEOF) c := testutils.NewConnectServiceClientMock(responseStream, nil, nil, t) client := retriableSparkConnectClient{ client: c, sessionId: mocks.MockSessionId, retryPolicies: []RetryPolicy{TestingRetryPolicy}, options: options.DefaultSparkClientOptions, } stream := retriableExecutePlanClient{ context: context.Background(), retryContext: &retryContext{ stream: toRetry, client: &client, request: &mocks.ExecutePlanRequestSql, retryPolicies: []RetryPolicy{TestingRetryPolicy}, }, } // Fetch the first response. _, err := stream.Recv() assert.NoError(t, err) _, err = stream.Recv() assert.NoError(t, err) } func Test_client_retriable_basics_execute(t *testing.T) { stream := mocks.NewProtoClientMock(&mocks.ExecutePlanResponseDone, &mocks.ExecutePlanResponseEOF) c := testutils.NewConnectServiceClientMock(stream, nil, nil, t) client := retriableSparkConnectClient{ client: c, sessionId: mocks.MockSessionId, retryPolicies: []RetryPolicy{TestingRetryPolicy}, options: options.DefaultSparkClientOptions, } ctx := context.Background() stream, err := client.ExecutePlan(ctx, &mocks.ExecutePlanRequestSql) assert.NoError(t, err) assert.NotNil(t, stream) _, err = stream.Recv() assert.NoError(t, err) _, err = stream.Recv() assert.ErrorIs(t, err, io.EOF) } func Test_client_retriable_basics_analyze(t *testing.T) { c := testutils.NewConnectServiceClientMock(nil, mocks.AnalyzePlanResponse, nil, t) client := retriableSparkConnectClient{ client: c, sessionId: mocks.MockSessionId, retryPolicies: []RetryPolicy{TestingRetryPolicy}, options: options.DefaultSparkClientOptions, } ctx := context.Background() resp, err := client.AnalyzePlan(ctx, &mocks.AnalyzePlanRequestSql) assert.NoError(t, err) assert.NotNil(t, resp) assert.Equal(t, mocks.MockSessionId, resp.SessionId) } ================================================ FILE: spark/client/testutils/utils.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package testutils import ( "context" "testing" proto "github.com/apache/spark-connect-go/internal/generated" "google.golang.org/grpc" ) // connectServiceClient is a mock implementation of the SparkConnectServiceClient interface. type connectServiceClient struct { t *testing.T analysePlanResponse *proto.AnalyzePlanResponse executePlanClient proto.SparkConnectService_ExecutePlanClient err error } // FetchErrorDetails implements generated.SparkConnectServiceClient. func (c *connectServiceClient) FetchErrorDetails(ctx context.Context, in *proto.FetchErrorDetailsRequest, opts ...grpc.CallOption, ) (*proto.FetchErrorDetailsResponse, error) { panic("unimplemented") } // ReleaseSession implements generated.SparkConnectServiceClient. func (c *connectServiceClient) ReleaseSession(ctx context.Context, in *proto.ReleaseSessionRequest, opts ...grpc.CallOption, ) (*proto.ReleaseSessionResponse, error) { panic("unimplemented") } func (c *connectServiceClient) ExecutePlan(ctx context.Context, in *proto.ExecutePlanRequest, opts ...grpc.CallOption, ) (proto.SparkConnectService_ExecutePlanClient, error) { return c.executePlanClient, c.err } func (c *connectServiceClient) AnalyzePlan(ctx context.Context, in *proto.AnalyzePlanRequest, opts ...grpc.CallOption, ) (*proto.AnalyzePlanResponse, error) { return c.analysePlanResponse, c.err } func (c *connectServiceClient) Config(ctx context.Context, in *proto.ConfigRequest, opts ...grpc.CallOption) (*proto.ConfigResponse, error) { return nil, c.err } func (c *connectServiceClient) AddArtifacts(ctx context.Context, opts ...grpc.CallOption) (proto.SparkConnectService_AddArtifactsClient, error) { return nil, c.err } func (c *connectServiceClient) ArtifactStatus(ctx context.Context, in *proto.ArtifactStatusesRequest, opts ...grpc.CallOption, ) (*proto.ArtifactStatusesResponse, error) { return nil, c.err } func (c *connectServiceClient) Interrupt(ctx context.Context, in *proto.InterruptRequest, opts ...grpc.CallOption, ) (*proto.InterruptResponse, error) { return nil, c.err } func (c *connectServiceClient) ReattachExecute(ctx context.Context, in *proto.ReattachExecuteRequest, opts ...grpc.CallOption, ) (proto.SparkConnectService_ReattachExecuteClient, error) { return c.executePlanClient, c.err } func (c *connectServiceClient) ReleaseExecute(ctx context.Context, in *proto.ReleaseExecuteRequest, opts ...grpc.CallOption, ) (*proto.ReleaseExecuteResponse, error) { return nil, c.err } func NewConnectServiceClientMock(epc proto.SparkConnectService_ExecutePlanClient, apr *proto.AnalyzePlanResponse, err error, t *testing.T, ) proto.SparkConnectServiceClient { return &connectServiceClient{ t: t, analysePlanResponse: apr, executePlanClient: epc, err: err, } } ================================================ FILE: spark/mocks/mock_executor.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package mocks import ( "context" "errors" "github.com/apache/spark-connect-go/spark/sql/utils" "github.com/apache/spark-connect-go/spark/client/base" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sql/types" ) type TestExecutor struct { Client base.ExecuteResponseStream response *generated.AnalyzePlanResponse Err error } func (t *TestExecutor) ExecutePlan(ctx context.Context, plan *generated.Plan) (base.ExecuteResponseStream, error) { if t.Err != nil { return nil, t.Err } return t.Client, nil } func (t *TestExecutor) AnalyzePlan(ctx context.Context, plan *generated.Plan) (*generated.AnalyzePlanResponse, error) { return t.response, nil } func (t *TestExecutor) Explain(ctx context.Context, plan *generated.Plan, explainMode utils.ExplainMode, ) (*generated.AnalyzePlanResponse, error) { return nil, errors.New("not implemented") } func (t *TestExecutor) ExecuteCommand(ctx context.Context, plan *generated.Plan) (arrow.Table, *types.StructType, map[string]interface{}, error) { if t.Err != nil { return nil, nil, nil, t.Err } return nil, nil, nil, nil } func (t *TestExecutor) Persist(ctx context.Context, plan *generated.Plan, storageLevel utils.StorageLevel) error { return errors.New("not implemented") } func (t *TestExecutor) Unpersist(ctx context.Context, plan *generated.Plan) error { return errors.New("not implemented") } func (t *TestExecutor) GetStorageLevel(ctx context.Context, plan *generated.Plan) (*utils.StorageLevel, error) { return nil, errors.New("not implemented") } func (t *TestExecutor) SparkVersion(ctx context.Context) (string, error) { return "", errors.New("not implemented") } func (t *TestExecutor) DDLParse(ctx context.Context, sql string) (*types.StructType, error) { return nil, errors.New("not implemented") } func (t *TestExecutor) SameSemantics(ctx context.Context, plan1 *generated.Plan, plan2 *generated.Plan) (bool, error) { return false, errors.New("not implemented") } func (t *TestExecutor) SemanticHash(ctx context.Context, plan *generated.Plan) (int32, error) { return 0, errors.New("not implemented") } func (t *TestExecutor) Config(ctx context.Context, configRequest *generated.ConfigRequest_Operation) (*generated.ConfigResponse, error) { return nil, errors.New("not implemented") } ================================================ FILE: spark/mocks/mocks.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package mocks import ( "context" "io" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "github.com/google/uuid" proto "github.com/apache/spark-connect-go/internal/generated" "google.golang.org/grpc/metadata" ) type MockResponse struct { Resp *proto.ExecutePlanResponse Err error } type ProtoClient struct { // The stream of responses to return. RecvResponse []*MockResponse sent int } var ( MockSessionId = uuid.NewString() MockOperationId = uuid.NewString() MockResponseId = "1" MockUserContext = &proto.UserContext{ UserId: "user", } ) // MockResponseDone is a response that indicates the plan execution is done. var ExecutePlanResponseDone = MockResponse{ Resp: &proto.ExecutePlanResponse{ ResponseType: &proto.ExecutePlanResponse_ResultComplete_{ ResultComplete: &proto.ExecutePlanResponse_ResultComplete{}, }, SessionId: MockSessionId, OperationId: MockOperationId, }, Err: nil, } var ExecutePlanResponseEOF = MockResponse{ Err: io.EOF, } var ExecutePlanResponseBrokenSchema = MockResponse{ Resp: &proto.ExecutePlanResponse{ Schema: &proto.DataType{ Kind: &proto.DataType_String_{ String_: &proto.DataType_String{}, }, }, SessionId: MockSessionId, OperationId: MockOperationId, }, } var ExecutePlanResponseWithSchema = MockResponse{ Resp: &proto.ExecutePlanResponse{ ResponseId: MockResponseId, OperationId: MockOperationId, Schema: &proto.DataType{ Kind: &proto.DataType_Struct_{ Struct: &proto.DataType_Struct{ Fields: []*proto.DataType_StructField{ { Name: "col0", DataType: &proto.DataType{ Kind: &proto.DataType_Integer_{ Integer: &proto.DataType_Integer{}, }, }, Nullable: true, }, }, }, }, }, }, } var ExecutePlanResponseUnavailable = MockResponse{ Err: status.New(codes.Unavailable, "Unavailable").Err(), } var ExecutePlanRequestSql = proto.ExecutePlanRequest{ Plan: NewSqlCommand("select range(10)"), OperationId: &MockOperationId, SessionId: MockSessionId, UserContext: MockUserContext, } var AnalyzePlanRequestSql = proto.AnalyzePlanRequest{ SessionId: MockSessionId, Analyze: &proto.AnalyzePlanRequest_Schema_{ Schema: &proto.AnalyzePlanRequest_Schema{ Plan: NewSqlCommand("select range(10)"), }, }, UserContext: MockUserContext, } var AnalyzePlanResponse = &proto.AnalyzePlanResponse{ SessionId: MockSessionId, Result: &proto.AnalyzePlanResponse_Schema_{ Schema: &proto.AnalyzePlanResponse_Schema{ Schema: &proto.DataType{ Kind: &proto.DataType_Struct_{ Struct: &proto.DataType_Struct{ Fields: []*proto.DataType_StructField{ { Name: "col0", DataType: &proto.DataType{ Kind: &proto.DataType_Integer_{ Integer: &proto.DataType_Integer{}, }, }, }, }, }, }, }, }, }, } // NewProtoClientMock creates a new mock client that returns the given responses. func NewProtoClientMock(responses ...*MockResponse) proto.SparkConnectService_ExecutePlanClient { return &ProtoClient{RecvResponse: responses} } func (p *ProtoClient) Recv() (*proto.ExecutePlanResponse, error) { val := p.RecvResponse[p.sent] p.sent += 1 return val.Resp, val.Err } func (p *ProtoClient) Header() (metadata.MD, error) { return nil, p.RecvResponse[p.sent].Err } func (p *ProtoClient) Trailer() metadata.MD { return nil } func (p *ProtoClient) CloseSend() error { return p.RecvResponse[p.sent].Err } func (p *ProtoClient) Context() context.Context { return nil } func (p *ProtoClient) SendMsg(m interface{}) error { return p.RecvResponse[p.sent].Err } func (p *ProtoClient) RecvMsg(m interface{}) error { return p.RecvResponse[p.sent].Err } func NewSqlCommand(sql string) *proto.Plan { return &proto.Plan{ OpType: &proto.Plan_Command{ Command: &proto.Command{ CommandType: &proto.Command_SqlCommand{ SqlCommand: &proto.SqlCommand{ Sql: sql, }, }, }, }, } } ================================================ FILE: spark/sparkerrors/errors.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sparkerrors import ( "encoding/json" "fmt" "io" "github.com/go-errors/errors" "google.golang.org/genproto/googleapis/rpc/errdetails" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) type wrappedError struct { errorType error cause *errors.Error } func (w *wrappedError) Unwrap() []error { return []error{w.errorType, w.cause} } func (w *wrappedError) Error() string { return fmt.Sprintf("%s", w) } // WithType wraps an error with a type that can later be checked using `errors.Is` func WithType(err error, errType errorType) error { return &wrappedError{cause: errors.Wrap(err, 1), errorType: errType} } func WithString(err error, errMsg string) error { return &wrappedError{cause: errors.Wrap(err, 1), errorType: errors.New(errMsg)} } func WithStringf(err error, errMsg string, params ...any) error { return &wrappedError{cause: errors.Wrap(err, 1), errorType: fmt.Errorf(errMsg, params...)} } type errorType error var ( ConnectionError = errorType(errors.New("connection error")) ReadError = errorType(errors.New("read error")) ExecutionError = errorType(errors.New("execution error")) InvalidInputError = errorType(errors.New("invalid input")) InvalidPlanError = errorType(errors.New("invalid plan")) RetriesExceeded = errorType(errors.New("retries exceeded")) InvalidServerSideSessionError = errorType(errors.New("invalid server side session")) TestSetupError = errorType(errors.New("test setup error")) WriteError = errorType(errors.New("write error")) NotImplementedError = errorType(errors.New("not implemented")) InvalidArgumentError = errorType(errors.New("invalid argument")) ) // Format formats the error, supporting both short forms (v, s, q) and verbose form (+v) func (w *wrappedError) Format(s fmt.State, verb rune) { switch verb { case 'v': if s.Flag('+') { _, _ = io.WriteString(s, "[sparkerror] ") _, _ = io.WriteString(s, fmt.Sprintf("Error Type: %s\n", w.errorType.Error())) _, _ = io.WriteString(s, fmt.Sprintf("Error Cause: %s\n%s", w.cause.Err.Error(), w.cause.Stack())) return } fallthrough case 's': _, _ = io.WriteString(s, fmt.Sprintf("%s: %s", w.errorType, w.cause)) case 'q': _, _ = fmt.Fprintf(s, "%q", w.errorType.Error()) } } type UnsupportedResponseTypeError struct { ResponseType interface{} } func (e UnsupportedResponseTypeError) Error() string { return fmt.Sprintf("Received unsupported response type: %T", e.ResponseType) } type InvalidServerSideSessionDetailsError struct { OwnSessionId string ReceivedSessionId string } func (e InvalidServerSideSessionDetailsError) Error() string { return fmt.Sprintf("Received invalid session id %s, expected %s", e.ReceivedSessionId, e.OwnSessionId) } // SparkError represents an error that is returned from Spark itself. It captures details of the // error that allows better understanding about the error. This allows us to check if the error // can be retried or not. type SparkError struct { // SqlState is the SQL state of the error. SqlState string // ErrorClass is the class of the error. ErrorClass string // If set is typically the classname throwing the error on the Spark side. Reason string // Message is the human-readable message of the error. Message string // Code is the gRPC status code of the error. Code codes.Code // ErrorId is the unique id of the error. It can be used to fetch more details about // the error using an additional RPC from the server. ErrorId string // Parameters are the parameters that are used to format the error message. Parameters map[string]string status *status.Status } func (e SparkError) Error() string { if e.Code == codes.Internal && e.SqlState != "" { return fmt.Sprintf("[%s] %s. SQLSTATE: %s", e.ErrorClass, e.Message, e.SqlState) } else { return fmt.Sprintf("[%s] %s", e.Code.String(), e.Message) } } // FromRPCError converts a gRPC error to a SparkError. If the error is not a gRPC error, it will // create a plain "UNKNOWN" GRPC status type. If no error was observed returns nil. func FromRPCError(e error) *SparkError { status := status.Convert(e) // If there was no error, simply pass through. if status == nil { return nil } result := &SparkError{ Message: status.Message(), Code: status.Code(), status: status, } // Now lets, check if we can extract the error info from the details. for _, d := range status.Details() { switch info := d.(type) { case *errdetails.ErrorInfo: // Parse the parameters from the error details, but only parse them if // they're present. var params map[string]string if v, ok := info.GetMetadata()["messageParameters"]; ok { err := json.Unmarshal([]byte(v), ¶ms) if err == nil { // The message parameters is properly formatted JSON, if for some reason // this is not the case, errors are ignored. result.Parameters = params } } result.SqlState = info.GetMetadata()["sqlState"] result.ErrorClass = info.GetMetadata()["errorClass"] result.ErrorId = info.GetMetadata()["errorId"] result.Reason = info.Reason } } return result } ================================================ FILE: spark/sparkerrors/errors_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sparkerrors import ( "fmt" "testing" "google.golang.org/genproto/googleapis/rpc/errdetails" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "github.com/stretchr/testify/assert" ) func TestWithTypeGivesAndErrorThatIsOfThatType(t *testing.T) { err := WithType(assert.AnError, ConnectionError) assert.ErrorIs(t, err, ConnectionError) } func TestErrorStringContainsErrorType(t *testing.T) { err := WithType(assert.AnError, ConnectionError) assert.Contains(t, err.Error(), ConnectionError.Error()) } func TestGRPCErrorConversion(t *testing.T) { err := status.Error(codes.Internal, "invalid argument") se := FromRPCError(err) assert.Equal(t, se.Code, codes.Internal) assert.Equal(t, se.Message, "invalid argument") } func TestNonGRPCErrorsAreConvertedAsWell(t *testing.T) { err := assert.AnError se := FromRPCError(err) assert.Equal(t, se.Code, codes.Unknown) assert.Equal(t, se.Message, assert.AnError.Error()) } func TestStackTracePrint(t *testing.T) { err := WithType(assert.AnError, ConnectionError) errorString := fmt.Sprintf("%+v", err) t.Log(errorString) assert.Contains(t, errorString, "spark/sparkerrors/errors_test.go") } func TestErrorDetailsExtractionFromGRPCStatus(t *testing.T) { status := status.New(codes.Internal, "AnalysisException") status, _ = status.WithDetails(&errdetails.ErrorInfo{ Reason: "AnalysisException", Domain: "spark.sql", Metadata: map[string]string{}, }) err := status.Err() se := FromRPCError(err) assert.Equal(t, codes.Internal, se.Code) assert.Equal(t, "AnalysisException", se.Message) assert.Equal(t, "AnalysisException", se.Reason) } func TestErrorDetailsWithSqlStateAndClass(t *testing.T) { status := status.New(codes.Internal, "AnalysisException") status, _ = status.WithDetails(&errdetails.ErrorInfo{ Reason: "AnalysisException", Domain: "spark.sql", Metadata: map[string]string{ "sqlState": "42000", "errorClass": "ERROR_CLASS", "errorId": "errorId", "messageParameters": "", }, }) err := status.Err() se := FromRPCError(err) assert.Equal(t, codes.Internal, se.Code) assert.Equal(t, "AnalysisException", se.Message) assert.Equal(t, "AnalysisException", se.Reason) assert.Equal(t, "42000", se.SqlState) assert.Equal(t, "ERROR_CLASS", se.ErrorClass) assert.Equal(t, "errorId", se.ErrorId) } func TestErrorDetailsWithMessageParameterParsing(t *testing.T) { type param struct { TestName string Input string Expected map[string]string } params := []param{ {"empty input", "", nil}, {"empty input", "{", nil}, {"parse error", "{}", map[string]string{}}, {"valid input", "{\"key\":\"value\"}", map[string]string{"key": "value"}}, } for _, p := range params { t.Run(p.TestName, func(t *testing.T) { status := status.New(codes.Internal, "AnalysisException") status, _ = status.WithDetails(&errdetails.ErrorInfo{ Reason: "AnalysisException", Domain: "spark.sql", Metadata: map[string]string{ "sqlState": "42000", "errorClass": "ERROR_CLASS", "errorId": "errorId", "messageParameters": p.Input, }, }) err := status.Err() se := FromRPCError(err) assert.Equal(t, codes.Internal, se.Code) assert.Equal(t, "AnalysisException", se.Message) assert.Equal(t, "AnalysisException", se.Reason) assert.Equal(t, "42000", se.SqlState) assert.Equal(t, "ERROR_CLASS", se.ErrorClass) assert.Equal(t, "errorId", se.ErrorId) assert.Equal(t, p.Expected, se.Parameters) }) } } func TestSparkError_Error(t *testing.T) { type fields struct { SqlState string ErrorClass string Reason string Message string Code codes.Code ErrorId string Parameters map[string]string status *status.Status } tests := []struct { name string fields fields want string }{ { "UNKNOWN", fields{ Code: codes.Unknown, Message: "Unknown error", }, "[Unknown] Unknown error", }, { "Analysis Exception", fields{ SqlState: "42703", ErrorClass: "UNRESOLVED_COLUMN.WITH_SUGGESTION", Message: "A column, variable, or function parameter with name `id2` cannot be resolved. Did you mean one of the following? [`id`]", Code: codes.Internal, }, "[UNRESOLVED_COLUMN.WITH_SUGGESTION] A column, variable, or function parameter with name `id2` cannot be resolved. Did you mean one of the following? [`id`]. SQLSTATE: 42703", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { e := SparkError{ SqlState: tt.fields.SqlState, ErrorClass: tt.fields.ErrorClass, Reason: tt.fields.Reason, Message: tt.fields.Message, Code: tt.fields.Code, ErrorId: tt.fields.ErrorId, Parameters: tt.fields.Parameters, status: tt.fields.status, } assert.Equalf(t, tt.want, e.Error(), "Error()") }) } } ================================================ FILE: spark/sql/column/column.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package column import ( "context" "github.com/apache/spark-connect-go/spark/sql/types" proto "github.com/apache/spark-connect-go/internal/generated" ) // Convertible is the interface for all things that can be converted into a protobuf expression. type Convertible interface { ToProto(ctx context.Context) (*proto.Expression, error) } type Column struct { expr expression } func (c Column) ToProto(ctx context.Context) (*proto.Expression, error) { return c.expr.ToProto(ctx) } func (c Column) Lt(other Column) Column { return NewColumn(NewUnresolvedFunction("<", []expression{c.expr, other.expr}, false)) } func (c Column) Le(other Column) Column { return NewColumn(NewUnresolvedFunction("<=", []expression{c.expr, other.expr}, false)) } func (c Column) Gt(other Column) Column { return NewColumn(NewUnresolvedFunction(">", []expression{c.expr, other.expr}, false)) } func (c Column) Ge(other Column) Column { return NewColumn(NewUnresolvedFunction(">=", []expression{c.expr, other.expr}, false)) } func (c Column) Eq(other Column) Column { return NewColumn(NewUnresolvedFunction("==", []expression{c.expr, other.expr}, false)) } func (c Column) Neq(other Column) Column { cmp := NewUnresolvedFunction("==", []expression{c.expr, other.expr}, false) return NewColumn(NewUnresolvedFunction("not", []expression{cmp}, false)) } func (c Column) Mul(other Column) Column { return NewColumn(NewUnresolvedFunction("*", []expression{c.expr, other.expr}, false)) } func (c Column) Div(other Column) Column { return NewColumn(NewUnresolvedFunction("/", []expression{c.expr, other.expr}, false)) } func (c Column) Desc() Column { return NewColumn(&sortExpression{ child: c.expr, direction: proto.Expression_SortOrder_SORT_DIRECTION_DESCENDING, nullOrdering: proto.Expression_SortOrder_SORT_NULLS_LAST, }) } func (c Column) GetItem(key types.LiteralType) Column { return NewColumn(NewUnresolvedExtractValue("getItem", c.expr, NewLiteral(key))) } func (c Column) Asc() Column { return NewColumn(&sortExpression{ child: c.expr, direction: proto.Expression_SortOrder_SORT_DIRECTION_ASCENDING, nullOrdering: proto.Expression_SortOrder_SORT_NULLS_FIRST, }) } func (c Column) Alias(alias string) Column { return NewColumn(NewColumnAlias(alias, c.expr)) } func NewColumn(expr expression) Column { return Column{ expr: expr, } } type SchemaDataFrame interface { PlanId() int64 Schema(ctx context.Context) (*types.StructType, error) } func OfDF(df SchemaDataFrame, colName string) Column { return NewColumn(&delayedColumnReference{colName, df}) } func OfDFWithRegex(df SchemaDataFrame, colRegex string) Column { planId := df.PlanId() return NewColumn(&unresolvedRegex{colRegex, &planId}) } type Alias struct { Name string Col Convertible } func (a Alias) ToProto(ctx context.Context) (*proto.Expression, error) { col, err := a.Col.ToProto(ctx) if err != nil { return nil, err } return &proto.Expression{ ExprType: &proto.Expression_Alias_{ Alias: &proto.Expression_Alias{ Expr: col, Name: []string{a.Name}, }, }, }, nil } func WithAlias(name string, col Convertible) Alias { return Alias{ Name: name, Col: col, } } ================================================ FILE: spark/sql/column/column_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package column import ( "context" "testing" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/stretchr/testify/assert" ) func TestNewUnresolvedFunction_Basic(t *testing.T) { ctx := context.Background() col1 := NewColumn(NewColumnReference("col1")) col2 := NewColumn(NewColumnReference("col2")) col1Plan, _ := col1.ToProto(ctx) col2Plan, _ := col2.ToProto(ctx) type args struct { name string arguments []expression isDistinct bool } tests := []struct { name string args args want *proto.Expression }{ { name: "TestNewUnresolvedWithArguments", args: args{ name: "id", arguments: []expression{col1.expr, col2.expr}, isDistinct: false, }, want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "id", IsDistinct: false, Arguments: []*proto.Expression{ col1Plan, col2Plan, }, }, }, }, }, { name: "TestNewUnresolvedWithArgumentsEmpty", args: args{ name: "id", arguments: []expression{}, isDistinct: true, }, want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "id", IsDistinct: true, }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := NewUnresolvedFunction(tt.args.name, tt.args.arguments, tt.args.isDistinct) expected := tt.want p, err := got.ToProto(ctx) assert.NoError(t, err) assert.Equalf(t, expected, p, "Input: %v", tt.args) }) } } func TestColumnFunctions(t *testing.T) { col1 := NewColumn(NewColumnReference("col1")) col2 := NewColumn(NewColumnReference("col2")) col1Plan, _ := col1.ToProto(context.Background()) tests := []struct { name string arg Column want *proto.Expression }{ { name: "TestColumnAlias", arg: NewColumn(NewColumnAlias("alias", col1.expr)), want: &proto.Expression{ ExprType: &proto.Expression_Alias_{ Alias: &proto.Expression_Alias{ Expr: col1Plan, Name: []string{"alias"}, }, }, }, }, { name: "TestNewUnresolvedFunction", arg: NewColumn(NewUnresolvedFunction("id", nil, false)), want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "id", IsDistinct: false, }, }, }, }, { name: "TestLtComparison", arg: col1.Lt(col2), want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "<", IsDistinct: false, Arguments: []*proto.Expression{ { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col1", }, }, }, { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col2", }, }, }, }, }, }, }, }, { name: "TestGtComparison", arg: col1.Gt(col2), want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: ">", IsDistinct: false, Arguments: []*proto.Expression{ { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col1", }, }, }, { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col2", }, }, }, }, }, }, }, }, { name: "TestLeComparison", arg: col1.Le(col2), want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "<=", IsDistinct: false, Arguments: []*proto.Expression{ { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col1", }, }, }, { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col2", }, }, }, }, }, }, }, }, { name: "TestGeComparison", arg: col1.Ge(col2), want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: ">=", IsDistinct: false, Arguments: []*proto.Expression{ { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col1", }, }, }, { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col2", }, }, }, }, }, }, }, }, { name: "TestMulComparison", arg: col1.Mul(col2), want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "*", IsDistinct: false, Arguments: []*proto.Expression{ { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col1", }, }, }, { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col2", }, }, }, }, }, }, }, }, { name: "TestDivComparison", arg: col1.Div(col2), want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "/", IsDistinct: false, Arguments: []*proto.Expression{ { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col1", }, }, }, { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col2", }, }, }, }, }, }, }, }, { name: "TestNeComparison", arg: col1.Neq(col2), want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "not", Arguments: []*proto.Expression{ { ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "==", IsDistinct: false, Arguments: []*proto.Expression{ { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col1", }, }, }, { ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: "col2", }, }, }, }, }, }, }, }, }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := tt.arg.ToProto(context.Background()) assert.NoError(t, err) expected := tt.want assert.Equalf(t, expected, got, "Input: %v", tt.arg.expr.DebugString()) }) } } func TestColumn_Alias(t *testing.T) { col1 := NewColumn(NewColumnReference("col1")) col1Plan, _ := col1.ToProto(context.Background()) tests := []struct { name string arg Convertible want *proto.Expression }{ { name: "TestColumnAlias", arg: WithAlias("alias", col1), want: &proto.Expression{ ExprType: &proto.Expression_Alias_{ Alias: &proto.Expression_Alias{ Expr: col1Plan, Name: []string{"alias"}, }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := tt.arg.ToProto(context.Background()) assert.NoError(t, err) expected := tt.want assert.Equalf(t, expected, got, "Input: %v", tt.arg) }) } } ================================================ FILE: spark/sql/column/expressions.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package column import ( "context" "fmt" "strings" "github.com/apache/spark-connect-go/spark/sql/types" "github.com/apache/spark-connect-go/spark/sparkerrors" proto "github.com/apache/spark-connect-go/internal/generated" ) func newProtoExpression() *proto.Expression { return &proto.Expression{} } // expression is the interface for all expressions used by Spark Connect. type expression interface { ToProto(context.Context) (*proto.Expression, error) DebugString() string } type unresolvedRegex struct { colRegex string planId *int64 } func (d *unresolvedRegex) DebugString() string { return d.colRegex } func (d *unresolvedRegex) ToProto(ctx context.Context) (*proto.Expression, error) { expr := newProtoExpression() expr.ExprType = &proto.Expression_UnresolvedRegex_{ UnresolvedRegex: &proto.Expression_UnresolvedRegex{ ColName: d.colRegex, PlanId: d.planId, }, } return expr, nil } type delayedColumnReference struct { unparsedIdentifier string df SchemaDataFrame } func (d *delayedColumnReference) DebugString() string { return d.unparsedIdentifier } func (d *delayedColumnReference) ToProto(ctx context.Context) (*proto.Expression, error) { // Check if the column identifier is actually part of the schema. schema, err := d.df.Schema(ctx) if err != nil { return nil, err } found := false for _, field := range schema.Fields { if field.Name == d.unparsedIdentifier { found = true break } } // TODO: return proper pyspark error if !found { return nil, sparkerrors.WithType(sparkerrors.InvalidPlanError, fmt.Errorf("cannot resolve column %s", d.unparsedIdentifier)) } expr := newProtoExpression() id := d.df.PlanId() expr.ExprType = &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: d.unparsedIdentifier, PlanId: &id, }, } return expr, nil } type sortExpression struct { child expression direction proto.Expression_SortOrder_SortDirection nullOrdering proto.Expression_SortOrder_NullOrdering } func (s *sortExpression) DebugString() string { return s.child.DebugString() } func (s *sortExpression) ToProto(ctx context.Context) (*proto.Expression, error) { exp := newProtoExpression() child, err := s.child.ToProto(ctx) if err != nil { return nil, err } exp.ExprType = &proto.Expression_SortOrder_{ SortOrder: &proto.Expression_SortOrder{ Child: child, Direction: s.direction, NullOrdering: s.nullOrdering, }, } return exp, nil } type caseWhenExpression struct { branches []*caseWhenBranch elseExpr expression } type caseWhenBranch struct { condition expression value expression } func NewCaseWhenExpression(branches []*caseWhenBranch, elseExpr expression) expression { return &caseWhenExpression{branches: branches, elseExpr: elseExpr} } func (c *caseWhenExpression) DebugString() string { branches := make([]string, 0) for _, branch := range c.branches { branches = append(branches, fmt.Sprintf("WHEN %s THEN %s", branch.condition.DebugString(), branch.value.DebugString())) } elseExpr := "" if c.elseExpr != nil { elseExpr = fmt.Sprintf("ELSE %s", c.elseExpr.DebugString()) } return fmt.Sprintf("CASE %s %s END", strings.Join(branches, " "), elseExpr) } func (c *caseWhenExpression) ToProto(ctx context.Context) (*proto.Expression, error) { args := make([]expression, 0) for _, branch := range c.branches { args = append(args, branch.condition) args = append(args, branch.value) } if c.elseExpr != nil { args = append(args, c.elseExpr) } fun := NewUnresolvedFunction("when", args, false) return fun.ToProto(ctx) } type unresolvedExtractValue struct { name string child expression extraction expression } func (u *unresolvedExtractValue) DebugString() string { return fmt.Sprintf("%s(%s, %s)", u.name, u.child.DebugString(), u.extraction.DebugString()) } func (u *unresolvedExtractValue) ToProto(ctx context.Context) (*proto.Expression, error) { expr := newProtoExpression() child, err := u.child.ToProto(ctx) if err != nil { return nil, err } extraction, err := u.extraction.ToProto(ctx) if err != nil { return nil, err } expr.ExprType = &proto.Expression_UnresolvedExtractValue_{ UnresolvedExtractValue: &proto.Expression_UnresolvedExtractValue{ Child: child, Extraction: extraction, }, } return expr, nil } type unresolvedFunction struct { name string args []expression isDistinct bool } func (u *unresolvedFunction) DebugString() string { args := make([]string, 0) for _, arg := range u.args { args = append(args, arg.DebugString()) } distinct := "" if u.isDistinct { distinct = "DISTINCT " } return fmt.Sprintf("%s(%s%s)", u.name, distinct, strings.Join(args, ", ")) } func (u *unresolvedFunction) ToProto(ctx context.Context) (*proto.Expression, error) { // Convert input args to the proto expression. var args []*proto.Expression = nil if len(u.args) > 0 { args = make([]*proto.Expression, 0) for _, arg := range u.args { p, e := arg.ToProto(ctx) if e != nil { return nil, e } args = append(args, p) } } expr := newProtoExpression() expr.ExprType = &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: u.name, Arguments: args, IsDistinct: u.isDistinct, }, } return expr, nil } func NewUnresolvedExtractValue(name string, child expression, extraction expression) expression { return &unresolvedExtractValue{name: name, child: child, extraction: extraction} } func NewUnresolvedFunction(name string, args []expression, isDistinct bool) expression { return &unresolvedFunction{name: name, args: args, isDistinct: isDistinct} } func NewUnresolvedFunctionWithColumns(name string, cols ...Column) expression { exprs := make([]expression, 0) for _, col := range cols { exprs = append(exprs, col.expr) } return NewUnresolvedFunction(name, exprs, false) } type columnAlias struct { alias []string expr expression metadata *string } func NewColumnAlias(alias string, expr expression) expression { return &columnAlias{alias: []string{alias}, expr: expr} } func NewColumnAliasFromNameParts(alias []string, expr expression) expression { return &columnAlias{alias: alias, expr: expr} } func (c *columnAlias) DebugString() string { child := c.expr.DebugString() alias := strings.Join(c.alias, ".") return fmt.Sprintf("%s AS %s", child, alias) } func (c *columnAlias) ToProto(ctx context.Context) (*proto.Expression, error) { expr := newProtoExpression() alias, err := c.expr.ToProto(ctx) if err != nil { return nil, err } expr.ExprType = &proto.Expression_Alias_{ Alias: &proto.Expression_Alias{ Expr: alias, Name: c.alias, Metadata: c.metadata, }, } return expr, nil } type columnReference struct { unparsedIdentifier string planId *int64 } func NewColumnReference(unparsedIdentifier string) expression { return &columnReference{unparsedIdentifier: unparsedIdentifier} } func NewColumnReferenceWithPlanId(unparsedIdentifier string, planId int64) expression { return &columnReference{unparsedIdentifier: unparsedIdentifier, planId: &planId} } func (c *columnReference) DebugString() string { return c.unparsedIdentifier } func (c *columnReference) ToProto(context.Context) (*proto.Expression, error) { expr := newProtoExpression() expr.ExprType = &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: c.unparsedIdentifier, PlanId: c.planId, }, } return expr, nil } type sqlExression struct { expression_string string } func NewSQLExpression(expression string) expression { return &sqlExression{expression_string: expression} } func (s *sqlExression) DebugString() string { return s.expression_string } func (s *sqlExression) ToProto(context.Context) (*proto.Expression, error) { expr := newProtoExpression() expr.ExprType = &proto.Expression_ExpressionString_{ ExpressionString: &proto.Expression_ExpressionString{ Expression: s.expression_string, }, } return expr, nil } type literalExpression struct { value types.LiteralType } func (l *literalExpression) DebugString() string { return fmt.Sprintf("%v", l.value) } func (l *literalExpression) ToProto(ctx context.Context) (*proto.Expression, error) { return l.value.ToProto(ctx) } func NewLiteral(value types.LiteralType) expression { return &literalExpression{value: value} } ================================================ FILE: spark/sql/column/expressions_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package column import ( "context" "reflect" "testing" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/stretchr/testify/assert" ) func TestNewUnresolvedFunction(t *testing.T) { colRef := NewColumnReference("martin") colRefPlan, _ := colRef.ToProto(context.Background()) type args struct { name string arguments []expression isDistinct bool } tests := []struct { name string args args want *proto.Expression }{ { name: "TestNewUnresolvedFunction", args: args{ name: "id", arguments: nil, isDistinct: false, }, want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "id", IsDistinct: false, }, }, }, }, { name: "TestNewUnresolvedWithArguments", args: args{ name: "id", arguments: []expression{colRef}, isDistinct: false, }, want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "id", IsDistinct: false, Arguments: []*proto.Expression{ colRefPlan, }, }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := NewUnresolvedFunction(tt.args.name, tt.args.arguments, tt.args.isDistinct).ToProto(context.Background()) assert.NoError(t, err) if !reflect.DeepEqual(got, tt.want) { assert.Equal(t, tt.want, got) t.Errorf("NewUnresolvedFunction() = %v, want %v", got, tt.want) } }) } } func TestNewUnresolvedFunctionWithColumns(t *testing.T) { colRef := NewColumn(NewColumnReference("martin")) colRefPlan, _ := colRef.ToProto(context.Background()) type args struct { name string arguments []Column } tests := []struct { name string args args want *proto.Expression }{ { name: "TestNewUnresolvedFunction", args: args{ name: "id", arguments: nil, }, want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "id", IsDistinct: false, }, }, }, }, { name: "TestNewUnresolvedWithArguments", args: args{ name: "id", arguments: []Column{colRef}, }, want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "id", IsDistinct: false, Arguments: []*proto.Expression{ colRefPlan, }, }, }, }, }, { name: "TestNewUnresolvedWithManyArguments", args: args{ name: "id", arguments: []Column{colRef, colRef, colRef}, }, want: &proto.Expression{ ExprType: &proto.Expression_UnresolvedFunction_{ UnresolvedFunction: &proto.Expression_UnresolvedFunction{ FunctionName: "id", IsDistinct: false, Arguments: []*proto.Expression{ colRefPlan, colRefPlan, colRefPlan, }, }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := NewUnresolvedFunctionWithColumns(tt.args.name, tt.args.arguments...).ToProto(context.Background()) assert.NoError(t, err) if !reflect.DeepEqual(got, tt.want) { assert.Equal(t, tt.want, got) t.Errorf("NewUnresolvedFunction() = %v, want %v", got, tt.want) } }) } } func TestNewSQLExpression(t *testing.T) { type args struct { expression string } tests := []struct { name string args args want *sqlExression }{ { name: "TestNewSQLExpression", args: args{ expression: "id < 10", }, want: &sqlExression{ expression_string: "id < 10", }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := NewSQLExpression(tt.args.expression); !reflect.DeepEqual(got, tt.want) { t.Errorf("NewSQLExpression() = %v, want %v", got, tt.want) } }) } } func TestColumnAlias_Basic(t *testing.T) { colRef := NewColumnReference("column") colRefPlan, _ := colRef.ToProto(context.Background()) colAlias := NewColumnAlias("martin", colRef) colAliasPlan, _ := colAlias.ToProto(context.Background()) assert.Equal(t, colRefPlan, colAliasPlan.GetAlias().GetExpr()) // Test the debug string: assert.Equal(t, "column AS martin", colAlias.DebugString()) } ================================================ FILE: spark/sql/dataframe.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "context" "fmt" "iter" "math/rand/v2" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/spark-connect-go/spark/sql/utils" "github.com/apache/spark-connect-go/spark/sql/column" "github.com/apache/spark-connect-go/spark/sql/types" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sparkerrors" ) // ResultCollector receives a stream of result rows type ResultCollector interface { // WriteRow receives a single row from the data frame WriteRow(values []any) } // DataFrame is a wrapper for data frame, representing a distributed collection of data row. type DataFrame interface { // PlanId returns the plan id of the data frame. PlanId() int64 All(ctx context.Context) iter.Seq2[types.Row, error] Agg(ctx context.Context, exprs ...column.Convertible) (DataFrame, error) AggWithMap(ctx context.Context, exprs map[string]string) (DataFrame, error) // Alias creates a new DataFrame with the specified subquery alias Alias(ctx context.Context, alias string) DataFrame ApproxQuantile(ctx context.Context, probabilities []float64, relativeError float64, cols ...string) ([][]float64, error) // Cache persists the DataFrame with the default storage level. Cache(ctx context.Context) error // Coalesce returns a new DataFrame that has exactly numPartitions partitions.DataFrame // // Similar to coalesce defined on an :class:`RDD`, this operation results in a // narrow dependency, e.g. if you go from 1000 partitions to 100 partitions, // there will not be a shuffle, instead each of the 100 new partitions will // claim 10 of the current partitions. If a larger number of partitions is requested, // it will stay at the current number of partitions. // // However, if you're doing a drastic coalesce, e.g. to numPartitions = 1, // this may result in your computation taking place on fewer nodes than // you like (e.g. one node in the case of numPartitions = 1). To avoid this, // you can call repartition(). This will add a shuffle step, but means the // current upstream partitions will be executed in parallel (per whatever // the current partitioning is). Coalesce(ctx context.Context, numPartitions int) DataFrame // Columns returns the list of column names of the DataFrame. Columns(ctx context.Context) ([]string, error) // Corr calculates the correlation of two columns of a :class:`DataFrame` as a double value. // Currently only supports the Pearson Correlation Coefficient. Corr(ctx context.Context, col1, col2 string) (float64, error) CorrWithMethod(ctx context.Context, col1, col2 string, method string) (float64, error) // Count returns the number of rows in the DataFrame. Count(ctx context.Context) (int64, error) // Cov calculates the sample covariance for the given columns, specified by their names, as a // double value. Cov(ctx context.Context, col1, col2 string) (float64, error) // Collect returns the data rows of the current data frame. Collect(ctx context.Context) ([]types.Row, error) // CreateTempView creates or replaces a temporary view. CreateTempView(ctx context.Context, viewName string, replace, global bool) error // CreateOrReplaceTempView creates or replaces a temporary view and replaces the optional existing view. CreateOrReplaceTempView(ctx context.Context, viewName string) error // CreateGlobalTempView creates a global temporary view. CreateGlobalTempView(ctx context.Context, viewName string) error // CreateOrReplaceGlobalTempView creates or replaces a global temporary view and replaces the optional existing view. CreateOrReplaceGlobalTempView(ctx context.Context, viewName string) error // CrossJoin joins the current DataFrame with another DataFrame using the cross product CrossJoin(ctx context.Context, other DataFrame) DataFrame // CrossTab computes a pair-wise frequency table of the given columns. Also known as a // contingency table. // The first column of each row will be the distinct values of `col1` and the column names // will be the distinct values of `col2`. The name of the first column will be `$col1_$col2`. // Pairs that have no occurrences will have zero as their counts. CrossTab(ctx context.Context, col1, col2 string) DataFrame // Cube creates a multi-dimensional cube for the current DataFrame using // the specified columns, so we can run aggregations on them. Cube(ctx context.Context, cols ...column.Convertible) *GroupedData // Describe omputes basic statistics for numeric and string columns. // This includes count, mean, stddev, min, and max. Describe(ctx context.Context, cols ...string) DataFrame // Distinct returns a new DataFrame containing the distinct rows in this DataFrame. Distinct(ctx context.Context) DataFrame // Drop returns a new DataFrame that drops the specified list of columns. Drop(ctx context.Context, columns ...column.Convertible) (DataFrame, error) // DropByName returns a new DataFrame that drops the specified list of columns by name. DropByName(ctx context.Context, columns ...string) (DataFrame, error) // DropDuplicates returns a new DataFrame that contains only the unique rows from this DataFrame. DropDuplicates(ctx context.Context, columns ...string) (DataFrame, error) // Drops all rows containing any null or NaN values. This is similar to PySparks dropna with how=any DropNa(ctx context.Context, cols ...string) (DataFrame, error) // Drops all rows containing all null or NaN values in the specified columns. This is // similar to PySparks dropna with how=all DropNaAll(ctx context.Context, cols ...string) (DataFrame, error) // Drops all rows containing null or NaN values in the specified columns. with a max threshold. DropNaWithThreshold(ctx context.Context, threshold int32, cols ...string) (DataFrame, error) // ExceptAll is similar to Substract but does not perform the distinct operation. ExceptAll(ctx context.Context, other DataFrame) DataFrame // Explain returns the string explain plan for the current DataFrame according to the explainMode. Explain(ctx context.Context, explainMode utils.ExplainMode) (string, error) // FillNa replaces null values with specified value. FillNa(ctx context.Context, value types.PrimitiveTypeLiteral, columns ...string) (DataFrame, error) // FillNaWithValues replaces null values in specified columns (key of the map) with values. FillNaWithValues(ctx context.Context, values map[string]types.PrimitiveTypeLiteral) (DataFrame, error) // Filter filters the data frame by a column condition. Filter(ctx context.Context, condition column.Convertible) (DataFrame, error) // FilterByString filters the data frame by a string condition. FilterByString(ctx context.Context, condition string) (DataFrame, error) // Returns the first row of the DataFrame. First(ctx context.Context) (types.Row, error) FreqItems(ctx context.Context, cols ...string) DataFrame FreqItemsWithSupport(ctx context.Context, support float64, cols ...string) DataFrame // GetStorageLevel returns the storage level of the data frame. GetStorageLevel(ctx context.Context) (*utils.StorageLevel, error) // GroupBy groups the DataFrame by the spcified columns so that the aggregation // can be performed on them. See GroupedData for all the available aggregate functions. GroupBy(cols ...column.Convertible) *GroupedData // Head is an alias for Limit Head(ctx context.Context, limit int32) ([]types.Row, error) // Intersect performs the set intersection of two data frames and only returns distinct rows. Intersect(ctx context.Context, other DataFrame) DataFrame // IntersectAll performs the set intersection of two data frames and returns all rows. IntersectAll(ctx context.Context, other DataFrame) DataFrame // IsEmpty returns true if the DataFrame is empty. IsEmpty(ctx context.Context) (bool, error) // Join joins the current DataFrame with another DataFrame using the specified column using the joinType specified. Join(ctx context.Context, other DataFrame, on column.Convertible, joinType utils.JoinType) (DataFrame, error) // Limit applies a limit on the DataFrame Limit(ctx context.Context, limit int32) DataFrame // Melt is an alias for Unpivot. Melt(ctx context.Context, ids []column.Convertible, values []column.Convertible, variableColumnName string, valueColumnName string) (DataFrame, error) Na() DataFrameNaFunctions // Offset returns a new DataFrame by skipping the first `offset` rows. Offset(ctx context.Context, offset int32) DataFrame // OrderBy is an alias for Sort OrderBy(ctx context.Context, columns ...column.Convertible) (DataFrame, error) PrintSchema(ctx context.Context) error Persist(ctx context.Context, storageLevel utils.StorageLevel) error RandomSplit(ctx context.Context, weights []float64) ([]DataFrame, error) // Repartition re-partitions a data frame. Repartition(ctx context.Context, numPartitions int, columns []string) (DataFrame, error) // RepartitionByRange re-partitions a data frame by range partition. RepartitionByRange(ctx context.Context, numPartitions int, columns ...column.Convertible) (DataFrame, error) // Replace Returns a new DataFrame` replacing a value with another value. // Values toReplace and Values must have the same type and can only be numerics, booleans, // or strings. Value can have None. When replacing, the new value will be cast // to the type of the existing column. // // For numeric replacements all values to be replaced should have unique // floating point representation. If cols is set allows to specify a subset of columns to // perform the replacement. Replace(ctx context.Context, toReplace []types.PrimitiveTypeLiteral, values []types.PrimitiveTypeLiteral, cols ...string) (DataFrame, error) // Rollup creates a multi-dimensional rollup for the current DataFrame using // the specified columns, so we can run aggregation on them. Rollup(ctx context.Context, cols ...column.Convertible) *GroupedData // SameSemantics returns true if the other DataFrame has the same semantics. SameSemantics(ctx context.Context, other DataFrame) (bool, error) // Sample samples a data frame without replacement and random seed. Sample(ctx context.Context, fraction float64) (DataFrame, error) // SampleWithReplacement samples a data frame with random seed and with/without replacement. SampleWithReplacement(ctx context.Context, withReplacement bool, fraction float64) (DataFrame, error) // SampleWithSeed samples a data frame without replacement and given seed. SampleWithSeed(ctx context.Context, fraction float64, seed int64) (DataFrame, error) // SampleWithReplacementAndSeed samples a data frame with/without replacement and given seed. SampleWithReplacementAndSeed(ctx context.Context, withReplacement bool, fraction float64, seed int64) (DataFrame, error) // Show uses WriteResult to write the data frames to the console output. Show(ctx context.Context, numRows int, truncate bool) error // Schema returns the schema for the current data frame. Schema(ctx context.Context) (*types.StructType, error) // Select projects a list of columns from the DataFrame Select(ctx context.Context, columns ...column.Convertible) (DataFrame, error) // SelectExpr projects a list of columns from the DataFrame by string expressions SelectExpr(ctx context.Context, exprs ...string) (DataFrame, error) // SemanticHash returns the semantic hash of the data frame. The semantic hash can be used to // understand of the semantic operations are similar. SemanticHash(ctx context.Context) (int32, error) // Sort returns a new DataFrame sorted by the specified columns. Sort(ctx context.Context, columns ...column.Convertible) (DataFrame, error) Stat() DataFrameStatFunctions // Subtract subtracts the other DataFrame from the current DataFrame. And only returns // distinct rows. Subtract(ctx context.Context, other DataFrame) DataFrame // Summary computes the specified statistics for the current DataFrame and returns it // as a new DataFrame. Available statistics are: "count", "mean", "stddev", "min", "max" and // arbitrary percentiles specified as a percentage (e.g., "75%"). If no statistics are given, // this function computes "count", "mean", "stddev", "min", "25%", "50%", "75%", "max". Summary(ctx context.Context, statistics ...string) DataFrame // Tail returns the last `limit` rows as a list of Row. Tail(ctx context.Context, limit int32) ([]types.Row, error) // Take is an alias for Limit Take(ctx context.Context, limit int32) ([]types.Row, error) // ToArrow returns the Arrow representation of the DataFrame. ToArrow(ctx context.Context) (*arrow.Table, error) // Union is an alias for UnionAll Union(ctx context.Context, other DataFrame) DataFrame // UnionAll returns a new DataFrame containing union of rows in this and another DataFrame. UnionAll(ctx context.Context, other DataFrame) DataFrame // UnionByName performs a SQL union operation on two dataframes but reorders the schema // according to the matching columns. If columns are missing, it will throw an eror. UnionByName(ctx context.Context, other DataFrame) DataFrame // UnionByNameWithMissingColumns performs a SQL union operation on two dataframes but reorders the schema // according to the matching columns. Missing columns are supported. UnionByNameWithMissingColumns(ctx context.Context, other DataFrame) DataFrame // Unpersist resets the storage level for this data frame, and if necessary removes it // from server-side caches. Unpersist(ctx context.Context) error // Unpivot a DataFrame from wide format to long format, optionally leaving // identifier columns set. This is the reverse to `groupBy(...).pivot(...).agg(...)`, // except for the aggregation, which cannot be reversed. // // This function is useful to massage a DataFrame into a format where some // columns are identifier columns ("ids"), while all other columns ("values") // are "unpivoted" to the rows, leaving just two non-id columns, named as given // by `variableColumnName` and `valueColumnName`. // // When no "id" columns are given, the unpivoted DataFrame consists of only the // "variable" and "value" columns. // // The `values` columns must not be empty so at least one value must be given to be unpivoted. // When `values` is `None`, all non-id columns will be unpivoted. // // All "value" columns must share a least common data type. Unless they are the same data type, // all "value" columns are cast to the nearest common data type. For instance, types // `IntegerType` and `LongType` are cast to `LongType`, while `IntegerType` and `StringType` // do not have a common data type and `unpivot` fails. Unpivot(ctx context.Context, ids []column.Convertible, values []column.Convertible, variableColumnName string, valueColumnName string) (DataFrame, error) // WithColumn returns a new DataFrame by adding a column or replacing the // existing column that has the same name. The column expression must be an // expression over this DataFrame; attempting to add a column from some other // DataFrame will raise an error. // // Note: This method introduces a projection internally. Therefore, calling it multiple // times, for instance, via loops in order to add multiple columns can generate big // plans which can cause performance issues and even `StackOverflowException`. // To avoid this, use :func:`select` with multiple columns at once. WithColumn(ctx context.Context, colName string, col column.Convertible) (DataFrame, error) WithColumns(ctx context.Context, alias ...column.Alias) (DataFrame, error) // WithColumnRenamed returns a new DataFrame by renaming an existing column. // This is a no-op if the schema doesn't contain the given column name. WithColumnRenamed(ctx context.Context, existingName, newName string) (DataFrame, error) // WithColumnsRenamed returns a new DataFrame by renaming multiple existing columns. WithColumnsRenamed(ctx context.Context, colsMap map[string]string) (DataFrame, error) // WithMetadata returns a new DataFrame with the specified metadata for each of the columns. WithMetadata(ctx context.Context, metadata map[string]string) (DataFrame, error) WithWatermark(ctx context.Context, eventTime string, delayThreshold string) (DataFrame, error) Where(ctx context.Context, condition string) (DataFrame, error) // Writer returns a data frame writer, which could be used to save data frame to supported storage. Writer() DataFrameWriter // Write is an alias for Writer // Deprecated: Use Writer Write() DataFrameWriter // WriteResult streams the data frames to a result collector WriteResult(ctx context.Context, collector ResultCollector, numRows int, truncate bool) error } // dataFrameImpl is an implementation of DataFrame interface. type dataFrameImpl struct { session *sparkSessionImpl relation *proto.Relation // TODO change to proto.Plan? } func (df *dataFrameImpl) Coalesce(ctx context.Context, numPartitions int) DataFrame { shuffle := false rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Repartition{ Repartition: &proto.Repartition{ Input: df.relation, Shuffle: &shuffle, NumPartitions: int32(numPartitions), }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Columns(ctx context.Context) ([]string, error) { schema, err := df.Schema(ctx) if err != nil { return nil, err } columns := make([]string, len(schema.Fields)) for i, field := range schema.Fields { columns[i] = field.Name } return columns, nil } func (df *dataFrameImpl) Corr(ctx context.Context, col1, col2 string) (float64, error) { return df.CorrWithMethod(ctx, col1, col2, "pearson") } func (df *dataFrameImpl) CorrWithMethod(ctx context.Context, col1, col2 string, method string) (float64, error) { plan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Corr{ Corr: &proto.StatCorr{ Input: df.relation, Col1: col1, Col2: col2, Method: &method, }, }, }, }, } responseClient, err := df.session.client.ExecutePlan(ctx, plan) if err != nil { return 0, sparkerrors.WithType(fmt.Errorf("failed to execute plan: %w", err), sparkerrors.ExecutionError) } _, table, err := responseClient.ToTable() if err != nil { return 0, err } values, err := types.ReadArrowTableToRows(table) if err != nil { return 0, err } return values[0].At(0).(float64), nil } func (df *dataFrameImpl) Count(ctx context.Context) (int64, error) { res, err := df.GroupBy().Count(ctx) if err != nil { return 0, err } rows, err := res.Collect(ctx) if err != nil { return 0, err } return rows[0].At(0).(int64), nil } func (df *dataFrameImpl) Cov(ctx context.Context, col1, col2 string) (float64, error) { plan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Cov{ Cov: &proto.StatCov{ Input: df.relation, Col1: col1, Col2: col2, }, }, }, }, } responseClient, err := df.session.client.ExecutePlan(ctx, plan) if err != nil { return 0, sparkerrors.WithType(fmt.Errorf("failed to execute plan: %w", err), sparkerrors.ExecutionError) } _, table, err := responseClient.ToTable() if err != nil { return 0, err } values, err := types.ReadArrowTableToRows(table) if err != nil { return 0, err } return values[0].At(0).(float64), nil } func (df *dataFrameImpl) PlanId() int64 { return df.relation.GetCommon().GetPlanId() } func (df *dataFrameImpl) SelectExpr(ctx context.Context, exprs ...string) (DataFrame, error) { expressions := make([]*proto.Expression, 0, len(exprs)) for _, expr := range exprs { col := column.NewSQLExpression(expr) f, e := col.ToProto(ctx) if e != nil { return nil, e } expressions = append(expressions, f) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Project{ Project: &proto.Project{ Input: df.relation, Expressions: expressions, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) Alias(ctx context.Context, alias string) DataFrame { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_SubqueryAlias{ SubqueryAlias: &proto.SubqueryAlias{ Input: df.relation, Alias: alias, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) CrossJoin(ctx context.Context, other DataFrame) DataFrame { otherDf := other.(*dataFrameImpl) rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Join{ Join: &proto.Join{ Left: df.relation, Right: otherDf.relation, JoinType: proto.Join_JOIN_TYPE_CROSS, }, }, } return NewDataFrame(df.session, rel) } // NewDataFrame creates a new DataFrame func NewDataFrame(session *sparkSessionImpl, relation *proto.Relation) DataFrame { return &dataFrameImpl{ session: session, relation: relation, } } type consoleCollector struct{} func (c consoleCollector) WriteRow(values []any) { fmt.Println(values...) } func (df *dataFrameImpl) Show(ctx context.Context, numRows int, truncate bool) error { return df.WriteResult(ctx, &consoleCollector{}, numRows, truncate) } func (df *dataFrameImpl) WriteResult(ctx context.Context, collector ResultCollector, numRows int, truncate bool) error { truncateValue := 0 if truncate { truncateValue = 20 } vertical := false plan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_ShowString{ ShowString: &proto.ShowString{ Input: df.relation, NumRows: int32(numRows), Truncate: int32(truncateValue), Vertical: vertical, }, }, }, }, } responseClient, err := df.session.client.ExecutePlan(ctx, plan) if err != nil { return sparkerrors.WithType(fmt.Errorf("failed to show dataframe: %w", err), sparkerrors.ExecutionError) } _, table, err := responseClient.ToTable() if err != nil { return err } rows, err := types.ReadArrowTableToRows(table) if err != nil { return err } for _, row := range rows { values := row.Values() collector.WriteRow(values) } return nil } func (df *dataFrameImpl) Schema(ctx context.Context) (*types.StructType, error) { response, err := df.session.client.AnalyzePlan(ctx, df.createPlan()) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to analyze plan: %w", err), sparkerrors.ExecutionError) } responseSchema := response.GetSchema().Schema return types.ConvertProtoDataTypeToStructType(responseSchema) } func (df *dataFrameImpl) Collect(ctx context.Context) ([]types.Row, error) { responseClient, err := df.session.client.ExecutePlan(ctx, df.createPlan()) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to execute plan: %w", err), sparkerrors.ExecutionError) } _, table, err := responseClient.ToTable() if err != nil { return nil, err } return types.ReadArrowTableToRows(table) } func (df *dataFrameImpl) Write() DataFrameWriter { return df.Writer() } func (df *dataFrameImpl) Writer() DataFrameWriter { return newDataFrameWriter(df.session, df.relation) } func (df *dataFrameImpl) CreateTempView(ctx context.Context, viewName string, replace, global bool) error { plan := &proto.Plan{ OpType: &proto.Plan_Command{ Command: &proto.Command{ CommandType: &proto.Command_CreateDataframeView{ CreateDataframeView: &proto.CreateDataFrameViewCommand{ Input: df.relation, Name: viewName, Replace: replace, IsGlobal: global, }, }, }, }, } responseClient, err := df.session.client.ExecutePlan(ctx, plan) if err != nil { return sparkerrors.WithType(fmt.Errorf("failed to create temp view %s: %w", viewName, err), sparkerrors.ExecutionError) } _, _, err = responseClient.ToTable() return err } func (df *dataFrameImpl) CreateOrReplaceTempView(ctx context.Context, viewName string) error { return df.CreateTempView(ctx, viewName, true, false) } func (df *dataFrameImpl) CreateGlobalTempView(ctx context.Context, viewName string) error { return df.CreateTempView(ctx, viewName, false, true) } func (df *dataFrameImpl) CreateOrReplaceGlobalTempView(ctx context.Context, viewName string) error { return df.CreateTempView(ctx, viewName, true, true) } func (df *dataFrameImpl) Repartition(ctx context.Context, numPartitions int, columns []string) (DataFrame, error) { var partitionExpressions []*proto.Expression if columns != nil { partitionExpressions = make([]*proto.Expression, 0, len(columns)) for _, c := range columns { expr := &proto.Expression{ ExprType: &proto.Expression_UnresolvedAttribute_{ UnresolvedAttribute: &proto.Expression_UnresolvedAttribute{ UnparsedIdentifier: c, }, }, } partitionExpressions = append(partitionExpressions, expr) } } return df.repartitionByExpressions(numPartitions, partitionExpressions) } func (df *dataFrameImpl) RepartitionByRange(ctx context.Context, numPartitions int, columns ...column.Convertible) (DataFrame, error) { var partitionExpressions []*proto.Expression if columns != nil { partitionExpressions = make([]*proto.Expression, 0, len(columns)) for _, c := range columns { expr, err := c.ToProto(ctx) if err != nil { return nil, err } partitionExpressions = append(partitionExpressions, expr) } } return df.repartitionByExpressions(numPartitions, partitionExpressions) } func (df *dataFrameImpl) createPlan() *proto.Plan { return &proto.Plan{ OpType: &proto.Plan_Root{ Root: df.relation, }, } } func (df *dataFrameImpl) repartitionByExpressions(numPartitions int, partitionExpressions []*proto.Expression, ) (DataFrame, error) { var numPartitionsPointerValue *int32 if numPartitions != 0 { int32Value := int32(numPartitions) numPartitionsPointerValue = &int32Value } df.relation.GetRepartitionByExpression() newRelation := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_RepartitionByExpression{ RepartitionByExpression: &proto.RepartitionByExpression{ Input: df.relation, NumPartitions: numPartitionsPointerValue, PartitionExprs: partitionExpressions, }, }, } return NewDataFrame(df.session, newRelation), nil } func (df *dataFrameImpl) Filter(ctx context.Context, condition column.Convertible) (DataFrame, error) { cnd, err := condition.ToProto(ctx) if err != nil { return nil, err } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Filter{ Filter: &proto.Filter{ Input: df.relation, Condition: cnd, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) FilterByString(ctx context.Context, condition string) (DataFrame, error) { return df.Filter(ctx, column.NewColumn(column.NewSQLExpression(condition))) } func (df *dataFrameImpl) Select(ctx context.Context, columns ...column.Convertible) (DataFrame, error) { // if len(columns) == 0 { return df, nil } exprs := make([]*proto.Expression, 0, len(columns)) for _, c := range columns { expr, err := c.ToProto(ctx) if err != nil { return nil, err } exprs = append(exprs, expr) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Project{ Project: &proto.Project{ Input: df.relation, Expressions: exprs, }, }, } return NewDataFrame(df.session, rel), nil } // GroupBy groups the DataFrame by the specified columns so that aggregation // can be performed on them. See GroupedData for all the available aggregate functions. func (df *dataFrameImpl) GroupBy(cols ...column.Convertible) *GroupedData { return &GroupedData{ df: df, groupingCols: cols, groupType: "groupby", } } func (df *dataFrameImpl) WithColumn(ctx context.Context, colName string, col column.Convertible) (DataFrame, error) { return df.WithColumns(ctx, column.WithAlias(colName, col)) } func (df *dataFrameImpl) WithColumns(ctx context.Context, cols ...column.Alias) (DataFrame, error) { // Convert all columns to proto expressions and the corresponding alias: aliases := make([]*proto.Expression_Alias, 0, len(cols)) for _, col := range cols { expr, err := col.ToProto(ctx) if err != nil { return nil, err } // The alias must be an alias expression. alias := expr.GetAlias() aliases = append(aliases, alias) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_WithColumns{ WithColumns: &proto.WithColumns{ Input: df.relation, Aliases: aliases, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) WithColumnRenamed(ctx context.Context, existingName, newName string) (DataFrame, error) { return df.WithColumnsRenamed(ctx, map[string]string{existingName: newName}) } func (df *dataFrameImpl) WithColumnsRenamed(ctx context.Context, colsMap map[string]string) (DataFrame, error) { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_WithColumnsRenamed{ WithColumnsRenamed: &proto.WithColumnsRenamed{ Input: df.relation, RenameColumnsMap: colsMap, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) WithMetadata(ctx context.Context, metadata map[string]string) (DataFrame, error) { // WithMetadata works the same way as with columns but extracts the column reference from the DataFrame // and injects it back into the projection. aliases := make([]*proto.Expression_Alias, 0, len(metadata)) for colName, metadata := range metadata { expr := column.OfDF(df, colName) exprProto, err := expr.ToProto(ctx) if err != nil { return nil, err } alias := &proto.Expression_Alias{ Expr: exprProto, Name: []string{colName}, Metadata: &metadata, } aliases = append(aliases, alias) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_WithColumns{ WithColumns: &proto.WithColumns{ Input: df.relation, Aliases: aliases, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) WithWatermark(ctx context.Context, eventTime string, delayThreshold string) (DataFrame, error) { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_WithWatermark{ WithWatermark: &proto.WithWatermark{ Input: df.relation, EventTime: eventTime, DelayThreshold: delayThreshold, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) Where(ctx context.Context, condition string) (DataFrame, error) { return df.FilterByString(ctx, condition) } func (df *dataFrameImpl) Drop(ctx context.Context, columns ...column.Convertible) (DataFrame, error) { exprs := make([]*proto.Expression, 0, len(columns)) for _, c := range columns { e, err := c.ToProto(ctx) if err != nil { return nil, err } exprs = append(exprs, e) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Drop{ Drop: &proto.Drop{ Input: df.relation, Columns: exprs, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) DropByName(ctx context.Context, columns ...string) (DataFrame, error) { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Drop{ Drop: &proto.Drop{ Input: df.relation, ColumnNames: columns, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) DropDuplicates(ctx context.Context, columns ...string) (DataFrame, error) { withinWatermark := false allColumnsAsKeys := len(columns) == 0 rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Deduplicate{ Deduplicate: &proto.Deduplicate{ Input: df.relation, ColumnNames: columns, WithinWatermark: &withinWatermark, AllColumnsAsKeys: &allColumnsAsKeys, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) Tail(ctx context.Context, limit int32) ([]types.Row, error) { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Tail{ Tail: &proto.Tail{ Input: df.relation, Limit: limit, }, }, } data := NewDataFrame(df.session, rel) return data.Collect(ctx) } func (df *dataFrameImpl) Limit(ctx context.Context, limit int32) DataFrame { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Limit{ Limit: &proto.Limit{ Input: df.relation, Limit: limit, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Head(ctx context.Context, limit int32) ([]types.Row, error) { return df.Limit(ctx, limit).Collect(ctx) } func (df *dataFrameImpl) Take(ctx context.Context, limit int32) ([]types.Row, error) { return df.Limit(ctx, limit).Collect(ctx) } func (df *dataFrameImpl) ToArrow(ctx context.Context) (*arrow.Table, error) { responseClient, err := df.session.client.ExecutePlan(ctx, df.createPlan()) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to execute plan: %w", err), sparkerrors.ExecutionError) } _, table, err := responseClient.ToTable() if err != nil { return nil, err } return &table, nil } func (df *dataFrameImpl) UnionAll(ctx context.Context, other DataFrame) DataFrame { otherDf := other.(*dataFrameImpl) isAll := true rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_SetOp{ SetOp: &proto.SetOperation{ LeftInput: df.relation, RightInput: otherDf.relation, SetOpType: proto.SetOperation_SET_OP_TYPE_UNION, IsAll: &isAll, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Union(ctx context.Context, other DataFrame) DataFrame { return df.UnionAll(ctx, other) } func (df *dataFrameImpl) UnionByName(ctx context.Context, other DataFrame) DataFrame { otherDf := other.(*dataFrameImpl) byName := true allowMissingColumns := false rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_SetOp{ SetOp: &proto.SetOperation{ LeftInput: df.relation, RightInput: otherDf.relation, SetOpType: proto.SetOperation_SET_OP_TYPE_UNION, ByName: &byName, AllowMissingColumns: &allowMissingColumns, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) UnionByNameWithMissingColumns(ctx context.Context, other DataFrame) DataFrame { otherDf := other.(*dataFrameImpl) byName := true allowMissingColumns := true rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_SetOp{ SetOp: &proto.SetOperation{ LeftInput: df.relation, RightInput: otherDf.relation, SetOpType: proto.SetOperation_SET_OP_TYPE_UNION, ByName: &byName, AllowMissingColumns: &allowMissingColumns, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) ExceptAll(ctx context.Context, other DataFrame) DataFrame { otherDf := other.(*dataFrameImpl) isAll := true rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_SetOp{ SetOp: &proto.SetOperation{ LeftInput: df.relation, RightInput: otherDf.relation, SetOpType: proto.SetOperation_SET_OP_TYPE_EXCEPT, IsAll: &isAll, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Subtract(ctx context.Context, other DataFrame) DataFrame { otherDf := other.(*dataFrameImpl) isAll := false rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_SetOp{ SetOp: &proto.SetOperation{ LeftInput: df.relation, RightInput: otherDf.relation, SetOpType: proto.SetOperation_SET_OP_TYPE_EXCEPT, IsAll: &isAll, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Intersect(ctx context.Context, other DataFrame) DataFrame { otherDf := other.(*dataFrameImpl) isAll := false rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_SetOp{ SetOp: &proto.SetOperation{ LeftInput: df.relation, RightInput: otherDf.relation, SetOpType: proto.SetOperation_SET_OP_TYPE_INTERSECT, IsAll: &isAll, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) IntersectAll(ctx context.Context, other DataFrame) DataFrame { otherDf := other.(*dataFrameImpl) isAll := true rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_SetOp{ SetOp: &proto.SetOperation{ LeftInput: df.relation, RightInput: otherDf.relation, SetOpType: proto.SetOperation_SET_OP_TYPE_INTERSECT, IsAll: &isAll, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Sort(ctx context.Context, columns ...column.Convertible) (DataFrame, error) { globalSort := true sortExprs := make([]*proto.Expression_SortOrder, 0, len(columns)) for _, c := range columns { expr, err := c.ToProto(ctx) if err != nil { return nil, err } so := expr.GetSortOrder() if so == nil { return nil, sparkerrors.WithType(fmt.Errorf( "sort expression must not be nil"), sparkerrors.InvalidArgumentError) } sortExprs = append(sortExprs, so) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Sort{ Sort: &proto.Sort{ Input: df.relation, Order: sortExprs, IsGlobal: &globalSort, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) SortWithinPartitions(ctx context.Context, columns ...column.Convertible) (DataFrame, error) { globalSort := false sortExprs := make([]*proto.Expression_SortOrder, 0, len(columns)) for _, c := range columns { expr, err := c.ToProto(ctx) if err != nil { return nil, err } sortExprs = append(sortExprs, expr.GetSortOrder()) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Sort{ Sort: &proto.Sort{ Input: df.relation, Order: sortExprs, IsGlobal: &globalSort, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) OrderBy(ctx context.Context, columns ...column.Convertible) (DataFrame, error) { return df.Sort(ctx, columns...) } func (df *dataFrameImpl) Explain(ctx context.Context, explainMode utils.ExplainMode) (string, error) { plan := df.createPlan() responseClient, err := df.session.client.Explain(ctx, plan, explainMode) if err != nil { return "", sparkerrors.WithType(fmt.Errorf("failed to execute plan: %w", err), sparkerrors.ExecutionError) } return responseClient.GetExplain().GetExplainString(), nil } func (df *dataFrameImpl) Persist(ctx context.Context, storageLevel utils.StorageLevel) error { plan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: df.relation, }, } return df.session.client.Persist(ctx, plan, storageLevel) } func (df *dataFrameImpl) Cache(ctx context.Context) error { return df.Persist(ctx, utils.StorageLevelMemoryOnly) } func (df *dataFrameImpl) Unpersist(ctx context.Context) error { plan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: df.relation, }, } return df.session.client.Unpersist(ctx, plan) } func (df *dataFrameImpl) GetStorageLevel(ctx context.Context) (*utils.StorageLevel, error) { plan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: df.relation, }, } return df.session.client.GetStorageLevel(ctx, plan) } func (df *dataFrameImpl) SameSemantics(ctx context.Context, other DataFrame) (bool, error) { otherDf := other.(*dataFrameImpl) plan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: df.relation, }, } otherPlan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: otherDf.relation, }, } return df.session.client.SameSemantics(ctx, plan, otherPlan) } func (df *dataFrameImpl) SemanticHash(ctx context.Context) (int32, error) { plan := &proto.Plan{ OpType: &proto.Plan_Root{ Root: df.relation, }, } return df.session.client.SemanticHash(ctx, plan) } func (df *dataFrameImpl) Join(ctx context.Context, other DataFrame, onExpr column.Convertible, joinType utils.JoinType) (DataFrame, error) { otherDf := other.(*dataFrameImpl) onExpression, err := onExpr.ToProto(ctx) if err != nil { return nil, err } joinTypeProto := utils.ToProtoJoinType(joinType) rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Join{ Join: &proto.Join{ Left: df.relation, Right: otherDf.relation, JoinType: joinTypeProto, JoinCondition: onExpression, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) CrossTab(ctx context.Context, col1, col2 string) DataFrame { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Crosstab{ Crosstab: &proto.StatCrosstab{ Input: df.relation, Col1: col1, Col2: col2, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Cube(ctx context.Context, cols ...column.Convertible) *GroupedData { return &GroupedData{ df: df, groupingCols: cols, groupType: "cube", } } func (df *dataFrameImpl) Rollup(ctx context.Context, cols ...column.Convertible) *GroupedData { return &GroupedData{ df: df, groupingCols: cols, groupType: "rollup", } } func (df *dataFrameImpl) Describe(ctx context.Context, cols ...string) DataFrame { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Describe{ Describe: &proto.StatDescribe{ Input: df.relation, Cols: cols, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Distinct(ctx context.Context) DataFrame { allColumnsAsKeys := true rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Deduplicate{ Deduplicate: &proto.Deduplicate{ Input: df.relation, AllColumnsAsKeys: &allColumnsAsKeys, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) First(ctx context.Context) (types.Row, error) { rows, err := df.Head(ctx, 1) if err != nil { return nil, err } return rows[0], nil } func (df *dataFrameImpl) FreqItems(ctx context.Context, cols ...string) DataFrame { return df.FreqItemsWithSupport(ctx, 0.01, cols...) } func (df *dataFrameImpl) FreqItemsWithSupport(ctx context.Context, support float64, cols ...string) DataFrame { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_FreqItems{ FreqItems: &proto.StatFreqItems{ Input: df.relation, Cols: cols, Support: &support, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) IsEmpty(ctx context.Context) (bool, error) { d, err := df.Select(ctx) if err != nil { return false, err } rows, err := d.Take(ctx, int32(1)) if err != nil { return false, err } return len(rows) == 0, nil } func (df *dataFrameImpl) Offset(ctx context.Context, offset int32) DataFrame { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Offset{ Offset: &proto.Offset{ Input: df.relation, Offset: offset, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) RandomSplit(ctx context.Context, weights []float64) ([]DataFrame, error) { // Check that we don't have negative weights: total := 0.0 for _, w := range weights { if w < 0.0 { return nil, sparkerrors.WithType(fmt.Errorf("weights must not be negative"), sparkerrors.InvalidArgumentError) } total += w } seed := rand.Int64() normalizedWeights := make([]float64, len(weights)) for i, w := range weights { normalizedWeights[i] = w / total } // Calculate the cumulative sum of the weights: cumulativeWeights := make([]float64, len(weights)+1) cumulativeWeights[0] = 0.0 for i := 0; i < len(normalizedWeights); i++ { cumulativeWeights[i+1] = cumulativeWeights[i] + normalizedWeights[i] } // Iterate over cumulative weights as the boundaries of the interval and create the dataframes: dataFrames := make([]DataFrame, len(weights)) withReplacement := false for i := 1; i < len(cumulativeWeights); i++ { sampleRelation := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Sample{ Sample: &proto.Sample{ Input: df.relation, LowerBound: cumulativeWeights[i-1], UpperBound: cumulativeWeights[i], WithReplacement: &withReplacement, Seed: &seed, DeterministicOrder: true, }, }, } dataFrames[i-1] = NewDataFrame(df.session, sampleRelation) } return dataFrames, nil } func (df *dataFrameImpl) Summary(ctx context.Context, statistics ...string) DataFrame { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Summary{ Summary: &proto.StatSummary{ Input: df.relation, Statistics: statistics, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) Sample(ctx context.Context, fraction float64) (DataFrame, error) { return df.sample(ctx, nil, fraction, nil) } func (df *dataFrameImpl) SampleWithReplacement(ctx context.Context, withReplacement bool, fraction float64) (DataFrame, error) { return df.sample(ctx, &withReplacement, fraction, nil) } func (df *dataFrameImpl) SampleWithSeed(ctx context.Context, fraction float64, seed int64) (DataFrame, error) { return df.sample(ctx, nil, fraction, &seed) } func (df *dataFrameImpl) SampleWithReplacementAndSeed(ctx context.Context, withReplacement bool, fraction float64, seed int64) (DataFrame, error) { return df.sample(ctx, &withReplacement, fraction, &seed) } func (df *dataFrameImpl) sample(ctx context.Context, withReplacement *bool, fraction float64, seed *int64) (DataFrame, error) { if seed == nil { defaultSeed := rand.Int64() seed = &defaultSeed } if withReplacement == nil { defaultWithReplacement := false withReplacement = &defaultWithReplacement } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Sample{ Sample: &proto.Sample{ Input: df.relation, LowerBound: 0, UpperBound: fraction, WithReplacement: withReplacement, Seed: seed, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) Replace(ctx context.Context, toReplace []types.PrimitiveTypeLiteral, values []types.PrimitiveTypeLiteral, cols ...string, ) (DataFrame, error) { if len(toReplace) != len(values) { return nil, sparkerrors.WithType(fmt.Errorf( "toReplace and values must have the same length"), sparkerrors.InvalidArgumentError) } toReplaceExprs := make([]*proto.Expression, 0, len(toReplace)) for _, c := range toReplace { expr, err := c.ToProto(ctx) if err != nil { return nil, err } toReplaceExprs = append(toReplaceExprs, expr) } valuesExprs := make([]*proto.Expression, 0, len(values)) for _, c := range values { expr, err := c.ToProto(ctx) if err != nil { return nil, err } valuesExprs = append(valuesExprs, expr) } // Create a list of NAReplace expressions. replacements := make([]*proto.NAReplace_Replacement, 0, len(toReplace)) for i := 0; i < len(toReplace); i++ { replacement := &proto.NAReplace_Replacement{ OldValue: toReplaceExprs[i].GetLiteral(), NewValue: valuesExprs[i].GetLiteral(), } replacements = append(replacements, replacement) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Replace{ Replace: &proto.NAReplace{ Input: df.relation, Replacements: replacements, Cols: cols, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) Melt(ctx context.Context, ids []column.Convertible, values []column.Convertible, variableColumnName string, valueColumnName string, ) (DataFrame, error) { return df.Unpivot(ctx, ids, values, variableColumnName, valueColumnName) } func (df *dataFrameImpl) Unpivot(ctx context.Context, ids []column.Convertible, values []column.Convertible, variableColumnName string, valueColumnName string, ) (DataFrame, error) { idExprs := make([]*proto.Expression, 0, len(ids)) for _, id := range ids { expr, err := id.ToProto(ctx) if err != nil { return nil, err } idExprs = append(idExprs, expr) } valueExprs := make([]*proto.Expression, 0, len(values)) for _, value := range values { expr, err := value.ToProto(ctx) if err != nil { return nil, err } valueExprs = append(valueExprs, expr) } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Unpivot{ Unpivot: &proto.Unpivot{ Input: df.relation, Ids: idExprs, Values: &proto.Unpivot_Values{ Values: valueExprs, }, VariableColumnName: variableColumnName, ValueColumnName: valueColumnName, }, }, } return NewDataFrame(df.session, rel), nil } func makeDataframeWithFillNaRelation(df *dataFrameImpl, values []*proto.Expression_Literal, columns []string) DataFrame { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_FillNa{ FillNa: &proto.NAFill{ Input: df.relation, Cols: columns, Values: values, }, }, } return NewDataFrame(df.session, rel) } func (df *dataFrameImpl) FillNa(ctx context.Context, value types.PrimitiveTypeLiteral, columns ...string) (DataFrame, error) { valueLiteral, err := value.ToProto(ctx) if err != nil { return nil, err } return makeDataframeWithFillNaRelation(df, []*proto.Expression_Literal{ valueLiteral.GetLiteral(), }, columns), nil } func (df *dataFrameImpl) FillNaWithValues(ctx context.Context, values map[string]types.PrimitiveTypeLiteral, ) (DataFrame, error) { valueLiterals := make([]*proto.Expression_Literal, 0, len(values)) columns := make([]string, 0, len(values)) for k, v := range values { valueLiteral, err := v.ToProto(ctx) if err != nil { return nil, err } valueLiterals = append(valueLiterals, valueLiteral.GetLiteral()) columns = append(columns, k) } return makeDataframeWithFillNaRelation(df, valueLiterals, columns), nil } func (df *dataFrameImpl) Stat() DataFrameStatFunctions { return &dataFrameStatFunctionsImpl{df: df} } func (df *dataFrameImpl) Agg(ctx context.Context, cols ...column.Convertible) (DataFrame, error) { return df.GroupBy().Agg(ctx, cols...) } func (df *dataFrameImpl) AggWithMap(ctx context.Context, exprs map[string]string) (DataFrame, error) { funs := make([]column.Convertible, 0) for k, v := range exprs { // Convert the column name to a column expression. col := column.OfDF(df, k) // Convert the value string to an unresolved function name. fun := column.NewUnresolvedFunctionWithColumns(v, col) funs = append(funs, fun) } return df.Agg(ctx, funs...) } func (df *dataFrameImpl) ApproxQuantile(ctx context.Context, probabilities []float64, relativeError float64, cols ...string, ) ([][]float64, error) { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_ApproxQuantile{ ApproxQuantile: &proto.StatApproxQuantile{ Input: df.relation, Probabilities: probabilities, RelativeError: relativeError, Cols: cols, }, }, } data := NewDataFrame(df.session, rel) rows, err := data.Collect(ctx) if err != nil { return nil, err } // The result structure is a bit weird here, essentially it returns exactly one row with // the quantiles. // Inside the row is a list of nested arroys that contain the quantiles. The first column is the // first nested array, the second column is the second nested array and so on. nested := rows[0].At(0).([]interface{}) result := make([][]float64, len(nested)) for i := 0; i < len(nested); i++ { tmp := nested[i].([]interface{}) result[i] = make([]float64, len(tmp)) for j := 0; j < len(tmp); j++ { f, ok := tmp[j].(float64) if !ok { return nil, sparkerrors.WithType(fmt.Errorf( "failed to cast to float64"), sparkerrors.ExecutionError) } result[i][j] = f } } return result, nil } func (df *dataFrameImpl) DropNa(ctx context.Context, subset ...string) (DataFrame, error) { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_DropNa{ DropNa: &proto.NADrop{ Input: df.relation, Cols: subset, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) DropNaAll(ctx context.Context, subset ...string) (DataFrame, error) { return df.DropNaWithThreshold(ctx, 1, subset...) } func (df *dataFrameImpl) DropNaWithThreshold(ctx context.Context, thresh int32, subset ...string) (DataFrame, error) { rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_DropNa{ DropNa: &proto.NADrop{ Input: df.relation, MinNonNulls: &thresh, Cols: subset, }, }, } return NewDataFrame(df.session, rel), nil } func (df *dataFrameImpl) Na() DataFrameNaFunctions { return &dataFrameNaFunctionsImpl{dataFrame: df} } func (df *dataFrameImpl) All(ctx context.Context) iter.Seq2[types.Row, error] { data, err := df.Collect(ctx) return func(yield func(types.Row, error) bool) { if err != nil { yield(nil, err) return } for _, row := range data { if !yield(row, nil) { break } } } } func (df *dataFrameImpl) PrintSchema(ctx context.Context) error { schema, err := df.Schema(ctx) if err != nil { return err } fmt.Print(schema.TreeString()) return nil } ================================================ FILE: spark/sql/dataframe_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "context" "testing" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sql/functions" "github.com/stretchr/testify/assert" ) func TestDataFrameImpl_GroupBy(t *testing.T) { ctx := context.Background() rel := &proto.Relation{ RelType: &proto.Relation_Range{ Range: &proto.Range{ End: 10, Step: 1, }, }, } df := NewDataFrame(nil, rel) gd := df.GroupBy(functions.Col("id")) assert.NotNil(t, gd) assert.Equal(t, gd.groupType, "groupby") df, err := gd.Agg(ctx, functions.Count(functions.Int64Lit(1))) assert.Nil(t, err) impl := df.(*dataFrameImpl) assert.NotNil(t, impl) assert.IsType(t, impl.relation.RelType, &proto.Relation_Aggregate{}) } ================================================ FILE: spark/sql/dataframenafunctions.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "context" "github.com/apache/spark-connect-go/spark/sql/types" ) type DataFrameNaFunctions interface { Drop(ctx context.Context, cols ...string) (DataFrame, error) DropAll(ctx context.Context, cols ...string) (DataFrame, error) DropWithThreshold(ctx context.Context, threshold int32, cols ...string) (DataFrame, error) Fill(ctx context.Context, value types.PrimitiveTypeLiteral, cols ...string) (DataFrame, error) FillWithValues(ctx context.Context, values map[string]types.PrimitiveTypeLiteral) (DataFrame, error) Replace(ctx context.Context, toReplace []types.PrimitiveTypeLiteral, values []types.PrimitiveTypeLiteral, cols ...string) (DataFrame, error) } type dataFrameNaFunctionsImpl struct { dataFrame DataFrame } func (d *dataFrameNaFunctionsImpl) Drop(ctx context.Context, cols ...string) (DataFrame, error) { return d.dataFrame.DropNa(ctx, cols...) } func (d *dataFrameNaFunctionsImpl) DropAll(ctx context.Context, cols ...string) (DataFrame, error) { return d.dataFrame.DropNaAll(ctx, cols...) } func (d *dataFrameNaFunctionsImpl) DropWithThreshold(ctx context.Context, threshold int32, cols ...string) (DataFrame, error) { return d.dataFrame.DropNaWithThreshold(ctx, threshold, cols...) } func (d *dataFrameNaFunctionsImpl) Fill(ctx context.Context, value types.PrimitiveTypeLiteral, cols ...string) (DataFrame, error) { return d.dataFrame.FillNa(ctx, value, cols...) } func (d *dataFrameNaFunctionsImpl) FillWithValues(ctx context.Context, values map[string]types.PrimitiveTypeLiteral, ) (DataFrame, error) { return d.dataFrame.FillNaWithValues(ctx, values) } func (d *dataFrameNaFunctionsImpl) Replace(ctx context.Context, toReplace []types.PrimitiveTypeLiteral, values []types.PrimitiveTypeLiteral, cols ...string, ) (DataFrame, error) { return d.dataFrame.Replace(ctx, toReplace, values, cols...) } ================================================ FILE: spark/sql/dataframereader.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql // DataFrameReader supports reading data from storage and returning a data frame. // TODO needs to implement other methods like Option(), Schema(), and also "strong typed" // reading (e.g. Parquet(), Orc(), Csv(), etc. type DataFrameReader interface { // Format specifies data format (data source type) for the underlying data, e.g. parquet. Format(source string) DataFrameReader // Load reads the underlying data and returns a data frame. Load(path string) (DataFrame, error) // Reads a table from the underlying data source. Table(name string) (DataFrame, error) Option(key, value string) DataFrameReader } // dataFrameReaderImpl is an implementation of DataFrameReader interface. type dataFrameReaderImpl struct { sparkSession *sparkSessionImpl formatSource string options map[string]string } // NewDataframeReader creates a new DataFrameReader func NewDataframeReader(session *sparkSessionImpl) DataFrameReader { return &dataFrameReaderImpl{ sparkSession: session, } } func (w *dataFrameReaderImpl) Table(name string) (DataFrame, error) { return NewDataFrame(w.sparkSession, newReadTableRelation(name)), nil } func (w *dataFrameReaderImpl) Format(source string) DataFrameReader { w.formatSource = source return w } func (w *dataFrameReaderImpl) Load(path string) (DataFrame, error) { var format string if w.formatSource != "" { format = w.formatSource } if w.options == nil { return NewDataFrame(w.sparkSession, newReadWithFormatAndPath(path, format)), nil } return NewDataFrame(w.sparkSession, newReadWithFormatAndPathAndOptions(path, format, w.options)), nil } func (w *dataFrameReaderImpl) Option(key, value string) DataFrameReader { if w.options == nil { w.options = make(map[string]string) } w.options[key] = value return w } ================================================ FILE: spark/sql/dataframereader_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "testing" "github.com/stretchr/testify/assert" ) func TestLoadCreatesADataFrame(t *testing.T) { reader := NewDataframeReader(nil) source := "source" path := "path" reader.Format(source) frame, err := reader.Load(path) assert.NoError(t, err) assert.NotNil(t, frame) } func TestRelationContainsPathAndFormat(t *testing.T) { formatSource := "source" path := "path" relation := newReadWithFormatAndPath(path, formatSource) assert.NotNil(t, relation) assert.Equal(t, &formatSource, relation.GetRead().GetDataSource().Format) assert.Equal(t, path, relation.GetRead().GetDataSource().Paths[0]) } func TestRelationContainsPathAndFormatAndOptions(t *testing.T) { formatSource := "source" path := "path" options := map[string]string{"key": "value"} relation := newReadWithFormatAndPathAndOptions(path, formatSource, options) assert.NotNil(t, relation) assert.Equal(t, &formatSource, relation.GetRead().GetDataSource().Format) assert.Equal(t, path, relation.GetRead().GetDataSource().Paths[0]) for i, v := range options { assert.Equal(t, v, relation.GetRead().GetDataSource().Options[i]) } } ================================================ FILE: spark/sql/dataframestatfunctions.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import "context" type DataFrameStatFunctions interface { ApproxQuantile(ctx context.Context, probabilities []float64, relativeError float64, cols ...string) ([][]float64, error) Cov(ctx context.Context, col1, col2 string) (float64, error) Corr(ctx context.Context, col1, col2 string) (float64, error) CorrWithMethod(ctx context.Context, col1, col2 string, method string) (float64, error) CrossTab(ctx context.Context, col1, col2 string) DataFrame FreqItems(ctx context.Context, cols ...string) DataFrame FreqItemsWithSupport(ctx context.Context, support float64, cols ...string) DataFrame Sample(ctx context.Context, fraction float64) (DataFrame, error) SampleWithReplacement(ctx context.Context, withReplacement bool, fraction float64) (DataFrame, error) SampleWithSeed(ctx context.Context, fraction float64, seed int64) (DataFrame, error) SampleWithReplacementAndSeed(ctx context.Context, withReplacement bool, fraction float64, seed int64) (DataFrame, error) } type dataFrameStatFunctionsImpl struct { df DataFrame } func (d *dataFrameStatFunctionsImpl) Sample(ctx context.Context, fraction float64) (DataFrame, error) { return d.df.Sample(ctx, fraction) } func (d *dataFrameStatFunctionsImpl) SampleWithReplacement(ctx context.Context, withReplacement bool, fraction float64, ) (DataFrame, error) { return d.df.SampleWithReplacement(ctx, withReplacement, fraction) } func (d *dataFrameStatFunctionsImpl) SampleWithSeed(ctx context.Context, fraction float64, seed int64) (DataFrame, error) { return d.df.SampleWithSeed(ctx, fraction, seed) } func (d *dataFrameStatFunctionsImpl) SampleWithReplacementAndSeed(ctx context.Context, withReplacement bool, fraction float64, seed int64, ) (DataFrame, error) { return d.df.SampleWithReplacementAndSeed(ctx, withReplacement, fraction, seed) } func (d *dataFrameStatFunctionsImpl) ApproxQuantile(ctx context.Context, probabilities []float64, relativeError float64, cols ...string, ) ([][]float64, error) { return d.df.ApproxQuantile(ctx, probabilities, relativeError, cols...) } func (d *dataFrameStatFunctionsImpl) Cov(ctx context.Context, col1, col2 string) (float64, error) { return d.df.Cov(ctx, col1, col2) } func (d *dataFrameStatFunctionsImpl) Corr(ctx context.Context, col1, col2 string) (float64, error) { return d.df.Corr(ctx, col1, col2) } func (d *dataFrameStatFunctionsImpl) CorrWithMethod(ctx context.Context, col1, col2 string, method string) (float64, error) { return d.df.CorrWithMethod(ctx, col1, col2, method) } func (d *dataFrameStatFunctionsImpl) CrossTab(ctx context.Context, col1, col2 string) DataFrame { return d.df.CrossTab(ctx, col1, col2) } func (d *dataFrameStatFunctionsImpl) FreqItems(ctx context.Context, cols ...string) DataFrame { return d.df.FreqItems(ctx, cols...) } func (d *dataFrameStatFunctionsImpl) FreqItemsWithSupport(ctx context.Context, support float64, cols ...string) DataFrame { return d.df.FreqItemsWithSupport(ctx, support, cols...) } ================================================ FILE: spark/sql/dataframewriter.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "context" "fmt" "strings" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sparkerrors" ) // DataFrameWriter supports writing data frame to storage. type DataFrameWriter interface { // Mode specifies saving mode for the data, e.g. Append, Overwrite, ErrorIfExists. Mode(saveMode string) DataFrameWriter // Format specifies data format (data source type) for the underlying data, e.g. parquet. Format(source string) DataFrameWriter // Save writes data frame to the given path. Save(ctx context.Context, path string) error } func newDataFrameWriter(sparkExecutor *sparkSessionImpl, relation *proto.Relation) DataFrameWriter { return &dataFrameWriterImpl{ sparkExecutor: sparkExecutor, relation: relation, } } // dataFrameWriterImpl is an implementation of DataFrameWriter interface. type dataFrameWriterImpl struct { sparkExecutor *sparkSessionImpl relation *proto.Relation saveMode string formatSource string } func (w *dataFrameWriterImpl) Mode(saveMode string) DataFrameWriter { w.saveMode = saveMode return w } func (w *dataFrameWriterImpl) Format(source string) DataFrameWriter { w.formatSource = source return w } func (w *dataFrameWriterImpl) Save(ctx context.Context, path string) error { saveMode, err := getSaveMode(w.saveMode) if err != nil { return err } var source *string if w.formatSource != "" { source = &w.formatSource } plan := &proto.Plan{ OpType: &proto.Plan_Command{ Command: &proto.Command{ CommandType: &proto.Command_WriteOperation{ WriteOperation: &proto.WriteOperation{ Input: w.relation, Mode: saveMode, Source: source, SaveType: &proto.WriteOperation_Path{ Path: path, }, }, }, }, }, } responseClient, err := w.sparkExecutor.client.ExecutePlan(ctx, plan) if err != nil { return err } _, _, err = responseClient.ToTable() return err } func getSaveMode(mode string) (proto.WriteOperation_SaveMode, error) { if mode == "" { return proto.WriteOperation_SAVE_MODE_UNSPECIFIED, nil } else if strings.EqualFold(mode, "Append") { return proto.WriteOperation_SAVE_MODE_APPEND, nil } else if strings.EqualFold(mode, "Overwrite") { return proto.WriteOperation_SAVE_MODE_OVERWRITE, nil } else if strings.EqualFold(mode, "ErrorIfExists") { return proto.WriteOperation_SAVE_MODE_ERROR_IF_EXISTS, nil } else if strings.EqualFold(mode, "Ignore") { return proto.WriteOperation_SAVE_MODE_IGNORE, nil } else { return 0, sparkerrors.WithType(fmt.Errorf("unsupported save mode: %s", mode), sparkerrors.InvalidInputError) } } ================================================ FILE: spark/sql/dataframewriter_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "context" "testing" "github.com/apache/spark-connect-go/spark/client" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/mocks" "github.com/stretchr/testify/assert" ) func TestGetSaveMode(t *testing.T) { mode, err := getSaveMode("") assert.Nil(t, err) assert.Equal(t, proto.WriteOperation_SAVE_MODE_UNSPECIFIED, mode) mode, err = getSaveMode("append") assert.Nil(t, err) assert.Equal(t, proto.WriteOperation_SAVE_MODE_APPEND, mode) mode, err = getSaveMode("Overwrite") assert.Nil(t, err) assert.Equal(t, proto.WriteOperation_SAVE_MODE_OVERWRITE, mode) mode, err = getSaveMode("ErrorIfExists") assert.Nil(t, err) assert.Equal(t, proto.WriteOperation_SAVE_MODE_ERROR_IF_EXISTS, mode) mode, err = getSaveMode("IGNORE") assert.Nil(t, err) assert.Equal(t, proto.WriteOperation_SAVE_MODE_IGNORE, mode) mode, err = getSaveMode("XYZ") assert.NotNil(t, err) assert.Equal(t, proto.WriteOperation_SAVE_MODE_UNSPECIFIED, mode) } func TestSaveExecutesWriteOperationUntilEOF(t *testing.T) { relation := &proto.Relation{} executor := client.NewTestConnectClientFromResponses(mocks.MockSessionId, &mocks.ExecutePlanResponseDone, &mocks.ExecutePlanResponseEOF) session := &sparkSessionImpl{ client: executor, sessionId: mocks.MockSessionId, } ctx := context.Background() path := "path" writer := newDataFrameWriter(session, relation) writer.Format("format") writer.Mode("append") err := writer.Save(ctx, path) assert.NoError(t, err) } func TestSaveFailsIfAnotherErrorHappensWhenReadingStream(t *testing.T) { relation := &proto.Relation{} executor := client.NewTestConnectClientFromResponses(mocks.MockSessionId, &mocks.MockResponse{ Err: assert.AnError, }) session := &sparkSessionImpl{ client: executor, sessionId: mocks.MockSessionId, } ctx := context.Background() path := "path" writer := newDataFrameWriter(session, relation) writer.Format("format") writer.Mode("append") err := writer.Save(ctx, path) assert.Error(t, err) } func TestSaveFailsIfAnotherErrorHappensWhenExecuting(t *testing.T) { relation := &proto.Relation{} executor := client.NewTestConnectClientWithImmediateError(mocks.MockSessionId, assert.AnError) session := &sparkSessionImpl{ client: executor, } ctx := context.Background() path := "path" writer := newDataFrameWriter(session, relation) writer.Format("format") writer.Mode("append") err := writer.Save(ctx, path) assert.Error(t, err) } ================================================ FILE: spark/sql/executeplanclient.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql ================================================ FILE: spark/sql/functions/buiitins.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package functions import ( "github.com/apache/spark-connect-go/spark/sql/column" "github.com/apache/spark-connect-go/spark/sql/types" ) func Expr(expr string) column.Column { return column.NewColumn(column.NewSQLExpression(expr)) } func Col(name string) column.Column { return column.NewColumn(column.NewColumnReference(name)) } func Lit(value types.LiteralType) column.Column { return column.NewColumn(column.NewLiteral(value)) } func Int8Lit(value int8) column.Column { return Lit(types.Int8(value)) } func Int16Lit(value int16) column.Column { return Lit(types.Int16(value)) } func Int32Lit(value int32) column.Column { return Lit(types.Int32(value)) } func Int64Lit(value int64) column.Column { return Lit(types.Int64(value)) } func Float32Lit(value float32) column.Column { return Lit(types.Float32(value)) } func Float64Lit(value float64) column.Column { return Lit(types.Float64(value)) } func StringLit(value string) column.Column { return Lit(types.String(value)) } func BoolLit(value bool) column.Column { return Lit(types.Boolean(value)) } func BinaryLit(value []byte) column.Column { return Lit(types.Binary(value)) } func IntLit(value int) column.Column { return Lit(types.Int(value)) } ================================================ FILE: spark/sql/functions/generated.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package functions import "github.com/apache/spark-connect-go/spark/sql/column" // BitwiseNOT - Computes bitwise not. // // BitwiseNOT is the Golang equivalent of bitwiseNOT: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitwiseNOT(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bitwiseNOT", col)) } // BitwiseNot - Computes bitwise not. // // BitwiseNot is the Golang equivalent of bitwise_not: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitwiseNot(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bitwise_not", col)) } // BitCount - Returns the number of bits that are set in the argument expr as an unsigned 64-bit integer, // or NULL if the argument is NULL. // // BitCount is the Golang equivalent of bit_count: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitCount(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bit_count", col)) } // BitGet - Returns the value of the bit (0 or 1) at the specified position. // The positions are numbered from right to left, starting at zero. // The position argument cannot be negative. // // BitGet is the Golang equivalent of bit_get: (col: 'ColumnOrName', pos: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitGet(col column.Column, pos column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bit_get", col, pos)) } // Getbit - Returns the value of the bit (0 or 1) at the specified position. // The positions are numbered from right to left, starting at zero. // The position argument cannot be negative. // // Getbit is the Golang equivalent of getbit: (col: 'ColumnOrName', pos: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Getbit(col column.Column, pos column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("getbit", col, pos)) } // TODO: broadcast: (df: 'DataFrame') -> 'DataFrame' // Coalesce - Returns the first column that is not null. // // Coalesce is the Golang equivalent of coalesce: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Coalesce(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("coalesce", vals...)) } // Greatest - Returns the greatest value of the list of column names, skipping null values. // This function takes at least 2 parameters. It will return null if all parameters are null. // // Greatest is the Golang equivalent of greatest: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Greatest(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("greatest", vals...)) } // InputFileName - Creates a string column for the file name of the current Spark task. // // InputFileName is the Golang equivalent of input_file_name: () -> pyspark.sql.connect.column.Column func InputFileName() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("input_file_name")) } // Least - Returns the least value of the list of column names, skipping null values. // This function takes at least 2 parameters. It will return null if all parameters are null. // // Least is the Golang equivalent of least: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Least(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("least", vals...)) } // Isnan - An expression that returns true if the column is NaN. // // Isnan is the Golang equivalent of isnan: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Isnan(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("isnan", col)) } // Isnull - An expression that returns true if the column is null. // // Isnull is the Golang equivalent of isnull: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Isnull(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("isnull", col)) } // MonotonicallyIncreasingId - A column that generates monotonically increasing 64-bit integers. // // The generated ID is guaranteed to be monotonically increasing and unique, but not consecutive. // The current implementation puts the partition ID in the upper 31 bits, and the record number // within each partition in the lower 33 bits. The assumption is that the data frame has // less than 1 billion partitions, and each partition has less than 8 billion records. // // MonotonicallyIncreasingId is the Golang equivalent of monotonically_increasing_id: () -> pyspark.sql.connect.column.Column func MonotonicallyIncreasingId() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("monotonically_increasing_id")) } // Nanvl - Returns col1 if it is not NaN, or col2 if col1 is NaN. // // Both inputs should be floating point columns (:class:`DoubleType` or :class:`FloatType`). // // Nanvl is the Golang equivalent of nanvl: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Nanvl(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("nanvl", col1, col2)) } // Rand - Generates a random column with independent and identically distributed (i.i.d.) samples // uniformly distributed in [0.0, 1.0). // // Rand is the Golang equivalent of rand: (seed: Optional[int] = None) -> pyspark.sql.connect.column.Column func Rand(seed int64) column.Column { lit_seed := Int64Lit(seed) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("rand", lit_seed)) } // Randn - Generates a column with independent and identically distributed (i.i.d.) samples from // the standard normal distribution. // // Randn is the Golang equivalent of randn: (seed: Optional[int] = None) -> pyspark.sql.connect.column.Column func Randn(seed int64) column.Column { lit_seed := Int64Lit(seed) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("randn", lit_seed)) } // SparkPartitionId - A column for partition ID. // // SparkPartitionId is the Golang equivalent of spark_partition_id: () -> pyspark.sql.connect.column.Column func SparkPartitionId() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("spark_partition_id")) } // TODO: when: (condition: pyspark.sql.connect.column.Column, value: Any) -> pyspark.sql.connect.column.Column // Asc - Returns a sort expression based on the ascending order of the given column name. // // Asc is the Golang equivalent of asc: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Asc(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("asc", col)) } // AscNullsFirst - Returns a sort expression based on the ascending order of the given // column name, and null values return before non-null values. // // AscNullsFirst is the Golang equivalent of asc_nulls_first: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func AscNullsFirst(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("asc_nulls_first", col)) } // AscNullsLast - Returns a sort expression based on the ascending order of the given // column name, and null values appear after non-null values. // // AscNullsLast is the Golang equivalent of asc_nulls_last: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func AscNullsLast(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("asc_nulls_last", col)) } // Desc - Returns a sort expression based on the descending order of the given column name. // // Desc is the Golang equivalent of desc: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Desc(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("desc", col)) } // DescNullsFirst - Returns a sort expression based on the descending order of the given // column name, and null values appear before non-null values. // // DescNullsFirst is the Golang equivalent of desc_nulls_first: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func DescNullsFirst(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("desc_nulls_first", col)) } // DescNullsLast - Returns a sort expression based on the descending order of the given // column name, and null values appear after non-null values. // // DescNullsLast is the Golang equivalent of desc_nulls_last: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func DescNullsLast(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("desc_nulls_last", col)) } // Abs - Computes the absolute value. // // Abs is the Golang equivalent of abs: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Abs(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("abs", col)) } // Acos - Computes inverse cosine of the input column. // // Acos is the Golang equivalent of acos: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Acos(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("acos", col)) } // Acosh - Computes inverse hyperbolic cosine of the input column. // // Acosh is the Golang equivalent of acosh: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Acosh(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("acosh", col)) } // Asin - Computes inverse sine of the input column. // // Asin is the Golang equivalent of asin: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Asin(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("asin", col)) } // Asinh - Computes inverse hyperbolic sine of the input column. // // Asinh is the Golang equivalent of asinh: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Asinh(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("asinh", col)) } // Atan - Compute inverse tangent of the input column. // // Atan is the Golang equivalent of atan: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Atan(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("atan", col)) } // Atan2 - // // Atan2 is the Golang equivalent of atan2: (col1: Union[ForwardRef('ColumnOrName'), float], col2: Union[ForwardRef('ColumnOrName'), float]) -> pyspark.sql.connect.column.Column func Atan2(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("atan2", col1, col2)) } // Atanh - Computes inverse hyperbolic tangent of the input column. // // Atanh is the Golang equivalent of atanh: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Atanh(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("atanh", col)) } // Bin - Returns the string representation of the binary value of the given column. // // Bin is the Golang equivalent of bin: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Bin(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bin", col)) } // Bround - Round the given value to `scale` decimal places using HALF_EVEN rounding mode if `scale` >= 0 // or at integral part when `scale` < 0. // // Bround is the Golang equivalent of bround: (col: 'ColumnOrName', scale: int = 0) -> pyspark.sql.connect.column.Column func Bround(col column.Column, scale int64) column.Column { lit_scale := Int64Lit(scale) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bround", col, lit_scale)) } // Cbrt - Computes the cube-root of the given value. // // Cbrt is the Golang equivalent of cbrt: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Cbrt(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("cbrt", col)) } // Ceil - Computes the ceiling of the given value. // // Ceil is the Golang equivalent of ceil: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Ceil(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("ceil", col)) } // Ceiling - Computes the ceiling of the given value. // // Ceiling is the Golang equivalent of ceiling: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Ceiling(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("ceiling", col)) } // Conv - Convert a number in a string column from one base to another. // // Conv is the Golang equivalent of conv: (col: 'ColumnOrName', fromBase: int, toBase: int) -> pyspark.sql.connect.column.Column func Conv(col column.Column, fromBase int64, toBase int64) column.Column { lit_fromBase := Int64Lit(fromBase) lit_toBase := Int64Lit(toBase) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("conv", col, lit_fromBase, lit_toBase)) } // Cos - Computes cosine of the input column. // // Cos is the Golang equivalent of cos: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Cos(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("cos", col)) } // Cosh - Computes hyperbolic cosine of the input column. // // Cosh is the Golang equivalent of cosh: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Cosh(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("cosh", col)) } // Cot - Computes cotangent of the input column. // // Cot is the Golang equivalent of cot: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Cot(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("cot", col)) } // Csc - Computes cosecant of the input column. // // Csc is the Golang equivalent of csc: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Csc(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("csc", col)) } // Degrees - Converts an angle measured in radians to an approximately equivalent angle // measured in degrees. // // Degrees is the Golang equivalent of degrees: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Degrees(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("degrees", col)) } // E - Returns Euler's number. // // E is the Golang equivalent of e: () -> pyspark.sql.connect.column.Column func E() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("e")) } // Exp - Computes the exponential of the given value. // // Exp is the Golang equivalent of exp: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Exp(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("exp", col)) } // Expm1 - Computes the exponential of the given value minus one. // // Expm1 is the Golang equivalent of expm1: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Expm1(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("expm1", col)) } // Factorial - Computes the factorial of the given value. // // Factorial is the Golang equivalent of factorial: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Factorial(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("factorial", col)) } // Floor - Computes the floor of the given value. // // Floor is the Golang equivalent of floor: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Floor(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("floor", col)) } // Hex - Computes hex value of the given column, which could be :class:`pyspark.sql.types.StringType`, // :class:`pyspark.sql.types.BinaryType`, :class:`pyspark.sql.types.IntegerType` or // :class:`pyspark.sql.types.LongType`. // // Hex is the Golang equivalent of hex: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Hex(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("hex", col)) } // Hypot - Computes “sqrt(a^2 + b^2)“ without intermediate overflow or underflow. // // Hypot is the Golang equivalent of hypot: (col1: Union[ForwardRef('ColumnOrName'), float], col2: Union[ForwardRef('ColumnOrName'), float]) -> pyspark.sql.connect.column.Column func Hypot(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("hypot", col1, col2)) } // Log - Returns the first argument-based logarithm of the second argument. // // If there is only one argument, then this takes the natural logarithm of the argument. // // Log is the Golang equivalent of log: (arg1: Union[ForwardRef('ColumnOrName'), float], arg2: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func Log(arg1 column.Column, arg2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("log", arg1, arg2)) } // Log10 - Computes the logarithm of the given value in Base 10. // // Log10 is the Golang equivalent of log10: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Log10(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("log10", col)) } // Log1p - Computes the natural logarithm of the "given value plus one". // // Log1p is the Golang equivalent of log1p: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Log1p(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("log1p", col)) } // Ln - Returns the natural logarithm of the argument. // // Ln is the Golang equivalent of ln: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Ln(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("ln", col)) } // Log2 - Returns the base-2 logarithm of the argument. // // Log2 is the Golang equivalent of log2: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Log2(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("log2", col)) } // Negative - Returns the negative value. // // Negative is the Golang equivalent of negative: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Negative(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("negative", col)) } // Negate - Returns the negative value. // // Negate is the Golang equivalent of negate: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Negate(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("negate", col)) } // Pi - Returns Pi. // // Pi is the Golang equivalent of pi: () -> pyspark.sql.connect.column.Column func Pi() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("pi")) } // Positive - Returns the value. // // Positive is the Golang equivalent of positive: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Positive(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("positive", col)) } // Pmod - Returns the positive value of dividend mod divisor. // // Pmod is the Golang equivalent of pmod: (dividend: Union[ForwardRef('ColumnOrName'), float], divisor: Union[ForwardRef('ColumnOrName'), float]) -> pyspark.sql.connect.column.Column func Pmod(dividend column.Column, divisor column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("pmod", dividend, divisor)) } // WidthBucket - Returns the bucket number into which the value of this expression would fall // after being evaluated. Note that input arguments must follow conditions listed below; // otherwise, the method will return null. // // WidthBucket is the Golang equivalent of width_bucket: (v: 'ColumnOrName', min: 'ColumnOrName', max: 'ColumnOrName', numBucket: Union[ForwardRef('ColumnOrName'), int]) -> pyspark.sql.connect.column.Column func WidthBucket(v column.Column, min column.Column, max column.Column, numBucket column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("width_bucket", v, min, max, numBucket)) } // Pow - Returns the value of the first argument raised to the power of the second argument. // // Pow is the Golang equivalent of pow: (col1: Union[ForwardRef('ColumnOrName'), float], col2: Union[ForwardRef('ColumnOrName'), float]) -> pyspark.sql.connect.column.Column func Pow(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("pow", col1, col2)) } // Radians - Converts an angle measured in degrees to an approximately equivalent angle // measured in radians. // // Radians is the Golang equivalent of radians: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Radians(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("radians", col)) } // Rint - Returns the double value that is closest in value to the argument and // is equal to a mathematical integer. // // Rint is the Golang equivalent of rint: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Rint(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("rint", col)) } // Round - Round the given value to `scale` decimal places using HALF_UP rounding mode if `scale` >= 0 // or at integral part when `scale` < 0. // // Round is the Golang equivalent of round: (col: 'ColumnOrName', scale: int = 0) -> pyspark.sql.connect.column.Column func Round(col column.Column, scale int64) column.Column { lit_scale := Int64Lit(scale) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("round", col, lit_scale)) } // Sec - Computes secant of the input column. // // Sec is the Golang equivalent of sec: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Sec(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sec", col)) } // ShiftLeft - Shift the given value numBits left. // // ShiftLeft is the Golang equivalent of shiftLeft: (col: 'ColumnOrName', numBits: int) -> pyspark.sql.connect.column.Column func ShiftLeft(col column.Column, numBits int64) column.Column { lit_numBits := Int64Lit(numBits) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("shiftLeft", col, lit_numBits)) } // Shiftleft - Shift the given value numBits left. // // Shiftleft is the Golang equivalent of shiftleft: (col: 'ColumnOrName', numBits: int) -> pyspark.sql.connect.column.Column func Shiftleft(col column.Column, numBits int64) column.Column { lit_numBits := Int64Lit(numBits) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("shiftleft", col, lit_numBits)) } // ShiftRight - (Signed) shift the given value numBits right. // // ShiftRight is the Golang equivalent of shiftRight: (col: 'ColumnOrName', numBits: int) -> pyspark.sql.connect.column.Column func ShiftRight(col column.Column, numBits int64) column.Column { lit_numBits := Int64Lit(numBits) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("shiftRight", col, lit_numBits)) } // Shiftright - (Signed) shift the given value numBits right. // // Shiftright is the Golang equivalent of shiftright: (col: 'ColumnOrName', numBits: int) -> pyspark.sql.connect.column.Column func Shiftright(col column.Column, numBits int64) column.Column { lit_numBits := Int64Lit(numBits) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("shiftright", col, lit_numBits)) } // ShiftRightUnsigned - Unsigned shift the given value numBits right. // // ShiftRightUnsigned is the Golang equivalent of shiftRightUnsigned: (col: 'ColumnOrName', numBits: int) -> pyspark.sql.connect.column.Column func ShiftRightUnsigned(col column.Column, numBits int64) column.Column { lit_numBits := Int64Lit(numBits) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("shiftRightUnsigned", col, lit_numBits)) } // Shiftrightunsigned - Unsigned shift the given value numBits right. // // Shiftrightunsigned is the Golang equivalent of shiftrightunsigned: (col: 'ColumnOrName', numBits: int) -> pyspark.sql.connect.column.Column func Shiftrightunsigned(col column.Column, numBits int64) column.Column { lit_numBits := Int64Lit(numBits) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("shiftrightunsigned", col, lit_numBits)) } // Signum - Computes the signum of the given value. // // Signum is the Golang equivalent of signum: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Signum(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("signum", col)) } // Sign - Computes the signum of the given value. // // Sign is the Golang equivalent of sign: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Sign(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sign", col)) } // Sin - Computes sine of the input column. // // Sin is the Golang equivalent of sin: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Sin(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sin", col)) } // Sinh - Computes hyperbolic sine of the input column. // // Sinh is the Golang equivalent of sinh: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Sinh(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sinh", col)) } // Sqrt - Computes the square root of the specified float value. // // Sqrt is the Golang equivalent of sqrt: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Sqrt(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sqrt", col)) } // TryAdd - Returns the sum of `left`and `right` and the result is null on overflow. // The acceptable input types are the same with the `+` operator. // // TryAdd is the Golang equivalent of try_add: (left: 'ColumnOrName', right: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TryAdd(left column.Column, right column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_add", left, right)) } // TryAvg - Returns the mean calculated from values of a group and the result is null on overflow. // // TryAvg is the Golang equivalent of try_avg: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TryAvg(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_avg", col)) } // TryDivide - Returns `dividend`/`divisor`. It always performs floating point division. Its result is // always null if `divisor` is 0. // // TryDivide is the Golang equivalent of try_divide: (left: 'ColumnOrName', right: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TryDivide(left column.Column, right column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_divide", left, right)) } // TryMultiply - Returns `left`*`right` and the result is null on overflow. The acceptable input types are the // same with the `*` operator. // // TryMultiply is the Golang equivalent of try_multiply: (left: 'ColumnOrName', right: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TryMultiply(left column.Column, right column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_multiply", left, right)) } // TrySubtract - Returns `left`-`right` and the result is null on overflow. The acceptable input types are the // same with the `-` operator. // // TrySubtract is the Golang equivalent of try_subtract: (left: 'ColumnOrName', right: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TrySubtract(left column.Column, right column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_subtract", left, right)) } // TrySum - Returns the sum calculated from values of a group and the result is null on overflow. // // TrySum is the Golang equivalent of try_sum: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TrySum(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_sum", col)) } // Tan - Computes tangent of the input column. // // Tan is the Golang equivalent of tan: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Tan(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("tan", col)) } // Tanh - Computes hyperbolic tangent of the input column. // // Tanh is the Golang equivalent of tanh: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Tanh(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("tanh", col)) } // ToDegrees - // // ToDegrees is the Golang equivalent of toDegrees: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ToDegrees(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("toDegrees", col)) } // ToRadians - // // ToRadians is the Golang equivalent of toRadians: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ToRadians(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("toRadians", col)) } // Unhex - Inverse of hex. Interprets each pair of characters as a hexadecimal number // and converts to the byte representation of number. // // Unhex is the Golang equivalent of unhex: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Unhex(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("unhex", col)) } // ApproxCountDistinct - Aggregate function: returns a new :class:`~pyspark.sql.Column` for approximate distinct count // of column `col`. // // ApproxCountDistinct is the Golang equivalent of approx_count_distinct: (col: 'ColumnOrName', rsd: Optional[float] = None) -> pyspark.sql.connect.column.Column func ApproxCountDistinct(col column.Column, rsd float64) column.Column { lit_rsd := Float64Lit(rsd) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("approx_count_distinct", col, lit_rsd)) } // Avg - Aggregate function: returns the average of the values in a group. // // Avg is the Golang equivalent of avg: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Avg(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("avg", col)) } // CollectList - Aggregate function: returns a list of objects with duplicates. // // CollectList is the Golang equivalent of collect_list: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CollectList(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("collect_list", col)) } // ArrayAgg - Aggregate function: returns a list of objects with duplicates. // // ArrayAgg is the Golang equivalent of array_agg: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArrayAgg(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_agg", col)) } // CollectSet - Aggregate function: returns a set of objects with duplicate elements eliminated. // // CollectSet is the Golang equivalent of collect_set: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CollectSet(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("collect_set", col)) } // Corr - Returns a new :class:`~pyspark.sql.Column` for the Pearson Correlation Coefficient for // “col1“ and “col2“. // // Corr is the Golang equivalent of corr: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Corr(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("corr", col1, col2)) } // Count - Aggregate function: returns the number of items in a group. // // Count is the Golang equivalent of count: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Count(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("count", col)) } // CountDistinct - Returns a new :class:`Column` for distinct count of “col“ or “cols“. // // CountDistinct is the Golang equivalent of count_distinct: (col: 'ColumnOrName', *cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CountDistinct(col column.Column, cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, col) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("count_distinct", vals...)) } // CovarPop - Returns a new :class:`~pyspark.sql.Column` for the population covariance of “col1“ and // “col2“. // // CovarPop is the Golang equivalent of covar_pop: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CovarPop(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("covar_pop", col1, col2)) } // CovarSamp - Returns a new :class:`~pyspark.sql.Column` for the sample covariance of “col1“ and // “col2“. // // CovarSamp is the Golang equivalent of covar_samp: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CovarSamp(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("covar_samp", col1, col2)) } // TODO: first: (col: 'ColumnOrName', ignorenulls: bool = False) -> pyspark.sql.connect.column.Column // Grouping - Aggregate function: indicates whether a specified column in a GROUP BY list is aggregated // or not, returns 1 for aggregated or 0 for not aggregated in the result set. // // Grouping is the Golang equivalent of grouping: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Grouping(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("grouping", col)) } // GroupingId - Aggregate function: returns the level of grouping, equals to // // (grouping(c1) << (n-1)) + (grouping(c2) << (n-2)) + ... + grouping(cn) // // GroupingId is the Golang equivalent of grouping_id: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func GroupingId(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("grouping_id", vals...)) } // CountMinSketch - Returns a count-min sketch of a column with the given esp, confidence and seed. // The result is an array of bytes, which can be deserialized to a `CountMinSketch` before usage. // Count-min sketch is a probabilistic data structure used for cardinality estimation // using sub-linear space. // // CountMinSketch is the Golang equivalent of count_min_sketch: (col: 'ColumnOrName', eps: 'ColumnOrName', confidence: 'ColumnOrName', seed: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CountMinSketch(col column.Column, eps column.Column, confidence column.Column, seed column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("count_min_sketch", col, eps, confidence, seed)) } // Kurtosis - Aggregate function: returns the kurtosis of the values in a group. // // Kurtosis is the Golang equivalent of kurtosis: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Kurtosis(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("kurtosis", col)) } // TODO: last: (col: 'ColumnOrName', ignorenulls: bool = False) -> pyspark.sql.connect.column.Column // Max - Aggregate function: returns the maximum value of the expression in a group. // // Max is the Golang equivalent of max: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Max(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("max", col)) } // MaxBy - Returns the value associated with the maximum value of ord. // // MaxBy is the Golang equivalent of max_by: (col: 'ColumnOrName', ord: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MaxBy(col column.Column, ord column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("max_by", col, ord)) } // Mean - Aggregate function: returns the average of the values in a group. // An alias of :func:`avg`. // // Mean is the Golang equivalent of mean: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Mean(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("mean", col)) } // Median - Returns the median of the values in a group. // // Median is the Golang equivalent of median: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Median(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("median", col)) } // Min - Aggregate function: returns the minimum value of the expression in a group. // // Min is the Golang equivalent of min: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Min(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("min", col)) } // MinBy - Returns the value associated with the minimum value of ord. // // MinBy is the Golang equivalent of min_by: (col: 'ColumnOrName', ord: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MinBy(col column.Column, ord column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("min_by", col, ord)) } // Mode - Returns the most frequent value in a group. // // Mode is the Golang equivalent of mode: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Mode(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("mode", col)) } // TODO: percentile: (col: 'ColumnOrName', percentage: Union[pyspark.sql.connect.column.Column, float, List[float], Tuple[float]], frequency: Union[pyspark.sql.connect.column.Column, int] = 1) -> pyspark.sql.connect.column.Column // TODO: percentile_approx: (col: 'ColumnOrName', percentage: Union[pyspark.sql.connect.column.Column, float, List[float], Tuple[float]], accuracy: Union[pyspark.sql.connect.column.Column, float] = 10000) -> pyspark.sql.connect.column.Column // TODO: approx_percentile: (col: 'ColumnOrName', percentage: Union[pyspark.sql.connect.column.Column, float, List[float], Tuple[float]], accuracy: Union[pyspark.sql.connect.column.Column, float] = 10000) -> pyspark.sql.connect.column.Column // Product - Aggregate function: returns the product of the values in a group. // // Product is the Golang equivalent of product: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Product(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("product", col)) } // Skewness - Aggregate function: returns the skewness of the values in a group. // // Skewness is the Golang equivalent of skewness: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Skewness(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("skewness", col)) } // Stddev - Aggregate function: alias for stddev_samp. // // Stddev is the Golang equivalent of stddev: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Stddev(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("stddev", col)) } // Std - Aggregate function: alias for stddev_samp. // // Std is the Golang equivalent of std: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Std(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("std", col)) } // StddevSamp - Aggregate function: returns the unbiased sample standard deviation of // the expression in a group. // // StddevSamp is the Golang equivalent of stddev_samp: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func StddevSamp(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("stddev_samp", col)) } // StddevPop - Aggregate function: returns population standard deviation of // the expression in a group. // // StddevPop is the Golang equivalent of stddev_pop: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func StddevPop(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("stddev_pop", col)) } // Sum - Aggregate function: returns the sum of all values in the expression. // // Sum is the Golang equivalent of sum: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Sum(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sum", col)) } // SumDistinct - Aggregate function: returns the sum of distinct values in the expression. // // SumDistinct is the Golang equivalent of sum_distinct: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func SumDistinct(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sum_distinct", col)) } // VarPop - Aggregate function: returns the population variance of the values in a group. // // VarPop is the Golang equivalent of var_pop: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func VarPop(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("var_pop", col)) } // RegrAvgx - Aggregate function: returns the average of the independent variable for non-null pairs // in a group, where `y` is the dependent variable and `x` is the independent variable. // // RegrAvgx is the Golang equivalent of regr_avgx: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrAvgx(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_avgx", y, x)) } // RegrAvgy - Aggregate function: returns the average of the dependent variable for non-null pairs // in a group, where `y` is the dependent variable and `x` is the independent variable. // // RegrAvgy is the Golang equivalent of regr_avgy: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrAvgy(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_avgy", y, x)) } // RegrCount - Aggregate function: returns the number of non-null number pairs // in a group, where `y` is the dependent variable and `x` is the independent variable. // // RegrCount is the Golang equivalent of regr_count: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrCount(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_count", y, x)) } // RegrIntercept - Aggregate function: returns the intercept of the univariate linear regression line // for non-null pairs in a group, where `y` is the dependent variable and // `x` is the independent variable. // // RegrIntercept is the Golang equivalent of regr_intercept: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrIntercept(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_intercept", y, x)) } // RegrR2 - Aggregate function: returns the coefficient of determination for non-null pairs // in a group, where `y` is the dependent variable and `x` is the independent variable. // // RegrR2 is the Golang equivalent of regr_r2: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrR2(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_r2", y, x)) } // RegrSlope - Aggregate function: returns the slope of the linear regression line for non-null pairs // in a group, where `y` is the dependent variable and `x` is the independent variable. // // RegrSlope is the Golang equivalent of regr_slope: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrSlope(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_slope", y, x)) } // RegrSxx - Aggregate function: returns REGR_COUNT(y, x) * VAR_POP(x) for non-null pairs // in a group, where `y` is the dependent variable and `x` is the independent variable. // // RegrSxx is the Golang equivalent of regr_sxx: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrSxx(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_sxx", y, x)) } // RegrSxy - Aggregate function: returns REGR_COUNT(y, x) * COVAR_POP(y, x) for non-null pairs // in a group, where `y` is the dependent variable and `x` is the independent variable. // // RegrSxy is the Golang equivalent of regr_sxy: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrSxy(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_sxy", y, x)) } // RegrSyy - Aggregate function: returns REGR_COUNT(y, x) * VAR_POP(y) for non-null pairs // in a group, where `y` is the dependent variable and `x` is the independent variable. // // RegrSyy is the Golang equivalent of regr_syy: (y: 'ColumnOrName', x: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegrSyy(y column.Column, x column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regr_syy", y, x)) } // VarSamp - Aggregate function: returns the unbiased sample variance of // the values in a group. // // VarSamp is the Golang equivalent of var_samp: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func VarSamp(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("var_samp", col)) } // Variance - Aggregate function: alias for var_samp // // Variance is the Golang equivalent of variance: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Variance(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("variance", col)) } // Every - Aggregate function: returns true if all values of `col` are true. // // Every is the Golang equivalent of every: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Every(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("every", col)) } // BoolAnd - Aggregate function: returns true if all values of `col` are true. // // BoolAnd is the Golang equivalent of bool_and: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BoolAnd(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bool_and", col)) } // Some - Aggregate function: returns true if at least one value of `col` is true. // // Some is the Golang equivalent of some: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Some(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("some", col)) } // BoolOr - Aggregate function: returns true if at least one value of `col` is true. // // BoolOr is the Golang equivalent of bool_or: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BoolOr(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bool_or", col)) } // BitAnd - Aggregate function: returns the bitwise AND of all non-null input values, or null if none. // // BitAnd is the Golang equivalent of bit_and: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitAnd(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bit_and", col)) } // BitOr - Aggregate function: returns the bitwise OR of all non-null input values, or null if none. // // BitOr is the Golang equivalent of bit_or: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitOr(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bit_or", col)) } // BitXor - Aggregate function: returns the bitwise XOR of all non-null input values, or null if none. // // BitXor is the Golang equivalent of bit_xor: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitXor(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bit_xor", col)) } // CumeDist - Window function: returns the cumulative distribution of values within a window partition, // i.e. the fraction of rows that are below the current row. // // CumeDist is the Golang equivalent of cume_dist: () -> pyspark.sql.connect.column.Column func CumeDist() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("cume_dist")) } // DenseRank - Window function: returns the rank of rows within a window partition, without any gaps. // // The difference between rank and dense_rank is that dense_rank leaves no gaps in ranking // sequence when there are ties. That is, if you were ranking a competition using dense_rank // and had three people tie for second place, you would say that all three were in second // place and that the next person came in third. Rank would give me sequential numbers, making // the person that came in third place (after the ties) would register as coming in fifth. // // This is equivalent to the DENSE_RANK function in SQL. // // DenseRank is the Golang equivalent of dense_rank: () -> pyspark.sql.connect.column.Column func DenseRank() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("dense_rank")) } // TODO: lag: (col: 'ColumnOrName', offset: int = 1, default: Optional[Any] = None) -> pyspark.sql.connect.column.Column // TODO: lead: (col: 'ColumnOrName', offset: int = 1, default: Optional[Any] = None) -> pyspark.sql.connect.column.Column // TODO: nth_value: (col: 'ColumnOrName', offset: int, ignoreNulls: Optional[bool] = None) -> pyspark.sql.connect.column.Column // TODO: any_value: (col: 'ColumnOrName', ignoreNulls: Union[bool, pyspark.sql.connect.column.Column, NoneType] = None) -> pyspark.sql.connect.column.Column // TODO: first_value: (col: 'ColumnOrName', ignoreNulls: Union[bool, pyspark.sql.connect.column.Column, NoneType] = None) -> pyspark.sql.connect.column.Column // TODO: last_value: (col: 'ColumnOrName', ignoreNulls: Union[bool, pyspark.sql.connect.column.Column, NoneType] = None) -> pyspark.sql.connect.column.Column // CountIf - Returns the number of `TRUE` values for the `col`. // // CountIf is the Golang equivalent of count_if: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CountIf(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("count_if", col)) } // HistogramNumeric - Computes a histogram on numeric 'col' using nb bins. // The return value is an array of (x,y) pairs representing the centers of the // histogram's bins. As the value of 'nb' is increased, the histogram approximation // gets finer-grained, but may yield artifacts around outliers. In practice, 20-40 // histogram bins appear to work well, with more bins being required for skewed or // smaller datasets. Note that this function creates a histogram with non-uniform // bin widths. It offers no guarantees in terms of the mean-squared-error of the // histogram, but in practice is comparable to the histograms produced by the R/S-Plus // statistical computing packages. Note: the output type of the 'x' field in the return value is // propagated from the input value consumed in the aggregate function. // // HistogramNumeric is the Golang equivalent of histogram_numeric: (col: 'ColumnOrName', nBins: 'ColumnOrName') -> pyspark.sql.connect.column.Column func HistogramNumeric(col column.Column, nBins column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("histogram_numeric", col, nBins)) } // Ntile - Window function: returns the ntile group id (from 1 to `n` inclusive) // in an ordered window partition. For example, if `n` is 4, the first // quarter of the rows will get value 1, the second quarter will get 2, // the third quarter will get 3, and the last quarter will get 4. // // This is equivalent to the NTILE function in SQL. // // Ntile is the Golang equivalent of ntile: (n: int) -> pyspark.sql.connect.column.Column func Ntile(n int64) column.Column { lit_n := Int64Lit(n) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("ntile", lit_n)) } // PercentRank - Window function: returns the relative rank (i.e. percentile) of rows within a window partition. // // PercentRank is the Golang equivalent of percent_rank: () -> pyspark.sql.connect.column.Column func PercentRank() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("percent_rank")) } // Rank - Window function: returns the rank of rows within a window partition. // // The difference between rank and dense_rank is that dense_rank leaves no gaps in ranking // sequence when there are ties. That is, if you were ranking a competition using dense_rank // and had three people tie for second place, you would say that all three were in second // place and that the next person came in third. Rank would give me sequential numbers, making // the person that came in third place (after the ties) would register as coming in fifth. // // This is equivalent to the RANK function in SQL. // // Rank is the Golang equivalent of rank: () -> pyspark.sql.connect.column.Column func Rank() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("rank")) } // RowNumber - Window function: returns a sequential number starting at 1 within a window partition. // // RowNumber is the Golang equivalent of row_number: () -> pyspark.sql.connect.column.Column func RowNumber() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("row_number")) } // TODO: aggregate: (col: 'ColumnOrName', initialValue: 'ColumnOrName', merge: Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column], finish: Optional[Callable[[pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]] = None) -> pyspark.sql.connect.column.Column // TODO: reduce: (col: 'ColumnOrName', initialValue: 'ColumnOrName', merge: Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column], finish: Optional[Callable[[pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]] = None) -> pyspark.sql.connect.column.Column // Array - Creates a new array column. // // Array is the Golang equivalent of array: (*cols: Union[ForwardRef('ColumnOrName'), List[ForwardRef('ColumnOrName')], Tuple[ForwardRef('ColumnOrName'), ...]]) -> pyspark.sql.connect.column.Column func Array(cols column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array", cols)) } // TODO: array_append: (col: 'ColumnOrName', value: Any) -> pyspark.sql.connect.column.Column // TODO: array_contains: (col: 'ColumnOrName', value: Any) -> pyspark.sql.connect.column.Column // ArrayDistinct - Collection function: removes duplicate values from the array. // // ArrayDistinct is the Golang equivalent of array_distinct: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArrayDistinct(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_distinct", col)) } // ArrayExcept - Collection function: returns an array of the elements in col1 but not in col2, // without duplicates. // // ArrayExcept is the Golang equivalent of array_except: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArrayExcept(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_except", col1, col2)) } // TODO: array_insert: (arr: 'ColumnOrName', pos: Union[ForwardRef('ColumnOrName'), int], value: Any) -> pyspark.sql.connect.column.Column // ArrayIntersect - Collection function: returns an array of the elements in the intersection of col1 and col2, // without duplicates. // // ArrayIntersect is the Golang equivalent of array_intersect: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArrayIntersect(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_intersect", col1, col2)) } // ArrayCompact - Collection function: removes null values from the array. // // ArrayCompact is the Golang equivalent of array_compact: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArrayCompact(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_compact", col)) } // ArrayJoin - Concatenates the elements of `column` using the `delimiter`. Null values are replaced with // `null_replacement` if set, otherwise they are ignored. // // ArrayJoin is the Golang equivalent of array_join: (col: 'ColumnOrName', delimiter: str, null_replacement: Optional[str] = None) -> pyspark.sql.connect.column.Column func ArrayJoin(col column.Column, delimiter string, null_replacement string) column.Column { lit_delimiter := StringLit(delimiter) lit_null_replacement := StringLit(null_replacement) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_join", col, lit_delimiter, lit_null_replacement)) } // ArrayMax - Collection function: returns the maximum value of the array. // // ArrayMax is the Golang equivalent of array_max: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArrayMax(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_max", col)) } // ArrayMin - Collection function: returns the minimum value of the array. // // ArrayMin is the Golang equivalent of array_min: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArrayMin(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_min", col)) } // ArraySize - Returns the total number of elements in the array. The function returns null for null input. // // ArraySize is the Golang equivalent of array_size: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArraySize(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_size", col)) } // Cardinality - Collection function: returns the length of the array or map stored in the column. // // Cardinality is the Golang equivalent of cardinality: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Cardinality(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("cardinality", col)) } // TODO: array_position: (col: 'ColumnOrName', value: Any) -> pyspark.sql.connect.column.Column // TODO: array_prepend: (col: 'ColumnOrName', value: Any) -> pyspark.sql.connect.column.Column // TODO: array_remove: (col: 'ColumnOrName', element: Any) -> pyspark.sql.connect.column.Column // ArrayRepeat - Collection function: creates an array containing a column repeated count times. // // ArrayRepeat is the Golang equivalent of array_repeat: (col: 'ColumnOrName', count: Union[ForwardRef('ColumnOrName'), int]) -> pyspark.sql.connect.column.Column func ArrayRepeat(col column.Column, count column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_repeat", col, count)) } // TODO: array_sort: (col: 'ColumnOrName', comparator: Optional[Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]] = None) -> pyspark.sql.connect.column.Column // ArrayUnion - Collection function: returns an array of the elements in the union of col1 and col2, // without duplicates. // // ArrayUnion is the Golang equivalent of array_union: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArrayUnion(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("array_union", col1, col2)) } // ArraysOverlap - Collection function: returns true if the arrays contain any common non-null element; if not, // returns null if both the arrays are non-empty and any of them contains a null element; returns // false otherwise. // // ArraysOverlap is the Golang equivalent of arrays_overlap: (a1: 'ColumnOrName', a2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArraysOverlap(a1 column.Column, a2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("arrays_overlap", a1, a2)) } // ArraysZip - Collection function: Returns a merged array of structs in which the N-th struct contains all // N-th values of input arrays. If one of the arrays is shorter than others then // resulting struct type value will be a `null` for missing elements. // // ArraysZip is the Golang equivalent of arrays_zip: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ArraysZip(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("arrays_zip", vals...)) } // Concat - Concatenates multiple input columns together into a single column. // The function works with strings, numeric, binary and compatible array columns. // // Concat is the Golang equivalent of concat: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Concat(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("concat", vals...)) } // CreateMap - Creates a new map column. // // CreateMap is the Golang equivalent of create_map: (*cols: Union[ForwardRef('ColumnOrName'), List[ForwardRef('ColumnOrName')], Tuple[ForwardRef('ColumnOrName'), ...]]) -> pyspark.sql.connect.column.Column func CreateMap(cols column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("create_map", cols)) } // TODO: element_at: (col: 'ColumnOrName', extraction: Any) -> pyspark.sql.connect.column.Column // TryElementAt - (array, index) - Returns element of array at given (1-based) index. If Index is 0, Spark will // throw an error. If index < 0, accesses elements from the last to the first. The function // always returns NULL if the index exceeds the length of the array. // // (map, key) - Returns value for given key. The function always returns NULL if the key is not // contained in the map. // // TryElementAt is the Golang equivalent of try_element_at: (col: 'ColumnOrName', extraction: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TryElementAt(col column.Column, extraction column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_element_at", col, extraction)) } // TODO: exists: (col: 'ColumnOrName', f: Callable[[pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]) -> pyspark.sql.connect.column.Column // Explode - Returns a new row for each element in the given array or map. // Uses the default column name `col` for elements in the array and // `key` and `value` for elements in the map unless specified otherwise. // // Explode is the Golang equivalent of explode: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Explode(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("explode", col)) } // ExplodeOuter - Returns a new row for each element in the given array or map. // Unlike explode, if the array/map is null or empty then null is produced. // Uses the default column name `col` for elements in the array and // `key` and `value` for elements in the map unless specified otherwise. // // ExplodeOuter is the Golang equivalent of explode_outer: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ExplodeOuter(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("explode_outer", col)) } // TODO: filter: (col: 'ColumnOrName', f: Union[Callable[[pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column], Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]]) -> pyspark.sql.connect.column.Column // Flatten - Collection function: creates a single array from an array of arrays. // If a structure of nested arrays is deeper than two levels, // only one level of nesting is removed. // // Flatten is the Golang equivalent of flatten: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Flatten(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("flatten", col)) } // TODO: forall: (col: 'ColumnOrName', f: Callable[[pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]) -> pyspark.sql.connect.column.Column // TODO: from_csv: (col: 'ColumnOrName', schema: Union[pyspark.sql.connect.column.Column, str], options: Optional[Dict[str, str]] = None) -> pyspark.sql.connect.column.Column // TODO: from_json: (col: 'ColumnOrName', schema: Union[pyspark.sql.types.ArrayType, pyspark.sql.types.StructType, pyspark.sql.connect.column.Column, str], options: Optional[Dict[str, str]] = None) -> pyspark.sql.connect.column.Column // Get - Collection function: Returns element of array at given (0-based) index. // If the index points outside of the array boundaries, then this function // returns NULL. // // Get is the Golang equivalent of get: (col: 'ColumnOrName', index: Union[ForwardRef('ColumnOrName'), int]) -> pyspark.sql.connect.column.Column func Get(col column.Column, index column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("get", col, index)) } // GetJsonObject - Extracts json object from a json string based on json `path` specified, and returns json string // of the extracted json object. It will return null if the input json string is invalid. // // GetJsonObject is the Golang equivalent of get_json_object: (col: 'ColumnOrName', path: str) -> pyspark.sql.connect.column.Column func GetJsonObject(col column.Column, path string) column.Column { lit_path := StringLit(path) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("get_json_object", col, lit_path)) } // JsonArrayLength - Returns the number of elements in the outermost JSON array. `NULL` is returned in case of // any other valid JSON string, `NULL` or an invalid JSON. // // JsonArrayLength is the Golang equivalent of json_array_length: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func JsonArrayLength(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("json_array_length", col)) } // JsonObjectKeys - Returns all the keys of the outermost JSON object as an array. If a valid JSON object is // given, all the keys of the outermost object will be returned as an array. If it is any // other valid JSON string, an invalid JSON string or an empty string, the function returns null. // // JsonObjectKeys is the Golang equivalent of json_object_keys: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func JsonObjectKeys(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("json_object_keys", col)) } // Inline - Explodes an array of structs into a table. // // Inline is the Golang equivalent of inline: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Inline(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("inline", col)) } // InlineOuter - Explodes an array of structs into a table. // Unlike inline, if the array is null or empty then null is produced for each nested column. // // InlineOuter is the Golang equivalent of inline_outer: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func InlineOuter(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("inline_outer", col)) } // JsonTuple - Creates a new row for a json column according to the given field names. // // JsonTuple is the Golang equivalent of json_tuple: (col: 'ColumnOrName', *fields: str) -> pyspark.sql.connect.column.Column func JsonTuple(col column.Column, fields string) column.Column { lit_fields := StringLit(fields) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("json_tuple", col, lit_fields)) } // MapConcat - Returns the union of all the given maps. // // MapConcat is the Golang equivalent of map_concat: (*cols: Union[ForwardRef('ColumnOrName'), List[ForwardRef('ColumnOrName')], Tuple[ForwardRef('ColumnOrName'), ...]]) -> pyspark.sql.connect.column.Column func MapConcat(cols column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("map_concat", cols)) } // TODO: map_contains_key: (col: 'ColumnOrName', value: Any) -> pyspark.sql.connect.column.Column // MapEntries - Collection function: Returns an unordered array of all entries in the given map. // // MapEntries is the Golang equivalent of map_entries: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MapEntries(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("map_entries", col)) } // TODO: map_filter: (col: 'ColumnOrName', f: Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]) -> pyspark.sql.connect.column.Column // MapFromArrays - Creates a new map from two arrays. // // MapFromArrays is the Golang equivalent of map_from_arrays: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MapFromArrays(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("map_from_arrays", col1, col2)) } // MapFromEntries - Collection function: Converts an array of entries (key value struct types) to a map // of values. // // MapFromEntries is the Golang equivalent of map_from_entries: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MapFromEntries(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("map_from_entries", col)) } // MapKeys - Collection function: Returns an unordered array containing the keys of the map. // // MapKeys is the Golang equivalent of map_keys: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MapKeys(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("map_keys", col)) } // MapValues - Collection function: Returns an unordered array containing the values of the map. // // MapValues is the Golang equivalent of map_values: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MapValues(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("map_values", col)) } // TODO: map_zip_with: (col1: 'ColumnOrName', col2: 'ColumnOrName', f: Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]) -> pyspark.sql.connect.column.Column // StrToMap - Creates a map after splitting the text into key/value pairs using delimiters. // Both `pairDelim` and `keyValueDelim` are treated as regular expressions. // // StrToMap is the Golang equivalent of str_to_map: (text: 'ColumnOrName', pairDelim: Optional[ForwardRef('ColumnOrName')] = None, keyValueDelim: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func StrToMap(text column.Column, pairDelim column.Column, keyValueDelim column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("str_to_map", text, pairDelim, keyValueDelim)) } // Posexplode - Returns a new row for each element with position in the given array or map. // Uses the default column name `pos` for position, and `col` for elements in the // array and `key` and `value` for elements in the map unless specified otherwise. // // Posexplode is the Golang equivalent of posexplode: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Posexplode(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("posexplode", col)) } // PosexplodeOuter - Returns a new row for each element with position in the given array or map. // Unlike posexplode, if the array/map is null or empty then the row (null, null) is produced. // Uses the default column name `pos` for position, and `col` for elements in the // array and `key` and `value` for elements in the map unless specified otherwise. // // PosexplodeOuter is the Golang equivalent of posexplode_outer: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func PosexplodeOuter(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("posexplode_outer", col)) } // Reverse - Collection function: returns a reversed string or an array with reverse order of elements. // // Reverse is the Golang equivalent of reverse: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Reverse(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("reverse", col)) } // Sequence - Generate a sequence of integers from `start` to `stop`, incrementing by `step`. // If `step` is not set, incrementing by 1 if `start` is less than or equal to `stop`, // otherwise -1. // // Sequence is the Golang equivalent of sequence: (start: 'ColumnOrName', stop: 'ColumnOrName', step: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func Sequence(start column.Column, stop column.Column, step column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sequence", start, stop, step)) } // TODO: schema_of_csv: (csv: 'ColumnOrName', options: Optional[Dict[str, str]] = None) -> pyspark.sql.connect.column.Column // TODO: schema_of_json: (json: 'ColumnOrName', options: Optional[Dict[str, str]] = None) -> pyspark.sql.connect.column.Column // Shuffle - Collection function: Generates a random permutation of the given array. // // Shuffle is the Golang equivalent of shuffle: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Shuffle(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("shuffle", col)) } // Size - Collection function: returns the length of the array or map stored in the column. // // Size is the Golang equivalent of size: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Size(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("size", col)) } // Slice - Collection function: returns an array containing all the elements in `x` from index `start` // (array indices start at 1, or from the end if `start` is negative) with the specified `length`. // // Slice is the Golang equivalent of slice: (col: 'ColumnOrName', start: Union[ForwardRef('ColumnOrName'), int], length: Union[ForwardRef('ColumnOrName'), int]) -> pyspark.sql.connect.column.Column func Slice(col column.Column, start column.Column, length column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("slice", col, start, length)) } // TODO: sort_array: (col: 'ColumnOrName', asc: bool = True) -> pyspark.sql.connect.column.Column // Struct - Creates a new struct column. // // Struct is the Golang equivalent of struct: (*cols: Union[ForwardRef('ColumnOrName'), List[ForwardRef('ColumnOrName')], Tuple[ForwardRef('ColumnOrName'), ...]]) -> pyspark.sql.connect.column.Column func Struct(cols column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("struct", cols)) } // NamedStruct - Creates a struct with the given field names and values. // // NamedStruct is the Golang equivalent of named_struct: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func NamedStruct(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("named_struct", vals...)) } // TODO: to_csv: (col: 'ColumnOrName', options: Optional[Dict[str, str]] = None) -> pyspark.sql.connect.column.Column // TODO: to_json: (col: 'ColumnOrName', options: Optional[Dict[str, str]] = None) -> pyspark.sql.connect.column.Column // TODO: transform: (col: 'ColumnOrName', f: Union[Callable[[pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column], Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]]) -> pyspark.sql.connect.column.Column // TODO: transform_keys: (col: 'ColumnOrName', f: Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]) -> pyspark.sql.connect.column.Column // TODO: transform_values: (col: 'ColumnOrName', f: Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]) -> pyspark.sql.connect.column.Column // TODO: zip_with: (left: 'ColumnOrName', right: 'ColumnOrName', f: Callable[[pyspark.sql.connect.column.Column, pyspark.sql.connect.column.Column], pyspark.sql.connect.column.Column]) -> pyspark.sql.connect.column.Column // Upper - Converts a string expression to upper case. // // Upper is the Golang equivalent of upper: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Upper(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("upper", col)) } // Lower - Converts a string expression to lower case. // // Lower is the Golang equivalent of lower: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Lower(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("lower", col)) } // Ascii - Computes the numeric value of the first character of the string column. // // Ascii is the Golang equivalent of ascii: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Ascii(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("ascii", col)) } // Base64 - Computes the BASE64 encoding of a binary column and returns it as a string column. // // Base64 is the Golang equivalent of base64: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Base64(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("base64", col)) } // Unbase64 - Decodes a BASE64 encoded string column and returns it as a binary column. // // Unbase64 is the Golang equivalent of unbase64: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Unbase64(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("unbase64", col)) } // Ltrim - Trim the spaces from left end for the specified string value. // // Ltrim is the Golang equivalent of ltrim: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Ltrim(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("ltrim", col)) } // Rtrim - Trim the spaces from right end for the specified string value. // // Rtrim is the Golang equivalent of rtrim: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Rtrim(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("rtrim", col)) } // Trim - Trim the spaces from both ends for the specified string column. // // Trim is the Golang equivalent of trim: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Trim(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("trim", col)) } // ConcatWs - Concatenates multiple input string columns together into a single string column, // using the given separator. // // ConcatWs is the Golang equivalent of concat_ws: (sep: str, *cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ConcatWs(sep string, cols ...column.Column) column.Column { lit_sep := StringLit(sep) vals := make([]column.Column, 0) vals = append(vals, lit_sep) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("concat_ws", vals...)) } // Decode - Computes the first argument into a string from a binary using the provided character set // (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16'). // // Decode is the Golang equivalent of decode: (col: 'ColumnOrName', charset: str) -> pyspark.sql.connect.column.Column func Decode(col column.Column, charset string) column.Column { lit_charset := StringLit(charset) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("decode", col, lit_charset)) } // Encode - Computes the first argument into a binary from a string using the provided character set // (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16'). // // Encode is the Golang equivalent of encode: (col: 'ColumnOrName', charset: str) -> pyspark.sql.connect.column.Column func Encode(col column.Column, charset string) column.Column { lit_charset := StringLit(charset) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("encode", col, lit_charset)) } // FormatNumber - Formats the number X to a format like '#,--#,--#.--', rounded to d decimal places // with HALF_EVEN round mode, and returns the result as a string. // // FormatNumber is the Golang equivalent of format_number: (col: 'ColumnOrName', d: int) -> pyspark.sql.connect.column.Column func FormatNumber(col column.Column, d int64) column.Column { lit_d := Int64Lit(d) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("format_number", col, lit_d)) } // FormatString - Formats the arguments in printf-style and returns the result as a string column. // // FormatString is the Golang equivalent of format_string: (format: str, *cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func FormatString(format string, cols ...column.Column) column.Column { lit_format := StringLit(format) vals := make([]column.Column, 0) vals = append(vals, lit_format) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("format_string", vals...)) } // Instr - Locate the position of the first occurrence of substr column in the given string. // Returns null if either of the arguments are null. // // Instr is the Golang equivalent of instr: (str: 'ColumnOrName', substr: str) -> pyspark.sql.connect.column.Column func Instr(str column.Column, substr string) column.Column { lit_substr := StringLit(substr) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("instr", str, lit_substr)) } // Overlay - Overlay the specified portion of `src` with `replace`, // starting from byte position `pos` of `src` and proceeding for `len` bytes. // // Overlay is the Golang equivalent of overlay: (src: 'ColumnOrName', replace: 'ColumnOrName', pos: Union[ForwardRef('ColumnOrName'), int], len: Union[ForwardRef('ColumnOrName'), int] = -1) -> pyspark.sql.connect.column.Column func Overlay(src column.Column, replace column.Column, pos column.Column, len column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("overlay", src, replace, pos, len)) } // Sentences - Splits a string into arrays of sentences, where each sentence is an array of words. // The 'language' and 'country' arguments are optional, and if omitted, the default locale is used. // // Sentences is the Golang equivalent of sentences: (string: 'ColumnOrName', language: Optional[ForwardRef('ColumnOrName')] = None, country: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func Sentences(string column.Column, language column.Column, country column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sentences", string, language, country)) } // Substring - Substring starts at `pos` and is of length `len` when str is String type or // returns the slice of byte array that starts at `pos` in byte and is of length `len` // when str is Binary type. // // Substring is the Golang equivalent of substring: (str: 'ColumnOrName', pos: int, len: int) -> pyspark.sql.connect.column.Column func Substring(str column.Column, pos int64, len int64) column.Column { lit_pos := Int64Lit(pos) lit_len := Int64Lit(len) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("substring", str, lit_pos, lit_len)) } // SubstringIndex - Returns the substring from string str before count occurrences of the delimiter delim. // If count is positive, everything the left of the final delimiter (counting from left) is // returned. If count is negative, every to the right of the final delimiter (counting from the // right) is returned. substring_index performs a case-sensitive match when searching for delim. // // SubstringIndex is the Golang equivalent of substring_index: (str: 'ColumnOrName', delim: str, count: int) -> pyspark.sql.connect.column.Column func SubstringIndex(str column.Column, delim string, count int64) column.Column { lit_delim := StringLit(delim) lit_count := Int64Lit(count) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("substring_index", str, lit_delim, lit_count)) } // Levenshtein - Computes the Levenshtein distance of the two given strings. // // Levenshtein is the Golang equivalent of levenshtein: (left: 'ColumnOrName', right: 'ColumnOrName', threshold: Optional[int] = None) -> pyspark.sql.connect.column.Column func Levenshtein(left column.Column, right column.Column, threshold int64) column.Column { lit_threshold := Int64Lit(threshold) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("levenshtein", left, right, lit_threshold)) } // Locate - Locate the position of the first occurrence of substr in a string column, after position pos. // // Locate is the Golang equivalent of locate: (substr: str, str: 'ColumnOrName', pos: int = 1) -> pyspark.sql.connect.column.Column func Locate(substr string, str column.Column, pos int64) column.Column { lit_substr := StringLit(substr) lit_pos := Int64Lit(pos) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("locate", lit_substr, str, lit_pos)) } // Lpad - Left-pad the string column to width `len` with `pad`. // // Lpad is the Golang equivalent of lpad: (col: 'ColumnOrName', len: int, pad: str) -> pyspark.sql.connect.column.Column func Lpad(col column.Column, len int64, pad string) column.Column { lit_len := Int64Lit(len) lit_pad := StringLit(pad) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("lpad", col, lit_len, lit_pad)) } // Rpad - Right-pad the string column to width `len` with `pad`. // // Rpad is the Golang equivalent of rpad: (col: 'ColumnOrName', len: int, pad: str) -> pyspark.sql.connect.column.Column func Rpad(col column.Column, len int64, pad string) column.Column { lit_len := Int64Lit(len) lit_pad := StringLit(pad) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("rpad", col, lit_len, lit_pad)) } // Repeat - Repeats a string column n times, and returns it as a new string column. // // Repeat is the Golang equivalent of repeat: (col: 'ColumnOrName', n: int) -> pyspark.sql.connect.column.Column func Repeat(col column.Column, n int64) column.Column { lit_n := Int64Lit(n) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("repeat", col, lit_n)) } // Split - Splits str around matches of the given pattern. // // Split is the Golang equivalent of split: (str: 'ColumnOrName', pattern: str, limit: int = -1) -> pyspark.sql.connect.column.Column func Split(str column.Column, pattern string, limit int64) column.Column { lit_pattern := StringLit(pattern) lit_limit := Int64Lit(limit) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("split", str, lit_pattern, lit_limit)) } // Rlike - Returns true if `str` matches the Java regex `regexp`, or false otherwise. // // Rlike is the Golang equivalent of rlike: (str: 'ColumnOrName', regexp: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Rlike(str column.Column, regexp column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("rlike", str, regexp)) } // Regexp - Returns true if `str` matches the Java regex `regexp`, or false otherwise. // // Regexp is the Golang equivalent of regexp: (str: 'ColumnOrName', regexp: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Regexp(str column.Column, regexp column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regexp", str, regexp)) } // RegexpLike - Returns true if `str` matches the Java regex `regexp`, or false otherwise. // // RegexpLike is the Golang equivalent of regexp_like: (str: 'ColumnOrName', regexp: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegexpLike(str column.Column, regexp column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regexp_like", str, regexp)) } // RegexpCount - Returns a count of the number of times that the Java regex pattern `regexp` is matched // in the string `str`. // // RegexpCount is the Golang equivalent of regexp_count: (str: 'ColumnOrName', regexp: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegexpCount(str column.Column, regexp column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regexp_count", str, regexp)) } // RegexpExtract - Extract a specific group matched by the Java regex `regexp`, from the specified string column. // If the regex did not match, or the specified group did not match, an empty string is returned. // // RegexpExtract is the Golang equivalent of regexp_extract: (str: 'ColumnOrName', pattern: str, idx: int) -> pyspark.sql.connect.column.Column func RegexpExtract(str column.Column, pattern string, idx int64) column.Column { lit_pattern := StringLit(pattern) lit_idx := Int64Lit(idx) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regexp_extract", str, lit_pattern, lit_idx)) } // TODO: regexp_extract_all: (str: 'ColumnOrName', regexp: 'ColumnOrName', idx: Union[int, pyspark.sql.connect.column.Column, NoneType] = None) -> pyspark.sql.connect.column.Column // TODO: regexp_replace: (string: 'ColumnOrName', pattern: Union[str, pyspark.sql.connect.column.Column], replacement: Union[str, pyspark.sql.connect.column.Column]) -> pyspark.sql.connect.column.Column // RegexpSubstr - Returns the substring that matches the Java regex `regexp` within the string `str`. // If the regular expression is not found, the result is null. // // RegexpSubstr is the Golang equivalent of regexp_substr: (str: 'ColumnOrName', regexp: 'ColumnOrName') -> pyspark.sql.connect.column.Column func RegexpSubstr(str column.Column, regexp column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("regexp_substr", str, regexp)) } // TODO: regexp_instr: (str: 'ColumnOrName', regexp: 'ColumnOrName', idx: Union[int, pyspark.sql.connect.column.Column, NoneType] = None) -> pyspark.sql.connect.column.Column // Initcap - Translate the first letter of each word to upper case in the sentence. // // Initcap is the Golang equivalent of initcap: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Initcap(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("initcap", col)) } // Soundex - Returns the SoundEx encoding for a string // // Soundex is the Golang equivalent of soundex: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Soundex(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("soundex", col)) } // Length - Computes the character length of string data or number of bytes of binary data. // The length of character data includes the trailing spaces. The length of binary data // includes binary zeros. // // Length is the Golang equivalent of length: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Length(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("length", col)) } // OctetLength - Calculates the byte length for the specified string column. // // OctetLength is the Golang equivalent of octet_length: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func OctetLength(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("octet_length", col)) } // BitLength - Calculates the bit length for the specified string column. // // BitLength is the Golang equivalent of bit_length: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitLength(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bit_length", col)) } // Translate - A function translate any character in the `srcCol` by a character in `matching`. // The characters in `replace` is corresponding to the characters in `matching`. // Translation will happen whenever any character in the string is matching with the character // in the `matching`. // // Translate is the Golang equivalent of translate: (srcCol: 'ColumnOrName', matching: str, replace: str) -> pyspark.sql.connect.column.Column func Translate(srcCol column.Column, matching string, replace string) column.Column { lit_matching := StringLit(matching) lit_replace := StringLit(replace) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("translate", srcCol, lit_matching, lit_replace)) } // ToBinary - Converts the input `col` to a binary value based on the supplied `format`. // The `format` can be a case-insensitive string literal of "hex", "utf-8", "utf8", // or "base64". By default, the binary format for conversion is "hex" if // `format` is omitted. The function returns NULL if at least one of the // input parameters is NULL. // // ToBinary is the Golang equivalent of to_binary: (col: 'ColumnOrName', format: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func ToBinary(col column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_binary", col, format)) } // ToChar - Convert `col` to a string based on the `format`. // Throws an exception if the conversion fails. The format can consist of the following // characters, case insensitive: // '0' or '9': Specifies an expected digit between 0 and 9. A sequence of 0 or 9 in the // format string matches a sequence of digits in the input value, generating a result // string of the same length as the corresponding sequence in the format string. // The result string is left-padded with zeros if the 0/9 sequence comprises more digits // than the matching part of the decimal value, starts with 0, and is before the decimal // point. Otherwise, it is padded with spaces. // '.' or 'D': Specifies the position of the decimal point (optional, only allowed once). // ',' or 'G': Specifies the position of the grouping (thousands) separator (,). // There must be a 0 or 9 to the left and right of each grouping separator. // '$': Specifies the location of the $ currency sign. This character may only be specified once. // 'S' or 'MI': Specifies the position of a '-' or '+' sign (optional, only allowed once at // the beginning or end of the format string). Note that 'S' prints '+' for positive // values but 'MI' prints a space. // 'PR': Only allowed at the end of the format string; specifies that the result string // will be wrapped by angle brackets if the input value is negative. // // ToChar is the Golang equivalent of to_char: (col: 'ColumnOrName', format: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ToChar(col column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_char", col, format)) } // ToVarchar - Convert `col` to a string based on the `format`. // Throws an exception if the conversion fails. The format can consist of the following // characters, case insensitive: // '0' or '9': Specifies an expected digit between 0 and 9. A sequence of 0 or 9 in the // format string matches a sequence of digits in the input value, generating a result // string of the same length as the corresponding sequence in the format string. // The result string is left-padded with zeros if the 0/9 sequence comprises more digits // than the matching part of the decimal value, starts with 0, and is before the decimal // point. Otherwise, it is padded with spaces. // '.' or 'D': Specifies the position of the decimal point (optional, only allowed once). // ',' or 'G': Specifies the position of the grouping (thousands) separator (,). // There must be a 0 or 9 to the left and right of each grouping separator. // '$': Specifies the location of the $ currency sign. This character may only be specified once. // 'S' or 'MI': Specifies the position of a '-' or '+' sign (optional, only allowed once at // the beginning or end of the format string). Note that 'S' prints '+' for positive // values but 'MI' prints a space. // 'PR': Only allowed at the end of the format string; specifies that the result string // will be wrapped by angle brackets if the input value is negative. // // ToVarchar is the Golang equivalent of to_varchar: (col: 'ColumnOrName', format: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ToVarchar(col column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_varchar", col, format)) } // ToNumber - Convert string 'col' to a number based on the string format 'format'. // Throws an exception if the conversion fails. The format can consist of the following // characters, case insensitive: // '0' or '9': Specifies an expected digit between 0 and 9. A sequence of 0 or 9 in the // format string matches a sequence of digits in the input string. If the 0/9 // sequence starts with 0 and is before the decimal point, it can only match a digit // sequence of the same size. Otherwise, if the sequence starts with 9 or is after // the decimal point, it can match a digit sequence that has the same or smaller size. // '.' or 'D': Specifies the position of the decimal point (optional, only allowed once). // ',' or 'G': Specifies the position of the grouping (thousands) separator (,). // There must be a 0 or 9 to the left and right of each grouping separator. // 'col' must match the grouping separator relevant for the size of the number. // '$': Specifies the location of the $ currency sign. This character may only be // specified once. // 'S' or 'MI': Specifies the position of a '-' or '+' sign (optional, only allowed // once at the beginning or end of the format string). Note that 'S' allows '-' // but 'MI' does not. // 'PR': Only allowed at the end of the format string; specifies that 'col' indicates a // negative number with wrapping angled brackets. // // ToNumber is the Golang equivalent of to_number: (col: 'ColumnOrName', format: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ToNumber(col column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_number", col, format)) } // Replace - Replaces all occurrences of `search` with `replace`. // // Replace is the Golang equivalent of replace: (src: 'ColumnOrName', search: 'ColumnOrName', replace: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func Replace(src column.Column, search column.Column, replace column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("replace", src, search, replace)) } // SplitPart - Splits `str` by delimiter and return requested part of the split (1-based). // If any input is null, returns null. if `partNum` is out of range of split parts, // returns empty string. If `partNum` is 0, throws an error. If `partNum` is negative, // the parts are counted backward from the end of the string. // If the `delimiter` is an empty string, the `str` is not split. // // SplitPart is the Golang equivalent of split_part: (src: 'ColumnOrName', delimiter: 'ColumnOrName', partNum: 'ColumnOrName') -> pyspark.sql.connect.column.Column func SplitPart(src column.Column, delimiter column.Column, partNum column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("split_part", src, delimiter, partNum)) } // Substr - Returns the substring of `str` that starts at `pos` and is of length `len`, // or the slice of byte array that starts at `pos` and is of length `len`. // // Substr is the Golang equivalent of substr: (str: 'ColumnOrName', pos: 'ColumnOrName', len: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func Substr(str column.Column, pos column.Column, len column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("substr", str, pos, len)) } // ParseUrl - Extracts a part from a URL. // // ParseUrl is the Golang equivalent of parse_url: (url: 'ColumnOrName', partToExtract: 'ColumnOrName', key: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func ParseUrl(url column.Column, partToExtract column.Column, key column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("parse_url", url, partToExtract, key)) } // Printf - Formats the arguments in printf-style and returns the result as a string column. // // Printf is the Golang equivalent of printf: (format: 'ColumnOrName', *cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Printf(format column.Column, cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, format) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("printf", vals...)) } // UrlDecode - Decodes a `str` in 'application/x-www-form-urlencoded' format // using a specific encoding scheme. // // UrlDecode is the Golang equivalent of url_decode: (str: 'ColumnOrName') -> pyspark.sql.connect.column.Column func UrlDecode(str column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("url_decode", str)) } // UrlEncode - Translates a string into 'application/x-www-form-urlencoded' format // using a specific encoding scheme. // // UrlEncode is the Golang equivalent of url_encode: (str: 'ColumnOrName') -> pyspark.sql.connect.column.Column func UrlEncode(str column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("url_encode", str)) } // Position - Returns the position of the first occurrence of `substr` in `str` after position `start`. // The given `start` and return value are 1-based. // // Position is the Golang equivalent of position: (substr: 'ColumnOrName', str: 'ColumnOrName', start: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func Position(substr column.Column, str column.Column, start column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("position", substr, str, start)) } // Endswith - Returns a boolean. The value is True if str ends with suffix. // Returns NULL if either input expression is NULL. Otherwise, returns False. // Both str or suffix must be of STRING or BINARY type. // // Endswith is the Golang equivalent of endswith: (str: 'ColumnOrName', suffix: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Endswith(str column.Column, suffix column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("endswith", str, suffix)) } // Startswith - Returns a boolean. The value is True if str starts with prefix. // Returns NULL if either input expression is NULL. Otherwise, returns False. // Both str or prefix must be of STRING or BINARY type. // // Startswith is the Golang equivalent of startswith: (str: 'ColumnOrName', prefix: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Startswith(str column.Column, prefix column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("startswith", str, prefix)) } // Char - Returns the ASCII character having the binary equivalent to `col`. If col is larger than 256 the // result is equivalent to char(col % 256) // // Char is the Golang equivalent of char: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Char(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("char", col)) } // TryToBinary - This is a special version of `to_binary` that performs the same operation, but returns a NULL // value instead of raising an error if the conversion cannot be performed. // // TryToBinary is the Golang equivalent of try_to_binary: (col: 'ColumnOrName', format: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func TryToBinary(col column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_to_binary", col, format)) } // TryToNumber - Convert string 'col' to a number based on the string format `format`. Returns NULL if the // string 'col' does not match the expected format. The format follows the same semantics as the // to_number function. // // TryToNumber is the Golang equivalent of try_to_number: (col: 'ColumnOrName', format: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TryToNumber(col column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_to_number", col, format)) } // Btrim - Remove the leading and trailing `trim` characters from `str`. // // Btrim is the Golang equivalent of btrim: (str: 'ColumnOrName', trim: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func Btrim(str column.Column, trim column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("btrim", str, trim)) } // CharLength - Returns the character length of string data or number of bytes of binary data. // The length of string data includes the trailing spaces. // The length of binary data includes binary zeros. // // CharLength is the Golang equivalent of char_length: (str: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CharLength(str column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("char_length", str)) } // CharacterLength - Returns the character length of string data or number of bytes of binary data. // The length of string data includes the trailing spaces. // The length of binary data includes binary zeros. // // CharacterLength is the Golang equivalent of character_length: (str: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CharacterLength(str column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("character_length", str)) } // Contains - Returns a boolean. The value is True if right is found inside left. // Returns NULL if either input expression is NULL. Otherwise, returns False. // Both left or right must be of STRING or BINARY type. // // Contains is the Golang equivalent of contains: (left: 'ColumnOrName', right: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Contains(left column.Column, right column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("contains", left, right)) } // Elt - Returns the `n`-th input, e.g., returns `input2` when `n` is 2. // The function returns NULL if the index exceeds the length of the array // and `spark.sql.ansi.enabled` is set to false. If `spark.sql.ansi.enabled` is set to true, // it throws ArrayIndexOutOfBoundsException for invalid indices. // // Elt is the Golang equivalent of elt: (*inputs: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Elt(inputs ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, inputs...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("elt", vals...)) } // FindInSet - Returns the index (1-based) of the given string (`str`) in the comma-delimited // list (`strArray`). Returns 0, if the string was not found or if the given string (`str`) // contains a comma. // // FindInSet is the Golang equivalent of find_in_set: (str: 'ColumnOrName', str_array: 'ColumnOrName') -> pyspark.sql.connect.column.Column func FindInSet(str column.Column, str_array column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("find_in_set", str, str_array)) } // TODO: like: (str: 'ColumnOrName', pattern: 'ColumnOrName', escapeChar: Optional[ForwardRef('Column')] = None) -> pyspark.sql.connect.column.Column // TODO: ilike: (str: 'ColumnOrName', pattern: 'ColumnOrName', escapeChar: Optional[ForwardRef('Column')] = None) -> pyspark.sql.connect.column.Column // Lcase - Returns `str` with all characters changed to lowercase. // // Lcase is the Golang equivalent of lcase: (str: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Lcase(str column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("lcase", str)) } // Ucase - Returns `str` with all characters changed to uppercase. // // Ucase is the Golang equivalent of ucase: (str: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Ucase(str column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("ucase", str)) } // Left - Returns the leftmost `len`(`len` can be string type) characters from the string `str`, // if `len` is less or equal than 0 the result is an empty string. // // Left is the Golang equivalent of left: (str: 'ColumnOrName', len: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Left(str column.Column, len column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("left", str, len)) } // Right - Returns the rightmost `len`(`len` can be string type) characters from the string `str`, // if `len` is less or equal than 0 the result is an empty string. // // Right is the Golang equivalent of right: (str: 'ColumnOrName', len: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Right(str column.Column, len column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("right", str, len)) } // Mask - Masks the given string value. This can be useful for creating copies of tables with sensitive // information removed. // // Mask is the Golang equivalent of mask: (col: 'ColumnOrName', upperChar: Optional[ForwardRef('ColumnOrName')] = None, lowerChar: Optional[ForwardRef('ColumnOrName')] = None, digitChar: Optional[ForwardRef('ColumnOrName')] = None, otherChar: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func Mask(col column.Column, upperChar column.Column, lowerChar column.Column, digitChar column.Column, otherChar column.Column, ) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("mask", col, upperChar, lowerChar, digitChar, otherChar)) } // Curdate - Returns the current date at the start of query evaluation as a :class:`DateType` column. // All calls of current_date within the same query return the same value. // // Curdate is the Golang equivalent of curdate: () -> pyspark.sql.connect.column.Column func Curdate() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("curdate")) } // CurrentDate - Returns the current date at the start of query evaluation as a :class:`DateType` column. // All calls of current_date within the same query return the same value. // // CurrentDate is the Golang equivalent of current_date: () -> pyspark.sql.connect.column.Column func CurrentDate() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("current_date")) } // CurrentTimestamp - Returns the current timestamp at the start of query evaluation as a :class:`TimestampType` // column. All calls of current_timestamp within the same query return the same value. // // CurrentTimestamp is the Golang equivalent of current_timestamp: () -> pyspark.sql.connect.column.Column func CurrentTimestamp() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("current_timestamp")) } // Now - Returns the current timestamp at the start of query evaluation. // // Now is the Golang equivalent of now: () -> pyspark.sql.connect.column.Column func Now() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("now")) } // CurrentTimezone - Returns the current session local timezone. // // CurrentTimezone is the Golang equivalent of current_timezone: () -> pyspark.sql.connect.column.Column func CurrentTimezone() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("current_timezone")) } // Localtimestamp - Returns the current timestamp without time zone at the start of query evaluation // as a timestamp without time zone column. All calls of localtimestamp within the // same query return the same value. // // Localtimestamp is the Golang equivalent of localtimestamp: () -> pyspark.sql.connect.column.Column func Localtimestamp() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("localtimestamp")) } // DateFormat - Converts a date/timestamp/string to a value of string in the format specified by the date // format given by the second argument. // // A pattern could be for instance `dd.MM.yyyy` and could return a string like '18.03.1993'. All // pattern letters of `datetime pattern`_. can be used. // // DateFormat is the Golang equivalent of date_format: (date: 'ColumnOrName', format: str) -> pyspark.sql.connect.column.Column func DateFormat(date column.Column, format string) column.Column { lit_format := StringLit(format) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("date_format", date, lit_format)) } // Year - Extract the year of a given date/timestamp as integer. // // Year is the Golang equivalent of year: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Year(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("year", col)) } // Quarter - Extract the quarter of a given date/timestamp as integer. // // Quarter is the Golang equivalent of quarter: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Quarter(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("quarter", col)) } // Month - Extract the month of a given date/timestamp as integer. // // Month is the Golang equivalent of month: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Month(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("month", col)) } // Dayofweek - Extract the day of the week of a given date/timestamp as integer. // Ranges from 1 for a Sunday through to 7 for a Saturday // // Dayofweek is the Golang equivalent of dayofweek: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Dayofweek(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("dayofweek", col)) } // Dayofmonth - Extract the day of the month of a given date/timestamp as integer. // // Dayofmonth is the Golang equivalent of dayofmonth: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Dayofmonth(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("dayofmonth", col)) } // Day - Extract the day of the month of a given date/timestamp as integer. // // Day is the Golang equivalent of day: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Day(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("day", col)) } // Dayofyear - Extract the day of the year of a given date/timestamp as integer. // // Dayofyear is the Golang equivalent of dayofyear: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Dayofyear(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("dayofyear", col)) } // Hour - Extract the hours of a given timestamp as integer. // // Hour is the Golang equivalent of hour: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Hour(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("hour", col)) } // Minute - Extract the minutes of a given timestamp as integer. // // Minute is the Golang equivalent of minute: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Minute(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("minute", col)) } // Second - Extract the seconds of a given date as integer. // // Second is the Golang equivalent of second: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Second(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("second", col)) } // Weekofyear - Extract the week number of a given date as integer. // A week is considered to start on a Monday and week 1 is the first week with more than 3 days, // as defined by ISO 8601 // // Weekofyear is the Golang equivalent of weekofyear: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Weekofyear(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("weekofyear", col)) } // Weekday - Returns the day of the week for date/timestamp (0 = Monday, 1 = Tuesday, ..., 6 = Sunday). // // Weekday is the Golang equivalent of weekday: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Weekday(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("weekday", col)) } // Extract - Extracts a part of the date/timestamp or interval source. // // Extract is the Golang equivalent of extract: (field: 'ColumnOrName', source: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Extract(field column.Column, source column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("extract", field, source)) } // DatePart is the Golang equivalent of date_part: (field: 'ColumnOrName', source: 'ColumnOrName') -> pyspark.sql.connect.column.Column func DatePart(field column.Column, source column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("date_part", field, source)) } // Datepart is the Golang equivalent of datepart: (field: 'ColumnOrName', source: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Datepart(field column.Column, source column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("datepart", field, source)) } // MakeDate - Returns a column with a date built from the year, month and day columns. // // MakeDate is the Golang equivalent of make_date: (year: 'ColumnOrName', month: 'ColumnOrName', day: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MakeDate(year column.Column, month column.Column, day column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("make_date", year, month, day)) } // DateAdd - Returns the date that is `days` days after `start`. If `days` is a negative value // then these amount of days will be deducted from `start`. // // DateAdd is the Golang equivalent of date_add: (start: 'ColumnOrName', days: Union[ForwardRef('ColumnOrName'), int]) -> pyspark.sql.connect.column.Column func DateAdd(start column.Column, days column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("date_add", start, days)) } // Dateadd - Returns the date that is `days` days after `start`. If `days` is a negative value // then these amount of days will be deducted from `start`. // // Dateadd is the Golang equivalent of dateadd: (start: 'ColumnOrName', days: Union[ForwardRef('ColumnOrName'), int]) -> pyspark.sql.connect.column.Column func Dateadd(start column.Column, days column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("dateadd", start, days)) } // DateSub - Returns the date that is `days` days before `start`. If `days` is a negative value // then these amount of days will be added to `start`. // // DateSub is the Golang equivalent of date_sub: (start: 'ColumnOrName', days: Union[ForwardRef('ColumnOrName'), int]) -> pyspark.sql.connect.column.Column func DateSub(start column.Column, days column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("date_sub", start, days)) } // Datediff - Returns the number of days from `start` to `end`. // // Datediff is the Golang equivalent of datediff: (end: 'ColumnOrName', start: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Datediff(end column.Column, start column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("datediff", end, start)) } // DateDiff - Returns the number of days from `start` to `end`. // // DateDiff is the Golang equivalent of date_diff: (end: 'ColumnOrName', start: 'ColumnOrName') -> pyspark.sql.connect.column.Column func DateDiff(end column.Column, start column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("date_diff", end, start)) } // DateFromUnixDate - Create date from the number of `days` since 1970-01-01. // // DateFromUnixDate is the Golang equivalent of date_from_unix_date: (days: 'ColumnOrName') -> pyspark.sql.connect.column.Column func DateFromUnixDate(days column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("date_from_unix_date", days)) } // AddMonths - Returns the date that is `months` months after `start`. If `months` is a negative value // then these amount of months will be deducted from the `start`. // // AddMonths is the Golang equivalent of add_months: (start: 'ColumnOrName', months: Union[ForwardRef('ColumnOrName'), int]) -> pyspark.sql.connect.column.Column func AddMonths(start column.Column, months column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("add_months", start, months)) } // TODO: months_between: (date1: 'ColumnOrName', date2: 'ColumnOrName', roundOff: bool = True) -> pyspark.sql.connect.column.Column // ToDate - Converts a :class:`~pyspark.sql.Column` into :class:`pyspark.sql.types.DateType` // using the optionally specified format. Specify formats according to `datetime pattern`_. // By default, it follows casting rules to :class:`pyspark.sql.types.DateType` if the format // is omitted. Equivalent to “col.cast("date")“. // // ToDate is the Golang equivalent of to_date: (col: 'ColumnOrName', format: Optional[str] = None) -> pyspark.sql.connect.column.Column func ToDate(col column.Column, format string) column.Column { lit_format := StringLit(format) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_date", col, lit_format)) } // UnixDate - Returns the number of days since 1970-01-01. // // UnixDate is the Golang equivalent of unix_date: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func UnixDate(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("unix_date", col)) } // UnixMicros - Returns the number of microseconds since 1970-01-01 00:00:00 UTC. // // UnixMicros is the Golang equivalent of unix_micros: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func UnixMicros(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("unix_micros", col)) } // UnixMillis - Returns the number of milliseconds since 1970-01-01 00:00:00 UTC. // Truncates higher levels of precision. // // UnixMillis is the Golang equivalent of unix_millis: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func UnixMillis(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("unix_millis", col)) } // UnixSeconds - Returns the number of seconds since 1970-01-01 00:00:00 UTC. // Truncates higher levels of precision. // // UnixSeconds is the Golang equivalent of unix_seconds: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func UnixSeconds(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("unix_seconds", col)) } // ToTimestamp - Converts a :class:`~pyspark.sql.Column` into :class:`pyspark.sql.types.TimestampType` // using the optionally specified format. Specify formats according to `datetime pattern`_. // By default, it follows casting rules to :class:`pyspark.sql.types.TimestampType` if the format // is omitted. Equivalent to “col.cast("timestamp")“. // // ToTimestamp is the Golang equivalent of to_timestamp: (col: 'ColumnOrName', format: Optional[str] = None) -> pyspark.sql.connect.column.Column func ToTimestamp(col column.Column, format string) column.Column { lit_format := StringLit(format) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_timestamp", col, lit_format)) } // TryToTimestamp - Parses the `col` with the `format` to a timestamp. The function always // returns null on an invalid input with/without ANSI SQL mode enabled. The result data type is // consistent with the value of configuration `spark.sql.timestampType`. // // TryToTimestamp is the Golang equivalent of try_to_timestamp: (col: 'ColumnOrName', format: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func TryToTimestamp(col column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_to_timestamp", col, format)) } // Xpath - Returns a string array of values within the nodes of xml that match the XPath expression. // // Xpath is the Golang equivalent of xpath: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Xpath(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath", xml, path)) } // XpathBoolean - Returns true if the XPath expression evaluates to true, or if a matching node is found. // // XpathBoolean is the Golang equivalent of xpath_boolean: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func XpathBoolean(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath_boolean", xml, path)) } // XpathDouble - Returns a double value, the value zero if no match is found, // or NaN if a match is found but the value is non-numeric. // // XpathDouble is the Golang equivalent of xpath_double: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func XpathDouble(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath_double", xml, path)) } // XpathNumber - Returns a double value, the value zero if no match is found, // or NaN if a match is found but the value is non-numeric. // // XpathNumber is the Golang equivalent of xpath_number: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func XpathNumber(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath_number", xml, path)) } // XpathFloat - Returns a float value, the value zero if no match is found, // or NaN if a match is found but the value is non-numeric. // // XpathFloat is the Golang equivalent of xpath_float: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func XpathFloat(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath_float", xml, path)) } // XpathInt - Returns an integer value, or the value zero if no match is found, // or a match is found but the value is non-numeric. // // XpathInt is the Golang equivalent of xpath_int: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func XpathInt(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath_int", xml, path)) } // XpathLong - Returns a long integer value, or the value zero if no match is found, // or a match is found but the value is non-numeric. // // XpathLong is the Golang equivalent of xpath_long: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func XpathLong(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath_long", xml, path)) } // XpathShort - Returns a short integer value, or the value zero if no match is found, // or a match is found but the value is non-numeric. // // XpathShort is the Golang equivalent of xpath_short: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func XpathShort(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath_short", xml, path)) } // XpathString - Returns the text contents of the first xml node that matches the XPath expression. // // XpathString is the Golang equivalent of xpath_string: (xml: 'ColumnOrName', path: 'ColumnOrName') -> pyspark.sql.connect.column.Column func XpathString(xml column.Column, path column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xpath_string", xml, path)) } // Trunc - Returns date truncated to the unit specified by the format. // // Trunc is the Golang equivalent of trunc: (date: 'ColumnOrName', format: str) -> pyspark.sql.connect.column.Column func Trunc(date column.Column, format string) column.Column { lit_format := StringLit(format) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("trunc", date, lit_format)) } // DateTrunc - Returns timestamp truncated to the unit specified by the format. // // DateTrunc is the Golang equivalent of date_trunc: (format: str, timestamp: 'ColumnOrName') -> pyspark.sql.connect.column.Column func DateTrunc(format string, timestamp column.Column) column.Column { lit_format := StringLit(format) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("date_trunc", lit_format, timestamp)) } // NextDay - Returns the first date which is later than the value of the date column // based on second `week day` argument. // // NextDay is the Golang equivalent of next_day: (date: 'ColumnOrName', dayOfWeek: str) -> pyspark.sql.connect.column.Column func NextDay(date column.Column, dayOfWeek string) column.Column { lit_dayOfWeek := StringLit(dayOfWeek) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("next_day", date, lit_dayOfWeek)) } // LastDay - Returns the last day of the month which the given date belongs to. // // LastDay is the Golang equivalent of last_day: (date: 'ColumnOrName') -> pyspark.sql.connect.column.Column func LastDay(date column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("last_day", date)) } // FromUnixtime - Converts the number of seconds from unix epoch (1970-01-01 00:00:00 UTC) to a string // representing the timestamp of that moment in the current system time zone in the given // format. // // FromUnixtime is the Golang equivalent of from_unixtime: (timestamp: 'ColumnOrName', format: str = 'yyyy-MM-dd HH:mm:ss') -> pyspark.sql.connect.column.Column func FromUnixtime(timestamp column.Column, format string) column.Column { lit_format := StringLit(format) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("from_unixtime", timestamp, lit_format)) } // UnixTimestamp - Convert time string with given pattern ('yyyy-MM-dd HH:mm:ss', by default) // to Unix time stamp (in seconds), using the default timezone and the default // locale, returns null if failed. // // if `timestamp` is None, then it returns current timestamp. // // UnixTimestamp is the Golang equivalent of unix_timestamp: (timestamp: Optional[ForwardRef('ColumnOrName')] = None, format: str = 'yyyy-MM-dd HH:mm:ss') -> pyspark.sql.connect.column.Column func UnixTimestamp(timestamp column.Column, format string) column.Column { lit_format := StringLit(format) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("unix_timestamp", timestamp, lit_format)) } // FromUtcTimestamp - This is a common function for databases supporting TIMESTAMP WITHOUT TIMEZONE. This function // takes a timestamp which is timezone-agnostic, and interprets it as a timestamp in UTC, and // renders that timestamp as a timestamp in the given time zone. // // However, timestamp in Spark represents number of microseconds from the Unix epoch, which is not // timezone-agnostic. So in Spark this function just shift the timestamp value from UTC timezone to // the given timezone. // // This function may return confusing result if the input is a string with timezone, e.g. // '2018-03-13T06:18:23+00:00'. The reason is that, Spark firstly cast the string to timestamp // according to the timezone in the string, and finally display the result by converting the // timestamp to string according to the session local timezone. // // FromUtcTimestamp is the Golang equivalent of from_utc_timestamp: (timestamp: 'ColumnOrName', tz: 'ColumnOrName') -> pyspark.sql.connect.column.Column func FromUtcTimestamp(timestamp column.Column, tz column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("from_utc_timestamp", timestamp, tz)) } // ToUtcTimestamp - This is a common function for databases supporting TIMESTAMP WITHOUT TIMEZONE. This function // takes a timestamp which is timezone-agnostic, and interprets it as a timestamp in the given // timezone, and renders that timestamp as a timestamp in UTC. // // However, timestamp in Spark represents number of microseconds from the Unix epoch, which is not // timezone-agnostic. So in Spark this function just shift the timestamp value from the given // timezone to UTC timezone. // // This function may return confusing result if the input is a string with timezone, e.g. // '2018-03-13T06:18:23+00:00'. The reason is that, Spark firstly cast the string to timestamp // according to the timezone in the string, and finally display the result by converting the // timestamp to string according to the session local timezone. // // ToUtcTimestamp is the Golang equivalent of to_utc_timestamp: (timestamp: 'ColumnOrName', tz: 'ColumnOrName') -> pyspark.sql.connect.column.Column func ToUtcTimestamp(timestamp column.Column, tz column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_utc_timestamp", timestamp, tz)) } // TimestampSeconds - Converts the number of seconds from the Unix epoch (1970-01-01T00:00:00Z) // to a timestamp. // // TimestampSeconds is the Golang equivalent of timestamp_seconds: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TimestampSeconds(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("timestamp_seconds", col)) } // TimestampMillis - Creates timestamp from the number of milliseconds since UTC epoch. // // TimestampMillis is the Golang equivalent of timestamp_millis: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TimestampMillis(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("timestamp_millis", col)) } // TimestampMicros - Creates timestamp from the number of microseconds since UTC epoch. // // TimestampMicros is the Golang equivalent of timestamp_micros: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func TimestampMicros(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("timestamp_micros", col)) } // Window - Bucketize rows into one or more time windows given a timestamp specifying column. Window // starts are inclusive but the window ends are exclusive, e.g. 12:05 will be in the window // [12:05,12:10) but not in [12:00,12:05). Windows can support microsecond precision. Windows in // the order of months are not supported. // // The time column must be of :class:`pyspark.sql.types.TimestampType`. // // Durations are provided as strings, e.g. '1 second', '1 day 12 hours', '2 minutes'. Valid // interval strings are 'week', 'day', 'hour', 'minute', 'second', 'millisecond', 'microsecond'. // If the “slideDuration“ is not provided, the windows will be tumbling windows. // // The startTime is the offset with respect to 1970-01-01 00:00:00 UTC with which to start // window intervals. For example, in order to have hourly tumbling windows that start 15 minutes // past the hour, e.g. 12:15-13:15, 13:15-14:15... provide `startTime` as `15 minutes`. // // The output column will be a struct called 'window' by default with the nested columns 'start' // and 'end', where 'start' and 'end' will be of :class:`pyspark.sql.types.TimestampType`. // // Window is the Golang equivalent of window: (timeColumn: 'ColumnOrName', windowDuration: str, slideDuration: Optional[str] = None, startTime: Optional[str] = None) -> pyspark.sql.connect.column.Column func Window(timeColumn column.Column, windowDuration string, slideDuration string, startTime string) column.Column { lit_windowDuration := StringLit(windowDuration) lit_slideDuration := StringLit(slideDuration) lit_startTime := StringLit(startTime) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("window", timeColumn, lit_windowDuration, lit_slideDuration, lit_startTime)) } // WindowTime - Computes the event time from a window column. The column window values are produced // by window aggregating operators and are of type `STRUCT` // where start is inclusive and end is exclusive. The event time of records produced by window // aggregating operators can be computed as “window_time(window)“ and are // “window.end - lit(1).alias("microsecond")“ (as microsecond is the minimal supported event // time precision). The window column must be one produced by a window aggregating operator. // // WindowTime is the Golang equivalent of window_time: (windowColumn: 'ColumnOrName') -> pyspark.sql.connect.column.Column func WindowTime(windowColumn column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("window_time", windowColumn)) } // TODO: session_window: (timeColumn: 'ColumnOrName', gapDuration: Union[pyspark.sql.connect.column.Column, str]) -> pyspark.sql.connect.column.Column // ToUnixTimestamp - Returns the UNIX timestamp of the given time. // // ToUnixTimestamp is the Golang equivalent of to_unix_timestamp: (timestamp: 'ColumnOrName', format: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func ToUnixTimestamp(timestamp column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_unix_timestamp", timestamp, format)) } // ToTimestampLtz - Parses the `timestamp` with the `format` to a timestamp without time zone. // Returns null with invalid input. // // ToTimestampLtz is the Golang equivalent of to_timestamp_ltz: (timestamp: 'ColumnOrName', format: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func ToTimestampLtz(timestamp column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_timestamp_ltz", timestamp, format)) } // ToTimestampNtz - Parses the `timestamp` with the `format` to a timestamp without time zone. // Returns null with invalid input. // // ToTimestampNtz is the Golang equivalent of to_timestamp_ntz: (timestamp: 'ColumnOrName', format: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func ToTimestampNtz(timestamp column.Column, format column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("to_timestamp_ntz", timestamp, format)) } // TODO: bucket: (numBuckets: Union[pyspark.sql.connect.column.Column, int], col: 'ColumnOrName') -> pyspark.sql.connect.column.Column // Years - Partition transform function: A transform for timestamps and dates // to partition data into years. // // Years is the Golang equivalent of years: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Years(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("years", col)) } // Months - Partition transform function: A transform for timestamps and dates // to partition data into months. // // Months is the Golang equivalent of months: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Months(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("months", col)) } // Days - Partition transform function: A transform for timestamps and dates // to partition data into days. // // Days is the Golang equivalent of days: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Days(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("days", col)) } // Hours - Partition transform function: A transform for timestamps // to partition data into hours. // // Hours is the Golang equivalent of hours: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Hours(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("hours", col)) } // TODO: convert_timezone: (sourceTz: Optional[pyspark.sql.connect.column.Column], targetTz: pyspark.sql.connect.column.Column, sourceTs: 'ColumnOrName') -> pyspark.sql.connect.column.Column // MakeDtInterval - Make DayTimeIntervalType duration from days, hours, mins and secs. // // MakeDtInterval is the Golang equivalent of make_dt_interval: (days: Optional[ForwardRef('ColumnOrName')] = None, hours: Optional[ForwardRef('ColumnOrName')] = None, mins: Optional[ForwardRef('ColumnOrName')] = None, secs: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func MakeDtInterval(days column.Column, hours column.Column, mins column.Column, secs column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("make_dt_interval", days, hours, mins, secs)) } // MakeInterval - Make interval from years, months, weeks, days, hours, mins and secs. // // MakeInterval is the Golang equivalent of make_interval: (years: Optional[ForwardRef('ColumnOrName')] = None, months: Optional[ForwardRef('ColumnOrName')] = None, weeks: Optional[ForwardRef('ColumnOrName')] = None, days: Optional[ForwardRef('ColumnOrName')] = None, hours: Optional[ForwardRef('ColumnOrName')] = None, mins: Optional[ForwardRef('ColumnOrName')] = None, secs: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func MakeInterval(years column.Column, months column.Column, weeks column.Column, days column.Column, hours column.Column, mins column.Column, secs column.Column, ) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("make_interval", years, months, weeks, days, hours, mins, secs)) } // MakeTimestamp - Create timestamp from years, months, days, hours, mins, secs and timezone fields. // The result data type is consistent with the value of configuration `spark.sql.timestampType`. // If the configuration `spark.sql.ansi.enabled` is false, the function returns NULL // on invalid inputs. Otherwise, it will throw an error instead. // // MakeTimestamp is the Golang equivalent of make_timestamp: (years: 'ColumnOrName', months: 'ColumnOrName', days: 'ColumnOrName', hours: 'ColumnOrName', mins: 'ColumnOrName', secs: 'ColumnOrName', timezone: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func MakeTimestamp(years column.Column, months column.Column, days column.Column, hours column.Column, mins column.Column, secs column.Column, timezone column.Column, ) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("make_timestamp", years, months, days, hours, mins, secs, timezone)) } // MakeTimestampLtz - Create the current timestamp with local time zone from years, months, days, hours, mins, // secs and timezone fields. If the configuration `spark.sql.ansi.enabled` is false, // the function returns NULL on invalid inputs. Otherwise, it will throw an error instead. // // MakeTimestampLtz is the Golang equivalent of make_timestamp_ltz: (years: 'ColumnOrName', months: 'ColumnOrName', days: 'ColumnOrName', hours: 'ColumnOrName', mins: 'ColumnOrName', secs: 'ColumnOrName', timezone: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func MakeTimestampLtz(years column.Column, months column.Column, days column.Column, hours column.Column, mins column.Column, secs column.Column, timezone column.Column, ) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("make_timestamp_ltz", years, months, days, hours, mins, secs, timezone)) } // MakeTimestampNtz - Create local date-time from years, months, days, hours, mins, secs fields. // If the configuration `spark.sql.ansi.enabled` is false, the function returns NULL // on invalid inputs. Otherwise, it will throw an error instead. // // MakeTimestampNtz is the Golang equivalent of make_timestamp_ntz: (years: 'ColumnOrName', months: 'ColumnOrName', days: 'ColumnOrName', hours: 'ColumnOrName', mins: 'ColumnOrName', secs: 'ColumnOrName') -> pyspark.sql.connect.column.Column func MakeTimestampNtz(years column.Column, months column.Column, days column.Column, hours column.Column, mins column.Column, secs column.Column, ) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("make_timestamp_ntz", years, months, days, hours, mins, secs)) } // MakeYmInterval - Make year-month interval from years, months. // // MakeYmInterval is the Golang equivalent of make_ym_interval: (years: Optional[ForwardRef('ColumnOrName')] = None, months: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func MakeYmInterval(years column.Column, months column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("make_ym_interval", years, months)) } // CurrentCatalog - Returns the current catalog. // // CurrentCatalog is the Golang equivalent of current_catalog: () -> pyspark.sql.connect.column.Column func CurrentCatalog() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("current_catalog")) } // CurrentDatabase - Returns the current database. // // CurrentDatabase is the Golang equivalent of current_database: () -> pyspark.sql.connect.column.Column func CurrentDatabase() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("current_database")) } // CurrentSchema - Returns the current database. // // CurrentSchema is the Golang equivalent of current_schema: () -> pyspark.sql.connect.column.Column func CurrentSchema() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("current_schema")) } // CurrentUser - Returns the current database. // // CurrentUser is the Golang equivalent of current_user: () -> pyspark.sql.connect.column.Column func CurrentUser() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("current_user")) } // User - Returns the current database. // // User is the Golang equivalent of user: () -> pyspark.sql.connect.column.Column func User() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("user")) } // TODO: assert_true: (col: 'ColumnOrName', errMsg: Union[pyspark.sql.connect.column.Column, str, NoneType] = None) -> pyspark.sql.connect.column.Column // TODO: raise_error: (errMsg: Union[pyspark.sql.connect.column.Column, str]) -> pyspark.sql.connect.column.Column // Crc32 - Calculates the cyclic redundancy check value (CRC32) of a binary column and // returns the value as a bigint. // // Crc32 is the Golang equivalent of crc32: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Crc32(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("crc32", col)) } // Hash - Calculates the hash code of given columns, and returns the result as an int column. // // Hash is the Golang equivalent of hash: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Hash(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("hash", vals...)) } // Xxhash64 - Calculates the hash code of given columns using the 64-bit variant of the xxHash algorithm, // and returns the result as a long column. The hash computation uses an initial seed of 42. // // Xxhash64 is the Golang equivalent of xxhash64: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Xxhash64(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("xxhash64", vals...)) } // Md5 - Calculates the MD5 digest and returns the value as a 32 character hex string. // // Md5 is the Golang equivalent of md5: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Md5(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("md5", col)) } // Sha1 - Returns the hex string result of SHA-1. // // Sha1 is the Golang equivalent of sha1: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Sha1(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sha1", col)) } // Sha2 - Returns the hex string result of SHA-2 family of hash functions (SHA-224, SHA-256, SHA-384, // and SHA-512). The numBits indicates the desired bit length of the result, which must have a // value of 224, 256, 384, 512, or 0 (which is equivalent to 256). // // Sha2 is the Golang equivalent of sha2: (col: 'ColumnOrName', numBits: int) -> pyspark.sql.connect.column.Column func Sha2(col column.Column, numBits int64) column.Column { lit_numBits := Int64Lit(numBits) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sha2", col, lit_numBits)) } // TODO: hll_sketch_agg: (col: 'ColumnOrName', lgConfigK: Union[int, pyspark.sql.connect.column.Column, NoneType] = None) -> pyspark.sql.connect.column.Column // TODO: hll_union_agg: (col: 'ColumnOrName', allowDifferentLgConfigK: Optional[bool] = None) -> pyspark.sql.connect.column.Column // HllSketchEstimate - Returns the estimated number of unique values given the binary representation // of a Datasketches HllSketch. // // HllSketchEstimate is the Golang equivalent of hll_sketch_estimate: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func HllSketchEstimate(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("hll_sketch_estimate", col)) } // TODO: hll_union: (col1: 'ColumnOrName', col2: 'ColumnOrName', allowDifferentLgConfigK: Optional[bool] = None) -> pyspark.sql.connect.column.Column // Ifnull - Returns `col2` if `col1` is null, or `col1` otherwise. // // Ifnull is the Golang equivalent of ifnull: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Ifnull(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("ifnull", col1, col2)) } // Isnotnull - Returns true if `col` is not null, or false otherwise. // // Isnotnull is the Golang equivalent of isnotnull: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Isnotnull(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("isnotnull", col)) } // EqualNull - Returns same result as the EQUAL(=) operator for non-null operands, // but returns true if both are null, false if one of the them is null. // // EqualNull is the Golang equivalent of equal_null: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func EqualNull(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("equal_null", col1, col2)) } // Nullif - Returns null if `col1` equals to `col2`, or `col1` otherwise. // // Nullif is the Golang equivalent of nullif: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Nullif(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("nullif", col1, col2)) } // Nvl - Returns `col2` if `col1` is null, or `col1` otherwise. // // Nvl is the Golang equivalent of nvl: (col1: 'ColumnOrName', col2: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Nvl(col1 column.Column, col2 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("nvl", col1, col2)) } // Nvl2 - Returns `col2` if `col1` is not null, or `col3` otherwise. // // Nvl2 is the Golang equivalent of nvl2: (col1: 'ColumnOrName', col2: 'ColumnOrName', col3: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Nvl2(col1 column.Column, col2 column.Column, col3 column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("nvl2", col1, col2, col3)) } // AesEncrypt - Returns an encrypted value of `input` using AES in given `mode` with the specified `padding`. // Key lengths of 16, 24 and 32 bits are supported. Supported combinations of (`mode`, // `padding`) are ('ECB', 'PKCS'), ('GCM', 'NONE') and ('CBC', 'PKCS'). Optional initialization // vectors (IVs) are only supported for CBC and GCM modes. These must be 16 bytes for CBC and 12 // bytes for GCM. If not provided, a random vector will be generated and prepended to the // output. Optional additional authenticated data (AAD) is only supported for GCM. If provided // for encryption, the identical AAD value must be provided for decryption. The default mode is // GCM. // // AesEncrypt is the Golang equivalent of aes_encrypt: (input: 'ColumnOrName', key: 'ColumnOrName', mode: Optional[ForwardRef('ColumnOrName')] = None, padding: Optional[ForwardRef('ColumnOrName')] = None, iv: Optional[ForwardRef('ColumnOrName')] = None, aad: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func AesEncrypt(input column.Column, key column.Column, mode column.Column, padding column.Column, iv column.Column, aad column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("aes_encrypt", input, key, mode, padding, iv, aad)) } // AesDecrypt - Returns a decrypted value of `input` using AES in `mode` with `padding`. Key lengths of 16, // 24 and 32 bits are supported. Supported combinations of (`mode`, `padding`) are ('ECB', // 'PKCS'), ('GCM', 'NONE') and ('CBC', 'PKCS'). Optional additional authenticated data (AAD) is // only supported for GCM. If provided for encryption, the identical AAD value must be provided // for decryption. The default mode is GCM. // // AesDecrypt is the Golang equivalent of aes_decrypt: (input: 'ColumnOrName', key: 'ColumnOrName', mode: Optional[ForwardRef('ColumnOrName')] = None, padding: Optional[ForwardRef('ColumnOrName')] = None, aad: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func AesDecrypt(input column.Column, key column.Column, mode column.Column, padding column.Column, aad column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("aes_decrypt", input, key, mode, padding, aad)) } // TryAesDecrypt - This is a special version of `aes_decrypt` that performs the same operation, // but returns a NULL value instead of raising an error if the decryption cannot be performed. // Returns a decrypted value of `input` using AES in `mode` with `padding`. Key lengths of 16, // 24 and 32 bits are supported. Supported combinations of (`mode`, `padding`) are ('ECB', // 'PKCS'), ('GCM', 'NONE') and ('CBC', 'PKCS'). Optional additional authenticated data (AAD) is // only supported for GCM. If provided for encryption, the identical AAD value must be provided // for decryption. The default mode is GCM. // // TryAesDecrypt is the Golang equivalent of try_aes_decrypt: (input: 'ColumnOrName', key: 'ColumnOrName', mode: Optional[ForwardRef('ColumnOrName')] = None, padding: Optional[ForwardRef('ColumnOrName')] = None, aad: Optional[ForwardRef('ColumnOrName')] = None) -> pyspark.sql.connect.column.Column func TryAesDecrypt(input column.Column, key column.Column, mode column.Column, padding column.Column, aad column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("try_aes_decrypt", input, key, mode, padding, aad)) } // Sha - Returns a sha1 hash value as a hex string of the `col`. // // Sha is the Golang equivalent of sha: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Sha(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("sha", col)) } // InputFileBlockLength - Returns the length of the block being read, or -1 if not available. // // InputFileBlockLength is the Golang equivalent of input_file_block_length: () -> pyspark.sql.connect.column.Column func InputFileBlockLength() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("input_file_block_length")) } // InputFileBlockStart - Returns the start offset of the block being read, or -1 if not available. // // InputFileBlockStart is the Golang equivalent of input_file_block_start: () -> pyspark.sql.connect.column.Column func InputFileBlockStart() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("input_file_block_start")) } // Reflect - Calls a method with reflection. // // Reflect is the Golang equivalent of reflect: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Reflect(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("reflect", vals...)) } // JavaMethod - Calls a method with reflection. // // JavaMethod is the Golang equivalent of java_method: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func JavaMethod(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("java_method", vals...)) } // Version - Returns the Spark version. The string contains 2 fields, the first being a release version // and the second being a git revision. // // Version is the Golang equivalent of version: () -> pyspark.sql.connect.column.Column func Version() column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("version")) } // Typeof - Return DDL-formatted type string for the data type of the input. // // Typeof is the Golang equivalent of typeof: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Typeof(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("typeof", col)) } // Stack - Separates `col1`, ..., `colk` into `n` rows. Uses column names col0, col1, etc. by default // unless specified otherwise. // // Stack is the Golang equivalent of stack: (*cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func Stack(cols ...column.Column) column.Column { vals := make([]column.Column, 0) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("stack", vals...)) } // BitmapBitPosition - Returns the bit position for the given input column. // // BitmapBitPosition is the Golang equivalent of bitmap_bit_position: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitmapBitPosition(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bitmap_bit_position", col)) } // BitmapBucketNumber - Returns the bucket number for the given input column. // // BitmapBucketNumber is the Golang equivalent of bitmap_bucket_number: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitmapBucketNumber(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bitmap_bucket_number", col)) } // BitmapConstructAgg - Returns a bitmap with the positions of the bits set from all the values from the input column. // The input column will most likely be bitmap_bit_position(). // // BitmapConstructAgg is the Golang equivalent of bitmap_construct_agg: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitmapConstructAgg(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bitmap_construct_agg", col)) } // BitmapCount - Returns the number of set bits in the input bitmap. // // BitmapCount is the Golang equivalent of bitmap_count: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitmapCount(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bitmap_count", col)) } // BitmapOrAgg - Returns a bitmap that is the bitwise OR of all of the bitmaps from the input column. // The input column should be bitmaps created from bitmap_construct_agg(). // // BitmapOrAgg is the Golang equivalent of bitmap_or_agg: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column func BitmapOrAgg(col column.Column) column.Column { return column.NewColumn(column.NewUnresolvedFunctionWithColumns("bitmap_or_agg", col)) } // Ignore UDF: call_udf: (udfName: str, *cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column // Ignore UDT: unwrap_udt: (col: 'ColumnOrName') -> pyspark.sql.connect.column.Column // TODO: udf: (f: Union[Callable[..., Any], ForwardRef('DataTypeOrString'), NoneType] = None, returnType: 'DataTypeOrString' = StringType(), useArrow: Optional[bool] = None) -> Union[ForwardRef('UserDefinedFunctionLike'), Callable[[Callable[..., Any]], ForwardRef('UserDefinedFunctionLike')]] // Ignore UDT: udtf: (cls: Optional[Type] = None, *, returnType: Union[pyspark.sql.types.StructType, str], useArrow: Optional[bool] = None) -> Union[ForwardRef('UserDefinedTableFunction'), Callable[[Type], ForwardRef('UserDefinedTableFunction')]] // CallFunction - Call a SQL function. // // CallFunction is the Golang equivalent of call_function: (funcName: str, *cols: 'ColumnOrName') -> pyspark.sql.connect.column.Column func CallFunction(funcName string, cols ...column.Column) column.Column { lit_funcName := StringLit(funcName) vals := make([]column.Column, 0) vals = append(vals, lit_funcName) vals = append(vals, cols...) return column.NewColumn(column.NewUnresolvedFunctionWithColumns("call_function", vals...)) } ================================================ FILE: spark/sql/group.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "context" "github.com/apache/spark-connect-go/spark/sql/types" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sparkerrors" "github.com/apache/spark-connect-go/spark/sql/column" "github.com/apache/spark-connect-go/spark/sql/functions" ) type GroupedData struct { df *dataFrameImpl groupType string groupingCols []column.Convertible pivotValues []types.LiteralType pivotCol column.Convertible } // Agg compute aggregates and returns the result as a DataFrame. The aggegrate expressions // are passed as column.Column arguments. func (gd *GroupedData) Agg(ctx context.Context, exprs ...column.Convertible) (DataFrame, error) { if len(exprs) == 0 { return nil, sparkerrors.WithString(sparkerrors.InvalidInputError, "exprs should not be empty") } agg := &proto.Aggregate{ Input: gd.df.relation, } // Add all grouping and aggregate expressions. agg.GroupingExpressions = make([]*proto.Expression, len(gd.groupingCols)) for i, col := range gd.groupingCols { exp, err := col.ToProto(ctx) if err != nil { return nil, err } agg.GroupingExpressions[i] = exp } agg.AggregateExpressions = make([]*proto.Expression, len(exprs)) for i, expr := range exprs { exp, err := expr.ToProto(ctx) if err != nil { return nil, err } agg.AggregateExpressions[i] = exp } // Apply the groupType switch gd.groupType { case "pivot": agg.GroupType = proto.Aggregate_GROUP_TYPE_PIVOT // Apply all pivot behavior and convert columns into literals. if len(gd.pivotValues) == 0 { return nil, sparkerrors.WithString(sparkerrors.InvalidInputError, "pivotValues should not be empty") } protoCol, err := gd.pivotCol.ToProto(ctx) if err != nil { return nil, err } agg.Pivot = &proto.Aggregate_Pivot{ Values: make([]*proto.Expression_Literal, len(gd.pivotValues)), Col: protoCol, } for i, v := range gd.pivotValues { exp, err := column.NewLiteral(v).ToProto(ctx) if err != nil { return nil, err } agg.Pivot.Values[i] = exp.GetLiteral() } case "groupby": agg.GroupType = proto.Aggregate_GROUP_TYPE_GROUPBY case "rollup": agg.GroupType = proto.Aggregate_GROUP_TYPE_ROLLUP case "cube": agg.GroupType = proto.Aggregate_GROUP_TYPE_CUBE } rel := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Aggregate{ Aggregate: agg, }, } return NewDataFrame(gd.df.session, rel), nil } func (gd *GroupedData) numericAgg(ctx context.Context, name string, cols ...string) (DataFrame, error) { schema, err := gd.df.Schema(ctx) if err != nil { return nil, err } // Find all numeric cols in the schema: numericCols := make([]string, 0) for _, field := range schema.Fields { if field.DataType.IsNumeric() { numericCols = append(numericCols, field.Name) } } aggCols := cols if len(cols) > 0 { invalidCols := make([]string, 0) for _, col := range cols { found := false for _, nc := range numericCols { if col == nc { found = true } } if !found { invalidCols = append(invalidCols, col) } } if len(invalidCols) > 0 { return nil, sparkerrors.WithStringf(sparkerrors.InvalidInputError, "columns %v are not numeric", invalidCols) } } else { aggCols = numericCols } finalColumns := make([]column.Convertible, len(aggCols)) for i, col := range aggCols { finalColumns[i] = column.NewColumn(column.NewUnresolvedFunctionWithColumns(name, functions.Col(col))) } return gd.Agg(ctx, finalColumns...) } // Min Computes the min value for each numeric column for each group. func (gd *GroupedData) Min(ctx context.Context, cols ...string) (DataFrame, error) { return gd.numericAgg(ctx, "min", cols...) } // Max Computes the max value for each numeric column for each group. func (gd *GroupedData) Max(ctx context.Context, cols ...string) (DataFrame, error) { return gd.numericAgg(ctx, "max", cols...) } // Avg Computes the avg value for each numeric column for each group. func (gd *GroupedData) Avg(ctx context.Context, cols ...string) (DataFrame, error) { return gd.numericAgg(ctx, "avg", cols...) } // Sum Computes the sum value for each numeric column for each group. func (gd *GroupedData) Sum(ctx context.Context, cols ...string) (DataFrame, error) { return gd.numericAgg(ctx, "sum", cols...) } // Count Computes the count value for each group. func (gd *GroupedData) Count(ctx context.Context) (DataFrame, error) { return gd.Agg(ctx, functions.Count(functions.Lit(types.Int64(1))).Alias("count")) } // Mean Computes the average value for each numeric column for each group. func (gd *GroupedData) Mean(ctx context.Context, cols ...string) (DataFrame, error) { return gd.Avg(ctx, cols...) } func (gd *GroupedData) Pivot(ctx context.Context, pivotCol string, pivotValues []types.LiteralType) (*GroupedData, error) { if gd.groupType != "groupby" { if gd.groupType == "pivot" { return nil, sparkerrors.WithString(sparkerrors.InvalidInputError, "pivot cannot be applied on pivot") } return nil, sparkerrors.WithString(sparkerrors.InvalidInputError, "pivot can only be applied on groupby") } return &GroupedData{ df: gd.df, groupType: "pivot", groupingCols: gd.groupingCols, pivotValues: pivotValues, pivotCol: column.NewColumnReferenceWithPlanId(pivotCol, gd.df.PlanId()), }, nil } ================================================ FILE: spark/sql/group_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "context" "testing" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/client" "github.com/apache/spark-connect-go/spark/client/testutils" "github.com/apache/spark-connect-go/spark/mocks" "github.com/stretchr/testify/assert" ) var sampleDataFrame = &dataFrameImpl{session: nil, relation: &proto.Relation{ RelType: &proto.Relation_Range{ Range: &proto.Range{ End: 10, Step: 1, }, }, }} func TestGroupedData_Agg(t *testing.T) { ctx := context.Background() c := client.NewSparkExecutorFromClient( testutils.NewConnectServiceClientMock(nil, mocks.AnalyzePlanResponse, nil, nil), nil, mocks.MockSessionId) session := sparkSessionImpl{sessionId: mocks.MockSessionId, client: c} sampleDataFrame.session = &session gd := GroupedData{ groupType: "groupby", df: sampleDataFrame, } // Should not be able to group by a non-existing column _, err := gd.Min(ctx, "nonExistingColumn") assert.Error(t, err) // Group by an existing column should work df, err := gd.Min(ctx, "col0") assert.NoError(t, err) assert.IsType(t, df.(*dataFrameImpl).relation.RelType, &proto.Relation_Aggregate{}) assert.Equal(t, "min", df.(*dataFrameImpl).relation.GetAggregate().GetAggregateExpressions()[0].GetUnresolvedFunction().FunctionName) // Group by an existing column should work df, err = gd.Max(ctx, "col0") assert.NoError(t, err) assert.IsType(t, df.(*dataFrameImpl).relation.RelType, &proto.Relation_Aggregate{}) assert.Equal(t, "max", df.(*dataFrameImpl).relation.GetAggregate().GetAggregateExpressions()[0].GetUnresolvedFunction().FunctionName) df, err = gd.Sum(ctx, "col0") assert.NoError(t, err) assert.IsType(t, df.(*dataFrameImpl).relation.RelType, &proto.Relation_Aggregate{}) assert.Equal(t, "sum", df.(*dataFrameImpl).relation.GetAggregate().GetAggregateExpressions()[0].GetUnresolvedFunction().FunctionName) df, err = gd.Avg(ctx, "col0") assert.NoError(t, err) assert.IsType(t, df.(*dataFrameImpl).relation.RelType, &proto.Relation_Aggregate{}) assert.Equal(t, "avg", df.(*dataFrameImpl).relation.GetAggregate().GetAggregateExpressions()[0].GetUnresolvedFunction().FunctionName) // Group by no column should pick all numeric columns df, err = gd.Min(ctx) assert.NoError(t, err) assert.IsType(t, df.(*dataFrameImpl).relation.RelType, &proto.Relation_Aggregate{}) assert.Len(t, df.(*dataFrameImpl).relation.GetAggregate().GetAggregateExpressions(), 1) } func TestGroupedData_Count(t *testing.T) { ctx := context.Background() c := client.NewSparkExecutorFromClient( testutils.NewConnectServiceClientMock(nil, mocks.AnalyzePlanResponse, nil, nil), nil, mocks.MockSessionId) session := sparkSessionImpl{sessionId: mocks.MockSessionId, client: c} sampleDataFrame.session = &session gd := GroupedData{ groupType: "groupby", df: sampleDataFrame, } df, err := gd.Count(ctx) assert.NoError(t, err) assert.IsType(t, df.(*dataFrameImpl).relation.RelType, &proto.Relation_Aggregate{}) assert.Equal(t, []string{"count"}, df.(*dataFrameImpl).relation.GetAggregate().GetAggregateExpressions()[0].GetAlias().Name) } ================================================ FILE: spark/sql/mocks_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql ================================================ FILE: spark/sql/plan.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "sync/atomic" proto "github.com/apache/spark-connect-go/internal/generated" ) var atomicInt64 atomic.Int64 func newPlanId() *int64 { v := atomicInt64.Add(1) return &v } func resetPlanIdForTesting() { atomicInt64.Swap(0) } func newReadTableRelation(table string) *proto.Relation { return &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Read{ Read: &proto.Read{ ReadType: &proto.Read_NamedTable_{ NamedTable: &proto.Read_NamedTable{ UnparsedIdentifier: table, }, }, }, }, } } func newReadWithFormatAndPath(path, format string) *proto.Relation { return &proto.Relation{ RelType: &proto.Relation_Read{ Read: &proto.Read{ ReadType: &proto.Read_DataSource_{ DataSource: &proto.Read_DataSource{ Format: &format, Paths: []string{path}, }, }, }, }, } } func newReadWithFormatAndPathAndOptions(path, format string, options map[string]string) *proto.Relation { return &proto.Relation{ RelType: &proto.Relation_Read{ Read: &proto.Read{ ReadType: &proto.Read_DataSource_{ DataSource: &proto.Read_DataSource{ Format: &format, Paths: []string{path}, Options: options, }, }, }, }, } } ================================================ FILE: spark/sql/plan_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "testing" "github.com/stretchr/testify/assert" ) func TestNewPlanIdGivesNewIDs(t *testing.T) { id1 := newPlanId() id2 := newPlanId() assert.NotEqual(t, id1, id2) } ================================================ FILE: spark/sql/sparksession.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "bytes" "context" "fmt" "time" "github.com/apache/arrow-go/v18/arrow/memory" "github.com/apache/spark-connect-go/spark/sql/types" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/array" "github.com/apache/arrow-go/v18/arrow/ipc" "github.com/apache/spark-connect-go/spark/client/base" "github.com/apache/spark-connect-go/spark/client/options" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/client" "github.com/apache/spark-connect-go/spark/client/channel" "github.com/apache/spark-connect-go/spark/sparkerrors" "github.com/google/uuid" "google.golang.org/grpc/metadata" ) type SparkSession interface { Read() DataFrameReader Sql(ctx context.Context, query string) (DataFrame, error) Stop() error Table(name string) (DataFrame, error) CreateDataFrameFromArrow(ctx context.Context, data arrow.Table) (DataFrame, error) CreateDataFrame(ctx context.Context, data [][]any, schema *types.StructType) (DataFrame, error) Config() client.RuntimeConfig } // NewSessionBuilder creates a new session builder for starting a new spark session func NewSessionBuilder() *SparkSessionBuilder { return &SparkSessionBuilder{} } type SparkSessionBuilder struct { connectionString string channelBuilder channel.Builder } // Remote sets the connection string for remote connection func (s *SparkSessionBuilder) Remote(connectionString string) *SparkSessionBuilder { s.connectionString = connectionString return s } func (s *SparkSessionBuilder) WithChannelBuilder(cb channel.Builder) *SparkSessionBuilder { s.channelBuilder = cb return s } func (s *SparkSessionBuilder) Build(ctx context.Context) (SparkSession, error) { if s.channelBuilder == nil { cb, err := channel.NewBuilder(s.connectionString) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf( "failed to connect to remote %s: %w", s.connectionString, err), sparkerrors.ConnectionError) } s.channelBuilder = cb } conn, err := s.channelBuilder.Build(ctx) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to connect to remote %s: %w", s.connectionString, err), sparkerrors.ConnectionError) } // Add metadata to the request. meta := metadata.MD{} for k, v := range s.channelBuilder.Headers() { meta[k] = append(meta[k], v) } sessionId := uuid.NewString() // Update the options according to the configuration. opts := options.NewSparkClientOptions(options.DefaultSparkClientOptions.ReattachExecution) opts.UserAgent = s.channelBuilder.UserAgent() opts.UserId = s.channelBuilder.User() return &sparkSessionImpl{ sessionId: sessionId, client: client.NewSparkExecutor(conn, meta, sessionId, opts), }, nil } type sparkSessionImpl struct { sessionId string client base.SparkConnectClient } func (s *sparkSessionImpl) Config() client.RuntimeConfig { return client.NewRuntimeConfig(&s.client) } func (s *sparkSessionImpl) Read() DataFrameReader { return NewDataframeReader(s) } // Sql executes a sql query and returns the result as a DataFrame func (s *sparkSessionImpl) Sql(ctx context.Context, query string) (DataFrame, error) { // Due to the nature of Spark, we have to first submit the SQL query immediately as a command // to make sure that all side effects have been executed properly. If no side effects are present, // then simply prepare this as a SQL relation. plan := &proto.Plan{ OpType: &proto.Plan_Command{ Command: &proto.Command{ CommandType: &proto.Command_SqlCommand{ SqlCommand: &proto.SqlCommand{ Sql: query, }, }, }, }, } // We need an execute command here. _, _, properties, err := s.client.ExecuteCommand(ctx, plan) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to execute sql: %s: %w", query, err), sparkerrors.ExecutionError) } val, ok := properties["sql_command_result"] if !ok { plan := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_Sql{ Sql: &proto.SQL{ Query: query, }, }, } return NewDataFrame(s, plan), nil } else { rel := val.(*proto.Relation) rel.Common = &proto.RelationCommon{ PlanId: newPlanId(), } return NewDataFrame(s, rel), nil } } func (s *sparkSessionImpl) Stop() error { return nil } func (s *sparkSessionImpl) Table(name string) (DataFrame, error) { return s.Read().Table(name) } func (s *sparkSessionImpl) CreateDataFrameFromArrow(ctx context.Context, data arrow.Table) (DataFrame, error) { // Generate the schema. // schema := types.ArrowSchemaToProto(data.Schema()) // schemaString := "" // TODO (PySpark does a lot of casting here to convert the schema that does not exist yet. // Convert the Arrow Table into a byte array of arrow IPC messages. buf := new(bytes.Buffer) w := ipc.NewWriter(buf, ipc.WithSchema(data.Schema())) defer w.Close() // Create a RecordReader from the table rr := array.NewTableReader(data, int64(data.NumRows())) defer rr.Release() // Read the records from the table and write them to the buffer for rr.Next() { record := rr.Record() if err := w.Write(record); err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to write record: %w", err), sparkerrors.WriteError) } } // Create a local relation object plan := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_LocalRelation{ LocalRelation: &proto.LocalRelation{ // Schema: &schemaString, Data: buf.Bytes(), }, }, } // Capture the column names from the schema: columnNames := make([]string, data.NumCols()) for i, field := range data.Schema().Fields() { columnNames[i] = field.Name } dfPlan := &proto.Relation{ Common: &proto.RelationCommon{ PlanId: newPlanId(), }, RelType: &proto.Relation_ToDf{ ToDf: &proto.ToDF{ Input: plan, ColumnNames: columnNames, }, }, } return NewDataFrame(s, dfPlan), nil } func (s *sparkSessionImpl) CreateDataFrame(ctx context.Context, data [][]any, schema *types.StructType) (DataFrame, error) { pool := memory.NewGoAllocator() // Convert the data into an Arrow Table arrowSchema := arrow.NewSchema(schema.ToArrowType().(*arrow.StructType).Fields(), nil) rb := array.NewRecordBuilder(pool, arrowSchema) defer rb.Release() // Iterate over all fields and add the values: for _, row := range data { for i, field := range schema.Fields { if row[i] == nil { rb.Field(i).AppendNull() continue } switch field.DataType { case types.BOOLEAN: rb.Field(i).(*array.BooleanBuilder).Append(row[i].(bool)) case types.BYTE: rb.Field(i).(*array.Int8Builder).Append(int8(row[i].(int))) case types.SHORT: rb.Field(i).(*array.Int16Builder).Append(int16(row[i].(int))) case types.INTEGER: rb.Field(i).(*array.Int32Builder).Append(int32(row[i].(int))) case types.LONG: rb.Field(i).(*array.Int64Builder).Append(int64(row[i].(int))) case types.FLOAT: rb.Field(i).(*array.Float32Builder).Append(float32(row[i].(float32))) case types.DOUBLE: rb.Field(i).(*array.Float64Builder).Append(row[i].(float64)) case types.STRING: rb.Field(i).(*array.StringBuilder).Append(row[i].(string)) case types.DATE: rb.Field(i).(*array.Date32Builder).Append( arrow.Date32FromTime(row[i].(time.Time))) case types.TIMESTAMP: ts, err := arrow.TimestampFromTime(row[i].(time.Time), arrow.Millisecond) if err != nil { return nil, err } rb.Field(i).(*array.TimestampBuilder).Append(ts) default: return nil, sparkerrors.WithType(fmt.Errorf( "unsupported data type: %s", field.DataType), sparkerrors.NotImplementedError) } } } rec := rb.NewRecord() defer rec.Release() tbl := array.NewTableFromRecords(arrowSchema, []arrow.Record{rec}) defer tbl.Release() return s.CreateDataFrameFromArrow(ctx, tbl) } ================================================ FILE: spark/sql/sparksession_integration_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "testing" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/memory" "github.com/apache/spark-connect-go/spark/sql/types" "github.com/stretchr/testify/assert" ) func TestSparkSession_CreateDataFrame_StructTypeToArrowConversion(t *testing.T) { // This test validates that the fix for StructType.ToArrowType() works correctly // The change from .Fields() to .(*arrow.StructType).Fields() should be validated // Create a test schema schema := types.StructOf( types.NewStructField("id", types.INTEGER), types.NewStructField("name", types.STRING), types.NewStructField("scores", types.ArrayType{ ElementType: types.DOUBLE, ContainsNull: true, }), ) // Test that ToArrowType returns the correct interface type arrowType := schema.ToArrowType() assert.NotNil(t, arrowType) // Verify we can cast it to *arrow.StructType and access Fields() structType, ok := arrowType.(*arrow.StructType) assert.True(t, ok) fields := structType.Fields() assert.Len(t, fields, 3) assert.Equal(t, "id", fields[0].Name) assert.Equal(t, "name", fields[1].Name) assert.Equal(t, "scores", fields[2].Name) // Test sample data that would work with CreateDataFrame data := [][]any{ {1, "Alice", []float64{95.5, 87.2, 92.1}}, {2, "Bob", []float64{88.0, 91.5, 89.3}}, } // Verify that the data structure is compatible with the schema assert.Len(t, data, 2) for _, row := range data { assert.Len(t, row, 3) assert.IsType(t, 1, row[0]) // integer id assert.IsType(t, "", row[1]) // string name assert.IsType(t, []float64{}, row[2]) // array of doubles } // Test that we can create an Arrow schema using the converted type pool := memory.NewGoAllocator() _ = pool // Use the pool variable to avoid unused error // This would previously fail due to the type assertion issue // Now it should work because ToArrowType() returns arrow.DataType interface arrowSchema := arrow.NewSchema(structType.Fields(), nil) assert.NotNil(t, arrowSchema) assert.Equal(t, 3, len(arrowSchema.Fields())) } ================================================ FILE: spark/sql/sparksession_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sql import ( "bytes" "context" "io" "testing" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/array" "github.com/apache/arrow-go/v18/arrow/ipc" "github.com/apache/arrow-go/v18/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/client" "github.com/apache/spark-connect-go/spark/client/testutils" "github.com/apache/spark-connect-go/spark/mocks" "github.com/apache/spark-connect-go/spark/sparkerrors" ) func TestSparkSessionTable(t *testing.T) { resetPlanIdForTesting() plan := newReadTableRelation("table") resetPlanIdForTesting() s := testutils.NewConnectServiceClientMock(nil, nil, nil, t) c := client.NewSparkExecutorFromClient(s, nil, "") session := &sparkSessionImpl{client: c} df, err := session.Table("table") df_plan := df.(*dataFrameImpl).relation assert.Equal(t, plan, df_plan) assert.NoError(t, err) } func TestSQLCallsExecutePlanWithSQLOnClient(t *testing.T) { ctx := context.Background() query := "select * from bla" // Create the responses: responses := []*mocks.MockResponse{ { Resp: &proto.ExecutePlanResponse{ ResponseType: &proto.ExecutePlanResponse_SqlCommandResult_{ SqlCommandResult: &proto.ExecutePlanResponse_SqlCommandResult{}, }, }, Err: nil, }, { Resp: &proto.ExecutePlanResponse{ ResponseType: &proto.ExecutePlanResponse_ResultComplete_{ ResultComplete: &proto.ExecutePlanResponse_ResultComplete{}, }, }, Err: nil, }, { Err: io.EOF, }, } s := testutils.NewConnectServiceClientMock(&mocks.ProtoClient{ RecvResponse: responses, }, nil, nil, t) c := client.NewSparkExecutorFromClient(s, nil, "") session := &sparkSessionImpl{ client: c, } resp, err := session.Sql(ctx, query) assert.NoError(t, err) assert.NotNil(t, resp) } func TestNewSessionBuilderCreatesASession(t *testing.T) { ctx := context.Background() spark, err := NewSessionBuilder().Remote("sc://connection").Build(ctx) assert.NoError(t, err) assert.NotNil(t, spark) } func TestNewSessionBuilderFailsIfConnectionStringIsInvalid(t *testing.T) { ctx := context.Background() spark, err := NewSessionBuilder().Remote("invalid").Build(ctx) assert.Error(t, err) assert.ErrorIs(t, err, sparkerrors.InvalidInputError) assert.Nil(t, spark) } func TestWriteResultStreamsArrowResultToCollector(t *testing.T) { ctx := context.Background() arrowFields := []arrow.Field{ { Name: "show_string", Type: &arrow.StringType{}, }, } arrowSchema := arrow.NewSchema(arrowFields, nil) var buf bytes.Buffer arrowWriter := ipc.NewWriter(&buf, ipc.WithSchema(arrowSchema)) defer arrowWriter.Close() alloc := memory.NewGoAllocator() recordBuilder := array.NewRecordBuilder(alloc, arrowSchema) defer recordBuilder.Release() recordBuilder.Field(0).(*array.StringBuilder).Append("str1a\nstr1b") recordBuilder.Field(0).(*array.StringBuilder).Append("str2") record := recordBuilder.NewRecord() defer record.Release() err := arrowWriter.Write(record) require.Nil(t, err) query := "select * from bla" // Create the responses: responses := []*mocks.MockResponse{ // The first stream of response is necessary for the SQL command. { Resp: &proto.ExecutePlanResponse{ ResponseType: &proto.ExecutePlanResponse_SqlCommandResult_{ SqlCommandResult: &proto.ExecutePlanResponse_SqlCommandResult{}, }, }, Err: nil, }, { Resp: &proto.ExecutePlanResponse{ ResponseType: &proto.ExecutePlanResponse_ResultComplete_{ ResultComplete: &proto.ExecutePlanResponse_ResultComplete{}, }, }, Err: nil, }, { Err: io.EOF, }, // The second stream of responses is for the actual execution { Resp: &proto.ExecutePlanResponse{ ResponseType: &proto.ExecutePlanResponse_ArrowBatch_{ ArrowBatch: &proto.ExecutePlanResponse_ArrowBatch{ RowCount: 2, Data: buf.Bytes(), }, }, }, }, { Err: io.EOF, }, } s := testutils.NewConnectServiceClientMock(&mocks.ProtoClient{ RecvResponse: responses, }, nil, nil, t) c := client.NewSparkExecutorFromClient(s, nil, "") session := &sparkSessionImpl{ client: c, } resp, err := session.Sql(ctx, query) assert.NoError(t, err) assert.NotNil(t, resp) df, err := resp.Repartition(ctx, 1, []string{"1"}) assert.NoError(t, err) rows, err := df.Collect(ctx) assert.NoError(t, err) vals := rows[1].Values() assert.NoError(t, err) assert.Equal(t, []any{"str2"}, vals) } ================================================ FILE: spark/sql/types/arrow.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "bytes" "fmt" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/array" "github.com/apache/arrow-go/v18/arrow/ipc" "github.com/apache/spark-connect-go/spark/sparkerrors" ) func ReadArrowTableToRows(table arrow.Table) ([]Row, error) { result := make([]Row, table.NumRows()) // For each column in the table, read the data and convert it to an array of any. cols := make([][]any, table.NumCols()) for i := 0; i < int(table.NumCols()); i++ { chunkedColumn := table.Column(i).Data() column, err := readChunkedColumn(chunkedColumn) if err != nil { return nil, err } cols[i] = column } // Create a list of field names for the rows. fieldNames := make([]string, table.NumCols()) for i, field := range table.Schema().Fields() { fieldNames[i] = field.Name } // Create the rows: for i := 0; i < int(table.NumRows()); i++ { row := make([]any, table.NumCols()) for j := 0; j < int(table.NumCols()); j++ { row[j] = cols[j][i] } r := &rowImpl{ values: row, offsets: make(map[string]int), } for j, fieldName := range fieldNames { r.offsets[fieldName] = j } result[i] = r } return result, nil } func readArrayData(t arrow.Type, data arrow.ArrayData) ([]any, error) { buf := make([]any, 0) // Switch over the type t and append the values to buf. switch t { case arrow.BOOL: data := array.NewBooleanData(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.INT8: data := array.NewInt8Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.INT16: data := array.NewInt16Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.INT32: data := array.NewInt32Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.INT64: data := array.NewInt64Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.FLOAT16: data := array.NewFloat16Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.FLOAT32: data := array.NewFloat32Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.FLOAT64: data := array.NewFloat64Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.DECIMAL | arrow.DECIMAL128: data := array.NewDecimal128Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.DECIMAL256: data := array.NewDecimal256Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.STRING: data := array.NewStringData(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.BINARY: data := array.NewBinaryData(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.TIMESTAMP: data := array.NewTimestampData(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.DATE64: data := array.NewDate64Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.DATE32: data := array.NewDate32Data(data) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) } else { buf = append(buf, data.Value(i)) } } case arrow.LIST: data := array.NewListData(data) values := data.ListValues() res, err := readArrayData(values.DataType().ID(), values.Data()) if err != nil { return nil, err } for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) continue } start := data.Offsets()[i] end := data.Offsets()[i+1] // TODO: Unfortunately, this ends up being stored as a slice of slices of any. But not // the right type. buf = append(buf, res[start:end]) } case arrow.MAP: // For maps the data is stored as a list of key value pairs. So to extract the maps, // we follow the same behavior as for lists but with two sub lists. data := array.NewMapData(data) keys := data.Keys() values := data.Items() keyValues, err := readArrayData(keys.DataType().ID(), keys.Data()) if err != nil { return nil, err } valueValues, err := readArrayData(values.DataType().ID(), values.Data()) if err != nil { return nil, err } for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) continue } tmp := make(map[any]any) start := data.Offsets()[i] end := data.Offsets()[i+1] k := keyValues[start:end] v := valueValues[start:end] for j := 0; j < len(k); j++ { tmp[k[j]] = v[j] } buf = append(buf, tmp) } case arrow.STRUCT: data := array.NewStructData(data) schema := data.DataType().(*arrow.StructType) for i := 0; i < data.Len(); i++ { if data.IsNull(i) { buf = append(buf, nil) continue } tmp := make(map[string]any) for j := range data.NumField() { field := data.Field(j) fieldValues, err := readArrayData(field.DataType().ID(), field.Data()) if err != nil { return nil, err } tmp[schema.Field(j).Name] = fieldValues[i] } buf = append(buf, tmp) } default: return nil, fmt.Errorf("unsupported arrow data type %s", t.String()) } return buf, nil } func readChunkedColumn(chunked *arrow.Chunked) ([]any, error) { buf := make([]any, 0) for _, chunk := range chunked.Chunks() { data := chunk.Data() t := data.DataType().ID() values, err := readArrayData(t, data) if err != nil { return nil, err } buf = append(buf, values...) } return buf, nil } func ReadArrowBatchToRecord(data []byte, schema *StructType) (arrow.Record, error) { reader := bytes.NewReader(data) arrowReader, err := ipc.NewReader(reader) if err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to create arrow reader: %w", err), sparkerrors.ReadError) } defer arrowReader.Release() record, err := arrowReader.Read() record.Retain() if err != nil { return nil, sparkerrors.WithType(fmt.Errorf("failed to read arrow record: %w", err), sparkerrors.ReadError) } return record, nil } func arrowStructToProtoStruct(schema *arrow.StructType) *proto.DataType_Struct_ { fields := make([]*proto.DataType_StructField, schema.NumFields()) for i, field := range schema.Fields() { fields[i] = &proto.DataType_StructField{ Name: field.Name, DataType: ArrowTypeToProto(field.Type), } } return &proto.DataType_Struct_{ Struct: &proto.DataType_Struct{ Fields: fields, }, } } func ArrowTypeToProto(dataType arrow.DataType) *proto.DataType { switch dataType.ID() { case arrow.BOOL: return &proto.DataType{Kind: &proto.DataType_Boolean_{}} case arrow.INT8: return &proto.DataType{Kind: &proto.DataType_Byte_{}} case arrow.INT16: return &proto.DataType{Kind: &proto.DataType_Short_{}} case arrow.INT32: return &proto.DataType{Kind: &proto.DataType_Integer_{}} case arrow.INT64: return &proto.DataType{Kind: &proto.DataType_Long_{}} case arrow.FLOAT16: return &proto.DataType{Kind: &proto.DataType_Float_{}} case arrow.FLOAT32: return &proto.DataType{Kind: &proto.DataType_Double_{}} case arrow.FLOAT64: return &proto.DataType{Kind: &proto.DataType_Double_{}} case arrow.DECIMAL | arrow.DECIMAL128: return &proto.DataType{Kind: &proto.DataType_Decimal_{}} case arrow.DECIMAL256: return &proto.DataType{Kind: &proto.DataType_Decimal_{}} case arrow.STRING: return &proto.DataType{Kind: &proto.DataType_String_{}} case arrow.BINARY: return &proto.DataType{Kind: &proto.DataType_Binary_{}} case arrow.TIMESTAMP: return &proto.DataType{Kind: &proto.DataType_Timestamp_{}} case arrow.DATE64: return &proto.DataType{Kind: &proto.DataType_Date_{}} case arrow.LIST: return &proto.DataType{Kind: &proto.DataType_Array_{ Array: &proto.DataType_Array{ ElementType: ArrowTypeToProto(dataType.(*arrow.ListType).Elem()), }, }} case arrow.STRUCT: return &proto.DataType{Kind: arrowStructToProtoStruct(dataType.(*arrow.StructType))} default: return &proto.DataType{Kind: &proto.DataType_Unparsed_{}} } } func ArrowSchemaToProto(schema *arrow.Schema) proto.DataType_Struct { fields := make([]*proto.DataType_StructField, schema.NumFields()) for i, field := range schema.Fields() { fields[i] = &proto.DataType_StructField{ Name: field.Name, DataType: ArrowTypeToProto(field.Type), } } return proto.DataType_Struct{ Fields: fields, } } ================================================ FILE: spark/sql/types/arrow_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types_test import ( "bytes" "testing" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/array" "github.com/apache/arrow-go/v18/arrow/decimal128" "github.com/apache/arrow-go/v18/arrow/decimal256" "github.com/apache/arrow-go/v18/arrow/float16" "github.com/apache/arrow-go/v18/arrow/ipc" "github.com/apache/arrow-go/v18/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sql/types" ) func TestShowArrowBatchData(t *testing.T) { arrowFields := []arrow.Field{ { Name: "show_string", Type: &arrow.StringType{}, }, } arrowSchema := arrow.NewSchema(arrowFields, nil) var buf bytes.Buffer arrowWriter := ipc.NewWriter(&buf, ipc.WithSchema(arrowSchema)) defer arrowWriter.Close() alloc := memory.NewGoAllocator() recordBuilder := array.NewRecordBuilder(alloc, arrowSchema) defer recordBuilder.Release() recordBuilder.Field(0).(*array.StringBuilder).Append("str1a\nstr1b") recordBuilder.Field(0).(*array.StringBuilder).Append("str2") record := recordBuilder.NewRecord() defer record.Release() err := arrowWriter.Write(record) require.Nil(t, err) // Convert the data record, err = types.ReadArrowBatchToRecord(buf.Bytes(), nil) require.NoError(t, err) table := array.NewTableFromRecords(arrowSchema, []arrow.Record{record}) values, err := types.ReadArrowTableToRows(table) require.Nil(t, err) assert.Equal(t, 2, len(values)) assert.Equal(t, []any{"str1a\nstr1b"}, values[0].Values()) assert.Equal(t, []any{"str2"}, values[1].Values()) } func TestReadArrowRecord(t *testing.T) { arrowFields := []arrow.Field{ { Name: "boolean_column", Type: &arrow.BooleanType{}, }, { Name: "int8_column", Type: &arrow.Int8Type{}, }, { Name: "int16_column", Type: &arrow.Int16Type{}, }, { Name: "int32_column", Type: &arrow.Int32Type{}, }, { Name: "int64_column", Type: &arrow.Int64Type{}, }, { Name: "float16_column", Type: &arrow.Float16Type{}, }, { Name: "float32_column", Type: &arrow.Float32Type{}, }, { Name: "float64_column", Type: &arrow.Float64Type{}, }, { Name: "decimal128_column", Type: &arrow.Decimal128Type{}, }, { Name: "decimal256_column", Type: &arrow.Decimal256Type{}, }, { Name: "string_column", Type: &arrow.StringType{}, }, { Name: "binary_column", Type: &arrow.BinaryType{}, }, { Name: "timestamp_column", Type: &arrow.TimestampType{}, }, { Name: "date64_column", Type: &arrow.Date64Type{}, }, { Name: "array_int64_column", Type: arrow.ListOf(arrow.PrimitiveTypes.Int64), }, { Name: "map_string_int32", Type: arrow.MapOf(arrow.BinaryTypes.String, arrow.PrimitiveTypes.Int32), }, { Name: "struct", Type: arrow.StructOf( arrow.Field{Name: "field1", Type: arrow.PrimitiveTypes.Int32}, arrow.Field{Name: "field2", Type: arrow.BinaryTypes.String}, ), }, { Name: "nested_struct", Type: arrow.StructOf( arrow.Field{Name: "field1", Type: arrow.StructOf( arrow.Field{Name: "nested_field1", Type: arrow.PrimitiveTypes.Int32}, arrow.Field{Name: "nested_field2", Type: arrow.BinaryTypes.String}, )}, ), }, } arrowSchema := arrow.NewSchema(arrowFields, nil) var buf bytes.Buffer arrowWriter := ipc.NewWriter(&buf, ipc.WithSchema(arrowSchema)) defer arrowWriter.Close() alloc := memory.NewGoAllocator() recordBuilder := array.NewRecordBuilder(alloc, arrowSchema) defer recordBuilder.Release() i := 0 recordBuilder.Field(i).(*array.BooleanBuilder).Append(false) recordBuilder.Field(i).(*array.BooleanBuilder).Append(true) i++ recordBuilder.Field(i).(*array.Int8Builder).Append(1) recordBuilder.Field(i).(*array.Int8Builder).Append(2) i++ recordBuilder.Field(i).(*array.Int16Builder).Append(10) recordBuilder.Field(i).(*array.Int16Builder).Append(20) i++ recordBuilder.Field(i).(*array.Int32Builder).Append(100) recordBuilder.Field(i).(*array.Int32Builder).Append(200) i++ recordBuilder.Field(i).(*array.Int64Builder).Append(1000) recordBuilder.Field(i).(*array.Int64Builder).Append(2000) i++ recordBuilder.Field(i).(*array.Float16Builder).Append(float16.New(10000.1)) recordBuilder.Field(i).(*array.Float16Builder).Append(float16.New(20000.1)) i++ recordBuilder.Field(i).(*array.Float32Builder).Append(100000.1) recordBuilder.Field(i).(*array.Float32Builder).Append(200000.1) i++ recordBuilder.Field(i).(*array.Float64Builder).Append(1000000.1) recordBuilder.Field(i).(*array.Float64Builder).Append(2000000.1) i++ recordBuilder.Field(i).(*array.Decimal128Builder).Append(decimal128.FromI64(10000000)) recordBuilder.Field(i).(*array.Decimal128Builder).Append(decimal128.FromI64(20000000)) i++ recordBuilder.Field(i).(*array.Decimal256Builder).Append(decimal256.FromI64(100000000)) recordBuilder.Field(i).(*array.Decimal256Builder).Append(decimal256.FromI64(200000000)) i++ recordBuilder.Field(i).(*array.StringBuilder).Append("str1") recordBuilder.Field(i).(*array.StringBuilder).Append("str2") i++ recordBuilder.Field(i).(*array.BinaryBuilder).Append([]byte("bytes1")) recordBuilder.Field(i).(*array.BinaryBuilder).Append([]byte("bytes2")) i++ recordBuilder.Field(i).(*array.TimestampBuilder).Append(arrow.Timestamp(1686981953115000)) recordBuilder.Field(i).(*array.TimestampBuilder).Append(arrow.Timestamp(1686981953116000)) i++ recordBuilder.Field(i).(*array.Date64Builder).Append(arrow.Date64(1686981953117000)) recordBuilder.Field(i).(*array.Date64Builder).Append(arrow.Date64(1686981953118000)) i++ lb := recordBuilder.Field(i).(*array.ListBuilder) lb.Append(true) lb.ValueBuilder().(*array.Int64Builder).Append(1) lb.ValueBuilder().(*array.Int64Builder).Append(-999231) lb.Append(true) lb.ValueBuilder().(*array.Int64Builder).Append(1) lb.ValueBuilder().(*array.Int64Builder).Append(2) lb.ValueBuilder().(*array.Int64Builder).Append(3) i++ mb := recordBuilder.Field(i).(*array.MapBuilder) mb.Append(true) mb.KeyBuilder().(*array.StringBuilder).Append("key1") mb.ItemBuilder().(*array.Int32Builder).Append(1) mb.Append(true) mb.KeyBuilder().(*array.StringBuilder).Append("key2") mb.ItemBuilder().(*array.Int32Builder).Append(2) i++ sb := recordBuilder.Field(i).(*array.StructBuilder) sb.Append(true) sb.FieldBuilder(0).(*array.Int32Builder).Append(1) sb.FieldBuilder(1).(*array.StringBuilder).Append("str1") sb.Append(true) sb.FieldBuilder(0).(*array.Int32Builder).Append(2) sb.FieldBuilder(1).(*array.StringBuilder).Append("str2") i++ sb = recordBuilder.Field(i).(*array.StructBuilder) sb.Append(true) nsb := sb.FieldBuilder(0).(*array.StructBuilder) nsb.Append(true) nsb.FieldBuilder(0).(*array.Int32Builder).Append(1) nsb.FieldBuilder(1).(*array.StringBuilder).Append("str1_nested") sb.Append(true) nsb = sb.FieldBuilder(0).(*array.StructBuilder) nsb.Append(true) nsb.FieldBuilder(0).(*array.Int32Builder).Append(2) nsb.FieldBuilder(1).(*array.StringBuilder).Append("str2_nested") record := recordBuilder.NewRecord() defer record.Release() table := array.NewTableFromRecords(arrowSchema, []arrow.Record{record}) values, err := types.ReadArrowTableToRows(table) require.Nil(t, err) assert.Equal(t, 2, len(values)) assert.Equal(t, []any{ false, int8(1), int16(10), int32(100), int64(1000), float16.New(10000.1), float32(100000.1), 1000000.1, decimal128.FromI64(10000000), decimal256.FromI64(100000000), "str1", []byte("bytes1"), arrow.Timestamp(1686981953115000), arrow.Date64(1686981953117000), []any{int64(1), int64(-999231)}, map[any]any{"key1": int32(1)}, map[string]any{"field1": int32(1), "field2": "str1"}, map[string]any{ "field1": map[string]any{ "nested_field1": int32(1), "nested_field2": "str1_nested", }, }, }, values[0].Values()) assert.Equal(t, []any{ true, int8(2), int16(20), int32(200), int64(2000), float16.New(20000.1), float32(200000.1), 2000000.1, decimal128.FromI64(20000000), decimal256.FromI64(200000000), "str2", []byte("bytes2"), arrow.Timestamp(1686981953116000), arrow.Date64(1686981953118000), []any{int64(1), int64(2), int64(3)}, map[any]any{"key2": int32(2)}, map[string]any{"field1": int32(2), "field2": "str2"}, map[string]any{ "field1": map[string]any{ "nested_field1": int32(2), "nested_field2": "str2_nested", }, }, }, values[1].Values()) } func TestReadArrowRecord_UnsupportedType(t *testing.T) { arrowFields := []arrow.Field{ { Name: "unsupported_type_column", Type: &arrow.MonthIntervalType{}, }, } arrowSchema := arrow.NewSchema(arrowFields, nil) var buf bytes.Buffer arrowWriter := ipc.NewWriter(&buf, ipc.WithSchema(arrowSchema)) defer arrowWriter.Close() alloc := memory.NewGoAllocator() recordBuilder := array.NewRecordBuilder(alloc, arrowSchema) defer recordBuilder.Release() recordBuilder.Field(0).(*array.MonthIntervalBuilder).Append(1) record := recordBuilder.NewRecord() defer record.Release() table := array.NewTableFromRecords(arrowSchema, []arrow.Record{record}) _, err := types.ReadArrowTableToRows(table) require.NotNil(t, err) } func TestConvertProtoDataTypeToDataType(t *testing.T) { booleanDataType := &proto.DataType{ Kind: &proto.DataType_Boolean_{}, } assert.Equal(t, "Boolean", types.ConvertProtoDataTypeToDataType(booleanDataType).TypeName()) byteDataType := &proto.DataType{ Kind: &proto.DataType_Byte_{}, } assert.Equal(t, "Byte", types.ConvertProtoDataTypeToDataType(byteDataType).TypeName()) shortDataType := &proto.DataType{ Kind: &proto.DataType_Short_{}, } assert.Equal(t, "Short", types.ConvertProtoDataTypeToDataType(shortDataType).TypeName()) integerDataType := &proto.DataType{ Kind: &proto.DataType_Integer_{}, } assert.Equal(t, "Integer", types.ConvertProtoDataTypeToDataType(integerDataType).TypeName()) longDataType := &proto.DataType{ Kind: &proto.DataType_Long_{}, } assert.Equal(t, "Long", types.ConvertProtoDataTypeToDataType(longDataType).TypeName()) floatDataType := &proto.DataType{ Kind: &proto.DataType_Float_{}, } assert.Equal(t, "Float", types.ConvertProtoDataTypeToDataType(floatDataType).TypeName()) doubleDataType := &proto.DataType{ Kind: &proto.DataType_Double_{}, } assert.Equal(t, "Double", types.ConvertProtoDataTypeToDataType(doubleDataType).TypeName()) decimalDataType := &proto.DataType{ Kind: &proto.DataType_Decimal_{}, } assert.Equal(t, "Decimal", types.ConvertProtoDataTypeToDataType(decimalDataType).TypeName()) stringDataType := &proto.DataType{ Kind: &proto.DataType_String_{}, } assert.Equal(t, "String", types.ConvertProtoDataTypeToDataType(stringDataType).TypeName()) binaryDataType := &proto.DataType{ Kind: &proto.DataType_Binary_{}, } assert.Equal(t, "Binary", types.ConvertProtoDataTypeToDataType(binaryDataType).TypeName()) timestampDataType := &proto.DataType{ Kind: &proto.DataType_Timestamp_{}, } assert.Equal(t, "Timestamp", types.ConvertProtoDataTypeToDataType(timestampDataType).TypeName()) timestampNtzDataType := &proto.DataType{ Kind: &proto.DataType_TimestampNtz{}, } assert.Equal(t, "TimestampNtz", types.ConvertProtoDataTypeToDataType(timestampNtzDataType).TypeName()) dateDataType := &proto.DataType{ Kind: &proto.DataType_Date_{}, } assert.Equal(t, "Date", types.ConvertProtoDataTypeToDataType(dateDataType).TypeName()) arrayDataType := &proto.DataType{ Kind: &proto.DataType_Array_{ Array: &proto.DataType_Array{ ElementType: &proto.DataType{Kind: &proto.DataType_Integer_{}}, }, }, } assert.Equal(t, "Array", types.ConvertProtoDataTypeToDataType(arrayDataType).TypeName()) mapDataType := &proto.DataType{ Kind: &proto.DataType_Map_{ Map: &proto.DataType_Map{ KeyType: &proto.DataType{Kind: &proto.DataType_String_{}}, ValueType: &proto.DataType{Kind: &proto.DataType_Integer_{}}, ValueContainsNull: true, }, }, } assert.Equal(t, "Map", types.ConvertProtoDataTypeToDataType(mapDataType).TypeName()) structDataType := &proto.DataType{ Kind: &proto.DataType_Struct_{ Struct: &proto.DataType_Struct{ Fields: []*proto.DataType_StructField{ {Name: "field1", DataType: &proto.DataType{Kind: &proto.DataType_Integer_{}}}, {Name: "field2", DataType: &proto.DataType{Kind: &proto.DataType_String_{}}}, }, }, }, } assert.Equal(t, "structtype", types.ConvertProtoDataTypeToDataType(structDataType).TypeName()) } func TestConvertProtoDataTypeToDataType_UnsupportedType(t *testing.T) { unsupportedDataType := &proto.DataType{ Kind: &proto.DataType_YearMonthInterval_{}, } assert.Equal(t, "Unsupported", types.ConvertProtoDataTypeToDataType(unsupportedDataType).TypeName()) } ================================================ FILE: spark/sql/types/builtin.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "context" proto "github.com/apache/spark-connect-go/internal/generated" ) type LiteralType interface { ToProto(ctx context.Context) (*proto.Expression, error) } type NumericLiteral interface { LiteralType // marker method for compile time safety. isNumericLiteral() } type PrimitiveTypeLiteral interface { LiteralType isPrimitiveTypeLiteral() } type Int8 int8 func (t Int8) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Byte{Byte: int32(t)}, }, }, }, nil } func (t Int8) isNumericLiteral() {} func (t Int8) isPrimitiveTypeLiteral() {} type Int16 int16 func (t Int16) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Short{Short: int32(t)}, }, }, }, nil } func (t Int16) isNumericLiteral() {} func (t Int16) isPrimitiveTypeLiteral() {} type Int32 int32 func (t Int32) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Integer{Integer: int32(t)}, }, }, }, nil } func (t Int32) isNumericLiteral() {} func (t Int32) isPrimitiveTypeLiteral() {} type Int64 int64 func (t Int64) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Long{Long: int64(t)}, }, }, }, nil } func (t Int64) isNumericLiteral() {} func (t Int64) isPrimitiveTypeLiteral() {} type Int int func (t Int) ToProto(ctx context.Context) (*proto.Expression, error) { return Int64(t).ToProto(ctx) } func (t Int) isNumericLiteral() {} func (t Int) isPrimitiveTypeLiteral() {} type Float32 float32 func (t Float32) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Float{Float: float32(t)}, }, }, }, nil } func (t Float32) isNumericLiteral() {} func (t Float32) isPrimitiveTypeLiteral() {} type Float64 float64 func (t Float64) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Double{Double: float64(t)}, }, }, }, nil } func (t Float64) isNumericLiteral() {} func (t Float64) isPrimitiveTypeLiteral() {} type String string func (t String) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_String_{String_: string(t)}, }, }, }, nil } func (t String) isPrimitiveTypeLiteral() {} type Boolean bool func (t Boolean) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Boolean{Boolean: bool(t)}, }, }, }, nil } func (t Boolean) isPrimitiveTypeLiteral() {} type Binary []byte func (t Binary) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Binary{Binary: t}, }, }, }, nil } type Int8NilType struct{} var Int8Nil = Int8NilType{} func (t Int8NilType) isNumericLiteral() {} func (t Int8NilType) isPrimitiveTypeLiteral() {} func (t Int8NilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_Byte_{ Byte: &proto.DataType_Byte{}, }, }, }, }, }, }, nil } type Int16NilType struct{} var Int16Nil = Int16NilType{} func (t Int16NilType) isNumericLiteral() {} func (t Int16NilType) isPrimitiveTypeLiteral() {} func (t Int16NilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_Short_{ Short: &proto.DataType_Short{}, }, }, }, }, }, }, nil } type Int32NilType struct{} var Int32Nil = Int32NilType{} func (t Int32NilType) isNumericLiteral() {} func (t Int32NilType) isPrimitiveTypeLiteral() {} func (t Int32NilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_Integer_{ Integer: &proto.DataType_Integer{}, }, }, }, }, }, }, nil } type Int64NilType struct{} var Int64Nil = Int64NilType{} func (t Int64NilType) isNumericLiteral() {} func (t Int64NilType) isPrimitiveTypeLiteral() {} func (t Int64NilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_Long_{ Long: &proto.DataType_Long{}, }, }, }, }, }, }, nil } type IntNilType struct{} var IntNil = IntNilType{} func (t IntNilType) isNumericLiteral() {} func (t IntNilType) isPrimitiveTypeLiteral() {} func (t IntNilType) ToProto(ctx context.Context) (*proto.Expression, error) { return Int64NilType{}.ToProto(ctx) } type Float32NilType struct{} var Float32Nil = Float32NilType{} func (t Float32NilType) isNumericLiteral() {} func (t Float32NilType) isPrimitiveTypeLiteral() {} func (t Float32NilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_Float_{ Float: &proto.DataType_Float{}, }, }, }, }, }, }, nil } type Float64NilType struct{} var Float64Nil = Float64NilType{} func (t Float64NilType) isNumericLiteral() {} func (t Float64NilType) isPrimitiveTypeLiteral() {} func (t Float64NilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_Double_{ Double: &proto.DataType_Double{}, }, }, }, }, }, }, nil } type StringNilType struct{} var StringNil = StringNilType{} func (t StringNilType) isPrimitiveTypeLiteral() {} func (t StringNilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_String_{ String_: &proto.DataType_String{}, }, }, }, }, }, }, nil } type BooleanNilType struct{} var BooleanNil = BooleanNilType{} func (t BooleanNilType) isPrimitiveTypeLiteral() {} func (t BooleanNilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_Boolean_{ Boolean: &proto.DataType_Boolean{}, }, }, }, }, }, }, nil } type BinaryNilType struct{} var BinaryNil = BinaryNilType{} func (t BinaryNilType) ToProto(ctx context.Context) (*proto.Expression, error) { return &proto.Expression{ ExprType: &proto.Expression_Literal_{ Literal: &proto.Expression_Literal{ LiteralType: &proto.Expression_Literal_Null{ Null: &proto.DataType{ Kind: &proto.DataType_Binary_{ Binary: &proto.DataType_Binary{}, }, }, }, }, }, }, nil } ================================================ FILE: spark/sql/types/builtin_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "context" "testing" "github.com/stretchr/testify/assert" ) func TestBuiltinTypes(t *testing.T) { p, err := Int8(1).ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetByte(), int32(1)) p, err = Int16(1).ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetShort(), int32(1)) p, err = Int32(1).ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetInteger(), int32(1)) p, err = Int64(1).ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetLong(), int64(1)) p, err = Float32(1.0).ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetFloat(), float32(1.0)) p, err = Float64(1.0).ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetDouble(), float64(1.0)) p, err = String("1").ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetString_(), "1") p, err = Boolean(true).ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetBoolean(), true) p, err = Binary([]byte{1}).ToProto(context.TODO()) assert.NoError(t, err) assert.Equal(t, p.GetLiteral().GetBinary(), []byte{1}) } func testMe(n NumericLiteral) bool { return true } func testPrimitive(p PrimitiveTypeLiteral) bool { return true } func TestNumericTypes(t *testing.T) { assert.True(t, testMe(Int8(1))) assert.True(t, testMe(Int16(1))) assert.True(t, testMe(Int32(1))) assert.True(t, testMe(Int64(1))) assert.True(t, testMe(Float32(1.0))) assert.True(t, testMe(Float64(1.0))) assert.True(t, testPrimitive(String("a"))) assert.True(t, testPrimitive(Boolean(true))) assert.True(t, testPrimitive(Int16(1))) } ================================================ FILE: spark/sql/types/conversion.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "errors" "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sparkerrors" ) func ConvertProtoDataTypeToStructType(input *generated.DataType) (*StructType, error) { dataTypeStruct := input.GetStruct() if dataTypeStruct == nil { return nil, sparkerrors.WithType(errors.New("dataType.GetStruct() is nil"), sparkerrors.InvalidInputError) } return &StructType{ Fields: ConvertProtoStructFields(dataTypeStruct.Fields), }, nil } func ConvertProtoStructFields(input []*generated.DataType_StructField) []StructField { result := make([]StructField, len(input)) for i, f := range input { result[i] = ConvertProtoStructField(f) } return result } func ConvertProtoStructField(field *generated.DataType_StructField) StructField { return StructField{ Name: field.Name, DataType: ConvertProtoDataTypeToDataType(field.DataType), Nullable: field.Nullable, Metadata: field.Metadata, } } // ConvertProtoDataTypeToDataType converts protobuf data type to Spark connect sql data type func ConvertProtoDataTypeToDataType(input *generated.DataType) DataType { switch v := input.GetKind().(type) { case *generated.DataType_Boolean_: return BooleanType{} case *generated.DataType_Byte_: return ByteType{} case *generated.DataType_Short_: return ShortType{} case *generated.DataType_Integer_: return IntegerType{} case *generated.DataType_Long_: return LongType{} case *generated.DataType_Float_: return FloatType{} case *generated.DataType_Double_: return DoubleType{} case *generated.DataType_Decimal_: return DecimalType{} case *generated.DataType_String_: return StringType{} case *generated.DataType_Binary_: return BinaryType{} case *generated.DataType_Timestamp_: return TimestampType{} case *generated.DataType_TimestampNtz: return TimestampNtzType{} case *generated.DataType_Date_: return DateType{} case *generated.DataType_Array_: nestedType := ConvertProtoDataTypeToDataType(input.GetArray().ElementType) containsNull := input.GetArray().ContainsNull return ArrayType{ ElementType: nestedType, ContainsNull: containsNull, } case *generated.DataType_Map_: keyType := ConvertProtoDataTypeToDataType(input.GetMap().KeyType) valueType := ConvertProtoDataTypeToDataType(input.GetMap().ValueType) valueContainsNull := input.GetMap().ValueContainsNull return MapType{ KeyType: keyType, ValueType: valueType, ValueContainsNull: valueContainsNull, } case *generated.DataType_Struct_: fields := ConvertProtoStructFields(input.GetStruct().Fields) return *StructOf(fields...) default: return UnsupportedType{ TypeInfo: v, } } } ================================================ FILE: spark/sql/types/conversion_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types_test import ( "testing" proto "github.com/apache/spark-connect-go/internal/generated" "github.com/apache/spark-connect-go/spark/sql/types" "github.com/stretchr/testify/assert" ) func TestConvertProtoStructFieldSupported(t *testing.T) { protoType := &proto.DataType{Kind: &proto.DataType_Integer_{}} structField := &proto.DataType_StructField{ Name: "test", DataType: protoType, Nullable: true, } dt := types.ConvertProtoStructField(structField) assert.Equal(t, "test", dt.Name) assert.IsType(t, types.IntegerType{}, dt.DataType) } func TestConvertProtoStructFieldUnsupported(t *testing.T) { protoType := &proto.DataType{Kind: &proto.DataType_CalendarInterval_{}} structField := &proto.DataType_StructField{ Name: "test", DataType: protoType, Nullable: true, } dt := types.ConvertProtoStructField(structField) assert.Equal(t, "test", dt.Name) assert.IsType(t, types.UnsupportedType{}, dt.DataType) } func TestConvertProtoStructToGoStruct(t *testing.T) { protoType := &proto.DataType{ Kind: &proto.DataType_Struct_{ Struct: &proto.DataType_Struct{ Fields: []*proto.DataType_StructField{ { Name: "test", DataType: &proto.DataType{Kind: &proto.DataType_Integer_{}}, Nullable: true, }, }, }, }, } structType, err := types.ConvertProtoDataTypeToStructType(protoType) assert.NoError(t, err) assert.Equal(t, 1, len(structType.Fields)) assert.Equal(t, "test", structType.Fields[0].Name) assert.IsType(t, types.IntegerType{}, structType.Fields[0].DataType) // Check for input type that is not a struct type and it returns an error. protoType = &proto.DataType{Kind: &proto.DataType_Integer_{}} _, err = types.ConvertProtoDataTypeToStructType(protoType) assert.Error(t, err) } func TestConvertProtoArrayType(t *testing.T) { tests := []struct { name string protoType *proto.DataType expectedType types.DataType expectedName string }{ { name: "Array of integers", protoType: &proto.DataType{ Kind: &proto.DataType_Array_{ Array: &proto.DataType_Array{ ElementType: &proto.DataType{Kind: &proto.DataType_Integer_{}}, ContainsNull: true, }, }, }, expectedType: types.ArrayType{ ElementType: types.INTEGER, ContainsNull: true, }, expectedName: "Array", }, { name: "Array of strings without nulls", protoType: &proto.DataType{ Kind: &proto.DataType_Array_{ Array: &proto.DataType_Array{ ElementType: &proto.DataType{Kind: &proto.DataType_String_{}}, ContainsNull: false, }, }, }, expectedType: types.ArrayType{ ElementType: types.STRING, ContainsNull: false, }, expectedName: "Array", }, { name: "Nested array", protoType: &proto.DataType{ Kind: &proto.DataType_Array_{ Array: &proto.DataType_Array{ ElementType: &proto.DataType{ Kind: &proto.DataType_Array_{ Array: &proto.DataType_Array{ ElementType: &proto.DataType{Kind: &proto.DataType_Double_{}}, ContainsNull: false, }, }, }, ContainsNull: true, }, }, }, expectedType: types.ArrayType{ ElementType: types.ArrayType{ ElementType: types.DOUBLE, ContainsNull: false, }, ContainsNull: true, }, expectedName: "Array>", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { convertedType := types.ConvertProtoDataTypeToDataType(tt.protoType) assert.Equal(t, tt.expectedType, convertedType) assert.Equal(t, tt.expectedName, convertedType.TypeName()) }) } } func TestConvertProtoMapType(t *testing.T) { tests := []struct { name string protoType *proto.DataType expectedType types.DataType expectedName string }{ { name: "Map of string to integer", protoType: &proto.DataType{ Kind: &proto.DataType_Map_{ Map: &proto.DataType_Map{ KeyType: &proto.DataType{Kind: &proto.DataType_String_{}}, ValueType: &proto.DataType{Kind: &proto.DataType_Integer_{}}, ValueContainsNull: true, }, }, }, expectedType: types.MapType{ KeyType: types.STRING, ValueType: types.INTEGER, ValueContainsNull: true, }, expectedName: "Map", }, { name: "Map with array values", protoType: &proto.DataType{ Kind: &proto.DataType_Map_{ Map: &proto.DataType_Map{ KeyType: &proto.DataType{Kind: &proto.DataType_Integer_{}}, ValueType: &proto.DataType{ Kind: &proto.DataType_Array_{ Array: &proto.DataType_Array{ ElementType: &proto.DataType{Kind: &proto.DataType_String_{}}, ContainsNull: true, }, }, }, ValueContainsNull: false, }, }, }, expectedType: types.MapType{ KeyType: types.INTEGER, ValueType: types.ArrayType{ ElementType: types.STRING, ContainsNull: true, }, ValueContainsNull: false, }, expectedName: "Map>", }, { name: "Nested map", protoType: &proto.DataType{ Kind: &proto.DataType_Map_{ Map: &proto.DataType_Map{ KeyType: &proto.DataType{Kind: &proto.DataType_String_{}}, ValueType: &proto.DataType{ Kind: &proto.DataType_Map_{ Map: &proto.DataType_Map{ KeyType: &proto.DataType{Kind: &proto.DataType_String_{}}, ValueType: &proto.DataType{Kind: &proto.DataType_Double_{}}, ValueContainsNull: false, }, }, }, ValueContainsNull: true, }, }, }, expectedType: types.MapType{ KeyType: types.STRING, ValueType: types.MapType{ KeyType: types.STRING, ValueType: types.DOUBLE, ValueContainsNull: false, }, ValueContainsNull: true, }, expectedName: "Map>", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { convertedType := types.ConvertProtoDataTypeToDataType(tt.protoType) assert.Equal(t, tt.expectedType, convertedType) assert.Equal(t, tt.expectedName, convertedType.TypeName()) }) } } func TestConvertComplexNestedTypes(t *testing.T) { // Test a complex nested structure: Struct containing Array> protoType := &proto.DataType{ Kind: &proto.DataType_Struct_{ Struct: &proto.DataType_Struct{ Fields: []*proto.DataType_StructField{ { Name: "complex_field", DataType: &proto.DataType{ Kind: &proto.DataType_Array_{ Array: &proto.DataType_Array{ ElementType: &proto.DataType{ Kind: &proto.DataType_Map_{ Map: &proto.DataType_Map{ KeyType: &proto.DataType{Kind: &proto.DataType_String_{}}, ValueType: &proto.DataType{Kind: &proto.DataType_Integer_{}}, ValueContainsNull: true, }, }, }, ContainsNull: false, }, }, }, Nullable: true, }, }, }, }, } convertedType := types.ConvertProtoDataTypeToDataType(protoType) structType, ok := convertedType.(types.StructType) assert.True(t, ok) assert.Equal(t, 1, len(structType.Fields)) assert.Equal(t, "complex_field", structType.Fields[0].Name) arrayType, ok := structType.Fields[0].DataType.(types.ArrayType) assert.True(t, ok) assert.False(t, arrayType.ContainsNull) mapType, ok := arrayType.ElementType.(types.MapType) assert.True(t, ok) assert.Equal(t, types.STRING, mapType.KeyType) assert.Equal(t, types.INTEGER, mapType.ValueType) assert.True(t, mapType.ValueContainsNull) } ================================================ FILE: spark/sql/types/datatype.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "fmt" "strings" "github.com/apache/arrow-go/v18/arrow" ) type DataType interface { TypeName() string IsNumeric() bool ToArrowType() arrow.DataType } type BooleanType struct{} func (t BooleanType) TypeName() string { return getDataTypeName(t) } func (t BooleanType) IsNumeric() bool { return false } func (t BooleanType) ToArrowType() arrow.DataType { return arrow.FixedWidthTypes.Boolean } type ByteType struct{} func (t ByteType) IsNumeric() bool { return true } func (t ByteType) ToArrowType() arrow.DataType { return arrow.PrimitiveTypes.Int8 } func (t ByteType) TypeName() string { return getDataTypeName(t) } type ShortType struct{} func (t ShortType) TypeName() string { return getDataTypeName(t) } func (t ShortType) IsNumeric() bool { return true } func (t ShortType) ToArrowType() arrow.DataType { return arrow.PrimitiveTypes.Int16 } type IntegerType struct{} func (t IntegerType) TypeName() string { return getDataTypeName(t) } func (t IntegerType) IsNumeric() bool { return true } func (t IntegerType) ToArrowType() arrow.DataType { return arrow.PrimitiveTypes.Int32 } type LongType struct{} func (t LongType) TypeName() string { return getDataTypeName(t) } func (t LongType) IsNumeric() bool { return true } func (t LongType) ToArrowType() arrow.DataType { return arrow.PrimitiveTypes.Int64 } type FloatType struct{} func (t FloatType) TypeName() string { return getDataTypeName(t) } func (t FloatType) IsNumeric() bool { return true } func (t FloatType) ToArrowType() arrow.DataType { return arrow.PrimitiveTypes.Float32 } type DoubleType struct{} func (t DoubleType) TypeName() string { return getDataTypeName(t) } func (t DoubleType) IsNumeric() bool { return true } func (t DoubleType) ToArrowType() arrow.DataType { return arrow.PrimitiveTypes.Float64 } type DecimalType struct { Precision int32 Scale int32 } func (t DecimalType) TypeName() string { return getDataTypeName(t) } func (t DecimalType) IsNumeric() bool { return true } func (t DecimalType) ToArrowType() arrow.DataType { return &arrow.Decimal128Type{ Precision: t.Precision, Scale: t.Scale, } } type StringType struct{} func (t StringType) TypeName() string { return getDataTypeName(t) } func (t StringType) IsNumeric() bool { return false } func (t StringType) ToArrowType() arrow.DataType { return arrow.BinaryTypes.String } type BinaryType struct{} func (t BinaryType) TypeName() string { return getDataTypeName(t) } func (t BinaryType) IsNumeric() bool { return false } func (t BinaryType) ToArrowType() arrow.DataType { return arrow.BinaryTypes.Binary } type TimestampType struct{} func (t TimestampType) TypeName() string { return getDataTypeName(t) } func (t TimestampType) IsNumeric() bool { return false } func (t TimestampType) ToArrowType() arrow.DataType { return arrow.FixedWidthTypes.Timestamp_ms } type TimestampNtzType struct{} func (t TimestampNtzType) TypeName() string { return getDataTypeName(t) } func (t TimestampNtzType) IsNumeric() bool { return false } func (t TimestampNtzType) ToArrowType() arrow.DataType { return arrow.FixedWidthTypes.Timestamp_ns } type DateType struct{} func (t DateType) TypeName() string { return getDataTypeName(t) } func (t DateType) IsNumeric() bool { return false } func (t DateType) ToArrowType() arrow.DataType { return arrow.FixedWidthTypes.Date32 } type ArrayType struct { ElementType DataType ContainsNull bool } func (t ArrayType) TypeName() string { return fmt.Sprintf("Array<%s>", t.ElementType.TypeName()) } func (t ArrayType) IsNumeric() bool { return false } func (t ArrayType) ToArrowType() arrow.DataType { return arrow.ListOf(t.ElementType.ToArrowType()) } type MapType struct { KeyType DataType ValueType DataType ValueContainsNull bool } func (t MapType) TypeName() string { return fmt.Sprintf("Map<%s,%s>", t.KeyType.TypeName(), t.ValueType.TypeName()) } func (t MapType) IsNumeric() bool { return false } func (t MapType) ToArrowType() arrow.DataType { // TODO: assert that ValueContainsNull is true because it indicates // nullability of the map type return arrow.MapOf(t.KeyType.ToArrowType(), t.ValueType.ToArrowType()) } type UnsupportedType struct { TypeInfo any } func (t UnsupportedType) TypeName() string { return getDataTypeName(t) } func (t UnsupportedType) IsNumeric() bool { return false } func (t UnsupportedType) ToArrowType() arrow.DataType { return nil } func getDataTypeName(dataType DataType) string { typeName := fmt.Sprintf("%T", dataType) nonQualifiedTypeName := strings.Split(typeName, ".")[1] return strings.TrimSuffix(nonQualifiedTypeName, "Type") } func MakeArrayType(elementType DataType, containsNull bool) ArrayType { return ArrayType{ ElementType: elementType, ContainsNull: containsNull, } } var ( BOOLEAN = BooleanType{} BYTE = ByteType{} SHORT = ShortType{} INTEGER = IntegerType{} LONG = LongType{} FLOAT = FloatType{} DOUBLE = DoubleType{} DATE = DateType{} TIMESTAMP = TimestampType{} TIMESTAMP_NTZ = TimestampNtzType{} STRING = StringType{} ) ================================================ FILE: spark/sql/types/datatype_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "testing" "github.com/apache/arrow-go/v18/arrow" "github.com/stretchr/testify/assert" ) func TestArrayType_TypeName(t *testing.T) { tests := []struct { name string arrayType ArrayType expectedName string }{ { name: "Array of integers", arrayType: ArrayType{ ElementType: INTEGER, ContainsNull: false, }, expectedName: "Array", }, { name: "Array of strings with nulls", arrayType: ArrayType{ ElementType: STRING, ContainsNull: true, }, expectedName: "Array", }, { name: "Nested array", arrayType: ArrayType{ ElementType: ArrayType{ ElementType: DOUBLE, ContainsNull: false, }, ContainsNull: false, }, expectedName: "Array>", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert.Equal(t, tt.expectedName, tt.arrayType.TypeName()) }) } } func TestArrayType_IsNumeric(t *testing.T) { arrayType := ArrayType{ ElementType: INTEGER, ContainsNull: false, } assert.False(t, arrayType.IsNumeric()) } func TestArrayType_ToArrowType(t *testing.T) { tests := []struct { name string arrayType ArrayType validate func(t *testing.T, arrowType arrow.DataType) }{ { name: "Array of integers", arrayType: ArrayType{ ElementType: INTEGER, ContainsNull: false, }, validate: func(t *testing.T, arrowType arrow.DataType) { listType, ok := arrowType.(*arrow.ListType) assert.True(t, ok) assert.Equal(t, arrow.PrimitiveTypes.Int32, listType.Elem()) }, }, { name: "Array of strings", arrayType: ArrayType{ ElementType: STRING, ContainsNull: true, }, validate: func(t *testing.T, arrowType arrow.DataType) { listType, ok := arrowType.(*arrow.ListType) assert.True(t, ok) assert.Equal(t, arrow.BinaryTypes.String, listType.Elem()) }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { arrowType := tt.arrayType.ToArrowType() tt.validate(t, arrowType) }) } } func TestMapType_TypeName(t *testing.T) { tests := []struct { name string mapType MapType expectedName string }{ { name: "Map of string to integer", mapType: MapType{ KeyType: STRING, ValueType: INTEGER, ValueContainsNull: false, }, expectedName: "Map", }, { name: "Map of integer to array", mapType: MapType{ KeyType: INTEGER, ValueType: ArrayType{ ElementType: STRING, ContainsNull: true, }, ValueContainsNull: true, }, expectedName: "Map>", }, { name: "Nested map", mapType: MapType{ KeyType: STRING, ValueType: MapType{ KeyType: STRING, ValueType: DOUBLE, ValueContainsNull: false, }, ValueContainsNull: false, }, expectedName: "Map>", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert.Equal(t, tt.expectedName, tt.mapType.TypeName()) }) } } func TestMapType_IsNumeric(t *testing.T) { mapType := MapType{ KeyType: STRING, ValueType: INTEGER, ValueContainsNull: false, } assert.False(t, mapType.IsNumeric()) } func TestMapType_ToArrowType(t *testing.T) { tests := []struct { name string mapType MapType validate func(t *testing.T, arrowType arrow.DataType) }{ { name: "Map of string to integer", mapType: MapType{ KeyType: STRING, ValueType: INTEGER, ValueContainsNull: true, }, validate: func(t *testing.T, arrowType arrow.DataType) { mapType, ok := arrowType.(*arrow.MapType) assert.True(t, ok) assert.Equal(t, arrow.BinaryTypes.String, mapType.KeyType()) assert.Equal(t, arrow.PrimitiveTypes.Int32, mapType.ItemType()) }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { arrowType := tt.mapType.ToArrowType() tt.validate(t, arrowType) }) } } func TestMakeArrayType(t *testing.T) { arrayType := MakeArrayType(STRING, true) assert.Equal(t, STRING, arrayType.ElementType) assert.True(t, arrayType.ContainsNull) assert.Equal(t, "Array", arrayType.TypeName()) } func TestComplexTypeNesting(t *testing.T) { // Test complex nested structure: Array>> innerArray := ArrayType{ ElementType: INTEGER, ContainsNull: false, } mapType := MapType{ KeyType: STRING, ValueType: innerArray, ValueContainsNull: true, } outerArray := ArrayType{ ElementType: mapType, ContainsNull: false, } expectedName := "Array>>" assert.Equal(t, expectedName, outerArray.TypeName()) } ================================================ FILE: spark/sql/types/row.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "encoding/base64" "encoding/json" "fmt" "maps" "reflect" "time" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/decimal128" "github.com/apache/arrow-go/v18/arrow/decimal256" ) type Row interface { // At returns field's value at the given index within a [Row]. // It returns nil for invalid indices. At(index int) any // Value returns field's value of the given column's name within a [Row]. // It returns nil for invalid column's name. Value(name string) any // Values returns values of all fields within a [Row] as a slice of any. Values() []any // Len returns the number of fields within a [Row]. Len() int FieldNames() []string // ToJsonString converts the Row to a JSON string representation. // Returns an error if the row contains data that cannot be properly represented in JSON. ToJsonString() (string, error) } type rowImpl struct { values []any offsets map[string]int } func (r *rowImpl) At(index int) any { if index < 0 || index > len(r.values) { return nil } return r.values[index] } func (r *rowImpl) Value(name string) any { idx, ok := r.offsets[name] if !ok { return nil } return r.values[idx] } func (r *rowImpl) Values() []any { return r.values } func (r *rowImpl) Len() int { return len(r.values) } func (r *rowImpl) FieldNames() []string { names := make([]string, len(r.offsets)) // Sort the field names to make the output deterministic. for k, v := range maps.All(r.offsets) { names[v] = k } return names } func (r *rowImpl) ToJsonString() (string, error) { jsonMap := make(map[string]any) fieldNames := r.FieldNames() for i, fieldName := range fieldNames { value := r.values[i] convertedValue, err := convertToJsonValue(value) if err != nil { return "", fmt.Errorf("failed to convert field '%s': %w", fieldName, err) } jsonMap[fieldName] = convertedValue } jsonBytes, err := json.Marshal(jsonMap) if err != nil { return "", fmt.Errorf("failed to marshal JSON: %w", err) } return string(jsonBytes), nil } func convertToJsonValue(value any) (any, error) { if value == nil { return nil, nil } switch v := value.(type) { case bool, string, int8, int16, int32, int64, float32, float64: return v, nil case []byte: return base64.StdEncoding.EncodeToString(v), nil case decimal128.Num: return v.BigInt().String(), nil case decimal256.Num: return v.BigInt().String(), nil case arrow.Timestamp: epochUs := int64(v) t := time.Unix(epochUs/1000000, (epochUs%1000000)*1000).UTC() return t.Format(time.RFC3339), nil case arrow.Date32: epochDays := int64(v) epochTime := time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC).AddDate(0, 0, int(epochDays)) return epochTime.Format("2006-01-02"), nil case arrow.Date64: epochMs := int64(v) t := time.Unix(epochMs/1000, (epochMs%1000)*1000000).UTC() return t.Format("2006-01-02"), nil case time.Time: if v.IsZero() { return nil, nil } return v.Format(time.RFC3339), nil case []any: result := make([]any, len(v)) for i, item := range v { convertedItem, err := convertToJsonValue(item) if err != nil { return nil, fmt.Errorf("failed to convert array element at index %d: %w", i, err) } result[i] = convertedItem } return result, nil case map[any]any: result := make(map[string]any) for key, val := range v { keyStr, ok := key.(string) if !ok { return nil, fmt.Errorf("map key must be string for JSON conversion, got %T", key) } convertedVal, err := convertToJsonValue(val) if err != nil { return nil, fmt.Errorf("failed to convert map value for key '%s': %w", keyStr, err) } result[keyStr] = convertedVal } return result, nil case map[string]any: result := make(map[string]any) for key, val := range v { convertedVal, err := convertToJsonValue(val) if err != nil { return nil, fmt.Errorf("failed to convert map value for key '%s': %w", key, err) } result[key] = convertedVal } return result, nil default: // Use reflection to handle custom types that have basic types as their underlying type. // For example, a custom type like "type MyInt int32" would not match the explicit // int32 case above, but would match reflect.Int32 here. This ensures we can still // convert custom integer, float, bool, and string types to their JSON representations. rv := reflect.ValueOf(value) switch rv.Kind() { case reflect.Slice, reflect.Array: length := rv.Len() result := make([]any, length) for i := 0; i < length; i++ { convertedItem, err := convertToJsonValue(rv.Index(i).Interface()) if err != nil { return nil, fmt.Errorf("failed to convert array element at index %d: %w", i, err) } result[i] = convertedItem } return result, nil case reflect.Map: if rv.Type().Key().Kind() != reflect.String { return nil, fmt.Errorf("map key must be string for JSON conversion, got %s", rv.Type().Key().Kind()) } result := make(map[string]any) for _, key := range rv.MapKeys() { keyStr := key.String() val := rv.MapIndex(key) convertedVal, err := convertToJsonValue(val.Interface()) if err != nil { return nil, fmt.Errorf("failed to convert map value for key '%s': %w", keyStr, err) } result[keyStr] = convertedVal } return result, nil case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return rv.Int(), nil case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: return rv.Uint(), nil case reflect.Float32, reflect.Float64: return rv.Float(), nil case reflect.Bool: return rv.Bool(), nil case reflect.String: return rv.String(), nil default: return fmt.Sprintf("%v", value), nil } } } ================================================ FILE: spark/sql/types/row_json_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "encoding/json" "testing" "time" "github.com/apache/arrow-go/v18/arrow" "github.com/apache/arrow-go/v18/arrow/decimal128" "github.com/apache/arrow-go/v18/arrow/decimal256" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestRowToJsonString(t *testing.T) { tests := []struct { name string row Row expected string hasError bool }{ { name: "basic types", row: &rowImpl{ values: []any{ "hello", int32(42), int64(123), float64(3.14), true, nil, }, offsets: map[string]int{ "str_col": 0, "int_col": 1, "long_col": 2, "double_col": 3, "bool_col": 4, "null_col": 5, }, }, expected: `{"bool_col":true,"double_col":3.14,"int_col":42,"long_col":123,"null_col":null,"str_col":"hello"}`, hasError: false, }, { name: "binary data", row: &rowImpl{ values: []any{ []byte("hello world"), }, offsets: map[string]int{ "binary_col": 0, }, }, expected: `{"binary_col":"aGVsbG8gd29ybGQ="}`, hasError: false, }, { name: "decimal types", row: &rowImpl{ values: []any{ decimal128.FromI64(123456), decimal256.FromI64(789012), }, offsets: map[string]int{ "decimal128_col": 0, "decimal256_col": 1, }, }, expected: `{"decimal128_col":"123456","decimal256_col":"789012"}`, hasError: false, }, { name: "timestamp and date", row: &rowImpl{ values: []any{ arrow.Timestamp(1686981953115000), // microseconds arrow.Date32(19521), // days since epoch (2023-06-13) arrow.Date64(1686981953115), // milliseconds }, offsets: map[string]int{ "timestamp_col": 0, "date32_col": 1, "date64_col": 2, }, }, expected: `{"date32_col":"2023-06-13","date64_col":"2023-06-17","timestamp_col":"2023-06-17T06:05:53Z"}`, hasError: false, }, { name: "arrays", row: &rowImpl{ values: []any{ []any{1, 2, 3}, []any{"a", "b", "c"}, }, offsets: map[string]int{ "int_array": 0, "str_array": 1, }, }, expected: `{"int_array":[1,2,3],"str_array":["a","b","c"]}`, hasError: false, }, { name: "valid string map", row: &rowImpl{ values: []any{ map[string]any{ "key1": "value1", "key2": 42, }, }, offsets: map[string]int{ "map_col": 0, }, }, expected: `{"map_col":{"key1":"value1","key2":42}}`, hasError: false, }, { name: "invalid map with non-string keys", row: &rowImpl{ values: []any{ map[any]any{ 42: "value1", "key2": "value2", }, }, offsets: map[string]int{ "map_col": 0, }, }, expected: "", hasError: true, }, { name: "nested structures", row: &rowImpl{ values: []any{ []any{ map[string]any{ "nested_key": "nested_value", "nested_num": 123, }, }, }, offsets: map[string]int{ "nested_col": 0, }, }, expected: `{"nested_col":[{"nested_key":"nested_value","nested_num":123}]}`, hasError: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result, err := tt.row.ToJsonString() if tt.hasError { assert.Error(t, err) assert.Empty(t, result) } else { require.NoError(t, err) // Verify the result is valid JSON var parsed map[string]any err = json.Unmarshal([]byte(result), &parsed) require.NoError(t, err) // Verify the expected content (comparing parsed JSON to avoid key ordering issues) var expected map[string]any err = json.Unmarshal([]byte(tt.expected), &expected) require.NoError(t, err) assert.Equal(t, expected, parsed) } }) } } func TestConvertToJsonValue(t *testing.T) { tests := []struct { name string input any expected any hasError bool }{ { name: "nil value", input: nil, expected: nil, hasError: false, }, { name: "time.Time", input: time.Date(2023, 6, 17, 10, 5, 53, 0, time.UTC), expected: "2023-06-17T10:05:53Z", hasError: false, }, { name: "zero time.Time", input: time.Time{}, expected: nil, hasError: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result, err := convertToJsonValue(tt.input) if tt.hasError { assert.Error(t, err) } else { require.NoError(t, err) assert.Equal(t, tt.expected, result) } }) } } ================================================ FILE: spark/sql/types/row_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "testing" "github.com/stretchr/testify/require" ) // rowImplTest is a read-only sample [Row] to be used in all tests. var rowImplSample rowImpl = rowImpl{ values: []any{1, 2, 3, 4, 5}, offsets: map[string]int{ "five": 4, "one": 0, "two": 1, "four": 3, "three": 2, }, } func TestRowImpl_At(t *testing.T) { testCases := []struct { name string input int exp any }{ { name: "index within range", input: 2, exp: 3, }, { name: "index out of range", input: 6, exp: nil, }, { name: "negative index", input: -1, exp: nil, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { act := rowImplSample.At(tc.input) require.Equal(t, tc.exp, act) }) } } func TestRowImpl_Value(t *testing.T) { testCases := []struct { name string input string exp any }{ { name: "valid field name", input: "two", exp: 2, }, { name: "invalid field name", input: "six", exp: nil, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { act := rowImplSample.Value(tc.input) require.Equal(t, tc.exp, act) }) } } func TestRowImpl_Values(t *testing.T) { exp := []any{1, 2, 3, 4, 5} act := rowImplSample.Values() require.Equal(t, exp, act) } func TestRowImpl_Len(t *testing.T) { exp := 5 act := rowImplSample.Len() require.Equal(t, exp, act) } func TestRowImpl_FieldNames(t *testing.T) { exp := []string{"one", "two", "three", "four", "five"} act := rowImplSample.FieldNames() require.ElementsMatch(t, exp, act) } ================================================ FILE: spark/sql/types/structtype.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "fmt" "strings" "github.com/apache/arrow-go/v18/arrow" ) // StructField represents a field in a StructType. type StructField struct { Name string DataType DataType Nullable bool // default should be true Metadata *string } func (t *StructField) ToArrowType() arrow.Field { return arrow.Field{ Name: t.Name, Type: t.DataType.ToArrowType(), Nullable: t.Nullable, } } func (t *StructField) buildFormattedString(prefix string, target *string) { if target == nil { return } switch t.DataType.(type) { case ArrayType: *target += fmt.Sprintf("%s-- %s: array (nullable = %t)\n", prefix, t.Name, t.Nullable) *target += fmt.Sprintf("%s |-- element: %s (valueContainsNull = %t)\n", prefix, t.DataType.(ArrayType).ElementType.TypeName(), t.Nullable) case MapType: *target += fmt.Sprintf("%s-- %s: map (nullable = %t)\n", prefix, t.Name, t.Nullable) *target += fmt.Sprintf("%s |-- key: %s\n", prefix, t.DataType.(MapType).KeyType.TypeName()) *target += fmt.Sprintf("%s |-- value: %s (valueContainsNull = %t)\n", prefix, t.DataType.(MapType).ValueType.TypeName(), t.Nullable) case StructType: *target += fmt.Sprintf("%s-- %s: structtype (nullable = %t)\n", prefix, t.Name, t.Nullable) for _, field := range t.DataType.(StructType).Fields { field.buildFormattedString(prefix+" |", target) } default: *target += fmt.Sprintf("%s-- %s: %s (nullable = %t)\n", prefix, t.Name, strings.ToLower(t.DataType.TypeName()), t.Nullable) } } // StructType represents a struct type. type StructType struct { Fields []StructField } func (t StructType) TypeName() string { return "structtype" } func (t StructType) IsNumeric() bool { return false } func (t StructType) ToArrowType() arrow.DataType { fields := make([]arrow.Field, len(t.Fields)) for i, f := range t.Fields { fields[i] = f.ToArrowType() } return arrow.StructOf(fields...) } func (t *StructType) TreeString() string { tree := string("root\n") prefix := " |" for _, f := range t.Fields { f.buildFormattedString(prefix, &tree) } return tree + "\n" } func StructOf(fields ...StructField) *StructType { return &StructType{Fields: fields} } func NewStructField(name string, dataType DataType) StructField { return StructField{ Name: name, DataType: dataType, Nullable: true, } } ================================================ FILE: spark/sql/types/structtype_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "strings" "testing" "github.com/apache/arrow-go/v18/arrow" "github.com/stretchr/testify/assert" ) func TestStructOf(t *testing.T) { s := StructOf(NewStructField("col1", BYTE)) assert.Len(t, s.Fields, 1) } func TestStructType_TypeName(t *testing.T) { structType := StructType{ Fields: []StructField{ {Name: "field1", DataType: INTEGER}, {Name: "field2", DataType: STRING}, }, } assert.Equal(t, "structtype", structType.TypeName()) } func TestStructType_IsNumeric(t *testing.T) { structType := StructType{ Fields: []StructField{ {Name: "field1", DataType: INTEGER}, }, } assert.False(t, structType.IsNumeric()) } func TestStructType_ToArrowType(t *testing.T) { tests := []struct { name string structType StructType validate func(t *testing.T, arrowType arrow.DataType) }{ { name: "Simple struct with integer and string fields", structType: StructType{ Fields: []StructField{ {Name: "id", DataType: INTEGER, Nullable: false}, {Name: "name", DataType: STRING, Nullable: true}, }, }, validate: func(t *testing.T, arrowType arrow.DataType) { structType, ok := arrowType.(*arrow.StructType) assert.True(t, ok) assert.Equal(t, 2, structType.NumFields()) field1 := structType.Field(0) assert.Equal(t, "id", field1.Name) assert.Equal(t, arrow.PrimitiveTypes.Int32, field1.Type) assert.False(t, field1.Nullable) field2 := structType.Field(1) assert.Equal(t, "name", field2.Name) assert.Equal(t, arrow.BinaryTypes.String, field2.Type) assert.True(t, field2.Nullable) }, }, { name: "Struct with array field", structType: StructType{ Fields: []StructField{ {Name: "items", DataType: ArrayType{ElementType: STRING, ContainsNull: true}, Nullable: false}, }, }, validate: func(t *testing.T, arrowType arrow.DataType) { structType, ok := arrowType.(*arrow.StructType) assert.True(t, ok) assert.Equal(t, 1, structType.NumFields()) field := structType.Field(0) assert.Equal(t, "items", field.Name) listType, ok := field.Type.(*arrow.ListType) assert.True(t, ok) assert.Equal(t, arrow.BinaryTypes.String, listType.Elem()) }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { arrowType := tt.structType.ToArrowType() tt.validate(t, arrowType) }) } } func TestStructType_ToArrowType_ReturnType(t *testing.T) { // Test that ToArrowType returns arrow.DataType interface, not *arrow.StructType structType := StructType{ Fields: []StructField{ {Name: "field1", DataType: INTEGER}, }, } arrowType := structType.ToArrowType() // This should compile and work correctly with the interface dataType := arrowType assert.NotNil(t, dataType) // But we should still be able to cast it to the concrete type concreteType, ok := arrowType.(*arrow.StructType) assert.True(t, ok) assert.Equal(t, 1, concreteType.NumFields()) } func TestTreeString(t *testing.T) { c := NewStructField("col1", STRING) c.Nullable = false s := StructOf( c, NewStructField("col2", INTEGER), NewStructField("col3", DATE), ) assert.Len(t, s.Fields, 3) ts := s.TreeString() assert.Contains(t, ts, "|-- col1: string (nullable = false") assert.Contains(t, ts, "|-- col2: integer (nullable = true)") assert.Contains(t, ts, "|-- col3: date (nullable = true)") } func TestTreeString_ComplexNestedTypes(t *testing.T) { // Create a complex nested structure with maps, arrays, and nested structs nestedStruct := StructOf( NewStructField("nested_id", INTEGER), NewStructField("nested_name", STRING), ) arrayOfStrings := ArrayType{ ElementType: STRING, ContainsNull: true, } mapOfIntToString := MapType{ KeyType: INTEGER, ValueType: STRING, ValueContainsNull: true, } arrayOfMaps := ArrayType{ ElementType: MapType{ KeyType: STRING, ValueType: DOUBLE, ValueContainsNull: false, }, ContainsNull: true, } complexStruct := StructOf( NewStructField("id", INTEGER), NewStructField("name", STRING), NewStructField("tags", arrayOfStrings), NewStructField("metadata", mapOfIntToString), NewStructField("scores", arrayOfMaps), NewStructField("profile", *nestedStruct), NewStructField("active", BOOLEAN), ) ts := complexStruct.TreeString() // Verify the tree string contains all expected elements assert.Contains(t, ts, "root") assert.Contains(t, ts, "|-- id: integer (nullable = true)") assert.Contains(t, ts, "|-- name: string (nullable = true)") assert.Contains(t, ts, "|-- tags: array (nullable = true)") assert.Contains(t, ts, "|-- metadata: map (nullable = true)") assert.Contains(t, ts, "|-- scores: array (nullable = true)") assert.Contains(t, ts, "|-- profile: structtype (nullable = true)") assert.Contains(t, ts, "|-- active: boolean (nullable = true)") // Verify the structure starts with "root" and ends with newlines assert.True(t, strings.HasPrefix(ts, "root\n")) assert.True(t, strings.HasSuffix(ts, "\n")) // Verify the correct number of field lines (excluding root and trailing newline) lines := strings.Split(strings.TrimSpace(ts), "\n") assert.Equal(t, len(complexStruct.Fields)+7, len(lines)) // root + number of fields // Verify the prefix format for all field lines for i := 1; i < len(lines); i++ { assert.True(t, strings.HasPrefix(lines[i], " |")) } } ================================================ FILE: spark/sql/utils/check.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package utils func WarnOnError(f func() error, h func(e error)) { if err := f(); err != nil { h(err) } } ================================================ FILE: spark/sql/utils/check_test.go ================================================ // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package utils import ( "testing" "github.com/stretchr/testify/assert" ) func TestWarnOnError(t *testing.T) { WarnOnError(func() error { return nil }, func(e error) { t.Errorf("Unexpected error: %v", e) }) called := 0 WarnOnError(func() error { return assert.AnError }, func(e error) { called++ assert.Equal(t, assert.AnError, e) }) assert.Equalf(t, 1, called, "Expected error handler to be called once, got %v", called) } ================================================ FILE: spark/sql/utils/consts.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package utils import proto "github.com/apache/spark-connect-go/internal/generated" type ExplainMode int const ( ExplainModeSimple ExplainMode = iota ExplainModeExtended ExplainMode = iota ExplainModeCodegen ExplainMode = iota ExplainModeCost ExplainMode = iota ExplainModeFormatted ExplainMode = iota ) type StorageLevel int const ( StorageLevelDiskOnly StorageLevel = iota StorageLevelDiskOnly2 StorageLevel = iota StorageLevelDiskOnly3 StorageLevel = iota StorageLevelMemoryAndDisk StorageLevel = iota StorageLevelMemoryAndDisk2 StorageLevel = iota StorageLevelMemoryOnly StorageLevel = iota StorageLevelMemoryOnly2 StorageLevel = iota StorageLevelMemoyAndDiskDeser StorageLevel = iota StorageLevelNone StorageLevel = iota StorageLevelOffHeap StorageLevel = iota ) func ToProtoStorageLevel(level StorageLevel) *proto.StorageLevel { switch level { case StorageLevelDiskOnly: return &proto.StorageLevel{UseDisk: true, UseMemory: false, Replication: 1} case StorageLevelDiskOnly2: return &proto.StorageLevel{UseDisk: true, UseMemory: false, Replication: 2} case StorageLevelDiskOnly3: return &proto.StorageLevel{UseDisk: true, UseMemory: false, Replication: 3} case StorageLevelMemoryAndDisk: return &proto.StorageLevel{UseDisk: true, UseMemory: true, Replication: 1} case StorageLevelMemoryAndDisk2: return &proto.StorageLevel{UseDisk: true, UseMemory: true, Replication: 2} case StorageLevelMemoryOnly: return &proto.StorageLevel{UseDisk: false, UseMemory: true, Replication: 1} case StorageLevelMemoryOnly2: return &proto.StorageLevel{UseDisk: false, UseMemory: true, Replication: 2} case StorageLevelMemoyAndDiskDeser: return &proto.StorageLevel{UseDisk: true, UseMemory: true, Replication: 1, Deserialized: true} case StorageLevelOffHeap: return &proto.StorageLevel{UseDisk: true, UseMemory: true, UseOffHeap: true, Replication: 1} default: return &proto.StorageLevel{UseDisk: false, UseMemory: false, UseOffHeap: false, Replication: 1} } } func FromProtoStorageLevel(level *proto.StorageLevel) StorageLevel { if level.UseDisk && level.UseMemory && level.Replication <= 1 && !level.Deserialized && !level.UseOffHeap { return StorageLevelMemoryAndDisk } else if level.UseDisk && level.UseMemory && level.Replication == 2 && !level.Deserialized && !level.UseOffHeap { return StorageLevelMemoryAndDisk2 } else if level.UseDisk && !level.UseMemory && level.Replication == 3 && !level.Deserialized && !level.UseOffHeap { return StorageLevelDiskOnly3 } else if level.UseDisk && !level.UseMemory && level.Replication == 2 && !level.Deserialized && !level.UseOffHeap { return StorageLevelDiskOnly2 } else if level.UseDisk && !level.UseMemory && level.Replication <= 1 && !level.Deserialized && !level.UseOffHeap { return StorageLevelDiskOnly } else if !level.UseDisk && level.UseMemory && level.Replication <= 1 && !level.Deserialized && !level.UseOffHeap { return StorageLevelMemoryOnly } else if !level.UseDisk && level.UseMemory && level.Replication == 2 && !level.Deserialized && !level.UseOffHeap { return StorageLevelMemoryOnly2 } else if level.UseDisk && level.UseMemory && level.Replication <= 1 && level.Deserialized && !level.UseOffHeap { return StorageLevelMemoyAndDiskDeser } else if !level.UseDisk && !level.UseMemory && !level.Deserialized && !level.UseOffHeap { return StorageLevelNone } else if level.UseOffHeap && !level.Deserialized { return StorageLevelOffHeap } return StorageLevelNone } type JoinType int const ( JoinTypeInner JoinType = iota JoinTypeLeftOuter JoinType = iota JoinTypeRightOuter JoinType = iota JoinTypeFullOuter JoinType = iota JoinTypeLeftSemi JoinType = iota JoinTypeLeftAnti JoinType = iota JoinTypeCross JoinType = iota ) func ToProtoJoinType(joinType JoinType) proto.Join_JoinType { switch joinType { case JoinTypeInner: return proto.Join_JOIN_TYPE_INNER case JoinTypeLeftOuter: return proto.Join_JOIN_TYPE_LEFT_OUTER case JoinTypeRightOuter: return proto.Join_JOIN_TYPE_RIGHT_OUTER case JoinTypeFullOuter: return proto.Join_JOIN_TYPE_FULL_OUTER case JoinTypeLeftSemi: return proto.Join_JOIN_TYPE_LEFT_SEMI case JoinTypeLeftAnti: return proto.Join_JOIN_TYPE_LEFT_ANTI case JoinTypeCross: return proto.Join_JOIN_TYPE_CROSS default: return proto.Join_JOIN_TYPE_INNER } } ================================================ FILE: spark/sql/utils/consts_test.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package utils import "testing" func TestStorageLevelConversion(t *testing.T) { // Given a list of all storage levels, convert them to and from proto and // check with the original value: for _, level := range []StorageLevel{ StorageLevelDiskOnly, StorageLevelDiskOnly2, StorageLevelDiskOnly3, StorageLevelMemoryAndDisk, StorageLevelMemoryAndDisk2, StorageLevelMemoryOnly, StorageLevelMemoryOnly2, StorageLevelMemoyAndDiskDeser, StorageLevelNone, StorageLevelOffHeap, } { protoLevel := ToProtoStorageLevel(level) convertedLevel := FromProtoStorageLevel(protoLevel) if level != convertedLevel { t.Errorf("Expected %v, got %v", level, convertedLevel) } } } ================================================ FILE: spark/version.go ================================================ // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package spark func Version() string { return "4.0.0" }