| # |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| # |
| """ |
| @generated by mypy-protobuf. Do not edit manually! |
| isort:skip_file |
| |
| Licensed to the Apache Software Foundation (ASF) under one or more |
| contributor license agreements. See the NOTICE file distributed with |
| this work for additional information regarding copyright ownership. |
| The ASF licenses this file to You under the Apache License, Version 2.0 |
| (the "License"); you may not use this file except in compliance with |
| the License. You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software |
| distributed under the License is distributed on an "AS IS" BASIS, |
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| See the License for the specific language governing permissions and |
| limitations under the License. |
| """ |
| import builtins |
| import collections.abc |
| import google.protobuf.any_pb2 |
| import google.protobuf.descriptor |
| import google.protobuf.internal.containers |
| import google.protobuf.internal.enum_type_wrapper |
| import google.protobuf.message |
| import pyspark.sql.connect.proto.commands_pb2 |
| import pyspark.sql.connect.proto.common_pb2 |
| import pyspark.sql.connect.proto.expressions_pb2 |
| import pyspark.sql.connect.proto.ml_pb2 |
| import pyspark.sql.connect.proto.pipelines_pb2 |
| import pyspark.sql.connect.proto.relations_pb2 |
| import pyspark.sql.connect.proto.types_pb2 |
| import sys |
| import typing |
| |
| if sys.version_info >= (3, 10): |
| import typing as typing_extensions |
| else: |
| import typing_extensions |
| |
| DESCRIPTOR: google.protobuf.descriptor.FileDescriptor |
| |
| class Plan(google.protobuf.message.Message): |
| """A [[Plan]] is the structure that carries the runtime information for the execution from the |
| client to the server. A [[Plan]] can either be of the type [[Relation]] which is a reference |
| to the underlying logical plan or it can be of the [[Command]] type that is used to execute |
| commands on the server. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ROOT_FIELD_NUMBER: builtins.int |
| COMMAND_FIELD_NUMBER: builtins.int |
| @property |
| def root(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: ... |
| @property |
| def command(self) -> pyspark.sql.connect.proto.commands_pb2.Command: ... |
| def __init__( |
| self, |
| *, |
| root: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| command: pyspark.sql.connect.proto.commands_pb2.Command | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "command", b"command", "op_type", b"op_type", "root", b"root" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "command", b"command", "op_type", b"op_type", "root", b"root" |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["op_type", b"op_type"] |
| ) -> typing_extensions.Literal["root", "command"] | None: ... |
| |
| global___Plan = Plan |
| |
| class UserContext(google.protobuf.message.Message): |
| """User Context is used to refer to one particular user session that is executing |
| queries in the backend. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| USER_ID_FIELD_NUMBER: builtins.int |
| USER_NAME_FIELD_NUMBER: builtins.int |
| EXTENSIONS_FIELD_NUMBER: builtins.int |
| user_id: builtins.str |
| user_name: builtins.str |
| @property |
| def extensions( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| google.protobuf.any_pb2.Any |
| ]: |
| """To extend the existing user context message that is used to identify incoming requests, |
| Spark Connect leverages the Any protobuf type that can be used to inject arbitrary other |
| messages into this message. Extensions are stored as a `repeated` type to be able to |
| handle multiple active extensions. |
| """ |
| def __init__( |
| self, |
| *, |
| user_id: builtins.str = ..., |
| user_name: builtins.str = ..., |
| extensions: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "extensions", b"extensions", "user_id", b"user_id", "user_name", b"user_name" |
| ], |
| ) -> None: ... |
| |
| global___UserContext = UserContext |
| |
| class AnalyzePlanRequest(google.protobuf.message.Message): |
| """Request to perform plan analyze, optionally to explain the plan.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class Schema(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PLAN_FIELD_NUMBER: builtins.int |
| @property |
| def plan(self) -> global___Plan: |
| """(Required) The logical plan to be analyzed.""" |
| def __init__( |
| self, |
| *, |
| plan: global___Plan | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["plan", b"plan"] |
| ) -> builtins.bool: ... |
| def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ... |
| |
| class Explain(google.protobuf.message.Message): |
| """Explains the input plan based on a configurable mode.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class _ExplainMode: |
| ValueType = typing.NewType("ValueType", builtins.int) |
| V: typing_extensions.TypeAlias = ValueType |
| |
| class _ExplainModeEnumTypeWrapper( |
| google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ |
| AnalyzePlanRequest.Explain._ExplainMode.ValueType |
| ], |
| builtins.type, |
| ): # noqa: F821 |
| DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor |
| EXPLAIN_MODE_UNSPECIFIED: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 0 |
| EXPLAIN_MODE_SIMPLE: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 1 |
| """Generates only physical plan.""" |
| EXPLAIN_MODE_EXTENDED: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 2 |
| """Generates parsed logical plan, analyzed logical plan, optimized logical plan and physical plan. |
| Parsed Logical plan is a unresolved plan that extracted from the query. Analyzed logical plans |
| transforms which translates unresolvedAttribute and unresolvedRelation into fully typed objects. |
| The optimized logical plan transforms through a set of optimization rules, resulting in the |
| physical plan. |
| """ |
| EXPLAIN_MODE_CODEGEN: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 3 |
| """Generates code for the statement, if any and a physical plan.""" |
| EXPLAIN_MODE_COST: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 4 |
| """If plan node statistics are available, generates a logical plan and also the statistics.""" |
| EXPLAIN_MODE_FORMATTED: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 5 |
| """Generates a physical plan outline and also node details.""" |
| |
| class ExplainMode(_ExplainMode, metaclass=_ExplainModeEnumTypeWrapper): |
| """Plan explanation mode.""" |
| |
| EXPLAIN_MODE_UNSPECIFIED: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 0 |
| EXPLAIN_MODE_SIMPLE: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 1 |
| """Generates only physical plan.""" |
| EXPLAIN_MODE_EXTENDED: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 2 |
| """Generates parsed logical plan, analyzed logical plan, optimized logical plan and physical plan. |
| Parsed Logical plan is a unresolved plan that extracted from the query. Analyzed logical plans |
| transforms which translates unresolvedAttribute and unresolvedRelation into fully typed objects. |
| The optimized logical plan transforms through a set of optimization rules, resulting in the |
| physical plan. |
| """ |
| EXPLAIN_MODE_CODEGEN: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 3 |
| """Generates code for the statement, if any and a physical plan.""" |
| EXPLAIN_MODE_COST: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 4 |
| """If plan node statistics are available, generates a logical plan and also the statistics.""" |
| EXPLAIN_MODE_FORMATTED: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 5 |
| """Generates a physical plan outline and also node details.""" |
| |
| PLAN_FIELD_NUMBER: builtins.int |
| EXPLAIN_MODE_FIELD_NUMBER: builtins.int |
| @property |
| def plan(self) -> global___Plan: |
| """(Required) The logical plan to be analyzed.""" |
| explain_mode: global___AnalyzePlanRequest.Explain.ExplainMode.ValueType |
| """(Required) For analyzePlan rpc calls, configure the mode to explain plan in strings.""" |
| def __init__( |
| self, |
| *, |
| plan: global___Plan | None = ..., |
| explain_mode: global___AnalyzePlanRequest.Explain.ExplainMode.ValueType = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["plan", b"plan"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal["explain_mode", b"explain_mode", "plan", b"plan"], |
| ) -> None: ... |
| |
| class TreeString(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PLAN_FIELD_NUMBER: builtins.int |
| LEVEL_FIELD_NUMBER: builtins.int |
| @property |
| def plan(self) -> global___Plan: |
| """(Required) The logical plan to be analyzed.""" |
| level: builtins.int |
| """(Optional) Max level of the schema.""" |
| def __init__( |
| self, |
| *, |
| plan: global___Plan | None = ..., |
| level: builtins.int | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_level", b"_level", "level", b"level", "plan", b"plan" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_level", b"_level", "level", b"level", "plan", b"plan" |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_level", b"_level"] |
| ) -> typing_extensions.Literal["level"] | None: ... |
| |
| class IsLocal(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PLAN_FIELD_NUMBER: builtins.int |
| @property |
| def plan(self) -> global___Plan: |
| """(Required) The logical plan to be analyzed.""" |
| def __init__( |
| self, |
| *, |
| plan: global___Plan | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["plan", b"plan"] |
| ) -> builtins.bool: ... |
| def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ... |
| |
| class IsStreaming(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PLAN_FIELD_NUMBER: builtins.int |
| @property |
| def plan(self) -> global___Plan: |
| """(Required) The logical plan to be analyzed.""" |
| def __init__( |
| self, |
| *, |
| plan: global___Plan | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["plan", b"plan"] |
| ) -> builtins.bool: ... |
| def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ... |
| |
| class InputFiles(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PLAN_FIELD_NUMBER: builtins.int |
| @property |
| def plan(self) -> global___Plan: |
| """(Required) The logical plan to be analyzed.""" |
| def __init__( |
| self, |
| *, |
| plan: global___Plan | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["plan", b"plan"] |
| ) -> builtins.bool: ... |
| def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ... |
| |
| class SparkVersion(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| def __init__( |
| self, |
| ) -> None: ... |
| |
| class DDLParse(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| DDL_STRING_FIELD_NUMBER: builtins.int |
| ddl_string: builtins.str |
| """(Required) The DDL formatted string to be parsed.""" |
| def __init__( |
| self, |
| *, |
| ddl_string: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["ddl_string", b"ddl_string"] |
| ) -> None: ... |
| |
| class SameSemantics(google.protobuf.message.Message): |
| """Returns `true` when the logical query plans are equal and therefore return same results.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| TARGET_PLAN_FIELD_NUMBER: builtins.int |
| OTHER_PLAN_FIELD_NUMBER: builtins.int |
| @property |
| def target_plan(self) -> global___Plan: |
| """(Required) The plan to be compared.""" |
| @property |
| def other_plan(self) -> global___Plan: |
| """(Required) The other plan to be compared.""" |
| def __init__( |
| self, |
| *, |
| target_plan: global___Plan | None = ..., |
| other_plan: global___Plan | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "other_plan", b"other_plan", "target_plan", b"target_plan" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "other_plan", b"other_plan", "target_plan", b"target_plan" |
| ], |
| ) -> None: ... |
| |
| class SemanticHash(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PLAN_FIELD_NUMBER: builtins.int |
| @property |
| def plan(self) -> global___Plan: |
| """(Required) The logical plan to get a hashCode.""" |
| def __init__( |
| self, |
| *, |
| plan: global___Plan | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["plan", b"plan"] |
| ) -> builtins.bool: ... |
| def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ... |
| |
| class Persist(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RELATION_FIELD_NUMBER: builtins.int |
| STORAGE_LEVEL_FIELD_NUMBER: builtins.int |
| @property |
| def relation(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) The logical plan to persist.""" |
| @property |
| def storage_level(self) -> pyspark.sql.connect.proto.common_pb2.StorageLevel: |
| """(Optional) The storage level.""" |
| def __init__( |
| self, |
| *, |
| relation: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| storage_level: pyspark.sql.connect.proto.common_pb2.StorageLevel | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_storage_level", |
| b"_storage_level", |
| "relation", |
| b"relation", |
| "storage_level", |
| b"storage_level", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_storage_level", |
| b"_storage_level", |
| "relation", |
| b"relation", |
| "storage_level", |
| b"storage_level", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_storage_level", b"_storage_level"] |
| ) -> typing_extensions.Literal["storage_level"] | None: ... |
| |
| class Unpersist(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RELATION_FIELD_NUMBER: builtins.int |
| BLOCKING_FIELD_NUMBER: builtins.int |
| @property |
| def relation(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) The logical plan to unpersist.""" |
| blocking: builtins.bool |
| """(Optional) Whether to block until all blocks are deleted.""" |
| def __init__( |
| self, |
| *, |
| relation: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| blocking: builtins.bool | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_blocking", b"_blocking", "blocking", b"blocking", "relation", b"relation" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_blocking", b"_blocking", "blocking", b"blocking", "relation", b"relation" |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_blocking", b"_blocking"] |
| ) -> typing_extensions.Literal["blocking"] | None: ... |
| |
| class GetStorageLevel(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RELATION_FIELD_NUMBER: builtins.int |
| @property |
| def relation(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) The logical plan to get the storage level.""" |
| def __init__( |
| self, |
| *, |
| relation: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["relation", b"relation"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["relation", b"relation"] |
| ) -> None: ... |
| |
| class JsonToDDL(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| JSON_STRING_FIELD_NUMBER: builtins.int |
| json_string: builtins.str |
| """(Required) The JSON formatted string to be converted to DDL.""" |
| def __init__( |
| self, |
| *, |
| json_string: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["json_string", b"json_string"] |
| ) -> None: ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| SCHEMA_FIELD_NUMBER: builtins.int |
| EXPLAIN_FIELD_NUMBER: builtins.int |
| TREE_STRING_FIELD_NUMBER: builtins.int |
| IS_LOCAL_FIELD_NUMBER: builtins.int |
| IS_STREAMING_FIELD_NUMBER: builtins.int |
| INPUT_FILES_FIELD_NUMBER: builtins.int |
| SPARK_VERSION_FIELD_NUMBER: builtins.int |
| DDL_PARSE_FIELD_NUMBER: builtins.int |
| SAME_SEMANTICS_FIELD_NUMBER: builtins.int |
| SEMANTIC_HASH_FIELD_NUMBER: builtins.int |
| PERSIST_FIELD_NUMBER: builtins.int |
| UNPERSIST_FIELD_NUMBER: builtins.int |
| GET_STORAGE_LEVEL_FIELD_NUMBER: builtins.int |
| JSON_TO_DDL_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id specifies a spark session for a user id (which is specified |
| by user_context.user_id). The session_id is set by the client to be able to |
| collate streaming responses from different queries within the dedicated session. |
| The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """(Required) User context""" |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| @property |
| def schema(self) -> global___AnalyzePlanRequest.Schema: ... |
| @property |
| def explain(self) -> global___AnalyzePlanRequest.Explain: ... |
| @property |
| def tree_string(self) -> global___AnalyzePlanRequest.TreeString: ... |
| @property |
| def is_local(self) -> global___AnalyzePlanRequest.IsLocal: ... |
| @property |
| def is_streaming(self) -> global___AnalyzePlanRequest.IsStreaming: ... |
| @property |
| def input_files(self) -> global___AnalyzePlanRequest.InputFiles: ... |
| @property |
| def spark_version(self) -> global___AnalyzePlanRequest.SparkVersion: ... |
| @property |
| def ddl_parse(self) -> global___AnalyzePlanRequest.DDLParse: ... |
| @property |
| def same_semantics(self) -> global___AnalyzePlanRequest.SameSemantics: ... |
| @property |
| def semantic_hash(self) -> global___AnalyzePlanRequest.SemanticHash: ... |
| @property |
| def persist(self) -> global___AnalyzePlanRequest.Persist: ... |
| @property |
| def unpersist(self) -> global___AnalyzePlanRequest.Unpersist: ... |
| @property |
| def get_storage_level(self) -> global___AnalyzePlanRequest.GetStorageLevel: ... |
| @property |
| def json_to_ddl(self) -> global___AnalyzePlanRequest.JsonToDDL: ... |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| client_type: builtins.str | None = ..., |
| schema: global___AnalyzePlanRequest.Schema | None = ..., |
| explain: global___AnalyzePlanRequest.Explain | None = ..., |
| tree_string: global___AnalyzePlanRequest.TreeString | None = ..., |
| is_local: global___AnalyzePlanRequest.IsLocal | None = ..., |
| is_streaming: global___AnalyzePlanRequest.IsStreaming | None = ..., |
| input_files: global___AnalyzePlanRequest.InputFiles | None = ..., |
| spark_version: global___AnalyzePlanRequest.SparkVersion | None = ..., |
| ddl_parse: global___AnalyzePlanRequest.DDLParse | None = ..., |
| same_semantics: global___AnalyzePlanRequest.SameSemantics | None = ..., |
| semantic_hash: global___AnalyzePlanRequest.SemanticHash | None = ..., |
| persist: global___AnalyzePlanRequest.Persist | None = ..., |
| unpersist: global___AnalyzePlanRequest.Unpersist | None = ..., |
| get_storage_level: global___AnalyzePlanRequest.GetStorageLevel | None = ..., |
| json_to_ddl: global___AnalyzePlanRequest.JsonToDDL | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "analyze", |
| b"analyze", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "ddl_parse", |
| b"ddl_parse", |
| "explain", |
| b"explain", |
| "get_storage_level", |
| b"get_storage_level", |
| "input_files", |
| b"input_files", |
| "is_local", |
| b"is_local", |
| "is_streaming", |
| b"is_streaming", |
| "json_to_ddl", |
| b"json_to_ddl", |
| "persist", |
| b"persist", |
| "same_semantics", |
| b"same_semantics", |
| "schema", |
| b"schema", |
| "semantic_hash", |
| b"semantic_hash", |
| "spark_version", |
| b"spark_version", |
| "tree_string", |
| b"tree_string", |
| "unpersist", |
| b"unpersist", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "analyze", |
| b"analyze", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "ddl_parse", |
| b"ddl_parse", |
| "explain", |
| b"explain", |
| "get_storage_level", |
| b"get_storage_level", |
| "input_files", |
| b"input_files", |
| "is_local", |
| b"is_local", |
| "is_streaming", |
| b"is_streaming", |
| "json_to_ddl", |
| b"json_to_ddl", |
| "persist", |
| b"persist", |
| "same_semantics", |
| b"same_semantics", |
| "schema", |
| b"schema", |
| "semantic_hash", |
| b"semantic_hash", |
| "session_id", |
| b"session_id", |
| "spark_version", |
| b"spark_version", |
| "tree_string", |
| b"tree_string", |
| "unpersist", |
| b"unpersist", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["analyze", b"analyze"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "schema", |
| "explain", |
| "tree_string", |
| "is_local", |
| "is_streaming", |
| "input_files", |
| "spark_version", |
| "ddl_parse", |
| "same_semantics", |
| "semantic_hash", |
| "persist", |
| "unpersist", |
| "get_storage_level", |
| "json_to_ddl", |
| ] |
| | None |
| ): ... |
| |
| global___AnalyzePlanRequest = AnalyzePlanRequest |
| |
| class AnalyzePlanResponse(google.protobuf.message.Message): |
| """Response to performing analysis of the query. Contains relevant metadata to be able to |
| reason about the performance. |
| Next ID: 16 |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class Schema(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SCHEMA_FIELD_NUMBER: builtins.int |
| @property |
| def schema(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ... |
| def __init__( |
| self, |
| *, |
| schema: pyspark.sql.connect.proto.types_pb2.DataType | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["schema", b"schema"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["schema", b"schema"] |
| ) -> None: ... |
| |
| class Explain(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| EXPLAIN_STRING_FIELD_NUMBER: builtins.int |
| explain_string: builtins.str |
| def __init__( |
| self, |
| *, |
| explain_string: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["explain_string", b"explain_string"] |
| ) -> None: ... |
| |
| class TreeString(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| TREE_STRING_FIELD_NUMBER: builtins.int |
| tree_string: builtins.str |
| def __init__( |
| self, |
| *, |
| tree_string: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["tree_string", b"tree_string"] |
| ) -> None: ... |
| |
| class IsLocal(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| IS_LOCAL_FIELD_NUMBER: builtins.int |
| is_local: builtins.bool |
| def __init__( |
| self, |
| *, |
| is_local: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["is_local", b"is_local"] |
| ) -> None: ... |
| |
| class IsStreaming(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| IS_STREAMING_FIELD_NUMBER: builtins.int |
| is_streaming: builtins.bool |
| def __init__( |
| self, |
| *, |
| is_streaming: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["is_streaming", b"is_streaming"] |
| ) -> None: ... |
| |
| class InputFiles(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| FILES_FIELD_NUMBER: builtins.int |
| @property |
| def files( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """A best-effort snapshot of the files that compose this Dataset""" |
| def __init__( |
| self, |
| *, |
| files: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField(self, field_name: typing_extensions.Literal["files", b"files"]) -> None: ... |
| |
| class SparkVersion(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| VERSION_FIELD_NUMBER: builtins.int |
| version: builtins.str |
| def __init__( |
| self, |
| *, |
| version: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["version", b"version"] |
| ) -> None: ... |
| |
| class DDLParse(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PARSED_FIELD_NUMBER: builtins.int |
| @property |
| def parsed(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ... |
| def __init__( |
| self, |
| *, |
| parsed: pyspark.sql.connect.proto.types_pb2.DataType | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["parsed", b"parsed"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["parsed", b"parsed"] |
| ) -> None: ... |
| |
| class SameSemantics(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RESULT_FIELD_NUMBER: builtins.int |
| result: builtins.bool |
| def __init__( |
| self, |
| *, |
| result: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["result", b"result"] |
| ) -> None: ... |
| |
| class SemanticHash(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RESULT_FIELD_NUMBER: builtins.int |
| result: builtins.int |
| def __init__( |
| self, |
| *, |
| result: builtins.int = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["result", b"result"] |
| ) -> None: ... |
| |
| class Persist(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| def __init__( |
| self, |
| ) -> None: ... |
| |
| class Unpersist(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| def __init__( |
| self, |
| ) -> None: ... |
| |
| class GetStorageLevel(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| STORAGE_LEVEL_FIELD_NUMBER: builtins.int |
| @property |
| def storage_level(self) -> pyspark.sql.connect.proto.common_pb2.StorageLevel: |
| """(Required) The StorageLevel as a result of get_storage_level request.""" |
| def __init__( |
| self, |
| *, |
| storage_level: pyspark.sql.connect.proto.common_pb2.StorageLevel | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["storage_level", b"storage_level"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["storage_level", b"storage_level"] |
| ) -> None: ... |
| |
| class JsonToDDL(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| DDL_STRING_FIELD_NUMBER: builtins.int |
| ddl_string: builtins.str |
| def __init__( |
| self, |
| *, |
| ddl_string: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["ddl_string", b"ddl_string"] |
| ) -> None: ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| SCHEMA_FIELD_NUMBER: builtins.int |
| EXPLAIN_FIELD_NUMBER: builtins.int |
| TREE_STRING_FIELD_NUMBER: builtins.int |
| IS_LOCAL_FIELD_NUMBER: builtins.int |
| IS_STREAMING_FIELD_NUMBER: builtins.int |
| INPUT_FILES_FIELD_NUMBER: builtins.int |
| SPARK_VERSION_FIELD_NUMBER: builtins.int |
| DDL_PARSE_FIELD_NUMBER: builtins.int |
| SAME_SEMANTICS_FIELD_NUMBER: builtins.int |
| SEMANTIC_HASH_FIELD_NUMBER: builtins.int |
| PERSIST_FIELD_NUMBER: builtins.int |
| UNPERSIST_FIELD_NUMBER: builtins.int |
| GET_STORAGE_LEVEL_FIELD_NUMBER: builtins.int |
| JSON_TO_DDL_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| @property |
| def schema(self) -> global___AnalyzePlanResponse.Schema: ... |
| @property |
| def explain(self) -> global___AnalyzePlanResponse.Explain: ... |
| @property |
| def tree_string(self) -> global___AnalyzePlanResponse.TreeString: ... |
| @property |
| def is_local(self) -> global___AnalyzePlanResponse.IsLocal: ... |
| @property |
| def is_streaming(self) -> global___AnalyzePlanResponse.IsStreaming: ... |
| @property |
| def input_files(self) -> global___AnalyzePlanResponse.InputFiles: ... |
| @property |
| def spark_version(self) -> global___AnalyzePlanResponse.SparkVersion: ... |
| @property |
| def ddl_parse(self) -> global___AnalyzePlanResponse.DDLParse: ... |
| @property |
| def same_semantics(self) -> global___AnalyzePlanResponse.SameSemantics: ... |
| @property |
| def semantic_hash(self) -> global___AnalyzePlanResponse.SemanticHash: ... |
| @property |
| def persist(self) -> global___AnalyzePlanResponse.Persist: ... |
| @property |
| def unpersist(self) -> global___AnalyzePlanResponse.Unpersist: ... |
| @property |
| def get_storage_level(self) -> global___AnalyzePlanResponse.GetStorageLevel: ... |
| @property |
| def json_to_ddl(self) -> global___AnalyzePlanResponse.JsonToDDL: ... |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| schema: global___AnalyzePlanResponse.Schema | None = ..., |
| explain: global___AnalyzePlanResponse.Explain | None = ..., |
| tree_string: global___AnalyzePlanResponse.TreeString | None = ..., |
| is_local: global___AnalyzePlanResponse.IsLocal | None = ..., |
| is_streaming: global___AnalyzePlanResponse.IsStreaming | None = ..., |
| input_files: global___AnalyzePlanResponse.InputFiles | None = ..., |
| spark_version: global___AnalyzePlanResponse.SparkVersion | None = ..., |
| ddl_parse: global___AnalyzePlanResponse.DDLParse | None = ..., |
| same_semantics: global___AnalyzePlanResponse.SameSemantics | None = ..., |
| semantic_hash: global___AnalyzePlanResponse.SemanticHash | None = ..., |
| persist: global___AnalyzePlanResponse.Persist | None = ..., |
| unpersist: global___AnalyzePlanResponse.Unpersist | None = ..., |
| get_storage_level: global___AnalyzePlanResponse.GetStorageLevel | None = ..., |
| json_to_ddl: global___AnalyzePlanResponse.JsonToDDL | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "ddl_parse", |
| b"ddl_parse", |
| "explain", |
| b"explain", |
| "get_storage_level", |
| b"get_storage_level", |
| "input_files", |
| b"input_files", |
| "is_local", |
| b"is_local", |
| "is_streaming", |
| b"is_streaming", |
| "json_to_ddl", |
| b"json_to_ddl", |
| "persist", |
| b"persist", |
| "result", |
| b"result", |
| "same_semantics", |
| b"same_semantics", |
| "schema", |
| b"schema", |
| "semantic_hash", |
| b"semantic_hash", |
| "spark_version", |
| b"spark_version", |
| "tree_string", |
| b"tree_string", |
| "unpersist", |
| b"unpersist", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "ddl_parse", |
| b"ddl_parse", |
| "explain", |
| b"explain", |
| "get_storage_level", |
| b"get_storage_level", |
| "input_files", |
| b"input_files", |
| "is_local", |
| b"is_local", |
| "is_streaming", |
| b"is_streaming", |
| "json_to_ddl", |
| b"json_to_ddl", |
| "persist", |
| b"persist", |
| "result", |
| b"result", |
| "same_semantics", |
| b"same_semantics", |
| "schema", |
| b"schema", |
| "semantic_hash", |
| b"semantic_hash", |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| "spark_version", |
| b"spark_version", |
| "tree_string", |
| b"tree_string", |
| "unpersist", |
| b"unpersist", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["result", b"result"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "schema", |
| "explain", |
| "tree_string", |
| "is_local", |
| "is_streaming", |
| "input_files", |
| "spark_version", |
| "ddl_parse", |
| "same_semantics", |
| "semantic_hash", |
| "persist", |
| "unpersist", |
| "get_storage_level", |
| "json_to_ddl", |
| ] |
| | None |
| ): ... |
| |
| global___AnalyzePlanResponse = AnalyzePlanResponse |
| |
| class ExecutePlanRequest(google.protobuf.message.Message): |
| """A request to be executed by the service.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class RequestOption(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| REATTACH_OPTIONS_FIELD_NUMBER: builtins.int |
| RESULT_CHUNKING_OPTIONS_FIELD_NUMBER: builtins.int |
| EXTENSION_FIELD_NUMBER: builtins.int |
| @property |
| def reattach_options(self) -> global___ReattachOptions: ... |
| @property |
| def result_chunking_options(self) -> global___ResultChunkingOptions: ... |
| @property |
| def extension(self) -> google.protobuf.any_pb2.Any: |
| """Extension type for request options""" |
| def __init__( |
| self, |
| *, |
| reattach_options: global___ReattachOptions | None = ..., |
| result_chunking_options: global___ResultChunkingOptions | None = ..., |
| extension: google.protobuf.any_pb2.Any | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "extension", |
| b"extension", |
| "reattach_options", |
| b"reattach_options", |
| "request_option", |
| b"request_option", |
| "result_chunking_options", |
| b"result_chunking_options", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "extension", |
| b"extension", |
| "reattach_options", |
| b"reattach_options", |
| "request_option", |
| b"request_option", |
| "result_chunking_options", |
| b"result_chunking_options", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["request_option", b"request_option"] |
| ) -> ( |
| typing_extensions.Literal["reattach_options", "result_chunking_options", "extension"] |
| | None |
| ): ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| OPERATION_ID_FIELD_NUMBER: builtins.int |
| PLAN_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| REQUEST_OPTIONS_FIELD_NUMBER: builtins.int |
| TAGS_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id specifies a spark session for a user id (which is specified |
| by user_context.user_id). The session_id is set by the client to be able to |
| collate streaming responses from different queries within the dedicated session. |
| The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """(Required) User context |
| |
| user_context.user_id and session+id both identify a unique remote spark session on the |
| server side. |
| """ |
| operation_id: builtins.str |
| """(Optional) |
| Provide an id for this request. If not provided, it will be generated by the server. |
| It is returned in every ExecutePlanResponse.operation_id of the ExecutePlan response stream. |
| The id must be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| @property |
| def plan(self) -> global___Plan: |
| """(Required) The logical plan to be executed / analyzed.""" |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| @property |
| def request_options( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___ExecutePlanRequest.RequestOption |
| ]: |
| """Repeated element for options that can be passed to the request. This element is currently |
| unused but allows to pass in an extension value used for arbitrary options. |
| """ |
| @property |
| def tags( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """Tags to tag the given execution with. |
| Tags cannot contain ',' character and cannot be empty strings. |
| Used by Interrupt with interrupt.tag. |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| operation_id: builtins.str | None = ..., |
| plan: global___Plan | None = ..., |
| client_type: builtins.str | None = ..., |
| request_options: collections.abc.Iterable[global___ExecutePlanRequest.RequestOption] |
| | None = ..., |
| tags: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "_operation_id", |
| b"_operation_id", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "operation_id", |
| b"operation_id", |
| "plan", |
| b"plan", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "_operation_id", |
| b"_operation_id", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "operation_id", |
| b"operation_id", |
| "plan", |
| b"plan", |
| "request_options", |
| b"request_options", |
| "session_id", |
| b"session_id", |
| "tags", |
| b"tags", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_operation_id", b"_operation_id"] |
| ) -> typing_extensions.Literal["operation_id"] | None: ... |
| |
| global___ExecutePlanRequest = ExecutePlanRequest |
| |
| class ExecutePlanResponse(google.protobuf.message.Message): |
| """The response of a query, can be one or more for each request. Responses belonging to the |
| same input query, carry the same `session_id`. |
| Next ID: 17 |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class SqlCommandResult(google.protobuf.message.Message): |
| """A SQL command returns an opaque Relation that can be directly used as input for the next |
| call. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RELATION_FIELD_NUMBER: builtins.int |
| @property |
| def relation(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: ... |
| def __init__( |
| self, |
| *, |
| relation: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["relation", b"relation"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["relation", b"relation"] |
| ) -> None: ... |
| |
| class ArrowBatch(google.protobuf.message.Message): |
| """Batch results of metrics.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ROW_COUNT_FIELD_NUMBER: builtins.int |
| DATA_FIELD_NUMBER: builtins.int |
| START_OFFSET_FIELD_NUMBER: builtins.int |
| CHUNK_INDEX_FIELD_NUMBER: builtins.int |
| NUM_CHUNKS_IN_BATCH_FIELD_NUMBER: builtins.int |
| row_count: builtins.int |
| """Count rows in `data`. Must match the number of rows inside `data`.""" |
| data: builtins.bytes |
| """Serialized Arrow data.""" |
| start_offset: builtins.int |
| """If set, row offset of the start of this ArrowBatch in execution results.""" |
| chunk_index: builtins.int |
| """Index of this chunk in the batch if chunking is enabled. The index starts from 0.""" |
| num_chunks_in_batch: builtins.int |
| """Total number of chunks in this batch if chunking is enabled. |
| It is missing when chunking is disabled - the batch is returned whole |
| and client will treat this response as the batch. |
| """ |
| def __init__( |
| self, |
| *, |
| row_count: builtins.int = ..., |
| data: builtins.bytes = ..., |
| start_offset: builtins.int | None = ..., |
| chunk_index: builtins.int | None = ..., |
| num_chunks_in_batch: builtins.int | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_chunk_index", |
| b"_chunk_index", |
| "_num_chunks_in_batch", |
| b"_num_chunks_in_batch", |
| "_start_offset", |
| b"_start_offset", |
| "chunk_index", |
| b"chunk_index", |
| "num_chunks_in_batch", |
| b"num_chunks_in_batch", |
| "start_offset", |
| b"start_offset", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_chunk_index", |
| b"_chunk_index", |
| "_num_chunks_in_batch", |
| b"_num_chunks_in_batch", |
| "_start_offset", |
| b"_start_offset", |
| "chunk_index", |
| b"chunk_index", |
| "data", |
| b"data", |
| "num_chunks_in_batch", |
| b"num_chunks_in_batch", |
| "row_count", |
| b"row_count", |
| "start_offset", |
| b"start_offset", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_chunk_index", b"_chunk_index"] |
| ) -> typing_extensions.Literal["chunk_index"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal["_num_chunks_in_batch", b"_num_chunks_in_batch"], |
| ) -> typing_extensions.Literal["num_chunks_in_batch"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_start_offset", b"_start_offset"] |
| ) -> typing_extensions.Literal["start_offset"] | None: ... |
| |
| class Metrics(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class MetricObject(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class ExecutionMetricsEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| @property |
| def value(self) -> global___ExecutePlanResponse.Metrics.MetricValue: ... |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: global___ExecutePlanResponse.Metrics.MetricValue | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["value", b"value"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| NAME_FIELD_NUMBER: builtins.int |
| PLAN_ID_FIELD_NUMBER: builtins.int |
| PARENT_FIELD_NUMBER: builtins.int |
| EXECUTION_METRICS_FIELD_NUMBER: builtins.int |
| name: builtins.str |
| plan_id: builtins.int |
| parent: builtins.int |
| @property |
| def execution_metrics( |
| self, |
| ) -> google.protobuf.internal.containers.MessageMap[ |
| builtins.str, global___ExecutePlanResponse.Metrics.MetricValue |
| ]: ... |
| def __init__( |
| self, |
| *, |
| name: builtins.str = ..., |
| plan_id: builtins.int = ..., |
| parent: builtins.int = ..., |
| execution_metrics: collections.abc.Mapping[ |
| builtins.str, global___ExecutePlanResponse.Metrics.MetricValue |
| ] |
| | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "execution_metrics", |
| b"execution_metrics", |
| "name", |
| b"name", |
| "parent", |
| b"parent", |
| "plan_id", |
| b"plan_id", |
| ], |
| ) -> None: ... |
| |
| class MetricValue(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| NAME_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| METRIC_TYPE_FIELD_NUMBER: builtins.int |
| name: builtins.str |
| value: builtins.int |
| metric_type: builtins.str |
| def __init__( |
| self, |
| *, |
| name: builtins.str = ..., |
| value: builtins.int = ..., |
| metric_type: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "metric_type", b"metric_type", "name", b"name", "value", b"value" |
| ], |
| ) -> None: ... |
| |
| METRICS_FIELD_NUMBER: builtins.int |
| @property |
| def metrics( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___ExecutePlanResponse.Metrics.MetricObject |
| ]: ... |
| def __init__( |
| self, |
| *, |
| metrics: collections.abc.Iterable[global___ExecutePlanResponse.Metrics.MetricObject] |
| | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["metrics", b"metrics"] |
| ) -> None: ... |
| |
| class ObservedMetrics(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| NAME_FIELD_NUMBER: builtins.int |
| VALUES_FIELD_NUMBER: builtins.int |
| KEYS_FIELD_NUMBER: builtins.int |
| PLAN_ID_FIELD_NUMBER: builtins.int |
| name: builtins.str |
| @property |
| def values( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| pyspark.sql.connect.proto.expressions_pb2.Expression.Literal |
| ]: ... |
| @property |
| def keys( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... |
| plan_id: builtins.int |
| def __init__( |
| self, |
| *, |
| name: builtins.str = ..., |
| values: collections.abc.Iterable[ |
| pyspark.sql.connect.proto.expressions_pb2.Expression.Literal |
| ] |
| | None = ..., |
| keys: collections.abc.Iterable[builtins.str] | None = ..., |
| plan_id: builtins.int = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "keys", b"keys", "name", b"name", "plan_id", b"plan_id", "values", b"values" |
| ], |
| ) -> None: ... |
| |
| class ResultComplete(google.protobuf.message.Message): |
| """If present, in a reattachable execution this means that after server sends onComplete, |
| the execution is complete. If the server sends onComplete without sending a ResultComplete, |
| it means that there is more, and the client should use ReattachExecute RPC to continue. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| def __init__( |
| self, |
| ) -> None: ... |
| |
| class ExecutionProgress(google.protobuf.message.Message): |
| """This message is used to communicate progress about the query progress during the execution.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class StageInfo(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| STAGE_ID_FIELD_NUMBER: builtins.int |
| NUM_TASKS_FIELD_NUMBER: builtins.int |
| NUM_COMPLETED_TASKS_FIELD_NUMBER: builtins.int |
| INPUT_BYTES_READ_FIELD_NUMBER: builtins.int |
| DONE_FIELD_NUMBER: builtins.int |
| stage_id: builtins.int |
| num_tasks: builtins.int |
| num_completed_tasks: builtins.int |
| input_bytes_read: builtins.int |
| done: builtins.bool |
| def __init__( |
| self, |
| *, |
| stage_id: builtins.int = ..., |
| num_tasks: builtins.int = ..., |
| num_completed_tasks: builtins.int = ..., |
| input_bytes_read: builtins.int = ..., |
| done: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "done", |
| b"done", |
| "input_bytes_read", |
| b"input_bytes_read", |
| "num_completed_tasks", |
| b"num_completed_tasks", |
| "num_tasks", |
| b"num_tasks", |
| "stage_id", |
| b"stage_id", |
| ], |
| ) -> None: ... |
| |
| STAGES_FIELD_NUMBER: builtins.int |
| NUM_INFLIGHT_TASKS_FIELD_NUMBER: builtins.int |
| @property |
| def stages( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___ExecutePlanResponse.ExecutionProgress.StageInfo |
| ]: |
| """Captures the progress of each individual stage.""" |
| num_inflight_tasks: builtins.int |
| """Captures the currently in progress tasks.""" |
| def __init__( |
| self, |
| *, |
| stages: collections.abc.Iterable[ |
| global___ExecutePlanResponse.ExecutionProgress.StageInfo |
| ] |
| | None = ..., |
| num_inflight_tasks: builtins.int = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "num_inflight_tasks", b"num_inflight_tasks", "stages", b"stages" |
| ], |
| ) -> None: ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| OPERATION_ID_FIELD_NUMBER: builtins.int |
| RESPONSE_ID_FIELD_NUMBER: builtins.int |
| ARROW_BATCH_FIELD_NUMBER: builtins.int |
| SQL_COMMAND_RESULT_FIELD_NUMBER: builtins.int |
| WRITE_STREAM_OPERATION_START_RESULT_FIELD_NUMBER: builtins.int |
| STREAMING_QUERY_COMMAND_RESULT_FIELD_NUMBER: builtins.int |
| GET_RESOURCES_COMMAND_RESULT_FIELD_NUMBER: builtins.int |
| STREAMING_QUERY_MANAGER_COMMAND_RESULT_FIELD_NUMBER: builtins.int |
| STREAMING_QUERY_LISTENER_EVENTS_RESULT_FIELD_NUMBER: builtins.int |
| RESULT_COMPLETE_FIELD_NUMBER: builtins.int |
| CREATE_RESOURCE_PROFILE_COMMAND_RESULT_FIELD_NUMBER: builtins.int |
| EXECUTION_PROGRESS_FIELD_NUMBER: builtins.int |
| CHECKPOINT_COMMAND_RESULT_FIELD_NUMBER: builtins.int |
| ML_COMMAND_RESULT_FIELD_NUMBER: builtins.int |
| PIPELINE_EVENT_RESULT_FIELD_NUMBER: builtins.int |
| PIPELINE_COMMAND_RESULT_FIELD_NUMBER: builtins.int |
| PIPELINE_QUERY_FUNCTION_EXECUTION_SIGNAL_FIELD_NUMBER: builtins.int |
| EXTENSION_FIELD_NUMBER: builtins.int |
| METRICS_FIELD_NUMBER: builtins.int |
| OBSERVED_METRICS_FIELD_NUMBER: builtins.int |
| SCHEMA_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| operation_id: builtins.str |
| """Identifies the ExecutePlan execution. |
| If set by the client in ExecutePlanRequest.operationId, that value is returned. |
| Otherwise generated by the server. |
| It is an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| response_id: builtins.str |
| """Identified the response in the stream. |
| The id is an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| @property |
| def arrow_batch(self) -> global___ExecutePlanResponse.ArrowBatch: ... |
| @property |
| def sql_command_result(self) -> global___ExecutePlanResponse.SqlCommandResult: |
| """Special case for executing SQL commands.""" |
| @property |
| def write_stream_operation_start_result( |
| self, |
| ) -> pyspark.sql.connect.proto.commands_pb2.WriteStreamOperationStartResult: |
| """Response for a streaming query.""" |
| @property |
| def streaming_query_command_result( |
| self, |
| ) -> pyspark.sql.connect.proto.commands_pb2.StreamingQueryCommandResult: |
| """Response for commands on a streaming query.""" |
| @property |
| def get_resources_command_result( |
| self, |
| ) -> pyspark.sql.connect.proto.commands_pb2.GetResourcesCommandResult: |
| """Response for 'SparkContext.resources'.""" |
| @property |
| def streaming_query_manager_command_result( |
| self, |
| ) -> pyspark.sql.connect.proto.commands_pb2.StreamingQueryManagerCommandResult: |
| """Response for commands on the streaming query manager.""" |
| @property |
| def streaming_query_listener_events_result( |
| self, |
| ) -> pyspark.sql.connect.proto.commands_pb2.StreamingQueryListenerEventsResult: |
| """Response for commands on the client side streaming query listener.""" |
| @property |
| def result_complete(self) -> global___ExecutePlanResponse.ResultComplete: |
| """Response type informing if the stream is complete in reattachable execution.""" |
| @property |
| def create_resource_profile_command_result( |
| self, |
| ) -> pyspark.sql.connect.proto.commands_pb2.CreateResourceProfileCommandResult: |
| """Response for command that creates ResourceProfile.""" |
| @property |
| def execution_progress(self) -> global___ExecutePlanResponse.ExecutionProgress: |
| """(Optional) Intermediate query progress reports.""" |
| @property |
| def checkpoint_command_result(self) -> global___CheckpointCommandResult: |
| """Response for command that checkpoints a DataFrame.""" |
| @property |
| def ml_command_result(self) -> pyspark.sql.connect.proto.ml_pb2.MlCommandResult: |
| """ML command response""" |
| @property |
| def pipeline_event_result(self) -> pyspark.sql.connect.proto.pipelines_pb2.PipelineEventResult: |
| """Response containing pipeline event that is streamed back to the client during a pipeline run""" |
| @property |
| def pipeline_command_result( |
| self, |
| ) -> pyspark.sql.connect.proto.pipelines_pb2.PipelineCommandResult: |
| """Pipeline command response""" |
| @property |
| def pipeline_query_function_execution_signal( |
| self, |
| ) -> pyspark.sql.connect.proto.pipelines_pb2.PipelineQueryFunctionExecutionSignal: |
| """A signal from the server to the client to execute the query function for a flow, and to |
| register its result with the server. |
| """ |
| @property |
| def extension(self) -> google.protobuf.any_pb2.Any: |
| """Support arbitrary result objects.""" |
| @property |
| def metrics(self) -> global___ExecutePlanResponse.Metrics: |
| """Metrics for the query execution. Typically, this field is only present in the last |
| batch of results and then represent the overall state of the query execution. |
| """ |
| @property |
| def observed_metrics( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___ExecutePlanResponse.ObservedMetrics |
| ]: |
| """The metrics observed during the execution of the query plan.""" |
| @property |
| def schema(self) -> pyspark.sql.connect.proto.types_pb2.DataType: |
| """(Optional) The Spark schema. This field is available when `collect` is called.""" |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| operation_id: builtins.str = ..., |
| response_id: builtins.str = ..., |
| arrow_batch: global___ExecutePlanResponse.ArrowBatch | None = ..., |
| sql_command_result: global___ExecutePlanResponse.SqlCommandResult | None = ..., |
| write_stream_operation_start_result: pyspark.sql.connect.proto.commands_pb2.WriteStreamOperationStartResult |
| | None = ..., |
| streaming_query_command_result: pyspark.sql.connect.proto.commands_pb2.StreamingQueryCommandResult |
| | None = ..., |
| get_resources_command_result: pyspark.sql.connect.proto.commands_pb2.GetResourcesCommandResult |
| | None = ..., |
| streaming_query_manager_command_result: pyspark.sql.connect.proto.commands_pb2.StreamingQueryManagerCommandResult |
| | None = ..., |
| streaming_query_listener_events_result: pyspark.sql.connect.proto.commands_pb2.StreamingQueryListenerEventsResult |
| | None = ..., |
| result_complete: global___ExecutePlanResponse.ResultComplete | None = ..., |
| create_resource_profile_command_result: pyspark.sql.connect.proto.commands_pb2.CreateResourceProfileCommandResult |
| | None = ..., |
| execution_progress: global___ExecutePlanResponse.ExecutionProgress | None = ..., |
| checkpoint_command_result: global___CheckpointCommandResult | None = ..., |
| ml_command_result: pyspark.sql.connect.proto.ml_pb2.MlCommandResult | None = ..., |
| pipeline_event_result: pyspark.sql.connect.proto.pipelines_pb2.PipelineEventResult |
| | None = ..., |
| pipeline_command_result: pyspark.sql.connect.proto.pipelines_pb2.PipelineCommandResult |
| | None = ..., |
| pipeline_query_function_execution_signal: pyspark.sql.connect.proto.pipelines_pb2.PipelineQueryFunctionExecutionSignal |
| | None = ..., |
| extension: google.protobuf.any_pb2.Any | None = ..., |
| metrics: global___ExecutePlanResponse.Metrics | None = ..., |
| observed_metrics: collections.abc.Iterable[global___ExecutePlanResponse.ObservedMetrics] |
| | None = ..., |
| schema: pyspark.sql.connect.proto.types_pb2.DataType | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "arrow_batch", |
| b"arrow_batch", |
| "checkpoint_command_result", |
| b"checkpoint_command_result", |
| "create_resource_profile_command_result", |
| b"create_resource_profile_command_result", |
| "execution_progress", |
| b"execution_progress", |
| "extension", |
| b"extension", |
| "get_resources_command_result", |
| b"get_resources_command_result", |
| "metrics", |
| b"metrics", |
| "ml_command_result", |
| b"ml_command_result", |
| "pipeline_command_result", |
| b"pipeline_command_result", |
| "pipeline_event_result", |
| b"pipeline_event_result", |
| "pipeline_query_function_execution_signal", |
| b"pipeline_query_function_execution_signal", |
| "response_type", |
| b"response_type", |
| "result_complete", |
| b"result_complete", |
| "schema", |
| b"schema", |
| "sql_command_result", |
| b"sql_command_result", |
| "streaming_query_command_result", |
| b"streaming_query_command_result", |
| "streaming_query_listener_events_result", |
| b"streaming_query_listener_events_result", |
| "streaming_query_manager_command_result", |
| b"streaming_query_manager_command_result", |
| "write_stream_operation_start_result", |
| b"write_stream_operation_start_result", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "arrow_batch", |
| b"arrow_batch", |
| "checkpoint_command_result", |
| b"checkpoint_command_result", |
| "create_resource_profile_command_result", |
| b"create_resource_profile_command_result", |
| "execution_progress", |
| b"execution_progress", |
| "extension", |
| b"extension", |
| "get_resources_command_result", |
| b"get_resources_command_result", |
| "metrics", |
| b"metrics", |
| "ml_command_result", |
| b"ml_command_result", |
| "observed_metrics", |
| b"observed_metrics", |
| "operation_id", |
| b"operation_id", |
| "pipeline_command_result", |
| b"pipeline_command_result", |
| "pipeline_event_result", |
| b"pipeline_event_result", |
| "pipeline_query_function_execution_signal", |
| b"pipeline_query_function_execution_signal", |
| "response_id", |
| b"response_id", |
| "response_type", |
| b"response_type", |
| "result_complete", |
| b"result_complete", |
| "schema", |
| b"schema", |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| "sql_command_result", |
| b"sql_command_result", |
| "streaming_query_command_result", |
| b"streaming_query_command_result", |
| "streaming_query_listener_events_result", |
| b"streaming_query_listener_events_result", |
| "streaming_query_manager_command_result", |
| b"streaming_query_manager_command_result", |
| "write_stream_operation_start_result", |
| b"write_stream_operation_start_result", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["response_type", b"response_type"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "arrow_batch", |
| "sql_command_result", |
| "write_stream_operation_start_result", |
| "streaming_query_command_result", |
| "get_resources_command_result", |
| "streaming_query_manager_command_result", |
| "streaming_query_listener_events_result", |
| "result_complete", |
| "create_resource_profile_command_result", |
| "execution_progress", |
| "checkpoint_command_result", |
| "ml_command_result", |
| "pipeline_event_result", |
| "pipeline_command_result", |
| "pipeline_query_function_execution_signal", |
| "extension", |
| ] |
| | None |
| ): ... |
| |
| global___ExecutePlanResponse = ExecutePlanResponse |
| |
| class KeyValue(google.protobuf.message.Message): |
| """The key-value pair for the config request and response.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| """(Required) The key.""" |
| value: builtins.str |
| """(Optional) The value.""" |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["_value", b"_value", "value", b"value"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_value", b"_value", "key", b"key", "value", b"value" |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_value", b"_value"] |
| ) -> typing_extensions.Literal["value"] | None: ... |
| |
| global___KeyValue = KeyValue |
| |
| class ConfigRequest(google.protobuf.message.Message): |
| """Request to update or fetch the configurations.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class Operation(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SET_FIELD_NUMBER: builtins.int |
| GET_FIELD_NUMBER: builtins.int |
| GET_WITH_DEFAULT_FIELD_NUMBER: builtins.int |
| GET_OPTION_FIELD_NUMBER: builtins.int |
| GET_ALL_FIELD_NUMBER: builtins.int |
| UNSET_FIELD_NUMBER: builtins.int |
| IS_MODIFIABLE_FIELD_NUMBER: builtins.int |
| @property |
| def set(self) -> global___ConfigRequest.Set: ... |
| @property |
| def get(self) -> global___ConfigRequest.Get: ... |
| @property |
| def get_with_default(self) -> global___ConfigRequest.GetWithDefault: ... |
| @property |
| def get_option(self) -> global___ConfigRequest.GetOption: ... |
| @property |
| def get_all(self) -> global___ConfigRequest.GetAll: ... |
| @property |
| def unset(self) -> global___ConfigRequest.Unset: ... |
| @property |
| def is_modifiable(self) -> global___ConfigRequest.IsModifiable: ... |
| def __init__( |
| self, |
| *, |
| set: global___ConfigRequest.Set | None = ..., |
| get: global___ConfigRequest.Get | None = ..., |
| get_with_default: global___ConfigRequest.GetWithDefault | None = ..., |
| get_option: global___ConfigRequest.GetOption | None = ..., |
| get_all: global___ConfigRequest.GetAll | None = ..., |
| unset: global___ConfigRequest.Unset | None = ..., |
| is_modifiable: global___ConfigRequest.IsModifiable | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "get", |
| b"get", |
| "get_all", |
| b"get_all", |
| "get_option", |
| b"get_option", |
| "get_with_default", |
| b"get_with_default", |
| "is_modifiable", |
| b"is_modifiable", |
| "op_type", |
| b"op_type", |
| "set", |
| b"set", |
| "unset", |
| b"unset", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "get", |
| b"get", |
| "get_all", |
| b"get_all", |
| "get_option", |
| b"get_option", |
| "get_with_default", |
| b"get_with_default", |
| "is_modifiable", |
| b"is_modifiable", |
| "op_type", |
| b"op_type", |
| "set", |
| b"set", |
| "unset", |
| b"unset", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["op_type", b"op_type"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "set", "get", "get_with_default", "get_option", "get_all", "unset", "is_modifiable" |
| ] |
| | None |
| ): ... |
| |
| class Set(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PAIRS_FIELD_NUMBER: builtins.int |
| SILENT_FIELD_NUMBER: builtins.int |
| @property |
| def pairs( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: |
| """(Required) The config key-value pairs to set.""" |
| silent: builtins.bool |
| """(Optional) Whether to ignore failures.""" |
| def __init__( |
| self, |
| *, |
| pairs: collections.abc.Iterable[global___KeyValue] | None = ..., |
| silent: builtins.bool | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["_silent", b"_silent", "silent", b"silent"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_silent", b"_silent", "pairs", b"pairs", "silent", b"silent" |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_silent", b"_silent"] |
| ) -> typing_extensions.Literal["silent"] | None: ... |
| |
| class Get(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEYS_FIELD_NUMBER: builtins.int |
| @property |
| def keys( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Required) The config keys to get.""" |
| def __init__( |
| self, |
| *, |
| keys: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ... |
| |
| class GetWithDefault(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PAIRS_FIELD_NUMBER: builtins.int |
| @property |
| def pairs( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: |
| """(Required) The config key-value pairs to get. The value will be used as the default value.""" |
| def __init__( |
| self, |
| *, |
| pairs: collections.abc.Iterable[global___KeyValue] | None = ..., |
| ) -> None: ... |
| def ClearField(self, field_name: typing_extensions.Literal["pairs", b"pairs"]) -> None: ... |
| |
| class GetOption(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEYS_FIELD_NUMBER: builtins.int |
| @property |
| def keys( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Required) The config keys to get optionally.""" |
| def __init__( |
| self, |
| *, |
| keys: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ... |
| |
| class GetAll(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PREFIX_FIELD_NUMBER: builtins.int |
| prefix: builtins.str |
| """(Optional) The prefix of the config key to get.""" |
| def __init__( |
| self, |
| *, |
| prefix: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["_prefix", b"_prefix", "prefix", b"prefix"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["_prefix", b"_prefix", "prefix", b"prefix"] |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_prefix", b"_prefix"] |
| ) -> typing_extensions.Literal["prefix"] | None: ... |
| |
| class Unset(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEYS_FIELD_NUMBER: builtins.int |
| @property |
| def keys( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Required) The config keys to unset.""" |
| def __init__( |
| self, |
| *, |
| keys: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ... |
| |
| class IsModifiable(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEYS_FIELD_NUMBER: builtins.int |
| @property |
| def keys( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Required) The config keys to check the config is modifiable.""" |
| def __init__( |
| self, |
| *, |
| keys: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| OPERATION_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id specifies a spark session for a user id (which is specified |
| by user_context.user_id). The session_id is set by the client to be able to |
| collate streaming responses from different queries within the dedicated session. |
| The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """(Required) User context""" |
| @property |
| def operation(self) -> global___ConfigRequest.Operation: |
| """(Required) The operation for the config.""" |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| operation: global___ConfigRequest.Operation | None = ..., |
| client_type: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "operation", |
| b"operation", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "operation", |
| b"operation", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| |
| global___ConfigRequest = ConfigRequest |
| |
| class ConfigResponse(google.protobuf.message.Message): |
| """Response to the config request. |
| Next ID: 5 |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| PAIRS_FIELD_NUMBER: builtins.int |
| WARNINGS_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| @property |
| def pairs( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: |
| """(Optional) The result key-value pairs. |
| |
| Available when the operation is 'Get', 'GetWithDefault', 'GetOption', 'GetAll'. |
| Also available for the operation 'IsModifiable' with boolean string "true" and "false". |
| """ |
| @property |
| def warnings( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Optional) |
| |
| Warning messages for deprecated or unsupported configurations. |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| pairs: collections.abc.Iterable[global___KeyValue] | None = ..., |
| warnings: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "pairs", |
| b"pairs", |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| "warnings", |
| b"warnings", |
| ], |
| ) -> None: ... |
| |
| global___ConfigResponse = ConfigResponse |
| |
| class AddArtifactsRequest(google.protobuf.message.Message): |
| """Request to transfer client-local artifacts.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class ArtifactChunk(google.protobuf.message.Message): |
| """A chunk of an Artifact.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| DATA_FIELD_NUMBER: builtins.int |
| CRC_FIELD_NUMBER: builtins.int |
| data: builtins.bytes |
| """Data chunk.""" |
| crc: builtins.int |
| """CRC to allow server to verify integrity of the chunk.""" |
| def __init__( |
| self, |
| *, |
| data: builtins.bytes = ..., |
| crc: builtins.int = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["crc", b"crc", "data", b"data"] |
| ) -> None: ... |
| |
| class SingleChunkArtifact(google.protobuf.message.Message): |
| """An artifact that is contained in a single `ArtifactChunk`. |
| Generally, this message represents tiny artifacts such as REPL-generated class files. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| NAME_FIELD_NUMBER: builtins.int |
| DATA_FIELD_NUMBER: builtins.int |
| name: builtins.str |
| """The name of the artifact is expected in the form of a "Relative Path" that is made up of a |
| sequence of directories and the final file element. |
| Examples of "Relative Path"s: "jars/test.jar", "classes/xyz.class", "abc.xyz", "a/b/X.jar". |
| The server is expected to maintain the hierarchy of files as defined by their name. (i.e |
| The relative path of the file on the server's filesystem will be the same as the name of |
| the provided artifact) |
| """ |
| @property |
| def data(self) -> global___AddArtifactsRequest.ArtifactChunk: |
| """A single data chunk.""" |
| def __init__( |
| self, |
| *, |
| name: builtins.str = ..., |
| data: global___AddArtifactsRequest.ArtifactChunk | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["data", b"data"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["data", b"data", "name", b"name"] |
| ) -> None: ... |
| |
| class Batch(google.protobuf.message.Message): |
| """A number of `SingleChunkArtifact` batched into a single RPC.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ARTIFACTS_FIELD_NUMBER: builtins.int |
| @property |
| def artifacts( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___AddArtifactsRequest.SingleChunkArtifact |
| ]: ... |
| def __init__( |
| self, |
| *, |
| artifacts: collections.abc.Iterable[global___AddArtifactsRequest.SingleChunkArtifact] |
| | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["artifacts", b"artifacts"] |
| ) -> None: ... |
| |
| class BeginChunkedArtifact(google.protobuf.message.Message): |
| """Signals the beginning/start of a chunked artifact. |
| A large artifact is transferred through a payload of `BeginChunkedArtifact` followed by a |
| sequence of `ArtifactChunk`s. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| NAME_FIELD_NUMBER: builtins.int |
| TOTAL_BYTES_FIELD_NUMBER: builtins.int |
| NUM_CHUNKS_FIELD_NUMBER: builtins.int |
| INITIAL_CHUNK_FIELD_NUMBER: builtins.int |
| name: builtins.str |
| """Name of the artifact undergoing chunking. Follows the same conventions as the `name` in |
| the `Artifact` message. |
| """ |
| total_bytes: builtins.int |
| """Total size of the artifact in bytes.""" |
| num_chunks: builtins.int |
| """Number of chunks the artifact is split into. |
| This includes the `initial_chunk`. |
| """ |
| @property |
| def initial_chunk(self) -> global___AddArtifactsRequest.ArtifactChunk: |
| """The first/initial chunk.""" |
| def __init__( |
| self, |
| *, |
| name: builtins.str = ..., |
| total_bytes: builtins.int = ..., |
| num_chunks: builtins.int = ..., |
| initial_chunk: global___AddArtifactsRequest.ArtifactChunk | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["initial_chunk", b"initial_chunk"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "initial_chunk", |
| b"initial_chunk", |
| "name", |
| b"name", |
| "num_chunks", |
| b"num_chunks", |
| "total_bytes", |
| b"total_bytes", |
| ], |
| ) -> None: ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| BATCH_FIELD_NUMBER: builtins.int |
| BEGIN_CHUNK_FIELD_NUMBER: builtins.int |
| CHUNK_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id specifies a spark session for a user id (which is specified |
| by user_context.user_id). The session_id is set by the client to be able to |
| collate streaming responses from different queries within the dedicated session. |
| The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """User context""" |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| @property |
| def batch(self) -> global___AddArtifactsRequest.Batch: ... |
| @property |
| def begin_chunk(self) -> global___AddArtifactsRequest.BeginChunkedArtifact: |
| """The metadata and the initial chunk of a large artifact chunked into multiple requests. |
| The server side is notified about the total size of the large artifact as well as the |
| number of chunks to expect. |
| """ |
| @property |
| def chunk(self) -> global___AddArtifactsRequest.ArtifactChunk: |
| """A chunk of an artifact excluding metadata. This can be any chunk of a large artifact |
| excluding the first chunk (which is included in `BeginChunkedArtifact`). |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| user_context: global___UserContext | None = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| client_type: builtins.str | None = ..., |
| batch: global___AddArtifactsRequest.Batch | None = ..., |
| begin_chunk: global___AddArtifactsRequest.BeginChunkedArtifact | None = ..., |
| chunk: global___AddArtifactsRequest.ArtifactChunk | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "batch", |
| b"batch", |
| "begin_chunk", |
| b"begin_chunk", |
| "chunk", |
| b"chunk", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "payload", |
| b"payload", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "batch", |
| b"batch", |
| "begin_chunk", |
| b"begin_chunk", |
| "chunk", |
| b"chunk", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "payload", |
| b"payload", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["payload", b"payload"] |
| ) -> typing_extensions.Literal["batch", "begin_chunk", "chunk"] | None: ... |
| |
| global___AddArtifactsRequest = AddArtifactsRequest |
| |
| class AddArtifactsResponse(google.protobuf.message.Message): |
| """Response to adding an artifact. Contains relevant metadata to verify successful transfer of |
| artifact(s). |
| Next ID: 4 |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class ArtifactSummary(google.protobuf.message.Message): |
| """Metadata of an artifact.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| NAME_FIELD_NUMBER: builtins.int |
| IS_CRC_SUCCESSFUL_FIELD_NUMBER: builtins.int |
| name: builtins.str |
| is_crc_successful: builtins.bool |
| """Whether the CRC (Cyclic Redundancy Check) is successful on server verification. |
| The server discards any artifact that fails the CRC. |
| If false, the client may choose to resend the artifact specified by `name`. |
| """ |
| def __init__( |
| self, |
| *, |
| name: builtins.str = ..., |
| is_crc_successful: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "is_crc_successful", b"is_crc_successful", "name", b"name" |
| ], |
| ) -> None: ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| ARTIFACTS_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """Session id in which the AddArtifact was running.""" |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| @property |
| def artifacts( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___AddArtifactsResponse.ArtifactSummary |
| ]: |
| """The list of artifact(s) seen by the server.""" |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| artifacts: collections.abc.Iterable[global___AddArtifactsResponse.ArtifactSummary] |
| | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "artifacts", |
| b"artifacts", |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| ], |
| ) -> None: ... |
| |
| global___AddArtifactsResponse = AddArtifactsResponse |
| |
| class ArtifactStatusesRequest(google.protobuf.message.Message): |
| """Request to get current statuses of artifacts at the server side.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| NAMES_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id specifies a spark session for a user id (which is specified |
| by user_context.user_id). The session_id is set by the client to be able to |
| collate streaming responses from different queries within the dedicated session. |
| The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """User context""" |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| @property |
| def names( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """The name of the artifact is expected in the form of a "Relative Path" that is made up of a |
| sequence of directories and the final file element. |
| Examples of "Relative Path"s: "jars/test.jar", "classes/xyz.class", "abc.xyz", "a/b/X.jar". |
| The server is expected to maintain the hierarchy of files as defined by their name. (i.e |
| The relative path of the file on the server's filesystem will be the same as the name of |
| the provided artifact) |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| client_type: builtins.str | None = ..., |
| names: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "names", |
| b"names", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| |
| global___ArtifactStatusesRequest = ArtifactStatusesRequest |
| |
| class ArtifactStatusesResponse(google.protobuf.message.Message): |
| """Response to checking artifact statuses. |
| Next ID: 4 |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class StatusesEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| @property |
| def value(self) -> global___ArtifactStatusesResponse.ArtifactStatus: ... |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: global___ArtifactStatusesResponse.ArtifactStatus | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["value", b"value"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| class ArtifactStatus(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| EXISTS_FIELD_NUMBER: builtins.int |
| exists: builtins.bool |
| """Exists or not particular artifact at the server.""" |
| def __init__( |
| self, |
| *, |
| exists: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["exists", b"exists"] |
| ) -> None: ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| STATUSES_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """Session id in which the ArtifactStatus was running.""" |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| @property |
| def statuses( |
| self, |
| ) -> google.protobuf.internal.containers.MessageMap[ |
| builtins.str, global___ArtifactStatusesResponse.ArtifactStatus |
| ]: |
| """A map of artifact names to their statuses.""" |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| statuses: collections.abc.Mapping[ |
| builtins.str, global___ArtifactStatusesResponse.ArtifactStatus |
| ] |
| | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| "statuses", |
| b"statuses", |
| ], |
| ) -> None: ... |
| |
| global___ArtifactStatusesResponse = ArtifactStatusesResponse |
| |
| class InterruptRequest(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class _InterruptType: |
| ValueType = typing.NewType("ValueType", builtins.int) |
| V: typing_extensions.TypeAlias = ValueType |
| |
| class _InterruptTypeEnumTypeWrapper( |
| google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ |
| InterruptRequest._InterruptType.ValueType |
| ], |
| builtins.type, |
| ): # noqa: F821 |
| DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor |
| INTERRUPT_TYPE_UNSPECIFIED: InterruptRequest._InterruptType.ValueType # 0 |
| INTERRUPT_TYPE_ALL: InterruptRequest._InterruptType.ValueType # 1 |
| """Interrupt all running executions within the session with the provided session_id.""" |
| INTERRUPT_TYPE_TAG: InterruptRequest._InterruptType.ValueType # 2 |
| """Interrupt all running executions within the session with the provided operation_tag.""" |
| INTERRUPT_TYPE_OPERATION_ID: InterruptRequest._InterruptType.ValueType # 3 |
| """Interrupt the running execution within the session with the provided operation_id.""" |
| |
| class InterruptType(_InterruptType, metaclass=_InterruptTypeEnumTypeWrapper): ... |
| INTERRUPT_TYPE_UNSPECIFIED: InterruptRequest.InterruptType.ValueType # 0 |
| INTERRUPT_TYPE_ALL: InterruptRequest.InterruptType.ValueType # 1 |
| """Interrupt all running executions within the session with the provided session_id.""" |
| INTERRUPT_TYPE_TAG: InterruptRequest.InterruptType.ValueType # 2 |
| """Interrupt all running executions within the session with the provided operation_tag.""" |
| INTERRUPT_TYPE_OPERATION_ID: InterruptRequest.InterruptType.ValueType # 3 |
| """Interrupt the running execution within the session with the provided operation_id.""" |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| INTERRUPT_TYPE_FIELD_NUMBER: builtins.int |
| OPERATION_TAG_FIELD_NUMBER: builtins.int |
| OPERATION_ID_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id specifies a spark session for a user id (which is specified |
| by user_context.user_id). The session_id is set by the client to be able to |
| collate streaming responses from different queries within the dedicated session. |
| The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """(Required) User context""" |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| interrupt_type: global___InterruptRequest.InterruptType.ValueType |
| """(Required) The type of interrupt to execute.""" |
| operation_tag: builtins.str |
| """if interrupt_tag == INTERRUPT_TYPE_TAG, interrupt operation with this tag.""" |
| operation_id: builtins.str |
| """if interrupt_tag == INTERRUPT_TYPE_OPERATION_ID, interrupt operation with this operation_id.""" |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| client_type: builtins.str | None = ..., |
| interrupt_type: global___InterruptRequest.InterruptType.ValueType = ..., |
| operation_tag: builtins.str = ..., |
| operation_id: builtins.str = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "interrupt", |
| b"interrupt", |
| "operation_id", |
| b"operation_id", |
| "operation_tag", |
| b"operation_tag", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "interrupt", |
| b"interrupt", |
| "interrupt_type", |
| b"interrupt_type", |
| "operation_id", |
| b"operation_id", |
| "operation_tag", |
| b"operation_tag", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["interrupt", b"interrupt"] |
| ) -> typing_extensions.Literal["operation_tag", "operation_id"] | None: ... |
| |
| global___InterruptRequest = InterruptRequest |
| |
| class InterruptResponse(google.protobuf.message.Message): |
| """Next ID: 4""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| INTERRUPTED_IDS_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """Session id in which the interrupt was running.""" |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| @property |
| def interrupted_ids( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """Operation ids of the executions which were interrupted.""" |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| interrupted_ids: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "interrupted_ids", |
| b"interrupted_ids", |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| ], |
| ) -> None: ... |
| |
| global___InterruptResponse = InterruptResponse |
| |
| class ReattachOptions(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| REATTACHABLE_FIELD_NUMBER: builtins.int |
| reattachable: builtins.bool |
| """If true, the request can be reattached to using ReattachExecute. |
| ReattachExecute can be used either if the stream broke with a GRPC network error, |
| or if the server closed the stream without sending a response with StreamStatus.complete=true. |
| The server will keep a buffer of responses in case a response is lost, and |
| ReattachExecute needs to back-track. |
| |
| If false, the execution response stream will will not be reattachable, and all responses are |
| immediately released by the server after being sent. |
| """ |
| def __init__( |
| self, |
| *, |
| reattachable: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["reattachable", b"reattachable"] |
| ) -> None: ... |
| |
| global___ReattachOptions = ReattachOptions |
| |
| class ResultChunkingOptions(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ALLOW_ARROW_BATCH_CHUNKING_FIELD_NUMBER: builtins.int |
| PREFERRED_ARROW_CHUNK_SIZE_FIELD_NUMBER: builtins.int |
| allow_arrow_batch_chunking: builtins.bool |
| """Although Arrow results are split into batches with a size limit according to estimation, the |
| size of the batches is not guaranteed to be less than the limit, especially when a single row |
| is larger than the limit, in which case the server will fail to split it further into smaller |
| batches. As a result, the client may encounter a gRPC error stating “Received message larger |
| than max” when a batch is too large. |
| If allow_arrow_batch_chunking=true, the server will split large Arrow batches into smaller chunks, |
| and the client is expected to handle the chunked Arrow batches. |
| |
| If false, the server will not chunk large Arrow batches. |
| """ |
| preferred_arrow_chunk_size: builtins.int |
| """Optional preferred Arrow batch size in bytes for the server to use when sending Arrow results. |
| The server will attempt to use this size if it is set and within the valid range |
| ([1KB, max batch size on server]). Otherwise, the server's maximum batch size is used. |
| """ |
| def __init__( |
| self, |
| *, |
| allow_arrow_batch_chunking: builtins.bool = ..., |
| preferred_arrow_chunk_size: builtins.int | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_preferred_arrow_chunk_size", |
| b"_preferred_arrow_chunk_size", |
| "preferred_arrow_chunk_size", |
| b"preferred_arrow_chunk_size", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_preferred_arrow_chunk_size", |
| b"_preferred_arrow_chunk_size", |
| "allow_arrow_batch_chunking", |
| b"allow_arrow_batch_chunking", |
| "preferred_arrow_chunk_size", |
| b"preferred_arrow_chunk_size", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_preferred_arrow_chunk_size", b"_preferred_arrow_chunk_size" |
| ], |
| ) -> typing_extensions.Literal["preferred_arrow_chunk_size"] | None: ... |
| |
| global___ResultChunkingOptions = ResultChunkingOptions |
| |
| class ReattachExecuteRequest(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| OPERATION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| LAST_RESPONSE_ID_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id of the request to reattach to. |
| This must be an id of existing session. |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """(Required) User context |
| |
| user_context.user_id and session+id both identify a unique remote spark session on the |
| server side. |
| """ |
| operation_id: builtins.str |
| """(Required) |
| Provide an id of the request to reattach to. |
| This must be an id of existing operation. |
| """ |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| last_response_id: builtins.str |
| """(Optional) |
| Last already processed response id from the response stream. |
| After reattach, server will resume the response stream after that response. |
| If not specified, server will restart the stream from the start. |
| |
| Note: server controls the amount of responses that it buffers and it may drop responses, |
| that are far behind the latest returned response, so this can't be used to arbitrarily |
| scroll back the cursor. If the response is no longer available, this will result in an error. |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| operation_id: builtins.str = ..., |
| client_type: builtins.str | None = ..., |
| last_response_id: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "_last_response_id", |
| b"_last_response_id", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "last_response_id", |
| b"last_response_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "_last_response_id", |
| b"_last_response_id", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "last_response_id", |
| b"last_response_id", |
| "operation_id", |
| b"operation_id", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_last_response_id", b"_last_response_id"] |
| ) -> typing_extensions.Literal["last_response_id"] | None: ... |
| |
| global___ReattachExecuteRequest = ReattachExecuteRequest |
| |
| class ReleaseExecuteRequest(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class ReleaseAll(google.protobuf.message.Message): |
| """Release and close operation completely. |
| This will also interrupt the query if it is running execution, and wait for it to be torn down. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| def __init__( |
| self, |
| ) -> None: ... |
| |
| class ReleaseUntil(google.protobuf.message.Message): |
| """Release all responses from the operation response stream up to and including |
| the response with the given by response_id. |
| While server determines by itself how much of a buffer of responses to keep, client providing |
| explicit release calls will help reduce resource consumption. |
| Noop if response_id not found in cached responses. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RESPONSE_ID_FIELD_NUMBER: builtins.int |
| response_id: builtins.str |
| def __init__( |
| self, |
| *, |
| response_id: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["response_id", b"response_id"] |
| ) -> None: ... |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| OPERATION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| RELEASE_ALL_FIELD_NUMBER: builtins.int |
| RELEASE_UNTIL_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id of the request to reattach to. |
| This must be an id of existing session. |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """(Required) User context |
| |
| user_context.user_id and session+id both identify a unique remote spark session on the |
| server side. |
| """ |
| operation_id: builtins.str |
| """(Required) |
| Provide an id of the request to reattach to. |
| This must be an id of existing operation. |
| """ |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| @property |
| def release_all(self) -> global___ReleaseExecuteRequest.ReleaseAll: ... |
| @property |
| def release_until(self) -> global___ReleaseExecuteRequest.ReleaseUntil: ... |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| operation_id: builtins.str = ..., |
| client_type: builtins.str | None = ..., |
| release_all: global___ReleaseExecuteRequest.ReleaseAll | None = ..., |
| release_until: global___ReleaseExecuteRequest.ReleaseUntil | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "release", |
| b"release", |
| "release_all", |
| b"release_all", |
| "release_until", |
| b"release_until", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "operation_id", |
| b"operation_id", |
| "release", |
| b"release", |
| "release_all", |
| b"release_all", |
| "release_until", |
| b"release_until", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["release", b"release"] |
| ) -> typing_extensions.Literal["release_all", "release_until"] | None: ... |
| |
| global___ReleaseExecuteRequest = ReleaseExecuteRequest |
| |
| class ReleaseExecuteResponse(google.protobuf.message.Message): |
| """Next ID: 4""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| OPERATION_ID_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """Session id in which the release was running.""" |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| operation_id: builtins.str |
| """Operation id of the operation on which the release executed. |
| If the operation couldn't be found (because e.g. it was concurrently released), will be unset. |
| Otherwise, it will be equal to the operation_id from request. |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| operation_id: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_operation_id", b"_operation_id", "operation_id", b"operation_id" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_operation_id", |
| b"_operation_id", |
| "operation_id", |
| b"operation_id", |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_operation_id", b"_operation_id"] |
| ) -> typing_extensions.Literal["operation_id"] | None: ... |
| |
| global___ReleaseExecuteResponse = ReleaseExecuteResponse |
| |
| class ReleaseSessionRequest(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| ALLOW_RECONNECT_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id of the request to reattach to. |
| This must be an id of existing session. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """(Required) User context |
| |
| user_context.user_id and session+id both identify a unique remote spark session on the |
| server side. |
| """ |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| allow_reconnect: builtins.bool |
| """Signals the server to allow the client to reconnect to the session after it is released. |
| |
| By default, the server tombstones the session upon release, preventing reconnections and |
| fully cleaning the session state. |
| |
| If this flag is set to true, the server may permit the client to reconnect to the session |
| post-release, even if the session state has been cleaned. This can result in missing state, |
| such as Temporary Views, Temporary UDFs, or the Current Catalog, in the reconnected session. |
| |
| Use this option sparingly and only when the client fully understands the implications of |
| reconnecting to a released session. The client must ensure that any queries executed do not |
| rely on the session state prior to its release. |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| user_context: global___UserContext | None = ..., |
| client_type: builtins.str | None = ..., |
| allow_reconnect: builtins.bool = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_type", |
| b"_client_type", |
| "client_type", |
| b"client_type", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_type", |
| b"_client_type", |
| "allow_reconnect", |
| b"allow_reconnect", |
| "client_type", |
| b"client_type", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| |
| global___ReleaseSessionRequest = ReleaseSessionRequest |
| |
| class ReleaseSessionResponse(google.protobuf.message.Message): |
| """Next ID: 3""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """Session id of the session on which the release executed.""" |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "server_side_session_id", b"server_side_session_id", "session_id", b"session_id" |
| ], |
| ) -> None: ... |
| |
| global___ReleaseSessionResponse = ReleaseSessionResponse |
| |
| class FetchErrorDetailsRequest(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| ERROR_ID_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| The session_id specifies a Spark session for a user identified by user_context.user_id. |
| The id should be a UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`. |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """User context""" |
| error_id: builtins.str |
| """(Required) |
| The id of the error. |
| """ |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| error_id: builtins.str = ..., |
| client_type: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "error_id", |
| b"error_id", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| |
| global___FetchErrorDetailsRequest = FetchErrorDetailsRequest |
| |
| class FetchErrorDetailsResponse(google.protobuf.message.Message): |
| """Next ID: 5""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class StackTraceElement(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| DECLARING_CLASS_FIELD_NUMBER: builtins.int |
| METHOD_NAME_FIELD_NUMBER: builtins.int |
| FILE_NAME_FIELD_NUMBER: builtins.int |
| LINE_NUMBER_FIELD_NUMBER: builtins.int |
| declaring_class: builtins.str |
| """The fully qualified name of the class containing the execution point.""" |
| method_name: builtins.str |
| """The name of the method containing the execution point.""" |
| file_name: builtins.str |
| """The name of the file containing the execution point.""" |
| line_number: builtins.int |
| """The line number of the source line containing the execution point.""" |
| def __init__( |
| self, |
| *, |
| declaring_class: builtins.str = ..., |
| method_name: builtins.str = ..., |
| file_name: builtins.str | None = ..., |
| line_number: builtins.int = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_file_name", b"_file_name", "file_name", b"file_name" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_file_name", |
| b"_file_name", |
| "declaring_class", |
| b"declaring_class", |
| "file_name", |
| b"file_name", |
| "line_number", |
| b"line_number", |
| "method_name", |
| b"method_name", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_file_name", b"_file_name"] |
| ) -> typing_extensions.Literal["file_name"] | None: ... |
| |
| class QueryContext(google.protobuf.message.Message): |
| """QueryContext defines the schema for the query context of a SparkThrowable. |
| It helps users understand where the error occurs while executing queries. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class _ContextType: |
| ValueType = typing.NewType("ValueType", builtins.int) |
| V: typing_extensions.TypeAlias = ValueType |
| |
| class _ContextTypeEnumTypeWrapper( |
| google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ |
| FetchErrorDetailsResponse.QueryContext._ContextType.ValueType |
| ], |
| builtins.type, |
| ): # noqa: F821 |
| DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor |
| SQL: FetchErrorDetailsResponse.QueryContext._ContextType.ValueType # 0 |
| DATAFRAME: FetchErrorDetailsResponse.QueryContext._ContextType.ValueType # 1 |
| |
| class ContextType(_ContextType, metaclass=_ContextTypeEnumTypeWrapper): |
| """The type of this query context.""" |
| |
| SQL: FetchErrorDetailsResponse.QueryContext.ContextType.ValueType # 0 |
| DATAFRAME: FetchErrorDetailsResponse.QueryContext.ContextType.ValueType # 1 |
| |
| CONTEXT_TYPE_FIELD_NUMBER: builtins.int |
| OBJECT_TYPE_FIELD_NUMBER: builtins.int |
| OBJECT_NAME_FIELD_NUMBER: builtins.int |
| START_INDEX_FIELD_NUMBER: builtins.int |
| STOP_INDEX_FIELD_NUMBER: builtins.int |
| FRAGMENT_FIELD_NUMBER: builtins.int |
| CALL_SITE_FIELD_NUMBER: builtins.int |
| SUMMARY_FIELD_NUMBER: builtins.int |
| context_type: global___FetchErrorDetailsResponse.QueryContext.ContextType.ValueType |
| object_type: builtins.str |
| """The object type of the query which throws the exception. |
| If the exception is directly from the main query, it should be an empty string. |
| Otherwise, it should be the exact object type in upper case. For example, a "VIEW". |
| """ |
| object_name: builtins.str |
| """The object name of the query which throws the exception. |
| If the exception is directly from the main query, it should be an empty string. |
| Otherwise, it should be the object name. For example, a view name "V1". |
| """ |
| start_index: builtins.int |
| """The starting index in the query text which throws the exception. The index starts from 0.""" |
| stop_index: builtins.int |
| """The stopping index in the query which throws the exception. The index starts from 0.""" |
| fragment: builtins.str |
| """The corresponding fragment of the query which throws the exception.""" |
| call_site: builtins.str |
| """The user code (call site of the API) that caused throwing the exception.""" |
| summary: builtins.str |
| """Summary of the exception cause.""" |
| def __init__( |
| self, |
| *, |
| context_type: global___FetchErrorDetailsResponse.QueryContext.ContextType.ValueType = ..., |
| object_type: builtins.str = ..., |
| object_name: builtins.str = ..., |
| start_index: builtins.int = ..., |
| stop_index: builtins.int = ..., |
| fragment: builtins.str = ..., |
| call_site: builtins.str = ..., |
| summary: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "call_site", |
| b"call_site", |
| "context_type", |
| b"context_type", |
| "fragment", |
| b"fragment", |
| "object_name", |
| b"object_name", |
| "object_type", |
| b"object_type", |
| "start_index", |
| b"start_index", |
| "stop_index", |
| b"stop_index", |
| "summary", |
| b"summary", |
| ], |
| ) -> None: ... |
| |
| class SparkThrowable(google.protobuf.message.Message): |
| """SparkThrowable defines the schema for SparkThrowable exceptions.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class MessageParametersEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| value: builtins.str |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| ERROR_CLASS_FIELD_NUMBER: builtins.int |
| MESSAGE_PARAMETERS_FIELD_NUMBER: builtins.int |
| QUERY_CONTEXTS_FIELD_NUMBER: builtins.int |
| SQL_STATE_FIELD_NUMBER: builtins.int |
| BREAKING_CHANGE_INFO_FIELD_NUMBER: builtins.int |
| error_class: builtins.str |
| """Succinct, human-readable, unique, and consistent representation of the error category.""" |
| @property |
| def message_parameters( |
| self, |
| ) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: |
| """The message parameters for the error framework.""" |
| @property |
| def query_contexts( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___FetchErrorDetailsResponse.QueryContext |
| ]: |
| """The query context of a SparkThrowable.""" |
| sql_state: builtins.str |
| """Portable error identifier across SQL engines |
| If null, error class or SQLSTATE is not set. |
| """ |
| @property |
| def breaking_change_info(self) -> global___FetchErrorDetailsResponse.BreakingChangeInfo: |
| """Additional information if the error was caused by a breaking change.""" |
| def __init__( |
| self, |
| *, |
| error_class: builtins.str | None = ..., |
| message_parameters: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., |
| query_contexts: collections.abc.Iterable[ |
| global___FetchErrorDetailsResponse.QueryContext |
| ] |
| | None = ..., |
| sql_state: builtins.str | None = ..., |
| breaking_change_info: global___FetchErrorDetailsResponse.BreakingChangeInfo |
| | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_breaking_change_info", |
| b"_breaking_change_info", |
| "_error_class", |
| b"_error_class", |
| "_sql_state", |
| b"_sql_state", |
| "breaking_change_info", |
| b"breaking_change_info", |
| "error_class", |
| b"error_class", |
| "sql_state", |
| b"sql_state", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_breaking_change_info", |
| b"_breaking_change_info", |
| "_error_class", |
| b"_error_class", |
| "_sql_state", |
| b"_sql_state", |
| "breaking_change_info", |
| b"breaking_change_info", |
| "error_class", |
| b"error_class", |
| "message_parameters", |
| b"message_parameters", |
| "query_contexts", |
| b"query_contexts", |
| "sql_state", |
| b"sql_state", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_breaking_change_info", b"_breaking_change_info" |
| ], |
| ) -> typing_extensions.Literal["breaking_change_info"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_error_class", b"_error_class"] |
| ) -> typing_extensions.Literal["error_class"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_sql_state", b"_sql_state"] |
| ) -> typing_extensions.Literal["sql_state"] | None: ... |
| |
| class BreakingChangeInfo(google.protobuf.message.Message): |
| """BreakingChangeInfo defines the schema for breaking change information.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| MIGRATION_MESSAGE_FIELD_NUMBER: builtins.int |
| MITIGATION_CONFIG_FIELD_NUMBER: builtins.int |
| NEEDS_AUDIT_FIELD_NUMBER: builtins.int |
| @property |
| def migration_message( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """A message explaining how the user can migrate their job to work |
| with the breaking change. |
| """ |
| @property |
| def mitigation_config(self) -> global___FetchErrorDetailsResponse.MitigationConfig: |
| """A spark config flag that can be used to mitigate the breaking change.""" |
| needs_audit: builtins.bool |
| """If true, the breaking change should be inspected manually. |
| If false, the spark job should be retried by setting the mitigationConfig. |
| """ |
| def __init__( |
| self, |
| *, |
| migration_message: collections.abc.Iterable[builtins.str] | None = ..., |
| mitigation_config: global___FetchErrorDetailsResponse.MitigationConfig | None = ..., |
| needs_audit: builtins.bool | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_mitigation_config", |
| b"_mitigation_config", |
| "_needs_audit", |
| b"_needs_audit", |
| "mitigation_config", |
| b"mitigation_config", |
| "needs_audit", |
| b"needs_audit", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_mitigation_config", |
| b"_mitigation_config", |
| "_needs_audit", |
| b"_needs_audit", |
| "migration_message", |
| b"migration_message", |
| "mitigation_config", |
| b"mitigation_config", |
| "needs_audit", |
| b"needs_audit", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal["_mitigation_config", b"_mitigation_config"], |
| ) -> typing_extensions.Literal["mitigation_config"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_needs_audit", b"_needs_audit"] |
| ) -> typing_extensions.Literal["needs_audit"] | None: ... |
| |
| class MitigationConfig(google.protobuf.message.Message): |
| """MitigationConfig defines a spark config flag that can be used to mitigate a breaking change.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| """The spark config key.""" |
| value: builtins.str |
| """The spark config value that mitigates the breaking change.""" |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| class Error(google.protobuf.message.Message): |
| """Error defines the schema for the representing exception.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ERROR_TYPE_HIERARCHY_FIELD_NUMBER: builtins.int |
| MESSAGE_FIELD_NUMBER: builtins.int |
| STACK_TRACE_FIELD_NUMBER: builtins.int |
| CAUSE_IDX_FIELD_NUMBER: builtins.int |
| SPARK_THROWABLE_FIELD_NUMBER: builtins.int |
| @property |
| def error_type_hierarchy( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """The fully qualified names of the exception class and its parent classes.""" |
| message: builtins.str |
| """The detailed message of the exception.""" |
| @property |
| def stack_trace( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___FetchErrorDetailsResponse.StackTraceElement |
| ]: |
| """The stackTrace of the exception. It will be set |
| if the SQLConf spark.sql.connect.serverStacktrace.enabled is true. |
| """ |
| cause_idx: builtins.int |
| """The index of the cause error in errors.""" |
| @property |
| def spark_throwable(self) -> global___FetchErrorDetailsResponse.SparkThrowable: |
| """The structured data of a SparkThrowable exception.""" |
| def __init__( |
| self, |
| *, |
| error_type_hierarchy: collections.abc.Iterable[builtins.str] | None = ..., |
| message: builtins.str = ..., |
| stack_trace: collections.abc.Iterable[ |
| global___FetchErrorDetailsResponse.StackTraceElement |
| ] |
| | None = ..., |
| cause_idx: builtins.int | None = ..., |
| spark_throwable: global___FetchErrorDetailsResponse.SparkThrowable | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_cause_idx", |
| b"_cause_idx", |
| "_spark_throwable", |
| b"_spark_throwable", |
| "cause_idx", |
| b"cause_idx", |
| "spark_throwable", |
| b"spark_throwable", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_cause_idx", |
| b"_cause_idx", |
| "_spark_throwable", |
| b"_spark_throwable", |
| "cause_idx", |
| b"cause_idx", |
| "error_type_hierarchy", |
| b"error_type_hierarchy", |
| "message", |
| b"message", |
| "spark_throwable", |
| b"spark_throwable", |
| "stack_trace", |
| b"stack_trace", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_cause_idx", b"_cause_idx"] |
| ) -> typing_extensions.Literal["cause_idx"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_spark_throwable", b"_spark_throwable"] |
| ) -> typing_extensions.Literal["spark_throwable"] | None: ... |
| |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| ROOT_ERROR_IDX_FIELD_NUMBER: builtins.int |
| ERRORS_FIELD_NUMBER: builtins.int |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session has not changed. |
| """ |
| session_id: builtins.str |
| root_error_idx: builtins.int |
| """The index of the root error in errors. The field will not be set if the error is not found.""" |
| @property |
| def errors( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___FetchErrorDetailsResponse.Error |
| ]: |
| """A list of errors.""" |
| def __init__( |
| self, |
| *, |
| server_side_session_id: builtins.str = ..., |
| session_id: builtins.str = ..., |
| root_error_idx: builtins.int | None = ..., |
| errors: collections.abc.Iterable[global___FetchErrorDetailsResponse.Error] | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_root_error_idx", b"_root_error_idx", "root_error_idx", b"root_error_idx" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_root_error_idx", |
| b"_root_error_idx", |
| "errors", |
| b"errors", |
| "root_error_idx", |
| b"root_error_idx", |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_root_error_idx", b"_root_error_idx"] |
| ) -> typing_extensions.Literal["root_error_idx"] | None: ... |
| |
| global___FetchErrorDetailsResponse = FetchErrorDetailsResponse |
| |
| class CheckpointCommandResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RELATION_FIELD_NUMBER: builtins.int |
| @property |
| def relation(self) -> pyspark.sql.connect.proto.relations_pb2.CachedRemoteRelation: |
| """(Required) The logical plan checkpointed.""" |
| def __init__( |
| self, |
| *, |
| relation: pyspark.sql.connect.proto.relations_pb2.CachedRemoteRelation | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["relation", b"relation"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["relation", b"relation"] |
| ) -> None: ... |
| |
| global___CheckpointCommandResult = CheckpointCommandResult |
| |
| class CloneSessionRequest(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| CLIENT_OBSERVED_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| USER_CONTEXT_FIELD_NUMBER: builtins.int |
| CLIENT_TYPE_FIELD_NUMBER: builtins.int |
| NEW_SESSION_ID_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """(Required) |
| |
| The session_id specifies a spark session for a user id (which is specified |
| by user_context.user_id). The session_id is set by the client to be able to |
| collate streaming responses from different queries within the dedicated session. |
| The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| client_observed_server_side_session_id: builtins.str |
| """(Optional) |
| |
| Server-side generated idempotency key from the previous responses (if any). Server |
| can use this to validate that the server side session has not changed. |
| """ |
| @property |
| def user_context(self) -> global___UserContext: |
| """(Required) User context |
| |
| user_context.user_id and session_id both identify a unique remote spark session on the |
| server side. |
| """ |
| client_type: builtins.str |
| """Provides optional information about the client sending the request. This field |
| can be used for language or version specific information and is only intended for |
| logging purposes and will not be interpreted by the server. |
| """ |
| new_session_id: builtins.str |
| """(Optional) |
| The session_id for the new cloned session. If not provided, a new UUID will be generated. |
| The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff` |
| """ |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| client_observed_server_side_session_id: builtins.str | None = ..., |
| user_context: global___UserContext | None = ..., |
| client_type: builtins.str | None = ..., |
| new_session_id: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "_new_session_id", |
| b"_new_session_id", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "new_session_id", |
| b"new_session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", |
| b"_client_observed_server_side_session_id", |
| "_client_type", |
| b"_client_type", |
| "_new_session_id", |
| b"_new_session_id", |
| "client_observed_server_side_session_id", |
| b"client_observed_server_side_session_id", |
| "client_type", |
| b"client_type", |
| "new_session_id", |
| b"new_session_id", |
| "session_id", |
| b"session_id", |
| "user_context", |
| b"user_context", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_client_observed_server_side_session_id", b"_client_observed_server_side_session_id" |
| ], |
| ) -> typing_extensions.Literal["client_observed_server_side_session_id"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"] |
| ) -> typing_extensions.Literal["client_type"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_new_session_id", b"_new_session_id"] |
| ) -> typing_extensions.Literal["new_session_id"] | None: ... |
| |
| global___CloneSessionRequest = CloneSessionRequest |
| |
| class CloneSessionResponse(google.protobuf.message.Message): |
| """Next ID: 5""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| SESSION_ID_FIELD_NUMBER: builtins.int |
| SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| NEW_SESSION_ID_FIELD_NUMBER: builtins.int |
| NEW_SERVER_SIDE_SESSION_ID_FIELD_NUMBER: builtins.int |
| session_id: builtins.str |
| """Session id of the original session that was cloned.""" |
| server_side_session_id: builtins.str |
| """Server-side generated idempotency key that the client can use to assert that the server side |
| session (parent of the cloned session) has not changed. |
| """ |
| new_session_id: builtins.str |
| """Session id of the new cloned session.""" |
| new_server_side_session_id: builtins.str |
| """Server-side session ID of the new cloned session.""" |
| def __init__( |
| self, |
| *, |
| session_id: builtins.str = ..., |
| server_side_session_id: builtins.str = ..., |
| new_session_id: builtins.str = ..., |
| new_server_side_session_id: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "new_server_side_session_id", |
| b"new_server_side_session_id", |
| "new_session_id", |
| b"new_session_id", |
| "server_side_session_id", |
| b"server_side_session_id", |
| "session_id", |
| b"session_id", |
| ], |
| ) -> None: ... |
| |
| global___CloneSessionResponse = CloneSessionResponse |