| # |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| # |
| """ |
| @generated by mypy-protobuf. Do not edit manually! |
| isort:skip_file |
| |
| Licensed to the Apache Software Foundation (ASF) under one or more |
| contributor license agreements. See the NOTICE file distributed with |
| this work for additional information regarding copyright ownership. |
| The ASF licenses this file to You under the Apache License, Version 2.0 |
| (the "License"); you may not use this file except in compliance with |
| the License. You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software |
| distributed under the License is distributed on an "AS IS" BASIS, |
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| See the License for the specific language governing permissions and |
| limitations under the License. |
| """ |
| import builtins |
| import collections.abc |
| import google.protobuf.any_pb2 |
| import google.protobuf.descriptor |
| import google.protobuf.internal.containers |
| import google.protobuf.internal.enum_type_wrapper |
| import google.protobuf.message |
| import pyspark.sql.connect.proto.common_pb2 |
| import pyspark.sql.connect.proto.expressions_pb2 |
| import pyspark.sql.connect.proto.relations_pb2 |
| import sys |
| import typing |
| |
| if sys.version_info >= (3, 10): |
| import typing as typing_extensions |
| else: |
| import typing_extensions |
| |
| DESCRIPTOR: google.protobuf.descriptor.FileDescriptor |
| |
| class Command(google.protobuf.message.Message): |
| """A [[Command]] is an operation that is executed by the server that does not directly consume or |
| produce a relational result. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| REGISTER_FUNCTION_FIELD_NUMBER: builtins.int |
| WRITE_OPERATION_FIELD_NUMBER: builtins.int |
| CREATE_DATAFRAME_VIEW_FIELD_NUMBER: builtins.int |
| WRITE_OPERATION_V2_FIELD_NUMBER: builtins.int |
| SQL_COMMAND_FIELD_NUMBER: builtins.int |
| WRITE_STREAM_OPERATION_START_FIELD_NUMBER: builtins.int |
| STREAMING_QUERY_COMMAND_FIELD_NUMBER: builtins.int |
| GET_RESOURCES_COMMAND_FIELD_NUMBER: builtins.int |
| STREAMING_QUERY_MANAGER_COMMAND_FIELD_NUMBER: builtins.int |
| REGISTER_TABLE_FUNCTION_FIELD_NUMBER: builtins.int |
| EXTENSION_FIELD_NUMBER: builtins.int |
| @property |
| def register_function( |
| self, |
| ) -> pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction: ... |
| @property |
| def write_operation(self) -> global___WriteOperation: ... |
| @property |
| def create_dataframe_view(self) -> global___CreateDataFrameViewCommand: ... |
| @property |
| def write_operation_v2(self) -> global___WriteOperationV2: ... |
| @property |
| def sql_command(self) -> global___SqlCommand: ... |
| @property |
| def write_stream_operation_start(self) -> global___WriteStreamOperationStart: ... |
| @property |
| def streaming_query_command(self) -> global___StreamingQueryCommand: ... |
| @property |
| def get_resources_command(self) -> global___GetResourcesCommand: ... |
| @property |
| def streaming_query_manager_command(self) -> global___StreamingQueryManagerCommand: ... |
| @property |
| def register_table_function( |
| self, |
| ) -> pyspark.sql.connect.proto.relations_pb2.CommonInlineUserDefinedTableFunction: ... |
| @property |
| def extension(self) -> google.protobuf.any_pb2.Any: |
| """This field is used to mark extensions to the protocol. When plugins generate arbitrary |
| Commands they can add them here. During the planning the correct resolution is done. |
| """ |
| def __init__( |
| self, |
| *, |
| register_function: pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction |
| | None = ..., |
| write_operation: global___WriteOperation | None = ..., |
| create_dataframe_view: global___CreateDataFrameViewCommand | None = ..., |
| write_operation_v2: global___WriteOperationV2 | None = ..., |
| sql_command: global___SqlCommand | None = ..., |
| write_stream_operation_start: global___WriteStreamOperationStart | None = ..., |
| streaming_query_command: global___StreamingQueryCommand | None = ..., |
| get_resources_command: global___GetResourcesCommand | None = ..., |
| streaming_query_manager_command: global___StreamingQueryManagerCommand | None = ..., |
| register_table_function: pyspark.sql.connect.proto.relations_pb2.CommonInlineUserDefinedTableFunction |
| | None = ..., |
| extension: google.protobuf.any_pb2.Any | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "command_type", |
| b"command_type", |
| "create_dataframe_view", |
| b"create_dataframe_view", |
| "extension", |
| b"extension", |
| "get_resources_command", |
| b"get_resources_command", |
| "register_function", |
| b"register_function", |
| "register_table_function", |
| b"register_table_function", |
| "sql_command", |
| b"sql_command", |
| "streaming_query_command", |
| b"streaming_query_command", |
| "streaming_query_manager_command", |
| b"streaming_query_manager_command", |
| "write_operation", |
| b"write_operation", |
| "write_operation_v2", |
| b"write_operation_v2", |
| "write_stream_operation_start", |
| b"write_stream_operation_start", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "command_type", |
| b"command_type", |
| "create_dataframe_view", |
| b"create_dataframe_view", |
| "extension", |
| b"extension", |
| "get_resources_command", |
| b"get_resources_command", |
| "register_function", |
| b"register_function", |
| "register_table_function", |
| b"register_table_function", |
| "sql_command", |
| b"sql_command", |
| "streaming_query_command", |
| b"streaming_query_command", |
| "streaming_query_manager_command", |
| b"streaming_query_manager_command", |
| "write_operation", |
| b"write_operation", |
| "write_operation_v2", |
| b"write_operation_v2", |
| "write_stream_operation_start", |
| b"write_stream_operation_start", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["command_type", b"command_type"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "register_function", |
| "write_operation", |
| "create_dataframe_view", |
| "write_operation_v2", |
| "sql_command", |
| "write_stream_operation_start", |
| "streaming_query_command", |
| "get_resources_command", |
| "streaming_query_manager_command", |
| "register_table_function", |
| "extension", |
| ] |
| | None |
| ): ... |
| |
| global___Command = Command |
| |
| class SqlCommand(google.protobuf.message.Message): |
| """A SQL Command is used to trigger the eager evaluation of SQL commands in Spark. |
| |
| When the SQL provide as part of the message is a command it will be immediately evaluated |
| and the result will be collected and returned as part of a LocalRelation. If the result is |
| not a command, the operation will simply return a SQL Relation. This allows the client to be |
| almost oblivious to the server-side behavior. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class ArgsEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| @property |
| def value(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression.Literal: ... |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: pyspark.sql.connect.proto.expressions_pb2.Expression.Literal | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["value", b"value"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| class NamedArgumentsEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| @property |
| def value(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression: ... |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["value", b"value"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| SQL_FIELD_NUMBER: builtins.int |
| ARGS_FIELD_NUMBER: builtins.int |
| POS_ARGS_FIELD_NUMBER: builtins.int |
| NAMED_ARGUMENTS_FIELD_NUMBER: builtins.int |
| POS_ARGUMENTS_FIELD_NUMBER: builtins.int |
| sql: builtins.str |
| """(Required) SQL Query.""" |
| @property |
| def args( |
| self, |
| ) -> google.protobuf.internal.containers.MessageMap[ |
| builtins.str, pyspark.sql.connect.proto.expressions_pb2.Expression.Literal |
| ]: |
| """(Optional) A map of parameter names to literal expressions.""" |
| @property |
| def pos_args( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| pyspark.sql.connect.proto.expressions_pb2.Expression.Literal |
| ]: |
| """(Optional) A sequence of literal expressions for positional parameters in the SQL query text.""" |
| @property |
| def named_arguments( |
| self, |
| ) -> google.protobuf.internal.containers.MessageMap[ |
| builtins.str, pyspark.sql.connect.proto.expressions_pb2.Expression |
| ]: |
| """(Optional) A map of parameter names to expressions. |
| It cannot coexist with `pos_arguments`. |
| """ |
| @property |
| def pos_arguments( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| pyspark.sql.connect.proto.expressions_pb2.Expression |
| ]: |
| """(Optional) A sequence of expressions for positional parameters in the SQL query text. |
| It cannot coexist with `named_arguments`. |
| """ |
| def __init__( |
| self, |
| *, |
| sql: builtins.str = ..., |
| args: collections.abc.Mapping[ |
| builtins.str, pyspark.sql.connect.proto.expressions_pb2.Expression.Literal |
| ] |
| | None = ..., |
| pos_args: collections.abc.Iterable[ |
| pyspark.sql.connect.proto.expressions_pb2.Expression.Literal |
| ] |
| | None = ..., |
| named_arguments: collections.abc.Mapping[ |
| builtins.str, pyspark.sql.connect.proto.expressions_pb2.Expression |
| ] |
| | None = ..., |
| pos_arguments: collections.abc.Iterable[ |
| pyspark.sql.connect.proto.expressions_pb2.Expression |
| ] |
| | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "args", |
| b"args", |
| "named_arguments", |
| b"named_arguments", |
| "pos_args", |
| b"pos_args", |
| "pos_arguments", |
| b"pos_arguments", |
| "sql", |
| b"sql", |
| ], |
| ) -> None: ... |
| |
| global___SqlCommand = SqlCommand |
| |
| class CreateDataFrameViewCommand(google.protobuf.message.Message): |
| """A command that can create DataFrame global temp view or local temp view.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| INPUT_FIELD_NUMBER: builtins.int |
| NAME_FIELD_NUMBER: builtins.int |
| IS_GLOBAL_FIELD_NUMBER: builtins.int |
| REPLACE_FIELD_NUMBER: builtins.int |
| @property |
| def input(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) The relation that this view will be built on.""" |
| name: builtins.str |
| """(Required) View name.""" |
| is_global: builtins.bool |
| """(Required) Whether this is global temp view or local temp view.""" |
| replace: builtins.bool |
| """(Required) |
| |
| If true, and if the view already exists, updates it; if false, and if the view |
| already exists, throws exception. |
| """ |
| def __init__( |
| self, |
| *, |
| input: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| name: builtins.str = ..., |
| is_global: builtins.bool = ..., |
| replace: builtins.bool = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["input", b"input"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "input", b"input", "is_global", b"is_global", "name", b"name", "replace", b"replace" |
| ], |
| ) -> None: ... |
| |
| global___CreateDataFrameViewCommand = CreateDataFrameViewCommand |
| |
| class WriteOperation(google.protobuf.message.Message): |
| """As writes are not directly handled during analysis and planning, they are modeled as commands.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class _SaveMode: |
| ValueType = typing.NewType("ValueType", builtins.int) |
| V: typing_extensions.TypeAlias = ValueType |
| |
| class _SaveModeEnumTypeWrapper( |
| google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ |
| WriteOperation._SaveMode.ValueType |
| ], |
| builtins.type, |
| ): # noqa: F821 |
| DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor |
| SAVE_MODE_UNSPECIFIED: WriteOperation._SaveMode.ValueType # 0 |
| SAVE_MODE_APPEND: WriteOperation._SaveMode.ValueType # 1 |
| SAVE_MODE_OVERWRITE: WriteOperation._SaveMode.ValueType # 2 |
| SAVE_MODE_ERROR_IF_EXISTS: WriteOperation._SaveMode.ValueType # 3 |
| SAVE_MODE_IGNORE: WriteOperation._SaveMode.ValueType # 4 |
| |
| class SaveMode(_SaveMode, metaclass=_SaveModeEnumTypeWrapper): ... |
| SAVE_MODE_UNSPECIFIED: WriteOperation.SaveMode.ValueType # 0 |
| SAVE_MODE_APPEND: WriteOperation.SaveMode.ValueType # 1 |
| SAVE_MODE_OVERWRITE: WriteOperation.SaveMode.ValueType # 2 |
| SAVE_MODE_ERROR_IF_EXISTS: WriteOperation.SaveMode.ValueType # 3 |
| SAVE_MODE_IGNORE: WriteOperation.SaveMode.ValueType # 4 |
| |
| class OptionsEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| value: builtins.str |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| class SaveTable(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class _TableSaveMethod: |
| ValueType = typing.NewType("ValueType", builtins.int) |
| V: typing_extensions.TypeAlias = ValueType |
| |
| class _TableSaveMethodEnumTypeWrapper( |
| google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ |
| WriteOperation.SaveTable._TableSaveMethod.ValueType |
| ], |
| builtins.type, |
| ): # noqa: F821 |
| DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor |
| TABLE_SAVE_METHOD_UNSPECIFIED: WriteOperation.SaveTable._TableSaveMethod.ValueType # 0 |
| TABLE_SAVE_METHOD_SAVE_AS_TABLE: WriteOperation.SaveTable._TableSaveMethod.ValueType # 1 |
| TABLE_SAVE_METHOD_INSERT_INTO: WriteOperation.SaveTable._TableSaveMethod.ValueType # 2 |
| |
| class TableSaveMethod(_TableSaveMethod, metaclass=_TableSaveMethodEnumTypeWrapper): ... |
| TABLE_SAVE_METHOD_UNSPECIFIED: WriteOperation.SaveTable.TableSaveMethod.ValueType # 0 |
| TABLE_SAVE_METHOD_SAVE_AS_TABLE: WriteOperation.SaveTable.TableSaveMethod.ValueType # 1 |
| TABLE_SAVE_METHOD_INSERT_INTO: WriteOperation.SaveTable.TableSaveMethod.ValueType # 2 |
| |
| TABLE_NAME_FIELD_NUMBER: builtins.int |
| SAVE_METHOD_FIELD_NUMBER: builtins.int |
| table_name: builtins.str |
| """(Required) The table name.""" |
| save_method: global___WriteOperation.SaveTable.TableSaveMethod.ValueType |
| """(Required) The method to be called to write to the table.""" |
| def __init__( |
| self, |
| *, |
| table_name: builtins.str = ..., |
| save_method: global___WriteOperation.SaveTable.TableSaveMethod.ValueType = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "save_method", b"save_method", "table_name", b"table_name" |
| ], |
| ) -> None: ... |
| |
| class BucketBy(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| BUCKET_COLUMN_NAMES_FIELD_NUMBER: builtins.int |
| NUM_BUCKETS_FIELD_NUMBER: builtins.int |
| @property |
| def bucket_column_names( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... |
| num_buckets: builtins.int |
| def __init__( |
| self, |
| *, |
| bucket_column_names: collections.abc.Iterable[builtins.str] | None = ..., |
| num_buckets: builtins.int = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "bucket_column_names", b"bucket_column_names", "num_buckets", b"num_buckets" |
| ], |
| ) -> None: ... |
| |
| INPUT_FIELD_NUMBER: builtins.int |
| SOURCE_FIELD_NUMBER: builtins.int |
| PATH_FIELD_NUMBER: builtins.int |
| TABLE_FIELD_NUMBER: builtins.int |
| MODE_FIELD_NUMBER: builtins.int |
| SORT_COLUMN_NAMES_FIELD_NUMBER: builtins.int |
| PARTITIONING_COLUMNS_FIELD_NUMBER: builtins.int |
| BUCKET_BY_FIELD_NUMBER: builtins.int |
| OPTIONS_FIELD_NUMBER: builtins.int |
| CLUSTERING_COLUMNS_FIELD_NUMBER: builtins.int |
| @property |
| def input(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) The output of the `input` relation will be persisted according to the options.""" |
| source: builtins.str |
| """(Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.""" |
| path: builtins.str |
| @property |
| def table(self) -> global___WriteOperation.SaveTable: ... |
| mode: global___WriteOperation.SaveMode.ValueType |
| """(Required) the save mode.""" |
| @property |
| def sort_column_names( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Optional) List of columns to sort the output by.""" |
| @property |
| def partitioning_columns( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Optional) List of columns for partitioning.""" |
| @property |
| def bucket_by(self) -> global___WriteOperation.BucketBy: |
| """(Optional) Bucketing specification. Bucketing must set the number of buckets and the columns |
| to bucket by. |
| """ |
| @property |
| def options(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: |
| """(Optional) A list of configuration options.""" |
| @property |
| def clustering_columns( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Optional) Columns used for clustering the table.""" |
| def __init__( |
| self, |
| *, |
| input: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| source: builtins.str | None = ..., |
| path: builtins.str = ..., |
| table: global___WriteOperation.SaveTable | None = ..., |
| mode: global___WriteOperation.SaveMode.ValueType = ..., |
| sort_column_names: collections.abc.Iterable[builtins.str] | None = ..., |
| partitioning_columns: collections.abc.Iterable[builtins.str] | None = ..., |
| bucket_by: global___WriteOperation.BucketBy | None = ..., |
| options: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., |
| clustering_columns: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_source", |
| b"_source", |
| "bucket_by", |
| b"bucket_by", |
| "input", |
| b"input", |
| "path", |
| b"path", |
| "save_type", |
| b"save_type", |
| "source", |
| b"source", |
| "table", |
| b"table", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_source", |
| b"_source", |
| "bucket_by", |
| b"bucket_by", |
| "clustering_columns", |
| b"clustering_columns", |
| "input", |
| b"input", |
| "mode", |
| b"mode", |
| "options", |
| b"options", |
| "partitioning_columns", |
| b"partitioning_columns", |
| "path", |
| b"path", |
| "save_type", |
| b"save_type", |
| "sort_column_names", |
| b"sort_column_names", |
| "source", |
| b"source", |
| "table", |
| b"table", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_source", b"_source"] |
| ) -> typing_extensions.Literal["source"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["save_type", b"save_type"] |
| ) -> typing_extensions.Literal["path", "table"] | None: ... |
| |
| global___WriteOperation = WriteOperation |
| |
| class WriteOperationV2(google.protobuf.message.Message): |
| """As writes are not directly handled during analysis and planning, they are modeled as commands.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class _Mode: |
| ValueType = typing.NewType("ValueType", builtins.int) |
| V: typing_extensions.TypeAlias = ValueType |
| |
| class _ModeEnumTypeWrapper( |
| google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ |
| WriteOperationV2._Mode.ValueType |
| ], |
| builtins.type, |
| ): # noqa: F821 |
| DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor |
| MODE_UNSPECIFIED: WriteOperationV2._Mode.ValueType # 0 |
| MODE_CREATE: WriteOperationV2._Mode.ValueType # 1 |
| MODE_OVERWRITE: WriteOperationV2._Mode.ValueType # 2 |
| MODE_OVERWRITE_PARTITIONS: WriteOperationV2._Mode.ValueType # 3 |
| MODE_APPEND: WriteOperationV2._Mode.ValueType # 4 |
| MODE_REPLACE: WriteOperationV2._Mode.ValueType # 5 |
| MODE_CREATE_OR_REPLACE: WriteOperationV2._Mode.ValueType # 6 |
| |
| class Mode(_Mode, metaclass=_ModeEnumTypeWrapper): ... |
| MODE_UNSPECIFIED: WriteOperationV2.Mode.ValueType # 0 |
| MODE_CREATE: WriteOperationV2.Mode.ValueType # 1 |
| MODE_OVERWRITE: WriteOperationV2.Mode.ValueType # 2 |
| MODE_OVERWRITE_PARTITIONS: WriteOperationV2.Mode.ValueType # 3 |
| MODE_APPEND: WriteOperationV2.Mode.ValueType # 4 |
| MODE_REPLACE: WriteOperationV2.Mode.ValueType # 5 |
| MODE_CREATE_OR_REPLACE: WriteOperationV2.Mode.ValueType # 6 |
| |
| class OptionsEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| value: builtins.str |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| class TablePropertiesEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| value: builtins.str |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| INPUT_FIELD_NUMBER: builtins.int |
| TABLE_NAME_FIELD_NUMBER: builtins.int |
| PROVIDER_FIELD_NUMBER: builtins.int |
| PARTITIONING_COLUMNS_FIELD_NUMBER: builtins.int |
| OPTIONS_FIELD_NUMBER: builtins.int |
| TABLE_PROPERTIES_FIELD_NUMBER: builtins.int |
| MODE_FIELD_NUMBER: builtins.int |
| OVERWRITE_CONDITION_FIELD_NUMBER: builtins.int |
| CLUSTERING_COLUMNS_FIELD_NUMBER: builtins.int |
| @property |
| def input(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) The output of the `input` relation will be persisted according to the options.""" |
| table_name: builtins.str |
| """(Required) The destination of the write operation must be either a path or a table.""" |
| provider: builtins.str |
| """(Optional) A provider for the underlying output data source. Spark's default catalog supports |
| "parquet", "json", etc. |
| """ |
| @property |
| def partitioning_columns( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| pyspark.sql.connect.proto.expressions_pb2.Expression |
| ]: |
| """(Optional) List of columns for partitioning for output table created by `create`, |
| `createOrReplace`, or `replace` |
| """ |
| @property |
| def options(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: |
| """(Optional) A list of configuration options.""" |
| @property |
| def table_properties( |
| self, |
| ) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: |
| """(Optional) A list of table properties.""" |
| mode: global___WriteOperationV2.Mode.ValueType |
| """(Required) Write mode.""" |
| @property |
| def overwrite_condition(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression: |
| """(Optional) A condition for overwrite saving mode""" |
| @property |
| def clustering_columns( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Optional) Columns used for clustering the table.""" |
| def __init__( |
| self, |
| *, |
| input: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| table_name: builtins.str = ..., |
| provider: builtins.str | None = ..., |
| partitioning_columns: collections.abc.Iterable[ |
| pyspark.sql.connect.proto.expressions_pb2.Expression |
| ] |
| | None = ..., |
| options: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., |
| table_properties: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., |
| mode: global___WriteOperationV2.Mode.ValueType = ..., |
| overwrite_condition: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ..., |
| clustering_columns: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_provider", |
| b"_provider", |
| "input", |
| b"input", |
| "overwrite_condition", |
| b"overwrite_condition", |
| "provider", |
| b"provider", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_provider", |
| b"_provider", |
| "clustering_columns", |
| b"clustering_columns", |
| "input", |
| b"input", |
| "mode", |
| b"mode", |
| "options", |
| b"options", |
| "overwrite_condition", |
| b"overwrite_condition", |
| "partitioning_columns", |
| b"partitioning_columns", |
| "provider", |
| b"provider", |
| "table_name", |
| b"table_name", |
| "table_properties", |
| b"table_properties", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_provider", b"_provider"] |
| ) -> typing_extensions.Literal["provider"] | None: ... |
| |
| global___WriteOperationV2 = WriteOperationV2 |
| |
| class WriteStreamOperationStart(google.protobuf.message.Message): |
| """Starts write stream operation as streaming query. Query ID and Run ID of the streaming |
| query are returned. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class OptionsEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| value: builtins.str |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| INPUT_FIELD_NUMBER: builtins.int |
| FORMAT_FIELD_NUMBER: builtins.int |
| OPTIONS_FIELD_NUMBER: builtins.int |
| PARTITIONING_COLUMN_NAMES_FIELD_NUMBER: builtins.int |
| PROCESSING_TIME_INTERVAL_FIELD_NUMBER: builtins.int |
| AVAILABLE_NOW_FIELD_NUMBER: builtins.int |
| ONCE_FIELD_NUMBER: builtins.int |
| CONTINUOUS_CHECKPOINT_INTERVAL_FIELD_NUMBER: builtins.int |
| OUTPUT_MODE_FIELD_NUMBER: builtins.int |
| QUERY_NAME_FIELD_NUMBER: builtins.int |
| PATH_FIELD_NUMBER: builtins.int |
| TABLE_NAME_FIELD_NUMBER: builtins.int |
| FOREACH_WRITER_FIELD_NUMBER: builtins.int |
| FOREACH_BATCH_FIELD_NUMBER: builtins.int |
| @property |
| def input(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) The output of the `input` streaming relation will be written.""" |
| format: builtins.str |
| """The following fields directly map to API for DataStreamWriter(). |
| Consult API documentation unless explicitly documented here. |
| """ |
| @property |
| def options( |
| self, |
| ) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... |
| @property |
| def partitioning_column_names( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... |
| processing_time_interval: builtins.str |
| available_now: builtins.bool |
| once: builtins.bool |
| continuous_checkpoint_interval: builtins.str |
| output_mode: builtins.str |
| query_name: builtins.str |
| path: builtins.str |
| table_name: builtins.str |
| @property |
| def foreach_writer(self) -> global___StreamingForeachFunction: ... |
| @property |
| def foreach_batch(self) -> global___StreamingForeachFunction: ... |
| def __init__( |
| self, |
| *, |
| input: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| format: builtins.str = ..., |
| options: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., |
| partitioning_column_names: collections.abc.Iterable[builtins.str] | None = ..., |
| processing_time_interval: builtins.str = ..., |
| available_now: builtins.bool = ..., |
| once: builtins.bool = ..., |
| continuous_checkpoint_interval: builtins.str = ..., |
| output_mode: builtins.str = ..., |
| query_name: builtins.str = ..., |
| path: builtins.str = ..., |
| table_name: builtins.str = ..., |
| foreach_writer: global___StreamingForeachFunction | None = ..., |
| foreach_batch: global___StreamingForeachFunction | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "available_now", |
| b"available_now", |
| "continuous_checkpoint_interval", |
| b"continuous_checkpoint_interval", |
| "foreach_batch", |
| b"foreach_batch", |
| "foreach_writer", |
| b"foreach_writer", |
| "input", |
| b"input", |
| "once", |
| b"once", |
| "path", |
| b"path", |
| "processing_time_interval", |
| b"processing_time_interval", |
| "sink_destination", |
| b"sink_destination", |
| "table_name", |
| b"table_name", |
| "trigger", |
| b"trigger", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "available_now", |
| b"available_now", |
| "continuous_checkpoint_interval", |
| b"continuous_checkpoint_interval", |
| "foreach_batch", |
| b"foreach_batch", |
| "foreach_writer", |
| b"foreach_writer", |
| "format", |
| b"format", |
| "input", |
| b"input", |
| "once", |
| b"once", |
| "options", |
| b"options", |
| "output_mode", |
| b"output_mode", |
| "partitioning_column_names", |
| b"partitioning_column_names", |
| "path", |
| b"path", |
| "processing_time_interval", |
| b"processing_time_interval", |
| "query_name", |
| b"query_name", |
| "sink_destination", |
| b"sink_destination", |
| "table_name", |
| b"table_name", |
| "trigger", |
| b"trigger", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["sink_destination", b"sink_destination"] |
| ) -> typing_extensions.Literal["path", "table_name"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["trigger", b"trigger"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "processing_time_interval", "available_now", "once", "continuous_checkpoint_interval" |
| ] |
| | None |
| ): ... |
| |
| global___WriteStreamOperationStart = WriteStreamOperationStart |
| |
| class StreamingForeachFunction(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| PYTHON_FUNCTION_FIELD_NUMBER: builtins.int |
| SCALA_FUNCTION_FIELD_NUMBER: builtins.int |
| @property |
| def python_function(self) -> pyspark.sql.connect.proto.expressions_pb2.PythonUDF: ... |
| @property |
| def scala_function(self) -> pyspark.sql.connect.proto.expressions_pb2.ScalarScalaUDF: ... |
| def __init__( |
| self, |
| *, |
| python_function: pyspark.sql.connect.proto.expressions_pb2.PythonUDF | None = ..., |
| scala_function: pyspark.sql.connect.proto.expressions_pb2.ScalarScalaUDF | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "function", |
| b"function", |
| "python_function", |
| b"python_function", |
| "scala_function", |
| b"scala_function", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "function", |
| b"function", |
| "python_function", |
| b"python_function", |
| "scala_function", |
| b"scala_function", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["function", b"function"] |
| ) -> typing_extensions.Literal["python_function", "scala_function"] | None: ... |
| |
| global___StreamingForeachFunction = StreamingForeachFunction |
| |
| class WriteStreamOperationStartResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| QUERY_ID_FIELD_NUMBER: builtins.int |
| NAME_FIELD_NUMBER: builtins.int |
| @property |
| def query_id(self) -> global___StreamingQueryInstanceId: |
| """(Required) Query instance. See `StreamingQueryInstanceId`.""" |
| name: builtins.str |
| """An optional query name.""" |
| def __init__( |
| self, |
| *, |
| query_id: global___StreamingQueryInstanceId | None = ..., |
| name: builtins.str = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["query_id", b"query_id"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["name", b"name", "query_id", b"query_id"] |
| ) -> None: ... |
| |
| global___WriteStreamOperationStartResult = WriteStreamOperationStartResult |
| |
| class StreamingQueryInstanceId(google.protobuf.message.Message): |
| """A tuple that uniquely identifies an instance of streaming query run. It consists of `id` that |
| persists across the streaming runs and `run_id` that changes between each run of the |
| streaming query that resumes from the checkpoint. |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ID_FIELD_NUMBER: builtins.int |
| RUN_ID_FIELD_NUMBER: builtins.int |
| id: builtins.str |
| """(Required) The unique id of this query that persists across restarts from checkpoint data. |
| That is, this id is generated when a query is started for the first time, and |
| will be the same every time it is restarted from checkpoint data. |
| """ |
| run_id: builtins.str |
| """(Required) The unique id of this run of the query. That is, every start/restart of a query |
| will generate a unique run_id. Therefore, every time a query is restarted from |
| checkpoint, it will have the same `id` but different `run_id`s. |
| """ |
| def __init__( |
| self, |
| *, |
| id: builtins.str = ..., |
| run_id: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["id", b"id", "run_id", b"run_id"] |
| ) -> None: ... |
| |
| global___StreamingQueryInstanceId = StreamingQueryInstanceId |
| |
| class StreamingQueryCommand(google.protobuf.message.Message): |
| """Commands for a streaming query.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class ExplainCommand(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| EXTENDED_FIELD_NUMBER: builtins.int |
| extended: builtins.bool |
| """TODO: Consider reusing Explain from AnalyzePlanRequest message. |
| We can not do this right now since it base.proto imports this file. |
| """ |
| def __init__( |
| self, |
| *, |
| extended: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["extended", b"extended"] |
| ) -> None: ... |
| |
| class AwaitTerminationCommand(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| TIMEOUT_MS_FIELD_NUMBER: builtins.int |
| timeout_ms: builtins.int |
| def __init__( |
| self, |
| *, |
| timeout_ms: builtins.int | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_timeout_ms", b"_timeout_ms", "timeout_ms", b"timeout_ms" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_timeout_ms", b"_timeout_ms", "timeout_ms", b"timeout_ms" |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_timeout_ms", b"_timeout_ms"] |
| ) -> typing_extensions.Literal["timeout_ms"] | None: ... |
| |
| QUERY_ID_FIELD_NUMBER: builtins.int |
| STATUS_FIELD_NUMBER: builtins.int |
| LAST_PROGRESS_FIELD_NUMBER: builtins.int |
| RECENT_PROGRESS_FIELD_NUMBER: builtins.int |
| STOP_FIELD_NUMBER: builtins.int |
| PROCESS_ALL_AVAILABLE_FIELD_NUMBER: builtins.int |
| EXPLAIN_FIELD_NUMBER: builtins.int |
| EXCEPTION_FIELD_NUMBER: builtins.int |
| AWAIT_TERMINATION_FIELD_NUMBER: builtins.int |
| @property |
| def query_id(self) -> global___StreamingQueryInstanceId: |
| """(Required) Query instance. See `StreamingQueryInstanceId`.""" |
| status: builtins.bool |
| """status() API.""" |
| last_progress: builtins.bool |
| """lastProgress() API.""" |
| recent_progress: builtins.bool |
| """recentProgress() API.""" |
| stop: builtins.bool |
| """stop() API. Stops the query.""" |
| process_all_available: builtins.bool |
| """processAllAvailable() API. Waits till all the available data is processed""" |
| @property |
| def explain(self) -> global___StreamingQueryCommand.ExplainCommand: |
| """explain() API. Returns logical and physical plans.""" |
| exception: builtins.bool |
| """exception() API. Returns the exception in the query if any.""" |
| @property |
| def await_termination(self) -> global___StreamingQueryCommand.AwaitTerminationCommand: |
| """awaitTermination() API. Waits for the termination of the query.""" |
| def __init__( |
| self, |
| *, |
| query_id: global___StreamingQueryInstanceId | None = ..., |
| status: builtins.bool = ..., |
| last_progress: builtins.bool = ..., |
| recent_progress: builtins.bool = ..., |
| stop: builtins.bool = ..., |
| process_all_available: builtins.bool = ..., |
| explain: global___StreamingQueryCommand.ExplainCommand | None = ..., |
| exception: builtins.bool = ..., |
| await_termination: global___StreamingQueryCommand.AwaitTerminationCommand | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "await_termination", |
| b"await_termination", |
| "command", |
| b"command", |
| "exception", |
| b"exception", |
| "explain", |
| b"explain", |
| "last_progress", |
| b"last_progress", |
| "process_all_available", |
| b"process_all_available", |
| "query_id", |
| b"query_id", |
| "recent_progress", |
| b"recent_progress", |
| "status", |
| b"status", |
| "stop", |
| b"stop", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "await_termination", |
| b"await_termination", |
| "command", |
| b"command", |
| "exception", |
| b"exception", |
| "explain", |
| b"explain", |
| "last_progress", |
| b"last_progress", |
| "process_all_available", |
| b"process_all_available", |
| "query_id", |
| b"query_id", |
| "recent_progress", |
| b"recent_progress", |
| "status", |
| b"status", |
| "stop", |
| b"stop", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["command", b"command"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "status", |
| "last_progress", |
| "recent_progress", |
| "stop", |
| "process_all_available", |
| "explain", |
| "exception", |
| "await_termination", |
| ] |
| | None |
| ): ... |
| |
| global___StreamingQueryCommand = StreamingQueryCommand |
| |
| class StreamingQueryCommandResult(google.protobuf.message.Message): |
| """Response for commands on a streaming query.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class StatusResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| STATUS_MESSAGE_FIELD_NUMBER: builtins.int |
| IS_DATA_AVAILABLE_FIELD_NUMBER: builtins.int |
| IS_TRIGGER_ACTIVE_FIELD_NUMBER: builtins.int |
| IS_ACTIVE_FIELD_NUMBER: builtins.int |
| status_message: builtins.str |
| """See documentation for these Scala 'StreamingQueryStatus' struct""" |
| is_data_available: builtins.bool |
| is_trigger_active: builtins.bool |
| is_active: builtins.bool |
| def __init__( |
| self, |
| *, |
| status_message: builtins.str = ..., |
| is_data_available: builtins.bool = ..., |
| is_trigger_active: builtins.bool = ..., |
| is_active: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "is_active", |
| b"is_active", |
| "is_data_available", |
| b"is_data_available", |
| "is_trigger_active", |
| b"is_trigger_active", |
| "status_message", |
| b"status_message", |
| ], |
| ) -> None: ... |
| |
| class RecentProgressResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RECENT_PROGRESS_JSON_FIELD_NUMBER: builtins.int |
| @property |
| def recent_progress_json( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """Progress reports as an array of json strings.""" |
| def __init__( |
| self, |
| *, |
| recent_progress_json: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal["recent_progress_json", b"recent_progress_json"], |
| ) -> None: ... |
| |
| class ExplainResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| RESULT_FIELD_NUMBER: builtins.int |
| result: builtins.str |
| """Logical and physical plans as string""" |
| def __init__( |
| self, |
| *, |
| result: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["result", b"result"] |
| ) -> None: ... |
| |
| class ExceptionResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| EXCEPTION_MESSAGE_FIELD_NUMBER: builtins.int |
| ERROR_CLASS_FIELD_NUMBER: builtins.int |
| STACK_TRACE_FIELD_NUMBER: builtins.int |
| exception_message: builtins.str |
| """(Optional) Exception message as string, maps to the return value of original |
| StreamingQueryException's toString method |
| """ |
| error_class: builtins.str |
| """(Optional) Exception error class as string""" |
| stack_trace: builtins.str |
| """(Optional) Exception stack trace as string""" |
| def __init__( |
| self, |
| *, |
| exception_message: builtins.str | None = ..., |
| error_class: builtins.str | None = ..., |
| stack_trace: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_error_class", |
| b"_error_class", |
| "_exception_message", |
| b"_exception_message", |
| "_stack_trace", |
| b"_stack_trace", |
| "error_class", |
| b"error_class", |
| "exception_message", |
| b"exception_message", |
| "stack_trace", |
| b"stack_trace", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_error_class", |
| b"_error_class", |
| "_exception_message", |
| b"_exception_message", |
| "_stack_trace", |
| b"_stack_trace", |
| "error_class", |
| b"error_class", |
| "exception_message", |
| b"exception_message", |
| "stack_trace", |
| b"stack_trace", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_error_class", b"_error_class"] |
| ) -> typing_extensions.Literal["error_class"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal["_exception_message", b"_exception_message"], |
| ) -> typing_extensions.Literal["exception_message"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_stack_trace", b"_stack_trace"] |
| ) -> typing_extensions.Literal["stack_trace"] | None: ... |
| |
| class AwaitTerminationResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| TERMINATED_FIELD_NUMBER: builtins.int |
| terminated: builtins.bool |
| def __init__( |
| self, |
| *, |
| terminated: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["terminated", b"terminated"] |
| ) -> None: ... |
| |
| QUERY_ID_FIELD_NUMBER: builtins.int |
| STATUS_FIELD_NUMBER: builtins.int |
| RECENT_PROGRESS_FIELD_NUMBER: builtins.int |
| EXPLAIN_FIELD_NUMBER: builtins.int |
| EXCEPTION_FIELD_NUMBER: builtins.int |
| AWAIT_TERMINATION_FIELD_NUMBER: builtins.int |
| @property |
| def query_id(self) -> global___StreamingQueryInstanceId: |
| """(Required) Query instance id. See `StreamingQueryInstanceId`.""" |
| @property |
| def status(self) -> global___StreamingQueryCommandResult.StatusResult: ... |
| @property |
| def recent_progress(self) -> global___StreamingQueryCommandResult.RecentProgressResult: ... |
| @property |
| def explain(self) -> global___StreamingQueryCommandResult.ExplainResult: ... |
| @property |
| def exception(self) -> global___StreamingQueryCommandResult.ExceptionResult: ... |
| @property |
| def await_termination(self) -> global___StreamingQueryCommandResult.AwaitTerminationResult: ... |
| def __init__( |
| self, |
| *, |
| query_id: global___StreamingQueryInstanceId | None = ..., |
| status: global___StreamingQueryCommandResult.StatusResult | None = ..., |
| recent_progress: global___StreamingQueryCommandResult.RecentProgressResult | None = ..., |
| explain: global___StreamingQueryCommandResult.ExplainResult | None = ..., |
| exception: global___StreamingQueryCommandResult.ExceptionResult | None = ..., |
| await_termination: global___StreamingQueryCommandResult.AwaitTerminationResult | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "await_termination", |
| b"await_termination", |
| "exception", |
| b"exception", |
| "explain", |
| b"explain", |
| "query_id", |
| b"query_id", |
| "recent_progress", |
| b"recent_progress", |
| "result_type", |
| b"result_type", |
| "status", |
| b"status", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "await_termination", |
| b"await_termination", |
| "exception", |
| b"exception", |
| "explain", |
| b"explain", |
| "query_id", |
| b"query_id", |
| "recent_progress", |
| b"recent_progress", |
| "result_type", |
| b"result_type", |
| "status", |
| b"status", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["result_type", b"result_type"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "status", "recent_progress", "explain", "exception", "await_termination" |
| ] |
| | None |
| ): ... |
| |
| global___StreamingQueryCommandResult = StreamingQueryCommandResult |
| |
| class StreamingQueryManagerCommand(google.protobuf.message.Message): |
| """Commands for the streaming query manager.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class AwaitAnyTerminationCommand(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| TIMEOUT_MS_FIELD_NUMBER: builtins.int |
| timeout_ms: builtins.int |
| """(Optional) The waiting time in milliseconds to wait for any query to terminate.""" |
| def __init__( |
| self, |
| *, |
| timeout_ms: builtins.int | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_timeout_ms", b"_timeout_ms", "timeout_ms", b"timeout_ms" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_timeout_ms", b"_timeout_ms", "timeout_ms", b"timeout_ms" |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_timeout_ms", b"_timeout_ms"] |
| ) -> typing_extensions.Literal["timeout_ms"] | None: ... |
| |
| class StreamingQueryListenerCommand(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| LISTENER_PAYLOAD_FIELD_NUMBER: builtins.int |
| PYTHON_LISTENER_PAYLOAD_FIELD_NUMBER: builtins.int |
| ID_FIELD_NUMBER: builtins.int |
| listener_payload: builtins.bytes |
| @property |
| def python_listener_payload( |
| self, |
| ) -> pyspark.sql.connect.proto.expressions_pb2.PythonUDF: ... |
| id: builtins.str |
| def __init__( |
| self, |
| *, |
| listener_payload: builtins.bytes = ..., |
| python_listener_payload: pyspark.sql.connect.proto.expressions_pb2.PythonUDF |
| | None = ..., |
| id: builtins.str = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_python_listener_payload", |
| b"_python_listener_payload", |
| "python_listener_payload", |
| b"python_listener_payload", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_python_listener_payload", |
| b"_python_listener_payload", |
| "id", |
| b"id", |
| "listener_payload", |
| b"listener_payload", |
| "python_listener_payload", |
| b"python_listener_payload", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, |
| oneof_group: typing_extensions.Literal[ |
| "_python_listener_payload", b"_python_listener_payload" |
| ], |
| ) -> typing_extensions.Literal["python_listener_payload"] | None: ... |
| |
| ACTIVE_FIELD_NUMBER: builtins.int |
| GET_QUERY_FIELD_NUMBER: builtins.int |
| AWAIT_ANY_TERMINATION_FIELD_NUMBER: builtins.int |
| RESET_TERMINATED_FIELD_NUMBER: builtins.int |
| ADD_LISTENER_FIELD_NUMBER: builtins.int |
| REMOVE_LISTENER_FIELD_NUMBER: builtins.int |
| LIST_LISTENERS_FIELD_NUMBER: builtins.int |
| active: builtins.bool |
| """active() API, returns a list of active queries.""" |
| get_query: builtins.str |
| """get() API, returns the StreamingQuery identified by id.""" |
| @property |
| def await_any_termination( |
| self, |
| ) -> global___StreamingQueryManagerCommand.AwaitAnyTerminationCommand: |
| """awaitAnyTermination() API, wait until any query terminates or timeout.""" |
| reset_terminated: builtins.bool |
| """resetTerminated() API.""" |
| @property |
| def add_listener(self) -> global___StreamingQueryManagerCommand.StreamingQueryListenerCommand: |
| """addListener API.""" |
| @property |
| def remove_listener( |
| self, |
| ) -> global___StreamingQueryManagerCommand.StreamingQueryListenerCommand: |
| """removeListener API.""" |
| list_listeners: builtins.bool |
| """listListeners() API, returns a list of streaming query listeners.""" |
| def __init__( |
| self, |
| *, |
| active: builtins.bool = ..., |
| get_query: builtins.str = ..., |
| await_any_termination: global___StreamingQueryManagerCommand.AwaitAnyTerminationCommand |
| | None = ..., |
| reset_terminated: builtins.bool = ..., |
| add_listener: global___StreamingQueryManagerCommand.StreamingQueryListenerCommand |
| | None = ..., |
| remove_listener: global___StreamingQueryManagerCommand.StreamingQueryListenerCommand |
| | None = ..., |
| list_listeners: builtins.bool = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "active", |
| b"active", |
| "add_listener", |
| b"add_listener", |
| "await_any_termination", |
| b"await_any_termination", |
| "command", |
| b"command", |
| "get_query", |
| b"get_query", |
| "list_listeners", |
| b"list_listeners", |
| "remove_listener", |
| b"remove_listener", |
| "reset_terminated", |
| b"reset_terminated", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "active", |
| b"active", |
| "add_listener", |
| b"add_listener", |
| "await_any_termination", |
| b"await_any_termination", |
| "command", |
| b"command", |
| "get_query", |
| b"get_query", |
| "list_listeners", |
| b"list_listeners", |
| "remove_listener", |
| b"remove_listener", |
| "reset_terminated", |
| b"reset_terminated", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["command", b"command"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "active", |
| "get_query", |
| "await_any_termination", |
| "reset_terminated", |
| "add_listener", |
| "remove_listener", |
| "list_listeners", |
| ] |
| | None |
| ): ... |
| |
| global___StreamingQueryManagerCommand = StreamingQueryManagerCommand |
| |
| class StreamingQueryManagerCommandResult(google.protobuf.message.Message): |
| """Response for commands on the streaming query manager.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class ActiveResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ACTIVE_QUERIES_FIELD_NUMBER: builtins.int |
| @property |
| def active_queries( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| global___StreamingQueryManagerCommandResult.StreamingQueryInstance |
| ]: ... |
| def __init__( |
| self, |
| *, |
| active_queries: collections.abc.Iterable[ |
| global___StreamingQueryManagerCommandResult.StreamingQueryInstance |
| ] |
| | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["active_queries", b"active_queries"] |
| ) -> None: ... |
| |
| class StreamingQueryInstance(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ID_FIELD_NUMBER: builtins.int |
| NAME_FIELD_NUMBER: builtins.int |
| @property |
| def id(self) -> global___StreamingQueryInstanceId: |
| """(Required) The id and runId of this query.""" |
| name: builtins.str |
| """(Optional) The name of this query.""" |
| def __init__( |
| self, |
| *, |
| id: global___StreamingQueryInstanceId | None = ..., |
| name: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal["_name", b"_name", "id", b"id", "name", b"name"], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal["_name", b"_name", "id", b"id", "name", b"name"], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_name", b"_name"] |
| ) -> typing_extensions.Literal["name"] | None: ... |
| |
| class AwaitAnyTerminationResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| TERMINATED_FIELD_NUMBER: builtins.int |
| terminated: builtins.bool |
| def __init__( |
| self, |
| *, |
| terminated: builtins.bool = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["terminated", b"terminated"] |
| ) -> None: ... |
| |
| class StreamingQueryListenerInstance(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| LISTENER_PAYLOAD_FIELD_NUMBER: builtins.int |
| listener_payload: builtins.bytes |
| def __init__( |
| self, |
| *, |
| listener_payload: builtins.bytes = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["listener_payload", b"listener_payload"] |
| ) -> None: ... |
| |
| class ListStreamingQueryListenerResult(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| LISTENER_IDS_FIELD_NUMBER: builtins.int |
| @property |
| def listener_ids( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: |
| """(Required) Reference IDs of listener instances.""" |
| def __init__( |
| self, |
| *, |
| listener_ids: collections.abc.Iterable[builtins.str] | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["listener_ids", b"listener_ids"] |
| ) -> None: ... |
| |
| ACTIVE_FIELD_NUMBER: builtins.int |
| QUERY_FIELD_NUMBER: builtins.int |
| AWAIT_ANY_TERMINATION_FIELD_NUMBER: builtins.int |
| RESET_TERMINATED_FIELD_NUMBER: builtins.int |
| ADD_LISTENER_FIELD_NUMBER: builtins.int |
| REMOVE_LISTENER_FIELD_NUMBER: builtins.int |
| LIST_LISTENERS_FIELD_NUMBER: builtins.int |
| @property |
| def active(self) -> global___StreamingQueryManagerCommandResult.ActiveResult: ... |
| @property |
| def query(self) -> global___StreamingQueryManagerCommandResult.StreamingQueryInstance: ... |
| @property |
| def await_any_termination( |
| self, |
| ) -> global___StreamingQueryManagerCommandResult.AwaitAnyTerminationResult: ... |
| reset_terminated: builtins.bool |
| add_listener: builtins.bool |
| remove_listener: builtins.bool |
| @property |
| def list_listeners( |
| self, |
| ) -> global___StreamingQueryManagerCommandResult.ListStreamingQueryListenerResult: ... |
| def __init__( |
| self, |
| *, |
| active: global___StreamingQueryManagerCommandResult.ActiveResult | None = ..., |
| query: global___StreamingQueryManagerCommandResult.StreamingQueryInstance | None = ..., |
| await_any_termination: global___StreamingQueryManagerCommandResult.AwaitAnyTerminationResult |
| | None = ..., |
| reset_terminated: builtins.bool = ..., |
| add_listener: builtins.bool = ..., |
| remove_listener: builtins.bool = ..., |
| list_listeners: global___StreamingQueryManagerCommandResult.ListStreamingQueryListenerResult |
| | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "active", |
| b"active", |
| "add_listener", |
| b"add_listener", |
| "await_any_termination", |
| b"await_any_termination", |
| "list_listeners", |
| b"list_listeners", |
| "query", |
| b"query", |
| "remove_listener", |
| b"remove_listener", |
| "reset_terminated", |
| b"reset_terminated", |
| "result_type", |
| b"result_type", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "active", |
| b"active", |
| "add_listener", |
| b"add_listener", |
| "await_any_termination", |
| b"await_any_termination", |
| "list_listeners", |
| b"list_listeners", |
| "query", |
| b"query", |
| "remove_listener", |
| b"remove_listener", |
| "reset_terminated", |
| b"reset_terminated", |
| "result_type", |
| b"result_type", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["result_type", b"result_type"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "active", |
| "query", |
| "await_any_termination", |
| "reset_terminated", |
| "add_listener", |
| "remove_listener", |
| "list_listeners", |
| ] |
| | None |
| ): ... |
| |
| global___StreamingQueryManagerCommandResult = StreamingQueryManagerCommandResult |
| |
| class GetResourcesCommand(google.protobuf.message.Message): |
| """Command to get the output of 'SparkContext.resources'""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| def __init__( |
| self, |
| ) -> None: ... |
| |
| global___GetResourcesCommand = GetResourcesCommand |
| |
| class GetResourcesCommandResult(google.protobuf.message.Message): |
| """Response for command 'GetResourcesCommand'.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class ResourcesEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| @property |
| def value(self) -> pyspark.sql.connect.proto.common_pb2.ResourceInformation: ... |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: pyspark.sql.connect.proto.common_pb2.ResourceInformation | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["value", b"value"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| RESOURCES_FIELD_NUMBER: builtins.int |
| @property |
| def resources( |
| self, |
| ) -> google.protobuf.internal.containers.MessageMap[ |
| builtins.str, pyspark.sql.connect.proto.common_pb2.ResourceInformation |
| ]: ... |
| def __init__( |
| self, |
| *, |
| resources: collections.abc.Mapping[ |
| builtins.str, pyspark.sql.connect.proto.common_pb2.ResourceInformation |
| ] |
| | None = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["resources", b"resources"] |
| ) -> None: ... |
| |
| global___GetResourcesCommandResult = GetResourcesCommandResult |