blob: 93a431dcc860756fdde94fce785de02d1d132b27 [file] [log] [blame]
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import builtins
import collections.abc
import google.protobuf.any_pb2
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import pyspark.sql.connect.proto.types_pb2
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class Expression(google.protobuf.message.Message):
"""Expression used to refer to fields, functions and similar. This can be used everywhere
expressions in SQL appear.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class Window(google.protobuf.message.Message):
"""Expression for the OVER clause or WINDOW clause."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class WindowFrame(google.protobuf.message.Message):
"""The window frame"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _FrameType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _FrameTypeEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
Expression.Window.WindowFrame._FrameType.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
FRAME_TYPE_UNDEFINED: Expression.Window.WindowFrame._FrameType.ValueType # 0
FRAME_TYPE_ROW: Expression.Window.WindowFrame._FrameType.ValueType # 1
"""RowFrame treats rows in a partition individually."""
FRAME_TYPE_RANGE: Expression.Window.WindowFrame._FrameType.ValueType # 2
"""RangeFrame treats rows in a partition as groups of peers.
All rows having the same 'ORDER BY' ordering are considered as peers.
"""
class FrameType(_FrameType, metaclass=_FrameTypeEnumTypeWrapper): ...
FRAME_TYPE_UNDEFINED: Expression.Window.WindowFrame.FrameType.ValueType # 0
FRAME_TYPE_ROW: Expression.Window.WindowFrame.FrameType.ValueType # 1
"""RowFrame treats rows in a partition individually."""
FRAME_TYPE_RANGE: Expression.Window.WindowFrame.FrameType.ValueType # 2
"""RangeFrame treats rows in a partition as groups of peers.
All rows having the same 'ORDER BY' ordering are considered as peers.
"""
class FrameBoundary(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CURRENT_ROW_FIELD_NUMBER: builtins.int
UNBOUNDED_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
current_row: builtins.bool
"""CURRENT ROW boundary"""
unbounded: builtins.bool
"""UNBOUNDED boundary.
For lower bound, it will be converted to 'UnboundedPreceding'.
for upper bound, it will be converted to 'UnboundedFollowing'.
"""
@property
def value(self) -> global___Expression:
"""This is an expression for future proofing. We are expecting literals on the server side."""
def __init__(
self,
*,
current_row: builtins.bool = ...,
unbounded: builtins.bool = ...,
value: global___Expression | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"boundary",
b"boundary",
"current_row",
b"current_row",
"unbounded",
b"unbounded",
"value",
b"value",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"boundary",
b"boundary",
"current_row",
b"current_row",
"unbounded",
b"unbounded",
"value",
b"value",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["boundary", b"boundary"]
) -> typing_extensions.Literal["current_row", "unbounded", "value"] | None: ...
FRAME_TYPE_FIELD_NUMBER: builtins.int
LOWER_FIELD_NUMBER: builtins.int
UPPER_FIELD_NUMBER: builtins.int
frame_type: global___Expression.Window.WindowFrame.FrameType.ValueType
"""(Required) The type of the frame."""
@property
def lower(self) -> global___Expression.Window.WindowFrame.FrameBoundary:
"""(Required) The lower bound of the frame."""
@property
def upper(self) -> global___Expression.Window.WindowFrame.FrameBoundary:
"""(Required) The upper bound of the frame."""
def __init__(
self,
*,
frame_type: global___Expression.Window.WindowFrame.FrameType.ValueType = ...,
lower: global___Expression.Window.WindowFrame.FrameBoundary | None = ...,
upper: global___Expression.Window.WindowFrame.FrameBoundary | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["lower", b"lower", "upper", b"upper"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"frame_type", b"frame_type", "lower", b"lower", "upper", b"upper"
],
) -> None: ...
WINDOW_FUNCTION_FIELD_NUMBER: builtins.int
PARTITION_SPEC_FIELD_NUMBER: builtins.int
ORDER_SPEC_FIELD_NUMBER: builtins.int
FRAME_SPEC_FIELD_NUMBER: builtins.int
@property
def window_function(self) -> global___Expression:
"""(Required) The window function."""
@property
def partition_spec(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression
]:
"""(Optional) The way that input rows are partitioned."""
@property
def order_spec(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.SortOrder
]:
"""(Optional) Ordering of rows in a partition."""
@property
def frame_spec(self) -> global___Expression.Window.WindowFrame:
"""(Optional) Window frame in a partition.
If not set, it will be treated as 'UnspecifiedFrame'.
"""
def __init__(
self,
*,
window_function: global___Expression | None = ...,
partition_spec: collections.abc.Iterable[global___Expression] | None = ...,
order_spec: collections.abc.Iterable[global___Expression.SortOrder] | None = ...,
frame_spec: global___Expression.Window.WindowFrame | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"frame_spec", b"frame_spec", "window_function", b"window_function"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"frame_spec",
b"frame_spec",
"order_spec",
b"order_spec",
"partition_spec",
b"partition_spec",
"window_function",
b"window_function",
],
) -> None: ...
class SortOrder(google.protobuf.message.Message):
"""SortOrder is used to specify the data ordering, it is normally used in Sort and Window.
It is an unevaluable expression and cannot be evaluated, so can not be used in Projection.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _SortDirection:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _SortDirectionEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
Expression.SortOrder._SortDirection.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
SORT_DIRECTION_UNSPECIFIED: Expression.SortOrder._SortDirection.ValueType # 0
SORT_DIRECTION_ASCENDING: Expression.SortOrder._SortDirection.ValueType # 1
SORT_DIRECTION_DESCENDING: Expression.SortOrder._SortDirection.ValueType # 2
class SortDirection(_SortDirection, metaclass=_SortDirectionEnumTypeWrapper): ...
SORT_DIRECTION_UNSPECIFIED: Expression.SortOrder.SortDirection.ValueType # 0
SORT_DIRECTION_ASCENDING: Expression.SortOrder.SortDirection.ValueType # 1
SORT_DIRECTION_DESCENDING: Expression.SortOrder.SortDirection.ValueType # 2
class _NullOrdering:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _NullOrderingEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
Expression.SortOrder._NullOrdering.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
SORT_NULLS_UNSPECIFIED: Expression.SortOrder._NullOrdering.ValueType # 0
SORT_NULLS_FIRST: Expression.SortOrder._NullOrdering.ValueType # 1
SORT_NULLS_LAST: Expression.SortOrder._NullOrdering.ValueType # 2
class NullOrdering(_NullOrdering, metaclass=_NullOrderingEnumTypeWrapper): ...
SORT_NULLS_UNSPECIFIED: Expression.SortOrder.NullOrdering.ValueType # 0
SORT_NULLS_FIRST: Expression.SortOrder.NullOrdering.ValueType # 1
SORT_NULLS_LAST: Expression.SortOrder.NullOrdering.ValueType # 2
CHILD_FIELD_NUMBER: builtins.int
DIRECTION_FIELD_NUMBER: builtins.int
NULL_ORDERING_FIELD_NUMBER: builtins.int
@property
def child(self) -> global___Expression:
"""(Required) The expression to be sorted."""
direction: global___Expression.SortOrder.SortDirection.ValueType
"""(Required) The sort direction, should be ASCENDING or DESCENDING."""
null_ordering: global___Expression.SortOrder.NullOrdering.ValueType
"""(Required) How to deal with NULLs, should be NULLS_FIRST or NULLS_LAST."""
def __init__(
self,
*,
child: global___Expression | None = ...,
direction: global___Expression.SortOrder.SortDirection.ValueType = ...,
null_ordering: global___Expression.SortOrder.NullOrdering.ValueType = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["child", b"child"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"child", b"child", "direction", b"direction", "null_ordering", b"null_ordering"
],
) -> None: ...
class Cast(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
EXPR_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
TYPE_STR_FIELD_NUMBER: builtins.int
@property
def expr(self) -> global___Expression:
"""(Required) the expression to be casted."""
@property
def type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
type_str: builtins.str
"""If this is set, Server will use Catalyst parser to parse this string to DataType."""
def __init__(
self,
*,
expr: global___Expression | None = ...,
type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
type_str: builtins.str = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"cast_to_type",
b"cast_to_type",
"expr",
b"expr",
"type",
b"type",
"type_str",
b"type_str",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"cast_to_type",
b"cast_to_type",
"expr",
b"expr",
"type",
b"type",
"type_str",
b"type_str",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["cast_to_type", b"cast_to_type"]
) -> typing_extensions.Literal["type", "type_str"] | None: ...
class Literal(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class Decimal(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
PRECISION_FIELD_NUMBER: builtins.int
SCALE_FIELD_NUMBER: builtins.int
value: builtins.str
"""the string representation."""
precision: builtins.int
"""The maximum number of digits allowed in the value.
the maximum precision is 38.
"""
scale: builtins.int
"""declared scale of decimal literal"""
def __init__(
self,
*,
value: builtins.str = ...,
precision: builtins.int | None = ...,
scale: builtins.int | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_precision",
b"_precision",
"_scale",
b"_scale",
"precision",
b"precision",
"scale",
b"scale",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_precision",
b"_precision",
"_scale",
b"_scale",
"precision",
b"precision",
"scale",
b"scale",
"value",
b"value",
],
) -> None: ...
@typing.overload
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_precision", b"_precision"]
) -> typing_extensions.Literal["precision"] | None: ...
@typing.overload
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_scale", b"_scale"]
) -> typing_extensions.Literal["scale"] | None: ...
class CalendarInterval(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MONTHS_FIELD_NUMBER: builtins.int
DAYS_FIELD_NUMBER: builtins.int
MICROSECONDS_FIELD_NUMBER: builtins.int
months: builtins.int
days: builtins.int
microseconds: builtins.int
def __init__(
self,
*,
months: builtins.int = ...,
days: builtins.int = ...,
microseconds: builtins.int = ...,
) -> None: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"days", b"days", "microseconds", b"microseconds", "months", b"months"
],
) -> None: ...
class Array(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ELEMENT_TYPE_FIELD_NUMBER: builtins.int
ELEMENTS_FIELD_NUMBER: builtins.int
@property
def element_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
@property
def elements(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.Literal
]: ...
def __init__(
self,
*,
element_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
elements: collections.abc.Iterable[global___Expression.Literal] | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["element_type", b"element_type"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"element_type", b"element_type", "elements", b"elements"
],
) -> None: ...
class Map(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_TYPE_FIELD_NUMBER: builtins.int
VALUE_TYPE_FIELD_NUMBER: builtins.int
KEYS_FIELD_NUMBER: builtins.int
VALUES_FIELD_NUMBER: builtins.int
@property
def key_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
@property
def value_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
@property
def keys(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.Literal
]: ...
@property
def values(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.Literal
]: ...
def __init__(
self,
*,
key_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
value_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
keys: collections.abc.Iterable[global___Expression.Literal] | None = ...,
values: collections.abc.Iterable[global___Expression.Literal] | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"key_type", b"key_type", "value_type", b"value_type"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"key_type",
b"key_type",
"keys",
b"keys",
"value_type",
b"value_type",
"values",
b"values",
],
) -> None: ...
class Struct(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STRUCT_TYPE_FIELD_NUMBER: builtins.int
ELEMENTS_FIELD_NUMBER: builtins.int
@property
def struct_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
@property
def elements(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.Literal
]: ...
def __init__(
self,
*,
struct_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
elements: collections.abc.Iterable[global___Expression.Literal] | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["struct_type", b"struct_type"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"elements", b"elements", "struct_type", b"struct_type"
],
) -> None: ...
NULL_FIELD_NUMBER: builtins.int
BINARY_FIELD_NUMBER: builtins.int
BOOLEAN_FIELD_NUMBER: builtins.int
BYTE_FIELD_NUMBER: builtins.int
SHORT_FIELD_NUMBER: builtins.int
INTEGER_FIELD_NUMBER: builtins.int
LONG_FIELD_NUMBER: builtins.int
FLOAT_FIELD_NUMBER: builtins.int
DOUBLE_FIELD_NUMBER: builtins.int
DECIMAL_FIELD_NUMBER: builtins.int
STRING_FIELD_NUMBER: builtins.int
DATE_FIELD_NUMBER: builtins.int
TIMESTAMP_FIELD_NUMBER: builtins.int
TIMESTAMP_NTZ_FIELD_NUMBER: builtins.int
CALENDAR_INTERVAL_FIELD_NUMBER: builtins.int
YEAR_MONTH_INTERVAL_FIELD_NUMBER: builtins.int
DAY_TIME_INTERVAL_FIELD_NUMBER: builtins.int
ARRAY_FIELD_NUMBER: builtins.int
MAP_FIELD_NUMBER: builtins.int
STRUCT_FIELD_NUMBER: builtins.int
@property
def null(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
binary: builtins.bytes
boolean: builtins.bool
byte: builtins.int
short: builtins.int
integer: builtins.int
long: builtins.int
float: builtins.float
double: builtins.float
@property
def decimal(self) -> global___Expression.Literal.Decimal: ...
string: builtins.str
date: builtins.int
"""Date in units of days since the UNIX epoch."""
timestamp: builtins.int
"""Timestamp in units of microseconds since the UNIX epoch."""
timestamp_ntz: builtins.int
"""Timestamp in units of microseconds since the UNIX epoch (without timezone information)."""
@property
def calendar_interval(self) -> global___Expression.Literal.CalendarInterval: ...
year_month_interval: builtins.int
day_time_interval: builtins.int
@property
def array(self) -> global___Expression.Literal.Array: ...
@property
def map(self) -> global___Expression.Literal.Map: ...
@property
def struct(self) -> global___Expression.Literal.Struct: ...
def __init__(
self,
*,
null: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
binary: builtins.bytes = ...,
boolean: builtins.bool = ...,
byte: builtins.int = ...,
short: builtins.int = ...,
integer: builtins.int = ...,
long: builtins.int = ...,
float: builtins.float = ...,
double: builtins.float = ...,
decimal: global___Expression.Literal.Decimal | None = ...,
string: builtins.str = ...,
date: builtins.int = ...,
timestamp: builtins.int = ...,
timestamp_ntz: builtins.int = ...,
calendar_interval: global___Expression.Literal.CalendarInterval | None = ...,
year_month_interval: builtins.int = ...,
day_time_interval: builtins.int = ...,
array: global___Expression.Literal.Array | None = ...,
map: global___Expression.Literal.Map | None = ...,
struct: global___Expression.Literal.Struct | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"array",
b"array",
"binary",
b"binary",
"boolean",
b"boolean",
"byte",
b"byte",
"calendar_interval",
b"calendar_interval",
"date",
b"date",
"day_time_interval",
b"day_time_interval",
"decimal",
b"decimal",
"double",
b"double",
"float",
b"float",
"integer",
b"integer",
"literal_type",
b"literal_type",
"long",
b"long",
"map",
b"map",
"null",
b"null",
"short",
b"short",
"string",
b"string",
"struct",
b"struct",
"timestamp",
b"timestamp",
"timestamp_ntz",
b"timestamp_ntz",
"year_month_interval",
b"year_month_interval",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"array",
b"array",
"binary",
b"binary",
"boolean",
b"boolean",
"byte",
b"byte",
"calendar_interval",
b"calendar_interval",
"date",
b"date",
"day_time_interval",
b"day_time_interval",
"decimal",
b"decimal",
"double",
b"double",
"float",
b"float",
"integer",
b"integer",
"literal_type",
b"literal_type",
"long",
b"long",
"map",
b"map",
"null",
b"null",
"short",
b"short",
"string",
b"string",
"struct",
b"struct",
"timestamp",
b"timestamp",
"timestamp_ntz",
b"timestamp_ntz",
"year_month_interval",
b"year_month_interval",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["literal_type", b"literal_type"]
) -> (
typing_extensions.Literal[
"null",
"binary",
"boolean",
"byte",
"short",
"integer",
"long",
"float",
"double",
"decimal",
"string",
"date",
"timestamp",
"timestamp_ntz",
"calendar_interval",
"year_month_interval",
"day_time_interval",
"array",
"map",
"struct",
]
| None
): ...
class UnresolvedAttribute(google.protobuf.message.Message):
"""An unresolved attribute that is not explicitly bound to a specific column, but the column
is resolved during analysis by name.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
UNPARSED_IDENTIFIER_FIELD_NUMBER: builtins.int
PLAN_ID_FIELD_NUMBER: builtins.int
IS_METADATA_COLUMN_FIELD_NUMBER: builtins.int
unparsed_identifier: builtins.str
"""(Required) An identifier that will be parsed by Catalyst parser. This should follow the
Spark SQL identifier syntax.
"""
plan_id: builtins.int
"""(Optional) The id of corresponding connect plan."""
is_metadata_column: builtins.bool
"""(Optional) The requested column is a metadata column."""
def __init__(
self,
*,
unparsed_identifier: builtins.str = ...,
plan_id: builtins.int | None = ...,
is_metadata_column: builtins.bool | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_is_metadata_column",
b"_is_metadata_column",
"_plan_id",
b"_plan_id",
"is_metadata_column",
b"is_metadata_column",
"plan_id",
b"plan_id",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_is_metadata_column",
b"_is_metadata_column",
"_plan_id",
b"_plan_id",
"is_metadata_column",
b"is_metadata_column",
"plan_id",
b"plan_id",
"unparsed_identifier",
b"unparsed_identifier",
],
) -> None: ...
@typing.overload
def WhichOneof(
self,
oneof_group: typing_extensions.Literal["_is_metadata_column", b"_is_metadata_column"],
) -> typing_extensions.Literal["is_metadata_column"] | None: ...
@typing.overload
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_plan_id", b"_plan_id"]
) -> typing_extensions.Literal["plan_id"] | None: ...
class UnresolvedFunction(google.protobuf.message.Message):
"""An unresolved function is not explicitly bound to one explicit function, but the function
is resolved during analysis following Sparks name resolution rules.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_NAME_FIELD_NUMBER: builtins.int
ARGUMENTS_FIELD_NUMBER: builtins.int
IS_DISTINCT_FIELD_NUMBER: builtins.int
IS_USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int
function_name: builtins.str
"""(Required) name (or unparsed name for user defined function) for the unresolved function."""
@property
def arguments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression
]:
"""(Optional) Function arguments. Empty arguments are allowed."""
is_distinct: builtins.bool
"""(Required) Indicate if this function should be applied on distinct values."""
is_user_defined_function: builtins.bool
"""(Required) Indicate if this is a user defined function.
When it is not a user defined function, Connect will use the function name directly.
When it is a user defined function, Connect will parse the function name first.
"""
def __init__(
self,
*,
function_name: builtins.str = ...,
arguments: collections.abc.Iterable[global___Expression] | None = ...,
is_distinct: builtins.bool = ...,
is_user_defined_function: builtins.bool = ...,
) -> None: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"arguments",
b"arguments",
"function_name",
b"function_name",
"is_distinct",
b"is_distinct",
"is_user_defined_function",
b"is_user_defined_function",
],
) -> None: ...
class ExpressionString(google.protobuf.message.Message):
"""Expression as string."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
EXPRESSION_FIELD_NUMBER: builtins.int
expression: builtins.str
"""(Required) A SQL expression that will be parsed by Catalyst parser."""
def __init__(
self,
*,
expression: builtins.str = ...,
) -> None: ...
def ClearField(
self, field_name: typing_extensions.Literal["expression", b"expression"]
) -> None: ...
class UnresolvedStar(google.protobuf.message.Message):
"""UnresolvedStar is used to expand all the fields of a relation or struct."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
UNPARSED_TARGET_FIELD_NUMBER: builtins.int
unparsed_target: builtins.str
"""(Optional) The target of the expansion.
If set, it should end with '.*' and will be parsed by 'parseAttributeName'
in the server side.
"""
def __init__(
self,
*,
unparsed_target: builtins.str | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_unparsed_target", b"_unparsed_target", "unparsed_target", b"unparsed_target"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_unparsed_target", b"_unparsed_target", "unparsed_target", b"unparsed_target"
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_unparsed_target", b"_unparsed_target"]
) -> typing_extensions.Literal["unparsed_target"] | None: ...
class UnresolvedRegex(google.protobuf.message.Message):
"""Represents all of the input attributes to a given relational operator, for example in
"SELECT `(id)?+.+` FROM ...".
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
COL_NAME_FIELD_NUMBER: builtins.int
PLAN_ID_FIELD_NUMBER: builtins.int
col_name: builtins.str
"""(Required) The column name used to extract column with regex."""
plan_id: builtins.int
"""(Optional) The id of corresponding connect plan."""
def __init__(
self,
*,
col_name: builtins.str = ...,
plan_id: builtins.int | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal["_plan_id", b"_plan_id", "plan_id", b"plan_id"],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_plan_id", b"_plan_id", "col_name", b"col_name", "plan_id", b"plan_id"
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_plan_id", b"_plan_id"]
) -> typing_extensions.Literal["plan_id"] | None: ...
class UnresolvedExtractValue(google.protobuf.message.Message):
"""Extracts a value or values from an Expression"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CHILD_FIELD_NUMBER: builtins.int
EXTRACTION_FIELD_NUMBER: builtins.int
@property
def child(self) -> global___Expression:
"""(Required) The expression to extract value from, can be
Map, Array, Struct or array of Structs.
"""
@property
def extraction(self) -> global___Expression:
"""(Required) The expression to describe the extraction, can be
key of Map, index of Array, field name of Struct.
"""
def __init__(
self,
*,
child: global___Expression | None = ...,
extraction: global___Expression | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal["child", b"child", "extraction", b"extraction"],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal["child", b"child", "extraction", b"extraction"],
) -> None: ...
class UpdateFields(google.protobuf.message.Message):
"""Add, replace or drop a field of `StructType` expression by name."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STRUCT_EXPRESSION_FIELD_NUMBER: builtins.int
FIELD_NAME_FIELD_NUMBER: builtins.int
VALUE_EXPRESSION_FIELD_NUMBER: builtins.int
@property
def struct_expression(self) -> global___Expression:
"""(Required) The struct expression."""
field_name: builtins.str
"""(Required) The field name."""
@property
def value_expression(self) -> global___Expression:
"""(Optional) The expression to add or replace.
When not set, it means this field will be dropped.
"""
def __init__(
self,
*,
struct_expression: global___Expression | None = ...,
field_name: builtins.str = ...,
value_expression: global___Expression | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"struct_expression", b"struct_expression", "value_expression", b"value_expression"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"field_name",
b"field_name",
"struct_expression",
b"struct_expression",
"value_expression",
b"value_expression",
],
) -> None: ...
class Alias(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
EXPR_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
METADATA_FIELD_NUMBER: builtins.int
@property
def expr(self) -> global___Expression:
"""(Required) The expression that alias will be added on."""
@property
def name(
self,
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""(Required) a list of name parts for the alias.
Scalar columns only has one name that presents.
"""
metadata: builtins.str
"""(Optional) Alias metadata expressed as a JSON map."""
def __init__(
self,
*,
expr: global___Expression | None = ...,
name: collections.abc.Iterable[builtins.str] | None = ...,
metadata: builtins.str | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_metadata", b"_metadata", "expr", b"expr", "metadata", b"metadata"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_metadata", b"_metadata", "expr", b"expr", "metadata", b"metadata", "name", b"name"
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_metadata", b"_metadata"]
) -> typing_extensions.Literal["metadata"] | None: ...
class LambdaFunction(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_FIELD_NUMBER: builtins.int
ARGUMENTS_FIELD_NUMBER: builtins.int
@property
def function(self) -> global___Expression:
"""(Required) The lambda function.
The function body should use 'UnresolvedAttribute' as arguments, the sever side will
replace 'UnresolvedAttribute' with 'UnresolvedNamedLambdaVariable'.
"""
@property
def arguments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.UnresolvedNamedLambdaVariable
]:
"""(Required) Function variables. Must contains 1 ~ 3 variables."""
def __init__(
self,
*,
function: global___Expression | None = ...,
arguments: collections.abc.Iterable[global___Expression.UnresolvedNamedLambdaVariable]
| None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["function", b"function"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"arguments", b"arguments", "function", b"function"
],
) -> None: ...
class UnresolvedNamedLambdaVariable(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_PARTS_FIELD_NUMBER: builtins.int
@property
def name_parts(
self,
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""(Required) a list of name parts for the variable. Must not be empty."""
def __init__(
self,
*,
name_parts: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(
self, field_name: typing_extensions.Literal["name_parts", b"name_parts"]
) -> None: ...
LITERAL_FIELD_NUMBER: builtins.int
UNRESOLVED_ATTRIBUTE_FIELD_NUMBER: builtins.int
UNRESOLVED_FUNCTION_FIELD_NUMBER: builtins.int
EXPRESSION_STRING_FIELD_NUMBER: builtins.int
UNRESOLVED_STAR_FIELD_NUMBER: builtins.int
ALIAS_FIELD_NUMBER: builtins.int
CAST_FIELD_NUMBER: builtins.int
UNRESOLVED_REGEX_FIELD_NUMBER: builtins.int
SORT_ORDER_FIELD_NUMBER: builtins.int
LAMBDA_FUNCTION_FIELD_NUMBER: builtins.int
WINDOW_FIELD_NUMBER: builtins.int
UNRESOLVED_EXTRACT_VALUE_FIELD_NUMBER: builtins.int
UPDATE_FIELDS_FIELD_NUMBER: builtins.int
UNRESOLVED_NAMED_LAMBDA_VARIABLE_FIELD_NUMBER: builtins.int
COMMON_INLINE_USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int
CALL_FUNCTION_FIELD_NUMBER: builtins.int
NAMED_ARGUMENT_EXPRESSION_FIELD_NUMBER: builtins.int
EXTENSION_FIELD_NUMBER: builtins.int
@property
def literal(self) -> global___Expression.Literal: ...
@property
def unresolved_attribute(self) -> global___Expression.UnresolvedAttribute: ...
@property
def unresolved_function(self) -> global___Expression.UnresolvedFunction: ...
@property
def expression_string(self) -> global___Expression.ExpressionString: ...
@property
def unresolved_star(self) -> global___Expression.UnresolvedStar: ...
@property
def alias(self) -> global___Expression.Alias: ...
@property
def cast(self) -> global___Expression.Cast: ...
@property
def unresolved_regex(self) -> global___Expression.UnresolvedRegex: ...
@property
def sort_order(self) -> global___Expression.SortOrder: ...
@property
def lambda_function(self) -> global___Expression.LambdaFunction: ...
@property
def window(self) -> global___Expression.Window: ...
@property
def unresolved_extract_value(self) -> global___Expression.UnresolvedExtractValue: ...
@property
def update_fields(self) -> global___Expression.UpdateFields: ...
@property
def unresolved_named_lambda_variable(
self,
) -> global___Expression.UnresolvedNamedLambdaVariable: ...
@property
def common_inline_user_defined_function(self) -> global___CommonInlineUserDefinedFunction: ...
@property
def call_function(self) -> global___CallFunction: ...
@property
def named_argument_expression(self) -> global___NamedArgumentExpression: ...
@property
def extension(self) -> google.protobuf.any_pb2.Any:
"""This field is used to mark extensions to the protocol. When plugins generate arbitrary
relations they can add them here. During the planning the correct resolution is done.
"""
def __init__(
self,
*,
literal: global___Expression.Literal | None = ...,
unresolved_attribute: global___Expression.UnresolvedAttribute | None = ...,
unresolved_function: global___Expression.UnresolvedFunction | None = ...,
expression_string: global___Expression.ExpressionString | None = ...,
unresolved_star: global___Expression.UnresolvedStar | None = ...,
alias: global___Expression.Alias | None = ...,
cast: global___Expression.Cast | None = ...,
unresolved_regex: global___Expression.UnresolvedRegex | None = ...,
sort_order: global___Expression.SortOrder | None = ...,
lambda_function: global___Expression.LambdaFunction | None = ...,
window: global___Expression.Window | None = ...,
unresolved_extract_value: global___Expression.UnresolvedExtractValue | None = ...,
update_fields: global___Expression.UpdateFields | None = ...,
unresolved_named_lambda_variable: global___Expression.UnresolvedNamedLambdaVariable
| None = ...,
common_inline_user_defined_function: global___CommonInlineUserDefinedFunction | None = ...,
call_function: global___CallFunction | None = ...,
named_argument_expression: global___NamedArgumentExpression | None = ...,
extension: google.protobuf.any_pb2.Any | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"alias",
b"alias",
"call_function",
b"call_function",
"cast",
b"cast",
"common_inline_user_defined_function",
b"common_inline_user_defined_function",
"expr_type",
b"expr_type",
"expression_string",
b"expression_string",
"extension",
b"extension",
"lambda_function",
b"lambda_function",
"literal",
b"literal",
"named_argument_expression",
b"named_argument_expression",
"sort_order",
b"sort_order",
"unresolved_attribute",
b"unresolved_attribute",
"unresolved_extract_value",
b"unresolved_extract_value",
"unresolved_function",
b"unresolved_function",
"unresolved_named_lambda_variable",
b"unresolved_named_lambda_variable",
"unresolved_regex",
b"unresolved_regex",
"unresolved_star",
b"unresolved_star",
"update_fields",
b"update_fields",
"window",
b"window",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"alias",
b"alias",
"call_function",
b"call_function",
"cast",
b"cast",
"common_inline_user_defined_function",
b"common_inline_user_defined_function",
"expr_type",
b"expr_type",
"expression_string",
b"expression_string",
"extension",
b"extension",
"lambda_function",
b"lambda_function",
"literal",
b"literal",
"named_argument_expression",
b"named_argument_expression",
"sort_order",
b"sort_order",
"unresolved_attribute",
b"unresolved_attribute",
"unresolved_extract_value",
b"unresolved_extract_value",
"unresolved_function",
b"unresolved_function",
"unresolved_named_lambda_variable",
b"unresolved_named_lambda_variable",
"unresolved_regex",
b"unresolved_regex",
"unresolved_star",
b"unresolved_star",
"update_fields",
b"update_fields",
"window",
b"window",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["expr_type", b"expr_type"]
) -> (
typing_extensions.Literal[
"literal",
"unresolved_attribute",
"unresolved_function",
"expression_string",
"unresolved_star",
"alias",
"cast",
"unresolved_regex",
"sort_order",
"lambda_function",
"window",
"unresolved_extract_value",
"update_fields",
"unresolved_named_lambda_variable",
"common_inline_user_defined_function",
"call_function",
"named_argument_expression",
"extension",
]
| None
): ...
global___Expression = Expression
class CommonInlineUserDefinedFunction(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_NAME_FIELD_NUMBER: builtins.int
DETERMINISTIC_FIELD_NUMBER: builtins.int
ARGUMENTS_FIELD_NUMBER: builtins.int
PYTHON_UDF_FIELD_NUMBER: builtins.int
SCALAR_SCALA_UDF_FIELD_NUMBER: builtins.int
JAVA_UDF_FIELD_NUMBER: builtins.int
function_name: builtins.str
"""(Required) Name of the user-defined function."""
deterministic: builtins.bool
"""(Optional) Indicate if the user-defined function is deterministic."""
@property
def arguments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]:
"""(Optional) Function arguments. Empty arguments are allowed."""
@property
def python_udf(self) -> global___PythonUDF: ...
@property
def scalar_scala_udf(self) -> global___ScalarScalaUDF: ...
@property
def java_udf(self) -> global___JavaUDF: ...
def __init__(
self,
*,
function_name: builtins.str = ...,
deterministic: builtins.bool = ...,
arguments: collections.abc.Iterable[global___Expression] | None = ...,
python_udf: global___PythonUDF | None = ...,
scalar_scala_udf: global___ScalarScalaUDF | None = ...,
java_udf: global___JavaUDF | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"function",
b"function",
"java_udf",
b"java_udf",
"python_udf",
b"python_udf",
"scalar_scala_udf",
b"scalar_scala_udf",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"arguments",
b"arguments",
"deterministic",
b"deterministic",
"function",
b"function",
"function_name",
b"function_name",
"java_udf",
b"java_udf",
"python_udf",
b"python_udf",
"scalar_scala_udf",
b"scalar_scala_udf",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["function", b"function"]
) -> typing_extensions.Literal["python_udf", "scalar_scala_udf", "java_udf"] | None: ...
global___CommonInlineUserDefinedFunction = CommonInlineUserDefinedFunction
class PythonUDF(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OUTPUT_TYPE_FIELD_NUMBER: builtins.int
EVAL_TYPE_FIELD_NUMBER: builtins.int
COMMAND_FIELD_NUMBER: builtins.int
PYTHON_VER_FIELD_NUMBER: builtins.int
@property
def output_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
"""(Required) Output type of the Python UDF"""
eval_type: builtins.int
"""(Required) EvalType of the Python UDF"""
command: builtins.bytes
"""(Required) The encoded commands of the Python UDF"""
python_ver: builtins.str
"""(Required) Python version being used in the client."""
def __init__(
self,
*,
output_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
eval_type: builtins.int = ...,
command: builtins.bytes = ...,
python_ver: builtins.str = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["output_type", b"output_type"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"command",
b"command",
"eval_type",
b"eval_type",
"output_type",
b"output_type",
"python_ver",
b"python_ver",
],
) -> None: ...
global___PythonUDF = PythonUDF
class ScalarScalaUDF(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PAYLOAD_FIELD_NUMBER: builtins.int
INPUTTYPES_FIELD_NUMBER: builtins.int
OUTPUTTYPE_FIELD_NUMBER: builtins.int
NULLABLE_FIELD_NUMBER: builtins.int
payload: builtins.bytes
"""(Required) Serialized JVM object containing UDF definition, input encoders and output encoder"""
@property
def inputTypes(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
pyspark.sql.connect.proto.types_pb2.DataType
]:
"""(Optional) Input type(s) of the UDF"""
@property
def outputType(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
"""(Required) Output type of the UDF"""
nullable: builtins.bool
"""(Required) True if the UDF can return null value"""
def __init__(
self,
*,
payload: builtins.bytes = ...,
inputTypes: collections.abc.Iterable[pyspark.sql.connect.proto.types_pb2.DataType]
| None = ...,
outputType: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
nullable: builtins.bool = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["outputType", b"outputType"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"inputTypes",
b"inputTypes",
"nullable",
b"nullable",
"outputType",
b"outputType",
"payload",
b"payload",
],
) -> None: ...
global___ScalarScalaUDF = ScalarScalaUDF
class JavaUDF(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CLASS_NAME_FIELD_NUMBER: builtins.int
OUTPUT_TYPE_FIELD_NUMBER: builtins.int
AGGREGATE_FIELD_NUMBER: builtins.int
class_name: builtins.str
"""(Required) Fully qualified name of Java class"""
@property
def output_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
"""(Optional) Output type of the Java UDF"""
aggregate: builtins.bool
"""(Required) Indicate if the Java user-defined function is an aggregate function"""
def __init__(
self,
*,
class_name: builtins.str = ...,
output_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
aggregate: builtins.bool = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_output_type", b"_output_type", "output_type", b"output_type"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_output_type",
b"_output_type",
"aggregate",
b"aggregate",
"class_name",
b"class_name",
"output_type",
b"output_type",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_output_type", b"_output_type"]
) -> typing_extensions.Literal["output_type"] | None: ...
global___JavaUDF = JavaUDF
class CallFunction(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_NAME_FIELD_NUMBER: builtins.int
ARGUMENTS_FIELD_NUMBER: builtins.int
function_name: builtins.str
"""(Required) Unparsed name of the SQL function."""
@property
def arguments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]:
"""(Optional) Function arguments. Empty arguments are allowed."""
def __init__(
self,
*,
function_name: builtins.str = ...,
arguments: collections.abc.Iterable[global___Expression] | None = ...,
) -> None: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"arguments", b"arguments", "function_name", b"function_name"
],
) -> None: ...
global___CallFunction = CallFunction
class NamedArgumentExpression(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
"""(Required) The key of the named argument."""
@property
def value(self) -> global___Expression:
"""(Required) The value expression of the named argument."""
def __init__(
self,
*,
key: builtins.str = ...,
value: global___Expression | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["value", b"value"]
) -> builtins.bool: ...
def ClearField(
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
) -> None: ...
global___NamedArgumentExpression = NamedArgumentExpression