| # |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| # |
| """ |
| @generated by mypy-protobuf. Do not edit manually! |
| isort:skip_file |
| |
| Licensed to the Apache Software Foundation (ASF) under one or more |
| contributor license agreements. See the NOTICE file distributed with |
| this work for additional information regarding copyright ownership. |
| The ASF licenses this file to You under the Apache License, Version 2.0 |
| (the "License"); you may not use this file except in compliance with |
| the License. You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software |
| distributed under the License is distributed on an "AS IS" BASIS, |
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| See the License for the specific language governing permissions and |
| limitations under the License. |
| """ |
| import builtins |
| import collections.abc |
| import google.protobuf.descriptor |
| import google.protobuf.internal.containers |
| import google.protobuf.message |
| import pyspark.sql.connect.proto.expressions_pb2 |
| import pyspark.sql.connect.proto.ml_common_pb2 |
| import pyspark.sql.connect.proto.relations_pb2 |
| import sys |
| import typing |
| |
| if sys.version_info >= (3, 8): |
| import typing as typing_extensions |
| else: |
| import typing_extensions |
| |
| DESCRIPTOR: google.protobuf.descriptor.FileDescriptor |
| |
| class MlCommand(google.protobuf.message.Message): |
| """Command for ML""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class Fit(google.protobuf.message.Message): |
| """Command for estimator.fit(dataset)""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| ESTIMATOR_FIELD_NUMBER: builtins.int |
| PARAMS_FIELD_NUMBER: builtins.int |
| DATASET_FIELD_NUMBER: builtins.int |
| @property |
| def estimator(self) -> pyspark.sql.connect.proto.ml_common_pb2.MlOperator: |
| """(Required) Estimator information (its type should be OPERATOR_TYPE_ESTIMATOR)""" |
| @property |
| def params(self) -> pyspark.sql.connect.proto.ml_common_pb2.MlParams: |
| """(Optional) parameters of the Estimator""" |
| @property |
| def dataset(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) the training dataset""" |
| def __init__( |
| self, |
| *, |
| estimator: pyspark.sql.connect.proto.ml_common_pb2.MlOperator | None = ..., |
| params: pyspark.sql.connect.proto.ml_common_pb2.MlParams | None = ..., |
| dataset: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_params", |
| b"_params", |
| "dataset", |
| b"dataset", |
| "estimator", |
| b"estimator", |
| "params", |
| b"params", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_params", |
| b"_params", |
| "dataset", |
| b"dataset", |
| "estimator", |
| b"estimator", |
| "params", |
| b"params", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_params", b"_params"] |
| ) -> typing_extensions.Literal["params"] | None: ... |
| |
| class Delete(google.protobuf.message.Message): |
| """Command to delete the cached objects which could be a model |
| or summary evaluated by a model |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| OBJ_REFS_FIELD_NUMBER: builtins.int |
| EVICT_ONLY_FIELD_NUMBER: builtins.int |
| @property |
| def obj_refs( |
| self, |
| ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ |
| pyspark.sql.connect.proto.ml_common_pb2.ObjectRef |
| ]: ... |
| evict_only: builtins.bool |
| """if set `evict_only` to true, only evict the cached model from memory, |
| but keep the offloaded model in Spark driver local disk. |
| """ |
| def __init__( |
| self, |
| *, |
| obj_refs: collections.abc.Iterable[pyspark.sql.connect.proto.ml_common_pb2.ObjectRef] |
| | None = ..., |
| evict_only: builtins.bool | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_evict_only", b"_evict_only", "evict_only", b"evict_only" |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_evict_only", b"_evict_only", "evict_only", b"evict_only", "obj_refs", b"obj_refs" |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_evict_only", b"_evict_only"] |
| ) -> typing_extensions.Literal["evict_only"] | None: ... |
| |
| class CleanCache(google.protobuf.message.Message): |
| """Force to clean up all the ML cached objects""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| def __init__( |
| self, |
| ) -> None: ... |
| |
| class GetCacheInfo(google.protobuf.message.Message): |
| """Get the information of all the ML cached objects""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| def __init__( |
| self, |
| ) -> None: ... |
| |
| class Write(google.protobuf.message.Message): |
| """Command to write ML operator""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class OptionsEntry(google.protobuf.message.Message): |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| KEY_FIELD_NUMBER: builtins.int |
| VALUE_FIELD_NUMBER: builtins.int |
| key: builtins.str |
| value: builtins.str |
| def __init__( |
| self, |
| *, |
| key: builtins.str = ..., |
| value: builtins.str = ..., |
| ) -> None: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] |
| ) -> None: ... |
| |
| OPERATOR_FIELD_NUMBER: builtins.int |
| OBJ_REF_FIELD_NUMBER: builtins.int |
| PARAMS_FIELD_NUMBER: builtins.int |
| PATH_FIELD_NUMBER: builtins.int |
| SHOULD_OVERWRITE_FIELD_NUMBER: builtins.int |
| OPTIONS_FIELD_NUMBER: builtins.int |
| @property |
| def operator(self) -> pyspark.sql.connect.proto.ml_common_pb2.MlOperator: |
| """Estimator or evaluator""" |
| @property |
| def obj_ref(self) -> pyspark.sql.connect.proto.ml_common_pb2.ObjectRef: |
| """The cached model""" |
| @property |
| def params(self) -> pyspark.sql.connect.proto.ml_common_pb2.MlParams: |
| """(Optional) The parameters of operator which could be estimator/evaluator or a cached model""" |
| path: builtins.str |
| """(Required) Save the ML instance to the path""" |
| should_overwrite: builtins.bool |
| """(Optional) Overwrites if the output path already exists.""" |
| @property |
| def options( |
| self, |
| ) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: |
| """(Optional) The options of the writer""" |
| def __init__( |
| self, |
| *, |
| operator: pyspark.sql.connect.proto.ml_common_pb2.MlOperator | None = ..., |
| obj_ref: pyspark.sql.connect.proto.ml_common_pb2.ObjectRef | None = ..., |
| params: pyspark.sql.connect.proto.ml_common_pb2.MlParams | None = ..., |
| path: builtins.str = ..., |
| should_overwrite: builtins.bool | None = ..., |
| options: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_params", |
| b"_params", |
| "_should_overwrite", |
| b"_should_overwrite", |
| "obj_ref", |
| b"obj_ref", |
| "operator", |
| b"operator", |
| "params", |
| b"params", |
| "should_overwrite", |
| b"should_overwrite", |
| "type", |
| b"type", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_params", |
| b"_params", |
| "_should_overwrite", |
| b"_should_overwrite", |
| "obj_ref", |
| b"obj_ref", |
| "operator", |
| b"operator", |
| "options", |
| b"options", |
| "params", |
| b"params", |
| "path", |
| b"path", |
| "should_overwrite", |
| b"should_overwrite", |
| "type", |
| b"type", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_params", b"_params"] |
| ) -> typing_extensions.Literal["params"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_should_overwrite", b"_should_overwrite"] |
| ) -> typing_extensions.Literal["should_overwrite"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["type", b"type"] |
| ) -> typing_extensions.Literal["operator", "obj_ref"] | None: ... |
| |
| class Read(google.protobuf.message.Message): |
| """Command to load ML operator.""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| OPERATOR_FIELD_NUMBER: builtins.int |
| PATH_FIELD_NUMBER: builtins.int |
| @property |
| def operator(self) -> pyspark.sql.connect.proto.ml_common_pb2.MlOperator: |
| """(Required) ML operator information""" |
| path: builtins.str |
| """(Required) Load the ML instance from the input path""" |
| def __init__( |
| self, |
| *, |
| operator: pyspark.sql.connect.proto.ml_common_pb2.MlOperator | None = ..., |
| path: builtins.str = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["operator", b"operator"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["operator", b"operator", "path", b"path"] |
| ) -> None: ... |
| |
| class Evaluate(google.protobuf.message.Message): |
| """Command for evaluator.evaluate(dataset)""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| EVALUATOR_FIELD_NUMBER: builtins.int |
| PARAMS_FIELD_NUMBER: builtins.int |
| DATASET_FIELD_NUMBER: builtins.int |
| @property |
| def evaluator(self) -> pyspark.sql.connect.proto.ml_common_pb2.MlOperator: |
| """(Required) Evaluator information (its type should be OPERATOR_TYPE_EVALUATOR)""" |
| @property |
| def params(self) -> pyspark.sql.connect.proto.ml_common_pb2.MlParams: |
| """(Optional) parameters of the Evaluator""" |
| @property |
| def dataset(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: |
| """(Required) the evaluating dataset""" |
| def __init__( |
| self, |
| *, |
| evaluator: pyspark.sql.connect.proto.ml_common_pb2.MlOperator | None = ..., |
| params: pyspark.sql.connect.proto.ml_common_pb2.MlParams | None = ..., |
| dataset: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_params", |
| b"_params", |
| "dataset", |
| b"dataset", |
| "evaluator", |
| b"evaluator", |
| "params", |
| b"params", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_params", |
| b"_params", |
| "dataset", |
| b"dataset", |
| "evaluator", |
| b"evaluator", |
| "params", |
| b"params", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_params", b"_params"] |
| ) -> typing_extensions.Literal["params"] | None: ... |
| |
| class CreateSummary(google.protobuf.message.Message): |
| """This is for re-creating the model summary when the model summary is lost |
| (model summary is lost when the model is offloaded and then loaded back) |
| """ |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| MODEL_REF_FIELD_NUMBER: builtins.int |
| DATASET_FIELD_NUMBER: builtins.int |
| @property |
| def model_ref(self) -> pyspark.sql.connect.proto.ml_common_pb2.ObjectRef: ... |
| @property |
| def dataset(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: ... |
| def __init__( |
| self, |
| *, |
| model_ref: pyspark.sql.connect.proto.ml_common_pb2.ObjectRef | None = ..., |
| dataset: pyspark.sql.connect.proto.relations_pb2.Relation | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal["dataset", b"dataset", "model_ref", b"model_ref"], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal["dataset", b"dataset", "model_ref", b"model_ref"], |
| ) -> None: ... |
| |
| class GetModelSize(google.protobuf.message.Message): |
| """This is for query the model estimated in-memory size""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| MODEL_REF_FIELD_NUMBER: builtins.int |
| @property |
| def model_ref(self) -> pyspark.sql.connect.proto.ml_common_pb2.ObjectRef: ... |
| def __init__( |
| self, |
| *, |
| model_ref: pyspark.sql.connect.proto.ml_common_pb2.ObjectRef | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, field_name: typing_extensions.Literal["model_ref", b"model_ref"] |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, field_name: typing_extensions.Literal["model_ref", b"model_ref"] |
| ) -> None: ... |
| |
| FIT_FIELD_NUMBER: builtins.int |
| FETCH_FIELD_NUMBER: builtins.int |
| DELETE_FIELD_NUMBER: builtins.int |
| WRITE_FIELD_NUMBER: builtins.int |
| READ_FIELD_NUMBER: builtins.int |
| EVALUATE_FIELD_NUMBER: builtins.int |
| CLEAN_CACHE_FIELD_NUMBER: builtins.int |
| GET_CACHE_INFO_FIELD_NUMBER: builtins.int |
| CREATE_SUMMARY_FIELD_NUMBER: builtins.int |
| GET_MODEL_SIZE_FIELD_NUMBER: builtins.int |
| @property |
| def fit(self) -> global___MlCommand.Fit: ... |
| @property |
| def fetch(self) -> pyspark.sql.connect.proto.relations_pb2.Fetch: ... |
| @property |
| def delete(self) -> global___MlCommand.Delete: ... |
| @property |
| def write(self) -> global___MlCommand.Write: ... |
| @property |
| def read(self) -> global___MlCommand.Read: ... |
| @property |
| def evaluate(self) -> global___MlCommand.Evaluate: ... |
| @property |
| def clean_cache(self) -> global___MlCommand.CleanCache: ... |
| @property |
| def get_cache_info(self) -> global___MlCommand.GetCacheInfo: ... |
| @property |
| def create_summary(self) -> global___MlCommand.CreateSummary: ... |
| @property |
| def get_model_size(self) -> global___MlCommand.GetModelSize: ... |
| def __init__( |
| self, |
| *, |
| fit: global___MlCommand.Fit | None = ..., |
| fetch: pyspark.sql.connect.proto.relations_pb2.Fetch | None = ..., |
| delete: global___MlCommand.Delete | None = ..., |
| write: global___MlCommand.Write | None = ..., |
| read: global___MlCommand.Read | None = ..., |
| evaluate: global___MlCommand.Evaluate | None = ..., |
| clean_cache: global___MlCommand.CleanCache | None = ..., |
| get_cache_info: global___MlCommand.GetCacheInfo | None = ..., |
| create_summary: global___MlCommand.CreateSummary | None = ..., |
| get_model_size: global___MlCommand.GetModelSize | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "clean_cache", |
| b"clean_cache", |
| "command", |
| b"command", |
| "create_summary", |
| b"create_summary", |
| "delete", |
| b"delete", |
| "evaluate", |
| b"evaluate", |
| "fetch", |
| b"fetch", |
| "fit", |
| b"fit", |
| "get_cache_info", |
| b"get_cache_info", |
| "get_model_size", |
| b"get_model_size", |
| "read", |
| b"read", |
| "write", |
| b"write", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "clean_cache", |
| b"clean_cache", |
| "command", |
| b"command", |
| "create_summary", |
| b"create_summary", |
| "delete", |
| b"delete", |
| "evaluate", |
| b"evaluate", |
| "fetch", |
| b"fetch", |
| "fit", |
| b"fit", |
| "get_cache_info", |
| b"get_cache_info", |
| "get_model_size", |
| b"get_model_size", |
| "read", |
| b"read", |
| "write", |
| b"write", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["command", b"command"] |
| ) -> ( |
| typing_extensions.Literal[ |
| "fit", |
| "fetch", |
| "delete", |
| "write", |
| "read", |
| "evaluate", |
| "clean_cache", |
| "get_cache_info", |
| "create_summary", |
| "get_model_size", |
| ] |
| | None |
| ): ... |
| |
| global___MlCommand = MlCommand |
| |
| class MlCommandResult(google.protobuf.message.Message): |
| """The result of MlCommand""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| class MlOperatorInfo(google.protobuf.message.Message): |
| """Represents an operator info""" |
| |
| DESCRIPTOR: google.protobuf.descriptor.Descriptor |
| |
| OBJ_REF_FIELD_NUMBER: builtins.int |
| NAME_FIELD_NUMBER: builtins.int |
| UID_FIELD_NUMBER: builtins.int |
| PARAMS_FIELD_NUMBER: builtins.int |
| WARNING_MESSAGE_FIELD_NUMBER: builtins.int |
| @property |
| def obj_ref(self) -> pyspark.sql.connect.proto.ml_common_pb2.ObjectRef: |
| """The cached object which could be a model or summary evaluated by a model""" |
| name: builtins.str |
| """Operator name""" |
| uid: builtins.str |
| """(Optional) the 'uid' of a ML object |
| Note it is different from the 'id' of a cached object. |
| """ |
| @property |
| def params(self) -> pyspark.sql.connect.proto.ml_common_pb2.MlParams: |
| """(Optional) parameters""" |
| warning_message: builtins.str |
| """(Optional) warning message generated during the ML command execution""" |
| def __init__( |
| self, |
| *, |
| obj_ref: pyspark.sql.connect.proto.ml_common_pb2.ObjectRef | None = ..., |
| name: builtins.str = ..., |
| uid: builtins.str | None = ..., |
| params: pyspark.sql.connect.proto.ml_common_pb2.MlParams | None = ..., |
| warning_message: builtins.str | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_params", |
| b"_params", |
| "_uid", |
| b"_uid", |
| "_warning_message", |
| b"_warning_message", |
| "name", |
| b"name", |
| "obj_ref", |
| b"obj_ref", |
| "params", |
| b"params", |
| "type", |
| b"type", |
| "uid", |
| b"uid", |
| "warning_message", |
| b"warning_message", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "_params", |
| b"_params", |
| "_uid", |
| b"_uid", |
| "_warning_message", |
| b"_warning_message", |
| "name", |
| b"name", |
| "obj_ref", |
| b"obj_ref", |
| "params", |
| b"params", |
| "type", |
| b"type", |
| "uid", |
| b"uid", |
| "warning_message", |
| b"warning_message", |
| ], |
| ) -> None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_params", b"_params"] |
| ) -> typing_extensions.Literal["params"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_uid", b"_uid"] |
| ) -> typing_extensions.Literal["uid"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["_warning_message", b"_warning_message"] |
| ) -> typing_extensions.Literal["warning_message"] | None: ... |
| @typing.overload |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["type", b"type"] |
| ) -> typing_extensions.Literal["obj_ref", "name"] | None: ... |
| |
| PARAM_FIELD_NUMBER: builtins.int |
| SUMMARY_FIELD_NUMBER: builtins.int |
| OPERATOR_INFO_FIELD_NUMBER: builtins.int |
| @property |
| def param(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression.Literal: |
| """The result of the attribute""" |
| summary: builtins.str |
| """Evaluate a Dataset in a model and return the cached ID of summary""" |
| @property |
| def operator_info(self) -> global___MlCommandResult.MlOperatorInfo: |
| """Operator information""" |
| def __init__( |
| self, |
| *, |
| param: pyspark.sql.connect.proto.expressions_pb2.Expression.Literal | None = ..., |
| summary: builtins.str = ..., |
| operator_info: global___MlCommandResult.MlOperatorInfo | None = ..., |
| ) -> None: ... |
| def HasField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "operator_info", |
| b"operator_info", |
| "param", |
| b"param", |
| "result_type", |
| b"result_type", |
| "summary", |
| b"summary", |
| ], |
| ) -> builtins.bool: ... |
| def ClearField( |
| self, |
| field_name: typing_extensions.Literal[ |
| "operator_info", |
| b"operator_info", |
| "param", |
| b"param", |
| "result_type", |
| b"result_type", |
| "summary", |
| b"summary", |
| ], |
| ) -> None: ... |
| def WhichOneof( |
| self, oneof_group: typing_extensions.Literal["result_type", b"result_type"] |
| ) -> typing_extensions.Literal["param", "summary", "operator_info"] | None: ... |
| |
| global___MlCommandResult = MlCommandResult |