blob: 16dd20b563f3b125ec8a596d751f75b099b7335d [file] [log] [blame]
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from pyspark.sql.connect.proto import base_pb2 as spark_dot_connect_dot_base__pb2
class SparkConnectServiceStub(object):
"""Main interface for the SparkConnect service."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ExecutePlan = channel.unary_stream(
"/spark.connect.SparkConnectService/ExecutePlan",
request_serializer=spark_dot_connect_dot_base__pb2.ExecutePlanRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ExecutePlanResponse.FromString,
_registered_method=True,
)
self.AnalyzePlan = channel.unary_unary(
"/spark.connect.SparkConnectService/AnalyzePlan",
request_serializer=spark_dot_connect_dot_base__pb2.AnalyzePlanRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.AnalyzePlanResponse.FromString,
_registered_method=True,
)
self.Config = channel.unary_unary(
"/spark.connect.SparkConnectService/Config",
request_serializer=spark_dot_connect_dot_base__pb2.ConfigRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ConfigResponse.FromString,
_registered_method=True,
)
self.AddArtifacts = channel.stream_unary(
"/spark.connect.SparkConnectService/AddArtifacts",
request_serializer=spark_dot_connect_dot_base__pb2.AddArtifactsRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.AddArtifactsResponse.FromString,
_registered_method=True,
)
self.ArtifactStatus = channel.unary_unary(
"/spark.connect.SparkConnectService/ArtifactStatus",
request_serializer=spark_dot_connect_dot_base__pb2.ArtifactStatusesRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ArtifactStatusesResponse.FromString,
_registered_method=True,
)
self.Interrupt = channel.unary_unary(
"/spark.connect.SparkConnectService/Interrupt",
request_serializer=spark_dot_connect_dot_base__pb2.InterruptRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.InterruptResponse.FromString,
_registered_method=True,
)
self.ReattachExecute = channel.unary_stream(
"/spark.connect.SparkConnectService/ReattachExecute",
request_serializer=spark_dot_connect_dot_base__pb2.ReattachExecuteRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ExecutePlanResponse.FromString,
_registered_method=True,
)
self.ReleaseExecute = channel.unary_unary(
"/spark.connect.SparkConnectService/ReleaseExecute",
request_serializer=spark_dot_connect_dot_base__pb2.ReleaseExecuteRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ReleaseExecuteResponse.FromString,
_registered_method=True,
)
self.ReleaseSession = channel.unary_unary(
"/spark.connect.SparkConnectService/ReleaseSession",
request_serializer=spark_dot_connect_dot_base__pb2.ReleaseSessionRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ReleaseSessionResponse.FromString,
_registered_method=True,
)
self.FetchErrorDetails = channel.unary_unary(
"/spark.connect.SparkConnectService/FetchErrorDetails",
request_serializer=spark_dot_connect_dot_base__pb2.FetchErrorDetailsRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.FetchErrorDetailsResponse.FromString,
_registered_method=True,
)
self.CloneSession = channel.unary_unary(
"/spark.connect.SparkConnectService/CloneSession",
request_serializer=spark_dot_connect_dot_base__pb2.CloneSessionRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.CloneSessionResponse.FromString,
_registered_method=True,
)
class SparkConnectServiceServicer(object):
"""Main interface for the SparkConnect service."""
def ExecutePlan(self, request, context):
"""Executes a request that contains the query and returns a stream of [[Response]].
It is guaranteed that there is at least one ARROW batch returned even if the result set is empty.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def AnalyzePlan(self, request, context):
"""Analyzes a query and returns a [[AnalyzeResponse]] containing metadata about the query."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def Config(self, request, context):
"""Update or fetch the configurations and returns a [[ConfigResponse]] containing the result."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def AddArtifacts(self, request_iterator, context):
"""Add artifacts to the session and returns a [[AddArtifactsResponse]] containing metadata about
the added artifacts.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ArtifactStatus(self, request, context):
"""Check statuses of artifacts in the session and returns them in a [[ArtifactStatusesResponse]]"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def Interrupt(self, request, context):
"""Interrupts running executions"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ReattachExecute(self, request, context):
"""Reattach to an existing reattachable execution.
The ExecutePlan must have been started with ReattachOptions.reattachable=true.
If the ExecutePlanResponse stream ends without a ResultComplete message, there is more to
continue. If there is a ResultComplete, the client should use ReleaseExecute with
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ReleaseExecute(self, request, context):
"""Release an reattachable execution, or parts thereof.
The ExecutePlan must have been started with ReattachOptions.reattachable=true.
Non reattachable executions are released automatically and immediately after the ExecutePlan
RPC and ReleaseExecute may not be used.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ReleaseSession(self, request, context):
"""Release a session.
All the executions in the session will be released. Any further requests for the session with
that session_id for the given user_id will fail. If the session didn't exist or was already
released, this is a noop.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def FetchErrorDetails(self, request, context):
"""FetchErrorDetails retrieves the matched exception with details based on a provided error id."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CloneSession(self, request, context):
"""Create a clone of a Spark Connect session on the server side. The server-side session
is cloned with all its current state (SQL configurations, temporary views, registered
functions, catalog state) copied over to a new independent session. The cloned session
is isolated from the source session - any subsequent changes to either session's
server-side state will not be reflected in the other.
The request can optionally specify a custom session ID for the cloned session (must be
a valid UUID). If not provided, a new UUID will be generated automatically.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_SparkConnectServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
"ExecutePlan": grpc.unary_stream_rpc_method_handler(
servicer.ExecutePlan,
request_deserializer=spark_dot_connect_dot_base__pb2.ExecutePlanRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.ExecutePlanResponse.SerializeToString,
),
"AnalyzePlan": grpc.unary_unary_rpc_method_handler(
servicer.AnalyzePlan,
request_deserializer=spark_dot_connect_dot_base__pb2.AnalyzePlanRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.AnalyzePlanResponse.SerializeToString,
),
"Config": grpc.unary_unary_rpc_method_handler(
servicer.Config,
request_deserializer=spark_dot_connect_dot_base__pb2.ConfigRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.ConfigResponse.SerializeToString,
),
"AddArtifacts": grpc.stream_unary_rpc_method_handler(
servicer.AddArtifacts,
request_deserializer=spark_dot_connect_dot_base__pb2.AddArtifactsRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.AddArtifactsResponse.SerializeToString,
),
"ArtifactStatus": grpc.unary_unary_rpc_method_handler(
servicer.ArtifactStatus,
request_deserializer=spark_dot_connect_dot_base__pb2.ArtifactStatusesRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.ArtifactStatusesResponse.SerializeToString,
),
"Interrupt": grpc.unary_unary_rpc_method_handler(
servicer.Interrupt,
request_deserializer=spark_dot_connect_dot_base__pb2.InterruptRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.InterruptResponse.SerializeToString,
),
"ReattachExecute": grpc.unary_stream_rpc_method_handler(
servicer.ReattachExecute,
request_deserializer=spark_dot_connect_dot_base__pb2.ReattachExecuteRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.ExecutePlanResponse.SerializeToString,
),
"ReleaseExecute": grpc.unary_unary_rpc_method_handler(
servicer.ReleaseExecute,
request_deserializer=spark_dot_connect_dot_base__pb2.ReleaseExecuteRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.ReleaseExecuteResponse.SerializeToString,
),
"ReleaseSession": grpc.unary_unary_rpc_method_handler(
servicer.ReleaseSession,
request_deserializer=spark_dot_connect_dot_base__pb2.ReleaseSessionRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.ReleaseSessionResponse.SerializeToString,
),
"FetchErrorDetails": grpc.unary_unary_rpc_method_handler(
servicer.FetchErrorDetails,
request_deserializer=spark_dot_connect_dot_base__pb2.FetchErrorDetailsRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.FetchErrorDetailsResponse.SerializeToString,
),
"CloneSession": grpc.unary_unary_rpc_method_handler(
servicer.CloneSession,
request_deserializer=spark_dot_connect_dot_base__pb2.CloneSessionRequest.FromString,
response_serializer=spark_dot_connect_dot_base__pb2.CloneSessionResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"spark.connect.SparkConnectService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
server.add_registered_method_handlers("spark.connect.SparkConnectService", rpc_method_handlers)
# This class is part of an EXPERIMENTAL API.
class SparkConnectService(object):
"""Main interface for the SparkConnect service."""
@staticmethod
def ExecutePlan(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_stream(
request,
target,
"/spark.connect.SparkConnectService/ExecutePlan",
spark_dot_connect_dot_base__pb2.ExecutePlanRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.ExecutePlanResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def AnalyzePlan(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/spark.connect.SparkConnectService/AnalyzePlan",
spark_dot_connect_dot_base__pb2.AnalyzePlanRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.AnalyzePlanResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def Config(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/spark.connect.SparkConnectService/Config",
spark_dot_connect_dot_base__pb2.ConfigRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.ConfigResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def AddArtifacts(
request_iterator,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.stream_unary(
request_iterator,
target,
"/spark.connect.SparkConnectService/AddArtifacts",
spark_dot_connect_dot_base__pb2.AddArtifactsRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.AddArtifactsResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def ArtifactStatus(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/spark.connect.SparkConnectService/ArtifactStatus",
spark_dot_connect_dot_base__pb2.ArtifactStatusesRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.ArtifactStatusesResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def Interrupt(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/spark.connect.SparkConnectService/Interrupt",
spark_dot_connect_dot_base__pb2.InterruptRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.InterruptResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def ReattachExecute(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_stream(
request,
target,
"/spark.connect.SparkConnectService/ReattachExecute",
spark_dot_connect_dot_base__pb2.ReattachExecuteRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.ExecutePlanResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def ReleaseExecute(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/spark.connect.SparkConnectService/ReleaseExecute",
spark_dot_connect_dot_base__pb2.ReleaseExecuteRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.ReleaseExecuteResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def ReleaseSession(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/spark.connect.SparkConnectService/ReleaseSession",
spark_dot_connect_dot_base__pb2.ReleaseSessionRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.ReleaseSessionResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def FetchErrorDetails(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/spark.connect.SparkConnectService/FetchErrorDetails",
spark_dot_connect_dot_base__pb2.FetchErrorDetailsRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.FetchErrorDetailsResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
@staticmethod
def CloneSession(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/spark.connect.SparkConnectService/CloneSession",
spark_dot_connect_dot_base__pb2.CloneSessionRequest.SerializeToString,
spark_dot_connect_dot_base__pb2.CloneSessionResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)