diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala index 05aa191a4ddfd..348fc94bb89c3 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala @@ -63,7 +63,7 @@ private[sql] class SparkConnectClient( .newBuilder() .setPlan(plan) .setUserContext(userContext) - .setClientId(sessionId) + .setSessionId(sessionId) .setClientType(userAgent) .build() stub.executePlan(request) @@ -78,7 +78,7 @@ private[sql] class SparkConnectClient( val request = proto.ConfigRequest .newBuilder() .setOperation(operation) - .setClientId(sessionId) + .setSessionId(sessionId) .setClientType(userAgent) .setUserContext(userContext) .build() @@ -157,7 +157,7 @@ private[sql] class SparkConnectClient( private def analyze(builder: proto.AnalyzePlanRequest.Builder): proto.AnalyzePlanResponse = { val request = builder .setUserContext(userContext) - .setClientId(sessionId) + .setSessionId(sessionId) .setClientType(userAgent) .build() analyze(request) diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala index 11e28f538e89d..94bc22ef77d0e 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala @@ -612,8 +612,8 @@ class ClientE2ETestSuite extends RemoteSparkSession { } test("SparkSession newSession") { - val oldId = spark.sql("SELECT 1").analyze.getClientId - val newId = spark.newSession().sql("SELECT 1").analyze.getClientId + val oldId = spark.sql("SELECT 1").analyze.getSessionId + val newId = spark.newSession().sql("SELECT 1").analyze.getSessionId assert(oldId != newId) } diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala index dcb135892064a..bc600e5a07168 100755 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala @@ -75,11 +75,11 @@ class SparkConnectClientSuite extends ConnectFunSuite with BeforeAndAfterEach { client = clientBuilder(server.getPort) val request = AnalyzePlanRequest .newBuilder() - .setClientId("abc123") + .setSessionId("abc123") .build() val response = client.analyze(request) - assert(response.getClientId === "abc123") + assert(response.getSessionId === "abc123") } test("Test connection") { @@ -99,7 +99,7 @@ class SparkConnectClientSuite extends ConnectFunSuite with BeforeAndAfterEach { .connectionString(s"sc://localhost:${server.getPort}/;use_ssl=true") .build() - val request = AnalyzePlanRequest.newBuilder().setClientId("abc123").build() + val request = AnalyzePlanRequest.newBuilder().setSessionId("abc123").build() // Failed the ssl handshake as the dummy server does not have any server credentials installed. assertThrows[StatusRuntimeException] { @@ -201,11 +201,11 @@ class DummySparkConnectService() extends SparkConnectServiceGrpc.SparkConnectSer request: ExecutePlanRequest, responseObserver: StreamObserver[ExecutePlanResponse]): Unit = { // Reply with a dummy response using the same client ID - val requestClientId = request.getClientId + val requestSessionId = request.getSessionId inputPlan = request.getPlan val response = ExecutePlanResponse .newBuilder() - .setClientId(requestClientId) + .setSessionId(requestSessionId) .build() responseObserver.onNext(response) responseObserver.onCompleted() @@ -215,7 +215,7 @@ class DummySparkConnectService() extends SparkConnectServiceGrpc.SparkConnectSer request: AnalyzePlanRequest, responseObserver: StreamObserver[AnalyzePlanResponse]): Unit = { // Reply with a dummy response using the same client ID - val requestClientId = request.getClientId + val requestSessionId = request.getSessionId request.getAnalyzeCase match { case proto.AnalyzePlanRequest.AnalyzeCase.SCHEMA => inputPlan = request.getSchema.getPlan @@ -233,7 +233,7 @@ class DummySparkConnectService() extends SparkConnectServiceGrpc.SparkConnectSer } val response = AnalyzePlanResponse .newBuilder() - .setClientId(requestClientId) + .setSessionId(requestSessionId) .build() responseObserver.onNext(response) responseObserver.onCompleted() diff --git a/connector/connect/common/src/main/protobuf/spark/connect/base.proto b/connector/connect/common/src/main/protobuf/spark/connect/base.proto index 2252d91c9ff73..1a9c437f0ecc2 100644 --- a/connector/connect/common/src/main/protobuf/spark/connect/base.proto +++ b/connector/connect/common/src/main/protobuf/spark/connect/base.proto @@ -58,9 +58,10 @@ message UserContext { message AnalyzePlanRequest { // (Required) // - // The client_id is set by the client to be able to collate streaming responses from - // different queries. - string client_id = 1; + // The session_id specifies a spark session for a user id (which is specified + // by user_context.user_id). The session_id is set by the client to be able to + // collate streaming responses from different queries within the dedicated session. + string session_id = 1; // (Required) User context UserContext user_context = 2; @@ -161,7 +162,7 @@ message AnalyzePlanRequest { // Response to performing analysis of the query. Contains relevant metadata to be able to // reason about the performance. message AnalyzePlanResponse { - string client_id = 1; + string session_id = 1; oneof result { Schema schema = 2; @@ -217,11 +218,15 @@ message AnalyzePlanResponse { message ExecutePlanRequest { // (Required) // - // The client_id is set by the client to be able to collate streaming responses from - // different queries. - string client_id = 1; + // The session_id specifies a spark session for a user id (which is specified + // by user_context.user_id). The session_id is set by the client to be able to + // collate streaming responses from different queries within the dedicated session. + string session_id = 1; // (Required) User context + // + // user_context.user_id and session+id both identify a unique remote spark session on the + // server side. UserContext user_context = 2; // (Required) The logical plan to be executed / analyzed. @@ -234,9 +239,9 @@ message ExecutePlanRequest { } // The response of a query, can be one or more for each request. Responses belonging to the -// same input query, carry the same `client_id`. +// same input query, carry the same `session_id`. message ExecutePlanResponse { - string client_id = 1; + string session_id = 1; // Union type for the different response messages. oneof response_type { @@ -304,9 +309,10 @@ message KeyValue { message ConfigRequest { // (Required) // - // The client_id is set by the client to be able to collate streaming responses from - // different queries. - string client_id = 1; + // The session_id specifies a spark session for a user id (which is specified + // by user_context.user_id). The session_id is set by the client to be able to + // collate streaming responses from different queries within the dedicated session. + string session_id = 1; // (Required) User context UserContext user_context = 2; @@ -369,7 +375,7 @@ message ConfigRequest { // Response to the config request. message ConfigResponse { - string client_id = 1; + string session_id = 1; // (Optional) The result key-value pairs. // @@ -386,9 +392,12 @@ message ConfigResponse { // Request to transfer client-local artifacts. message AddArtifactsRequest { - // The client_id is set by the client to be able to collate streaming responses from - // different queries. - string client_id = 1; + // (Required) + // + // The session_id specifies a spark session for a user id (which is specified + // by user_context.user_id). The session_id is set by the client to be able to + // collate streaming responses from different queries within the dedicated session. + string session_id = 1; // User context UserContext user_context = 2; diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala index 60fb94e8098b9..8ca004d520c51 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala @@ -1459,7 +1459,7 @@ class SparkConnectPlanner(val session: SparkSession) { def process( command: proto.Command, - clientId: String, + sessionId: String, responseObserver: StreamObserver[ExecutePlanResponse]): Unit = { command.getCommandTypeCase match { case proto.Command.CommandTypeCase.REGISTER_FUNCTION => @@ -1473,14 +1473,14 @@ class SparkConnectPlanner(val session: SparkSession) { case proto.Command.CommandTypeCase.EXTENSION => handleCommandPlugin(command.getExtension) case proto.Command.CommandTypeCase.SQL_COMMAND => - handleSqlCommand(command.getSqlCommand, clientId, responseObserver) + handleSqlCommand(command.getSqlCommand, sessionId, responseObserver) case _ => throw new UnsupportedOperationException(s"$command not supported.") } } def handleSqlCommand( getSqlCommand: SqlCommand, - clientId: String, + sessionId: String, responseObserver: StreamObserver[ExecutePlanResponse]): Unit = { // Eagerly execute commands of the provided SQL string. val df = session.sql(getSqlCommand.getSql, getSqlCommand.getArgsMap) @@ -1537,12 +1537,12 @@ class SparkConnectPlanner(val session: SparkSession) { responseObserver.onNext( ExecutePlanResponse .newBuilder() - .setClientId(clientId) + .setSessionId(sessionId) .setSqlCommandResult(result) .build()) // Send Metrics - SparkConnectStreamHandler.sendMetricsToResponse(clientId, df) + SparkConnectStreamHandler.sendMetricsToResponse(sessionId, df) } private def handleRegisterUserDefinedFunction( diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectAnalyzeHandler.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectAnalyzeHandler.scala index e3d4da66a087c..9520ec8015f67 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectAnalyzeHandler.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectAnalyzeHandler.scala @@ -35,7 +35,7 @@ private[connect] class SparkConnectAnalyzeHandler( def handle(request: proto.AnalyzePlanRequest): Unit = { val session = SparkConnectService - .getOrCreateIsolatedSession(request.getUserContext.getUserId, request.getClientId) + .getOrCreateIsolatedSession(request.getUserContext.getUserId, request.getSessionId) .session session.withActive { val response = process(request, session) @@ -155,7 +155,7 @@ private[connect] class SparkConnectAnalyzeHandler( case other => throw InvalidPlanInput(s"Unknown Analyze Method $other!") } - builder.setClientId(request.getClientId) + builder.setSessionId(request.getSessionId) builder.build() } } diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectConfigHandler.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectConfigHandler.scala index 84f625222a856..38fd88297f354 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectConfigHandler.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectConfigHandler.scala @@ -32,7 +32,7 @@ class SparkConnectConfigHandler(responseObserver: StreamObserver[proto.ConfigRes def handle(request: proto.ConfigRequest): Unit = { val session = SparkConnectService - .getOrCreateIsolatedSession(request.getUserContext.getUserId, request.getClientId) + .getOrCreateIsolatedSession(request.getUserContext.getUserId, request.getSessionId) .session val builder = request.getOperation.getOpTypeCase match { @@ -53,7 +53,7 @@ class SparkConnectConfigHandler(responseObserver: StreamObserver[proto.ConfigRes case _ => throw new UnsupportedOperationException(s"${request.getOperation} not supported.") } - builder.setClientId(request.getClientId) + builder.setSessionId(request.getSessionId) responseObserver.onNext(builder.build()) responseObserver.onCompleted() } diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamHandler.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamHandler.scala index 15d5a981ae869..0dd1741f0990b 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamHandler.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamHandler.scala @@ -44,7 +44,7 @@ class SparkConnectStreamHandler(responseObserver: StreamObserver[ExecutePlanResp def handle(v: ExecutePlanRequest): Unit = { val session = SparkConnectService - .getOrCreateIsolatedSession(v.getUserContext.getUserId, v.getClientId) + .getOrCreateIsolatedSession(v.getUserContext.getUserId, v.getSessionId) .session session.withActive { v.getPlan.getOpTypeCase match { @@ -60,12 +60,12 @@ class SparkConnectStreamHandler(responseObserver: StreamObserver[ExecutePlanResp // Extract the plan from the request and convert it to a logical plan val planner = new SparkConnectPlanner(session) val dataframe = Dataset.ofRows(session, planner.transformRelation(request.getPlan.getRoot)) - processAsArrowBatches(request.getClientId, dataframe, responseObserver) + processAsArrowBatches(request.getSessionId, dataframe, responseObserver) responseObserver.onNext( - SparkConnectStreamHandler.sendMetricsToResponse(request.getClientId, dataframe)) + SparkConnectStreamHandler.sendMetricsToResponse(request.getSessionId, dataframe)) if (dataframe.queryExecution.observedMetrics.nonEmpty) { responseObserver.onNext( - SparkConnectStreamHandler.sendObservedMetricsToResponse(request.getClientId, dataframe)) + SparkConnectStreamHandler.sendObservedMetricsToResponse(request.getSessionId, dataframe)) } responseObserver.onCompleted() } @@ -73,7 +73,7 @@ class SparkConnectStreamHandler(responseObserver: StreamObserver[ExecutePlanResp private def handleCommand(session: SparkSession, request: ExecutePlanRequest): Unit = { val command = request.getPlan.getCommand val planner = new SparkConnectPlanner(session) - planner.process(command, request.getClientId, responseObserver) + planner.process(command, request.getSessionId, responseObserver) responseObserver.onCompleted() } } @@ -96,7 +96,7 @@ object SparkConnectStreamHandler { } def processAsArrowBatches( - clientId: String, + sessionId: String, dataframe: DataFrame, responseObserver: StreamObserver[ExecutePlanResponse]): Unit = { val spark = dataframe.sparkSession @@ -173,7 +173,7 @@ object SparkConnectStreamHandler { } partition.foreach { case (bytes, count) => - val response = proto.ExecutePlanResponse.newBuilder().setClientId(clientId) + val response = proto.ExecutePlanResponse.newBuilder().setSessionId(sessionId) val batch = proto.ExecutePlanResponse.ArrowBatch .newBuilder() .setRowCount(count) @@ -191,7 +191,7 @@ object SparkConnectStreamHandler { // Make sure at least 1 batch will be sent. if (numSent == 0) { val bytes = ArrowConverters.createEmptyArrowBatch(schema, timeZoneId) - val response = proto.ExecutePlanResponse.newBuilder().setClientId(clientId) + val response = proto.ExecutePlanResponse.newBuilder().setSessionId(sessionId) val batch = proto.ExecutePlanResponse.ArrowBatch .newBuilder() .setRowCount(0L) @@ -203,17 +203,17 @@ object SparkConnectStreamHandler { } } - def sendMetricsToResponse(clientId: String, rows: DataFrame): ExecutePlanResponse = { + def sendMetricsToResponse(sessionId: String, rows: DataFrame): ExecutePlanResponse = { // Send a last batch with the metrics ExecutePlanResponse .newBuilder() - .setClientId(clientId) + .setSessionId(sessionId) .setMetrics(MetricGenerator.buildMetrics(rows.queryExecution.executedPlan)) .build() } def sendObservedMetricsToResponse( - clientId: String, + sessionId: String, dataframe: DataFrame): ExecutePlanResponse = { val observedMetrics = dataframe.queryExecution.observedMetrics.map { case (name, row) => val cols = (0 until row.length).map(i => toConnectProtoValue(row(i))) @@ -226,7 +226,7 @@ object SparkConnectStreamHandler { // Prepare a response with the observed metrics. ExecutePlanResponse .newBuilder() - .setClientId(clientId) + .setSessionId(sessionId) .addAllObservedMetrics(observedMetrics.asJava) .build() } diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala index 2885d0035bce6..e2aecaaea8602 100644 --- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala +++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala @@ -221,7 +221,7 @@ class SparkConnectServiceSuite extends SharedSparkSession { .newBuilder() .setPlan(plan) .setUserContext(context) - .setClientId("session") + .setSessionId("session") .build() // The observer is executed inside this thread. So diff --git a/python/pyspark/sql/connect/client.py b/python/pyspark/sql/connect/client.py index 2594640aa3e20..8c85f17bb5fd7 100644 --- a/python/pyspark/sql/connect/client.py +++ b/python/pyspark/sql/connect/client.py @@ -712,7 +712,7 @@ def close(self) -> None: def _execute_plan_request_with_metadata(self) -> pb2.ExecutePlanRequest: req = pb2.ExecutePlanRequest() - req.client_id = self._session_id + req.session_id = self._session_id req.client_type = self._builder.userAgent if self._user_id: req.user_context.user_id = self._user_id @@ -720,7 +720,7 @@ def _execute_plan_request_with_metadata(self) -> pb2.ExecutePlanRequest: def _analyze_plan_request_with_metadata(self) -> pb2.AnalyzePlanRequest: req = pb2.AnalyzePlanRequest() - req.client_id = self._session_id + req.session_id = self._session_id req.client_type = self._builder.userAgent if self._user_id: req.user_context.user_id = self._user_id @@ -791,10 +791,10 @@ def _analyze(self, method: str, **kwargs: Any) -> AnalyzeResult: ): with attempt: resp = self._stub.AnalyzePlan(req, metadata=self._builder.metadata()) - if resp.client_id != self._session_id: + if resp.session_id != self._session_id: raise SparkConnectException( "Received incorrect session identifier for request:" - f"{resp.client_id} != {self._session_id}" + f"{resp.session_id} != {self._session_id}" ) return AnalyzeResult.fromProto(resp) raise SparkConnectException("Invalid state during retry exception handling.") @@ -818,10 +818,10 @@ def _execute(self, req: pb2.ExecutePlanRequest) -> None: ): with attempt: for b in self._stub.ExecutePlan(req, metadata=self._builder.metadata()): - if b.client_id != self._session_id: + if b.session_id != self._session_id: raise SparkConnectException( "Received incorrect session identifier for request: " - f"{b.client_id} != {self._session_id}" + f"{b.session_id} != {self._session_id}" ) except grpc.RpcError as rpc_error: self._handle_error(rpc_error) @@ -842,10 +842,10 @@ def _execute_and_fetch( with attempt: batches = [] for b in self._stub.ExecutePlan(req, metadata=self._builder.metadata()): - if b.client_id != self._session_id: + if b.session_id != self._session_id: raise SparkConnectException( "Received incorrect session identifier for request: " - f"{b.client_id} != {self._session_id}" + f"{b.session_id} != {self._session_id}" ) if b.metrics is not None: logger.debug("Received metric batch.") @@ -878,7 +878,7 @@ def _execute_and_fetch( def _config_request_with_metadata(self) -> pb2.ConfigRequest: req = pb2.ConfigRequest() - req.client_id = self._session_id + req.session_id = self._session_id req.client_type = self._builder.userAgent if self._user_id: req.user_context.user_id = self._user_id @@ -905,10 +905,10 @@ def config(self, operation: pb2.ConfigRequest.Operation) -> ConfigResult: ): with attempt: resp = self._stub.Config(req, metadata=self._builder.metadata()) - if resp.client_id != self._session_id: + if resp.session_id != self._session_id: raise SparkConnectException( "Received incorrect session identifier for request:" - f"{resp.client_id} != {self._session_id}" + f"{resp.session_id} != {self._session_id}" ) return ConfigResult.fromProto(resp) raise SparkConnectException("Invalid state during retry exception handling.") diff --git a/python/pyspark/sql/connect/proto/base_pb2.py b/python/pyspark/sql/connect/proto/base_pb2.py index 6d41ce28c7c3a..c67e58b44cda3 100644 --- a/python/pyspark/sql/connect/proto/base_pb2.py +++ b/python/pyspark/sql/connect/proto/base_pb2.py @@ -37,7 +37,7 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x18spark/connect/base.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1cspark/connect/commands.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"t\n\x04Plan\x12-\n\x04root\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationH\x00R\x04root\x12\x32\n\x07\x63ommand\x18\x02 \x01(\x0b\x32\x16.spark.connect.CommandH\x00R\x07\x63ommandB\t\n\x07op_type"z\n\x0bUserContext\x12\x17\n\x07user_id\x18\x01 \x01(\tR\x06userId\x12\x1b\n\tuser_name\x18\x02 \x01(\tR\x08userName\x12\x35\n\nextensions\x18\xe7\x07 \x03(\x0b\x32\x14.google.protobuf.AnyR\nextensions"\xf7\x0c\n\x12\x41nalyzePlanRequest\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12$\n\x0b\x63lient_type\x18\x03 \x01(\tH\x01R\nclientType\x88\x01\x01\x12\x42\n\x06schema\x18\x04 \x01(\x0b\x32(.spark.connect.AnalyzePlanRequest.SchemaH\x00R\x06schema\x12\x45\n\x07\x65xplain\x18\x05 \x01(\x0b\x32).spark.connect.AnalyzePlanRequest.ExplainH\x00R\x07\x65xplain\x12O\n\x0btree_string\x18\x06 \x01(\x0b\x32,.spark.connect.AnalyzePlanRequest.TreeStringH\x00R\ntreeString\x12\x46\n\x08is_local\x18\x07 \x01(\x0b\x32).spark.connect.AnalyzePlanRequest.IsLocalH\x00R\x07isLocal\x12R\n\x0cis_streaming\x18\x08 \x01(\x0b\x32-.spark.connect.AnalyzePlanRequest.IsStreamingH\x00R\x0bisStreaming\x12O\n\x0binput_files\x18\t \x01(\x0b\x32,.spark.connect.AnalyzePlanRequest.InputFilesH\x00R\ninputFiles\x12U\n\rspark_version\x18\n \x01(\x0b\x32..spark.connect.AnalyzePlanRequest.SparkVersionH\x00R\x0csparkVersion\x12I\n\tddl_parse\x18\x0b \x01(\x0b\x32*.spark.connect.AnalyzePlanRequest.DDLParseH\x00R\x08\x64\x64lParse\x12X\n\x0esame_semantics\x18\x0c \x01(\x0b\x32/.spark.connect.AnalyzePlanRequest.SameSemanticsH\x00R\rsameSemantics\x1a\x31\n\x06Schema\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\xbb\x02\n\x07\x45xplain\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x12X\n\x0c\x65xplain_mode\x18\x02 \x01(\x0e\x32\x35.spark.connect.AnalyzePlanRequest.Explain.ExplainModeR\x0b\x65xplainMode"\xac\x01\n\x0b\x45xplainMode\x12\x1c\n\x18\x45XPLAIN_MODE_UNSPECIFIED\x10\x00\x12\x17\n\x13\x45XPLAIN_MODE_SIMPLE\x10\x01\x12\x19\n\x15\x45XPLAIN_MODE_EXTENDED\x10\x02\x12\x18\n\x14\x45XPLAIN_MODE_CODEGEN\x10\x03\x12\x15\n\x11\x45XPLAIN_MODE_COST\x10\x04\x12\x1a\n\x16\x45XPLAIN_MODE_FORMATTED\x10\x05\x1a\x35\n\nTreeString\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x32\n\x07IsLocal\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x36\n\x0bIsStreaming\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x35\n\nInputFiles\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x0e\n\x0cSparkVersion\x1a)\n\x08\x44\x44LParse\x12\x1d\n\nddl_string\x18\x01 \x01(\tR\tddlString\x1ay\n\rSameSemantics\x12\x34\n\x0btarget_plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\ntargetPlan\x12\x32\n\nother_plan\x18\x02 \x01(\x0b\x32\x13.spark.connect.PlanR\totherPlanB\t\n\x07\x61nalyzeB\x0e\n\x0c_client_type"\xb2\t\n\x13\x41nalyzePlanResponse\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12\x43\n\x06schema\x18\x02 \x01(\x0b\x32).spark.connect.AnalyzePlanResponse.SchemaH\x00R\x06schema\x12\x46\n\x07\x65xplain\x18\x03 \x01(\x0b\x32*.spark.connect.AnalyzePlanResponse.ExplainH\x00R\x07\x65xplain\x12P\n\x0btree_string\x18\x04 \x01(\x0b\x32-.spark.connect.AnalyzePlanResponse.TreeStringH\x00R\ntreeString\x12G\n\x08is_local\x18\x05 \x01(\x0b\x32*.spark.connect.AnalyzePlanResponse.IsLocalH\x00R\x07isLocal\x12S\n\x0cis_streaming\x18\x06 \x01(\x0b\x32..spark.connect.AnalyzePlanResponse.IsStreamingH\x00R\x0bisStreaming\x12P\n\x0binput_files\x18\x07 \x01(\x0b\x32-.spark.connect.AnalyzePlanResponse.InputFilesH\x00R\ninputFiles\x12V\n\rspark_version\x18\x08 \x01(\x0b\x32/.spark.connect.AnalyzePlanResponse.SparkVersionH\x00R\x0csparkVersion\x12J\n\tddl_parse\x18\t \x01(\x0b\x32+.spark.connect.AnalyzePlanResponse.DDLParseH\x00R\x08\x64\x64lParse\x12Y\n\x0esame_semantics\x18\n \x01(\x0b\x32\x30.spark.connect.AnalyzePlanResponse.SameSemanticsH\x00R\rsameSemantics\x1a\x39\n\x06Schema\x12/\n\x06schema\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x06schema\x1a\x30\n\x07\x45xplain\x12%\n\x0e\x65xplain_string\x18\x01 \x01(\tR\rexplainString\x1a-\n\nTreeString\x12\x1f\n\x0btree_string\x18\x01 \x01(\tR\ntreeString\x1a$\n\x07IsLocal\x12\x19\n\x08is_local\x18\x01 \x01(\x08R\x07isLocal\x1a\x30\n\x0bIsStreaming\x12!\n\x0cis_streaming\x18\x01 \x01(\x08R\x0bisStreaming\x1a"\n\nInputFiles\x12\x14\n\x05\x66iles\x18\x01 \x03(\tR\x05\x66iles\x1a(\n\x0cSparkVersion\x12\x18\n\x07version\x18\x01 \x01(\tR\x07version\x1a;\n\x08\x44\x44LParse\x12/\n\x06parsed\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x06parsed\x1a\'\n\rSameSemantics\x12\x16\n\x06result\x18\x01 \x01(\x08R\x06resultB\x08\n\x06result"\xcf\x01\n\x12\x45xecutePlanRequest\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12\'\n\x04plan\x18\x03 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x12$\n\x0b\x63lient_type\x18\x04 \x01(\tH\x00R\nclientType\x88\x01\x01\x42\x0e\n\x0c_client_type"\xc8\t\n\x13\x45xecutePlanResponse\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12P\n\x0b\x61rrow_batch\x18\x02 \x01(\x0b\x32-.spark.connect.ExecutePlanResponse.ArrowBatchH\x00R\narrowBatch\x12\x63\n\x12sql_command_result\x18\x05 \x01(\x0b\x32\x33.spark.connect.ExecutePlanResponse.SqlCommandResultH\x00R\x10sqlCommandResult\x12\x35\n\textension\x18\xe7\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textension\x12\x44\n\x07metrics\x18\x04 \x01(\x0b\x32*.spark.connect.ExecutePlanResponse.MetricsR\x07metrics\x12]\n\x10observed_metrics\x18\x06 \x03(\x0b\x32\x32.spark.connect.ExecutePlanResponse.ObservedMetricsR\x0fobservedMetrics\x1aG\n\x10SqlCommandResult\x12\x33\n\x08relation\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x08relation\x1a=\n\nArrowBatch\x12\x1b\n\trow_count\x18\x01 \x01(\x03R\x08rowCount\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x1a\x85\x04\n\x07Metrics\x12Q\n\x07metrics\x18\x01 \x03(\x0b\x32\x37.spark.connect.ExecutePlanResponse.Metrics.MetricObjectR\x07metrics\x1a\xcc\x02\n\x0cMetricObject\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x17\n\x07plan_id\x18\x02 \x01(\x03R\x06planId\x12\x16\n\x06parent\x18\x03 \x01(\x03R\x06parent\x12z\n\x11\x65xecution_metrics\x18\x04 \x03(\x0b\x32M.spark.connect.ExecutePlanResponse.Metrics.MetricObject.ExecutionMetricsEntryR\x10\x65xecutionMetrics\x1a{\n\x15\x45xecutionMetricsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12L\n\x05value\x18\x02 \x01(\x0b\x32\x36.spark.connect.ExecutePlanResponse.Metrics.MetricValueR\x05value:\x02\x38\x01\x1aX\n\x0bMetricValue\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x14\n\x05value\x18\x02 \x01(\x03R\x05value\x12\x1f\n\x0bmetric_type\x18\x03 \x01(\tR\nmetricType\x1a`\n\x0fObservedMetrics\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x39\n\x06values\x18\x02 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x06valuesB\x0f\n\rresponse_type"A\n\x08KeyValue\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x19\n\x05value\x18\x02 \x01(\tH\x00R\x05value\x88\x01\x01\x42\x08\n\x06_value"\x82\x08\n\rConfigRequest\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12\x44\n\toperation\x18\x03 \x01(\x0b\x32&.spark.connect.ConfigRequest.OperationR\toperation\x12$\n\x0b\x63lient_type\x18\x04 \x01(\tH\x00R\nclientType\x88\x01\x01\x1a\xf2\x03\n\tOperation\x12\x34\n\x03set\x18\x01 \x01(\x0b\x32 .spark.connect.ConfigRequest.SetH\x00R\x03set\x12\x34\n\x03get\x18\x02 \x01(\x0b\x32 .spark.connect.ConfigRequest.GetH\x00R\x03get\x12W\n\x10get_with_default\x18\x03 \x01(\x0b\x32+.spark.connect.ConfigRequest.GetWithDefaultH\x00R\x0egetWithDefault\x12G\n\nget_option\x18\x04 \x01(\x0b\x32&.spark.connect.ConfigRequest.GetOptionH\x00R\tgetOption\x12>\n\x07get_all\x18\x05 \x01(\x0b\x32#.spark.connect.ConfigRequest.GetAllH\x00R\x06getAll\x12:\n\x05unset\x18\x06 \x01(\x0b\x32".spark.connect.ConfigRequest.UnsetH\x00R\x05unset\x12P\n\ris_modifiable\x18\x07 \x01(\x0b\x32).spark.connect.ConfigRequest.IsModifiableH\x00R\x0cisModifiableB\t\n\x07op_type\x1a\x34\n\x03Set\x12-\n\x05pairs\x18\x01 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x1a\x19\n\x03Get\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a?\n\x0eGetWithDefault\x12-\n\x05pairs\x18\x01 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x1a\x1f\n\tGetOption\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a\x30\n\x06GetAll\x12\x1b\n\x06prefix\x18\x01 \x01(\tH\x00R\x06prefix\x88\x01\x01\x42\t\n\x07_prefix\x1a\x1b\n\x05Unset\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a"\n\x0cIsModifiable\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keysB\x0e\n\x0c_client_type"x\n\x0e\x43onfigResponse\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12-\n\x05pairs\x18\x02 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x12\x1a\n\x08warnings\x18\x03 \x03(\tR\x08warnings"\xaf\x06\n\x13\x41\x64\x64\x41rtifactsRequest\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12@\n\x05\x62\x61tch\x18\x03 \x01(\x0b\x32(.spark.connect.AddArtifactsRequest.BatchH\x00R\x05\x62\x61tch\x12Z\n\x0b\x62\x65gin_chunk\x18\x04 \x01(\x0b\x32\x37.spark.connect.AddArtifactsRequest.BeginChunkedArtifactH\x00R\nbeginChunk\x12H\n\x05\x63hunk\x18\x05 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkH\x00R\x05\x63hunk\x1a\x35\n\rArtifactChunk\x12\x12\n\x04\x64\x61ta\x18\x01 \x01(\x0cR\x04\x64\x61ta\x12\x10\n\x03\x63rc\x18\x02 \x01(\x03R\x03\x63rc\x1ao\n\x13SingleChunkArtifact\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x44\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkR\x04\x64\x61ta\x1a]\n\x05\x42\x61tch\x12T\n\tartifacts\x18\x01 \x03(\x0b\x32\x36.spark.connect.AddArtifactsRequest.SingleChunkArtifactR\tartifacts\x1a\xc1\x01\n\x14\x42\x65ginChunkedArtifact\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n\x0btotal_bytes\x18\x02 \x01(\x03R\ntotalBytes\x12\x1d\n\nnum_chunks\x18\x03 \x01(\x03R\tnumChunks\x12U\n\rinitial_chunk\x18\x04 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkR\x0cinitialChunkB\t\n\x07payload"\xbc\x01\n\x14\x41\x64\x64\x41rtifactsResponse\x12Q\n\tartifacts\x18\x01 \x03(\x0b\x32\x33.spark.connect.AddArtifactsResponse.ArtifactSummaryR\tartifacts\x1aQ\n\x0f\x41rtifactSummary\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12*\n\x11is_crc_successful\x18\x02 \x01(\x08R\x0fisCrcSuccessful2\xed\x02\n\x13SparkConnectService\x12X\n\x0b\x45xecutePlan\x12!.spark.connect.ExecutePlanRequest\x1a".spark.connect.ExecutePlanResponse"\x00\x30\x01\x12V\n\x0b\x41nalyzePlan\x12!.spark.connect.AnalyzePlanRequest\x1a".spark.connect.AnalyzePlanResponse"\x00\x12G\n\x06\x43onfig\x12\x1c.spark.connect.ConfigRequest\x1a\x1d.spark.connect.ConfigResponse"\x00\x12[\n\x0c\x41\x64\x64\x41rtifacts\x12".spark.connect.AddArtifactsRequest\x1a#.spark.connect.AddArtifactsResponse"\x00(\x01\x42"\n\x1eorg.apache.spark.connect.protoP\x01\x62\x06proto3' + b'\n\x18spark/connect/base.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1cspark/connect/commands.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"t\n\x04Plan\x12-\n\x04root\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationH\x00R\x04root\x12\x32\n\x07\x63ommand\x18\x02 \x01(\x0b\x32\x16.spark.connect.CommandH\x00R\x07\x63ommandB\t\n\x07op_type"z\n\x0bUserContext\x12\x17\n\x07user_id\x18\x01 \x01(\tR\x06userId\x12\x1b\n\tuser_name\x18\x02 \x01(\tR\x08userName\x12\x35\n\nextensions\x18\xe7\x07 \x03(\x0b\x32\x14.google.protobuf.AnyR\nextensions"\xf9\x0c\n\x12\x41nalyzePlanRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12$\n\x0b\x63lient_type\x18\x03 \x01(\tH\x01R\nclientType\x88\x01\x01\x12\x42\n\x06schema\x18\x04 \x01(\x0b\x32(.spark.connect.AnalyzePlanRequest.SchemaH\x00R\x06schema\x12\x45\n\x07\x65xplain\x18\x05 \x01(\x0b\x32).spark.connect.AnalyzePlanRequest.ExplainH\x00R\x07\x65xplain\x12O\n\x0btree_string\x18\x06 \x01(\x0b\x32,.spark.connect.AnalyzePlanRequest.TreeStringH\x00R\ntreeString\x12\x46\n\x08is_local\x18\x07 \x01(\x0b\x32).spark.connect.AnalyzePlanRequest.IsLocalH\x00R\x07isLocal\x12R\n\x0cis_streaming\x18\x08 \x01(\x0b\x32-.spark.connect.AnalyzePlanRequest.IsStreamingH\x00R\x0bisStreaming\x12O\n\x0binput_files\x18\t \x01(\x0b\x32,.spark.connect.AnalyzePlanRequest.InputFilesH\x00R\ninputFiles\x12U\n\rspark_version\x18\n \x01(\x0b\x32..spark.connect.AnalyzePlanRequest.SparkVersionH\x00R\x0csparkVersion\x12I\n\tddl_parse\x18\x0b \x01(\x0b\x32*.spark.connect.AnalyzePlanRequest.DDLParseH\x00R\x08\x64\x64lParse\x12X\n\x0esame_semantics\x18\x0c \x01(\x0b\x32/.spark.connect.AnalyzePlanRequest.SameSemanticsH\x00R\rsameSemantics\x1a\x31\n\x06Schema\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\xbb\x02\n\x07\x45xplain\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x12X\n\x0c\x65xplain_mode\x18\x02 \x01(\x0e\x32\x35.spark.connect.AnalyzePlanRequest.Explain.ExplainModeR\x0b\x65xplainMode"\xac\x01\n\x0b\x45xplainMode\x12\x1c\n\x18\x45XPLAIN_MODE_UNSPECIFIED\x10\x00\x12\x17\n\x13\x45XPLAIN_MODE_SIMPLE\x10\x01\x12\x19\n\x15\x45XPLAIN_MODE_EXTENDED\x10\x02\x12\x18\n\x14\x45XPLAIN_MODE_CODEGEN\x10\x03\x12\x15\n\x11\x45XPLAIN_MODE_COST\x10\x04\x12\x1a\n\x16\x45XPLAIN_MODE_FORMATTED\x10\x05\x1a\x35\n\nTreeString\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x32\n\x07IsLocal\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x36\n\x0bIsStreaming\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x35\n\nInputFiles\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x0e\n\x0cSparkVersion\x1a)\n\x08\x44\x44LParse\x12\x1d\n\nddl_string\x18\x01 \x01(\tR\tddlString\x1ay\n\rSameSemantics\x12\x34\n\x0btarget_plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\ntargetPlan\x12\x32\n\nother_plan\x18\x02 \x01(\x0b\x32\x13.spark.connect.PlanR\totherPlanB\t\n\x07\x61nalyzeB\x0e\n\x0c_client_type"\xb4\t\n\x13\x41nalyzePlanResponse\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12\x43\n\x06schema\x18\x02 \x01(\x0b\x32).spark.connect.AnalyzePlanResponse.SchemaH\x00R\x06schema\x12\x46\n\x07\x65xplain\x18\x03 \x01(\x0b\x32*.spark.connect.AnalyzePlanResponse.ExplainH\x00R\x07\x65xplain\x12P\n\x0btree_string\x18\x04 \x01(\x0b\x32-.spark.connect.AnalyzePlanResponse.TreeStringH\x00R\ntreeString\x12G\n\x08is_local\x18\x05 \x01(\x0b\x32*.spark.connect.AnalyzePlanResponse.IsLocalH\x00R\x07isLocal\x12S\n\x0cis_streaming\x18\x06 \x01(\x0b\x32..spark.connect.AnalyzePlanResponse.IsStreamingH\x00R\x0bisStreaming\x12P\n\x0binput_files\x18\x07 \x01(\x0b\x32-.spark.connect.AnalyzePlanResponse.InputFilesH\x00R\ninputFiles\x12V\n\rspark_version\x18\x08 \x01(\x0b\x32/.spark.connect.AnalyzePlanResponse.SparkVersionH\x00R\x0csparkVersion\x12J\n\tddl_parse\x18\t \x01(\x0b\x32+.spark.connect.AnalyzePlanResponse.DDLParseH\x00R\x08\x64\x64lParse\x12Y\n\x0esame_semantics\x18\n \x01(\x0b\x32\x30.spark.connect.AnalyzePlanResponse.SameSemanticsH\x00R\rsameSemantics\x1a\x39\n\x06Schema\x12/\n\x06schema\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x06schema\x1a\x30\n\x07\x45xplain\x12%\n\x0e\x65xplain_string\x18\x01 \x01(\tR\rexplainString\x1a-\n\nTreeString\x12\x1f\n\x0btree_string\x18\x01 \x01(\tR\ntreeString\x1a$\n\x07IsLocal\x12\x19\n\x08is_local\x18\x01 \x01(\x08R\x07isLocal\x1a\x30\n\x0bIsStreaming\x12!\n\x0cis_streaming\x18\x01 \x01(\x08R\x0bisStreaming\x1a"\n\nInputFiles\x12\x14\n\x05\x66iles\x18\x01 \x03(\tR\x05\x66iles\x1a(\n\x0cSparkVersion\x12\x18\n\x07version\x18\x01 \x01(\tR\x07version\x1a;\n\x08\x44\x44LParse\x12/\n\x06parsed\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x06parsed\x1a\'\n\rSameSemantics\x12\x16\n\x06result\x18\x01 \x01(\x08R\x06resultB\x08\n\x06result"\xd1\x01\n\x12\x45xecutePlanRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12\'\n\x04plan\x18\x03 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x12$\n\x0b\x63lient_type\x18\x04 \x01(\tH\x00R\nclientType\x88\x01\x01\x42\x0e\n\x0c_client_type"\xca\t\n\x13\x45xecutePlanResponse\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12P\n\x0b\x61rrow_batch\x18\x02 \x01(\x0b\x32-.spark.connect.ExecutePlanResponse.ArrowBatchH\x00R\narrowBatch\x12\x63\n\x12sql_command_result\x18\x05 \x01(\x0b\x32\x33.spark.connect.ExecutePlanResponse.SqlCommandResultH\x00R\x10sqlCommandResult\x12\x35\n\textension\x18\xe7\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textension\x12\x44\n\x07metrics\x18\x04 \x01(\x0b\x32*.spark.connect.ExecutePlanResponse.MetricsR\x07metrics\x12]\n\x10observed_metrics\x18\x06 \x03(\x0b\x32\x32.spark.connect.ExecutePlanResponse.ObservedMetricsR\x0fobservedMetrics\x1aG\n\x10SqlCommandResult\x12\x33\n\x08relation\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x08relation\x1a=\n\nArrowBatch\x12\x1b\n\trow_count\x18\x01 \x01(\x03R\x08rowCount\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x1a\x85\x04\n\x07Metrics\x12Q\n\x07metrics\x18\x01 \x03(\x0b\x32\x37.spark.connect.ExecutePlanResponse.Metrics.MetricObjectR\x07metrics\x1a\xcc\x02\n\x0cMetricObject\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x17\n\x07plan_id\x18\x02 \x01(\x03R\x06planId\x12\x16\n\x06parent\x18\x03 \x01(\x03R\x06parent\x12z\n\x11\x65xecution_metrics\x18\x04 \x03(\x0b\x32M.spark.connect.ExecutePlanResponse.Metrics.MetricObject.ExecutionMetricsEntryR\x10\x65xecutionMetrics\x1a{\n\x15\x45xecutionMetricsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12L\n\x05value\x18\x02 \x01(\x0b\x32\x36.spark.connect.ExecutePlanResponse.Metrics.MetricValueR\x05value:\x02\x38\x01\x1aX\n\x0bMetricValue\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x14\n\x05value\x18\x02 \x01(\x03R\x05value\x12\x1f\n\x0bmetric_type\x18\x03 \x01(\tR\nmetricType\x1a`\n\x0fObservedMetrics\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x39\n\x06values\x18\x02 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x06valuesB\x0f\n\rresponse_type"A\n\x08KeyValue\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x19\n\x05value\x18\x02 \x01(\tH\x00R\x05value\x88\x01\x01\x42\x08\n\x06_value"\x84\x08\n\rConfigRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12\x44\n\toperation\x18\x03 \x01(\x0b\x32&.spark.connect.ConfigRequest.OperationR\toperation\x12$\n\x0b\x63lient_type\x18\x04 \x01(\tH\x00R\nclientType\x88\x01\x01\x1a\xf2\x03\n\tOperation\x12\x34\n\x03set\x18\x01 \x01(\x0b\x32 .spark.connect.ConfigRequest.SetH\x00R\x03set\x12\x34\n\x03get\x18\x02 \x01(\x0b\x32 .spark.connect.ConfigRequest.GetH\x00R\x03get\x12W\n\x10get_with_default\x18\x03 \x01(\x0b\x32+.spark.connect.ConfigRequest.GetWithDefaultH\x00R\x0egetWithDefault\x12G\n\nget_option\x18\x04 \x01(\x0b\x32&.spark.connect.ConfigRequest.GetOptionH\x00R\tgetOption\x12>\n\x07get_all\x18\x05 \x01(\x0b\x32#.spark.connect.ConfigRequest.GetAllH\x00R\x06getAll\x12:\n\x05unset\x18\x06 \x01(\x0b\x32".spark.connect.ConfigRequest.UnsetH\x00R\x05unset\x12P\n\ris_modifiable\x18\x07 \x01(\x0b\x32).spark.connect.ConfigRequest.IsModifiableH\x00R\x0cisModifiableB\t\n\x07op_type\x1a\x34\n\x03Set\x12-\n\x05pairs\x18\x01 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x1a\x19\n\x03Get\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a?\n\x0eGetWithDefault\x12-\n\x05pairs\x18\x01 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x1a\x1f\n\tGetOption\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a\x30\n\x06GetAll\x12\x1b\n\x06prefix\x18\x01 \x01(\tH\x00R\x06prefix\x88\x01\x01\x42\t\n\x07_prefix\x1a\x1b\n\x05Unset\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a"\n\x0cIsModifiable\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keysB\x0e\n\x0c_client_type"z\n\x0e\x43onfigResponse\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12-\n\x05pairs\x18\x02 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x12\x1a\n\x08warnings\x18\x03 \x03(\tR\x08warnings"\xb1\x06\n\x13\x41\x64\x64\x41rtifactsRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12@\n\x05\x62\x61tch\x18\x03 \x01(\x0b\x32(.spark.connect.AddArtifactsRequest.BatchH\x00R\x05\x62\x61tch\x12Z\n\x0b\x62\x65gin_chunk\x18\x04 \x01(\x0b\x32\x37.spark.connect.AddArtifactsRequest.BeginChunkedArtifactH\x00R\nbeginChunk\x12H\n\x05\x63hunk\x18\x05 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkH\x00R\x05\x63hunk\x1a\x35\n\rArtifactChunk\x12\x12\n\x04\x64\x61ta\x18\x01 \x01(\x0cR\x04\x64\x61ta\x12\x10\n\x03\x63rc\x18\x02 \x01(\x03R\x03\x63rc\x1ao\n\x13SingleChunkArtifact\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x44\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkR\x04\x64\x61ta\x1a]\n\x05\x42\x61tch\x12T\n\tartifacts\x18\x01 \x03(\x0b\x32\x36.spark.connect.AddArtifactsRequest.SingleChunkArtifactR\tartifacts\x1a\xc1\x01\n\x14\x42\x65ginChunkedArtifact\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n\x0btotal_bytes\x18\x02 \x01(\x03R\ntotalBytes\x12\x1d\n\nnum_chunks\x18\x03 \x01(\x03R\tnumChunks\x12U\n\rinitial_chunk\x18\x04 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkR\x0cinitialChunkB\t\n\x07payload"\xbc\x01\n\x14\x41\x64\x64\x41rtifactsResponse\x12Q\n\tartifacts\x18\x01 \x03(\x0b\x32\x33.spark.connect.AddArtifactsResponse.ArtifactSummaryR\tartifacts\x1aQ\n\x0f\x41rtifactSummary\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12*\n\x11is_crc_successful\x18\x02 \x01(\x08R\x0fisCrcSuccessful2\xed\x02\n\x13SparkConnectService\x12X\n\x0b\x45xecutePlan\x12!.spark.connect.ExecutePlanRequest\x1a".spark.connect.ExecutePlanResponse"\x00\x30\x01\x12V\n\x0b\x41nalyzePlan\x12!.spark.connect.AnalyzePlanRequest\x1a".spark.connect.AnalyzePlanResponse"\x00\x12G\n\x06\x43onfig\x12\x1c.spark.connect.ConfigRequest\x1a\x1d.spark.connect.ConfigResponse"\x00\x12[\n\x0c\x41\x64\x64\x41rtifacts\x12".spark.connect.AddArtifactsRequest\x1a#.spark.connect.AddArtifactsResponse"\x00(\x01\x42"\n\x1eorg.apache.spark.connect.protoP\x01\x62\x06proto3' ) @@ -620,101 +620,101 @@ _USERCONTEXT._serialized_start = 309 _USERCONTEXT._serialized_end = 431 _ANALYZEPLANREQUEST._serialized_start = 434 - _ANALYZEPLANREQUEST._serialized_end = 2089 - _ANALYZEPLANREQUEST_SCHEMA._serialized_start = 1295 - _ANALYZEPLANREQUEST_SCHEMA._serialized_end = 1344 - _ANALYZEPLANREQUEST_EXPLAIN._serialized_start = 1347 - _ANALYZEPLANREQUEST_EXPLAIN._serialized_end = 1662 - _ANALYZEPLANREQUEST_EXPLAIN_EXPLAINMODE._serialized_start = 1490 - _ANALYZEPLANREQUEST_EXPLAIN_EXPLAINMODE._serialized_end = 1662 - _ANALYZEPLANREQUEST_TREESTRING._serialized_start = 1664 - _ANALYZEPLANREQUEST_TREESTRING._serialized_end = 1717 - _ANALYZEPLANREQUEST_ISLOCAL._serialized_start = 1719 - _ANALYZEPLANREQUEST_ISLOCAL._serialized_end = 1769 - _ANALYZEPLANREQUEST_ISSTREAMING._serialized_start = 1771 - _ANALYZEPLANREQUEST_ISSTREAMING._serialized_end = 1825 - _ANALYZEPLANREQUEST_INPUTFILES._serialized_start = 1827 - _ANALYZEPLANREQUEST_INPUTFILES._serialized_end = 1880 - _ANALYZEPLANREQUEST_SPARKVERSION._serialized_start = 1882 - _ANALYZEPLANREQUEST_SPARKVERSION._serialized_end = 1896 - _ANALYZEPLANREQUEST_DDLPARSE._serialized_start = 1898 - _ANALYZEPLANREQUEST_DDLPARSE._serialized_end = 1939 - _ANALYZEPLANREQUEST_SAMESEMANTICS._serialized_start = 1941 - _ANALYZEPLANREQUEST_SAMESEMANTICS._serialized_end = 2062 - _ANALYZEPLANRESPONSE._serialized_start = 2092 - _ANALYZEPLANRESPONSE._serialized_end = 3294 - _ANALYZEPLANRESPONSE_SCHEMA._serialized_start = 2862 - _ANALYZEPLANRESPONSE_SCHEMA._serialized_end = 2919 - _ANALYZEPLANRESPONSE_EXPLAIN._serialized_start = 2921 - _ANALYZEPLANRESPONSE_EXPLAIN._serialized_end = 2969 - _ANALYZEPLANRESPONSE_TREESTRING._serialized_start = 2971 - _ANALYZEPLANRESPONSE_TREESTRING._serialized_end = 3016 - _ANALYZEPLANRESPONSE_ISLOCAL._serialized_start = 3018 - _ANALYZEPLANRESPONSE_ISLOCAL._serialized_end = 3054 - _ANALYZEPLANRESPONSE_ISSTREAMING._serialized_start = 3056 - _ANALYZEPLANRESPONSE_ISSTREAMING._serialized_end = 3104 - _ANALYZEPLANRESPONSE_INPUTFILES._serialized_start = 3106 - _ANALYZEPLANRESPONSE_INPUTFILES._serialized_end = 3140 - _ANALYZEPLANRESPONSE_SPARKVERSION._serialized_start = 3142 - _ANALYZEPLANRESPONSE_SPARKVERSION._serialized_end = 3182 - _ANALYZEPLANRESPONSE_DDLPARSE._serialized_start = 3184 - _ANALYZEPLANRESPONSE_DDLPARSE._serialized_end = 3243 - _ANALYZEPLANRESPONSE_SAMESEMANTICS._serialized_start = 3245 - _ANALYZEPLANRESPONSE_SAMESEMANTICS._serialized_end = 3284 - _EXECUTEPLANREQUEST._serialized_start = 3297 - _EXECUTEPLANREQUEST._serialized_end = 3504 - _EXECUTEPLANRESPONSE._serialized_start = 3507 - _EXECUTEPLANRESPONSE._serialized_end = 4731 - _EXECUTEPLANRESPONSE_SQLCOMMANDRESULT._serialized_start = 3962 - _EXECUTEPLANRESPONSE_SQLCOMMANDRESULT._serialized_end = 4033 - _EXECUTEPLANRESPONSE_ARROWBATCH._serialized_start = 4035 - _EXECUTEPLANRESPONSE_ARROWBATCH._serialized_end = 4096 - _EXECUTEPLANRESPONSE_METRICS._serialized_start = 4099 - _EXECUTEPLANRESPONSE_METRICS._serialized_end = 4616 - _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT._serialized_start = 4194 - _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT._serialized_end = 4526 - _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY._serialized_start = 4403 - _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY._serialized_end = 4526 - _EXECUTEPLANRESPONSE_METRICS_METRICVALUE._serialized_start = 4528 - _EXECUTEPLANRESPONSE_METRICS_METRICVALUE._serialized_end = 4616 - _EXECUTEPLANRESPONSE_OBSERVEDMETRICS._serialized_start = 4618 - _EXECUTEPLANRESPONSE_OBSERVEDMETRICS._serialized_end = 4714 - _KEYVALUE._serialized_start = 4733 - _KEYVALUE._serialized_end = 4798 - _CONFIGREQUEST._serialized_start = 4801 - _CONFIGREQUEST._serialized_end = 5827 - _CONFIGREQUEST_OPERATION._serialized_start = 5019 - _CONFIGREQUEST_OPERATION._serialized_end = 5517 - _CONFIGREQUEST_SET._serialized_start = 5519 - _CONFIGREQUEST_SET._serialized_end = 5571 - _CONFIGREQUEST_GET._serialized_start = 5573 - _CONFIGREQUEST_GET._serialized_end = 5598 - _CONFIGREQUEST_GETWITHDEFAULT._serialized_start = 5600 - _CONFIGREQUEST_GETWITHDEFAULT._serialized_end = 5663 - _CONFIGREQUEST_GETOPTION._serialized_start = 5665 - _CONFIGREQUEST_GETOPTION._serialized_end = 5696 - _CONFIGREQUEST_GETALL._serialized_start = 5698 - _CONFIGREQUEST_GETALL._serialized_end = 5746 - _CONFIGREQUEST_UNSET._serialized_start = 5748 - _CONFIGREQUEST_UNSET._serialized_end = 5775 - _CONFIGREQUEST_ISMODIFIABLE._serialized_start = 5777 - _CONFIGREQUEST_ISMODIFIABLE._serialized_end = 5811 - _CONFIGRESPONSE._serialized_start = 5829 - _CONFIGRESPONSE._serialized_end = 5949 - _ADDARTIFACTSREQUEST._serialized_start = 5952 - _ADDARTIFACTSREQUEST._serialized_end = 6767 - _ADDARTIFACTSREQUEST_ARTIFACTCHUNK._serialized_start = 6299 - _ADDARTIFACTSREQUEST_ARTIFACTCHUNK._serialized_end = 6352 - _ADDARTIFACTSREQUEST_SINGLECHUNKARTIFACT._serialized_start = 6354 - _ADDARTIFACTSREQUEST_SINGLECHUNKARTIFACT._serialized_end = 6465 - _ADDARTIFACTSREQUEST_BATCH._serialized_start = 6467 - _ADDARTIFACTSREQUEST_BATCH._serialized_end = 6560 - _ADDARTIFACTSREQUEST_BEGINCHUNKEDARTIFACT._serialized_start = 6563 - _ADDARTIFACTSREQUEST_BEGINCHUNKEDARTIFACT._serialized_end = 6756 - _ADDARTIFACTSRESPONSE._serialized_start = 6770 - _ADDARTIFACTSRESPONSE._serialized_end = 6958 - _ADDARTIFACTSRESPONSE_ARTIFACTSUMMARY._serialized_start = 6877 - _ADDARTIFACTSRESPONSE_ARTIFACTSUMMARY._serialized_end = 6958 - _SPARKCONNECTSERVICE._serialized_start = 6961 - _SPARKCONNECTSERVICE._serialized_end = 7326 + _ANALYZEPLANREQUEST._serialized_end = 2091 + _ANALYZEPLANREQUEST_SCHEMA._serialized_start = 1297 + _ANALYZEPLANREQUEST_SCHEMA._serialized_end = 1346 + _ANALYZEPLANREQUEST_EXPLAIN._serialized_start = 1349 + _ANALYZEPLANREQUEST_EXPLAIN._serialized_end = 1664 + _ANALYZEPLANREQUEST_EXPLAIN_EXPLAINMODE._serialized_start = 1492 + _ANALYZEPLANREQUEST_EXPLAIN_EXPLAINMODE._serialized_end = 1664 + _ANALYZEPLANREQUEST_TREESTRING._serialized_start = 1666 + _ANALYZEPLANREQUEST_TREESTRING._serialized_end = 1719 + _ANALYZEPLANREQUEST_ISLOCAL._serialized_start = 1721 + _ANALYZEPLANREQUEST_ISLOCAL._serialized_end = 1771 + _ANALYZEPLANREQUEST_ISSTREAMING._serialized_start = 1773 + _ANALYZEPLANREQUEST_ISSTREAMING._serialized_end = 1827 + _ANALYZEPLANREQUEST_INPUTFILES._serialized_start = 1829 + _ANALYZEPLANREQUEST_INPUTFILES._serialized_end = 1882 + _ANALYZEPLANREQUEST_SPARKVERSION._serialized_start = 1884 + _ANALYZEPLANREQUEST_SPARKVERSION._serialized_end = 1898 + _ANALYZEPLANREQUEST_DDLPARSE._serialized_start = 1900 + _ANALYZEPLANREQUEST_DDLPARSE._serialized_end = 1941 + _ANALYZEPLANREQUEST_SAMESEMANTICS._serialized_start = 1943 + _ANALYZEPLANREQUEST_SAMESEMANTICS._serialized_end = 2064 + _ANALYZEPLANRESPONSE._serialized_start = 2094 + _ANALYZEPLANRESPONSE._serialized_end = 3298 + _ANALYZEPLANRESPONSE_SCHEMA._serialized_start = 2866 + _ANALYZEPLANRESPONSE_SCHEMA._serialized_end = 2923 + _ANALYZEPLANRESPONSE_EXPLAIN._serialized_start = 2925 + _ANALYZEPLANRESPONSE_EXPLAIN._serialized_end = 2973 + _ANALYZEPLANRESPONSE_TREESTRING._serialized_start = 2975 + _ANALYZEPLANRESPONSE_TREESTRING._serialized_end = 3020 + _ANALYZEPLANRESPONSE_ISLOCAL._serialized_start = 3022 + _ANALYZEPLANRESPONSE_ISLOCAL._serialized_end = 3058 + _ANALYZEPLANRESPONSE_ISSTREAMING._serialized_start = 3060 + _ANALYZEPLANRESPONSE_ISSTREAMING._serialized_end = 3108 + _ANALYZEPLANRESPONSE_INPUTFILES._serialized_start = 3110 + _ANALYZEPLANRESPONSE_INPUTFILES._serialized_end = 3144 + _ANALYZEPLANRESPONSE_SPARKVERSION._serialized_start = 3146 + _ANALYZEPLANRESPONSE_SPARKVERSION._serialized_end = 3186 + _ANALYZEPLANRESPONSE_DDLPARSE._serialized_start = 3188 + _ANALYZEPLANRESPONSE_DDLPARSE._serialized_end = 3247 + _ANALYZEPLANRESPONSE_SAMESEMANTICS._serialized_start = 3249 + _ANALYZEPLANRESPONSE_SAMESEMANTICS._serialized_end = 3288 + _EXECUTEPLANREQUEST._serialized_start = 3301 + _EXECUTEPLANREQUEST._serialized_end = 3510 + _EXECUTEPLANRESPONSE._serialized_start = 3513 + _EXECUTEPLANRESPONSE._serialized_end = 4739 + _EXECUTEPLANRESPONSE_SQLCOMMANDRESULT._serialized_start = 3970 + _EXECUTEPLANRESPONSE_SQLCOMMANDRESULT._serialized_end = 4041 + _EXECUTEPLANRESPONSE_ARROWBATCH._serialized_start = 4043 + _EXECUTEPLANRESPONSE_ARROWBATCH._serialized_end = 4104 + _EXECUTEPLANRESPONSE_METRICS._serialized_start = 4107 + _EXECUTEPLANRESPONSE_METRICS._serialized_end = 4624 + _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT._serialized_start = 4202 + _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT._serialized_end = 4534 + _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY._serialized_start = 4411 + _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY._serialized_end = 4534 + _EXECUTEPLANRESPONSE_METRICS_METRICVALUE._serialized_start = 4536 + _EXECUTEPLANRESPONSE_METRICS_METRICVALUE._serialized_end = 4624 + _EXECUTEPLANRESPONSE_OBSERVEDMETRICS._serialized_start = 4626 + _EXECUTEPLANRESPONSE_OBSERVEDMETRICS._serialized_end = 4722 + _KEYVALUE._serialized_start = 4741 + _KEYVALUE._serialized_end = 4806 + _CONFIGREQUEST._serialized_start = 4809 + _CONFIGREQUEST._serialized_end = 5837 + _CONFIGREQUEST_OPERATION._serialized_start = 5029 + _CONFIGREQUEST_OPERATION._serialized_end = 5527 + _CONFIGREQUEST_SET._serialized_start = 5529 + _CONFIGREQUEST_SET._serialized_end = 5581 + _CONFIGREQUEST_GET._serialized_start = 5583 + _CONFIGREQUEST_GET._serialized_end = 5608 + _CONFIGREQUEST_GETWITHDEFAULT._serialized_start = 5610 + _CONFIGREQUEST_GETWITHDEFAULT._serialized_end = 5673 + _CONFIGREQUEST_GETOPTION._serialized_start = 5675 + _CONFIGREQUEST_GETOPTION._serialized_end = 5706 + _CONFIGREQUEST_GETALL._serialized_start = 5708 + _CONFIGREQUEST_GETALL._serialized_end = 5756 + _CONFIGREQUEST_UNSET._serialized_start = 5758 + _CONFIGREQUEST_UNSET._serialized_end = 5785 + _CONFIGREQUEST_ISMODIFIABLE._serialized_start = 5787 + _CONFIGREQUEST_ISMODIFIABLE._serialized_end = 5821 + _CONFIGRESPONSE._serialized_start = 5839 + _CONFIGRESPONSE._serialized_end = 5961 + _ADDARTIFACTSREQUEST._serialized_start = 5964 + _ADDARTIFACTSREQUEST._serialized_end = 6781 + _ADDARTIFACTSREQUEST_ARTIFACTCHUNK._serialized_start = 6313 + _ADDARTIFACTSREQUEST_ARTIFACTCHUNK._serialized_end = 6366 + _ADDARTIFACTSREQUEST_SINGLECHUNKARTIFACT._serialized_start = 6368 + _ADDARTIFACTSREQUEST_SINGLECHUNKARTIFACT._serialized_end = 6479 + _ADDARTIFACTSREQUEST_BATCH._serialized_start = 6481 + _ADDARTIFACTSREQUEST_BATCH._serialized_end = 6574 + _ADDARTIFACTSREQUEST_BEGINCHUNKEDARTIFACT._serialized_start = 6577 + _ADDARTIFACTSREQUEST_BEGINCHUNKEDARTIFACT._serialized_end = 6770 + _ADDARTIFACTSRESPONSE._serialized_start = 6784 + _ADDARTIFACTSRESPONSE._serialized_end = 6972 + _ADDARTIFACTSRESPONSE_ARTIFACTSUMMARY._serialized_start = 6891 + _ADDARTIFACTSRESPONSE_ARTIFACTSUMMARY._serialized_end = 6972 + _SPARKCONNECTSERVICE._serialized_start = 6975 + _SPARKCONNECTSERVICE._serialized_end = 7340 # @@protoc_insertion_point(module_scope) diff --git a/python/pyspark/sql/connect/proto/base_pb2.pyi b/python/pyspark/sql/connect/proto/base_pb2.pyi index 2e9a877b658b6..e87194f31aa0b 100644 --- a/python/pyspark/sql/connect/proto/base_pb2.pyi +++ b/python/pyspark/sql/connect/proto/base_pb2.pyi @@ -350,7 +350,7 @@ class AnalyzePlanRequest(google.protobuf.message.Message): ], ) -> None: ... - CLIENT_ID_FIELD_NUMBER: builtins.int + SESSION_ID_FIELD_NUMBER: builtins.int USER_CONTEXT_FIELD_NUMBER: builtins.int CLIENT_TYPE_FIELD_NUMBER: builtins.int SCHEMA_FIELD_NUMBER: builtins.int @@ -362,11 +362,12 @@ class AnalyzePlanRequest(google.protobuf.message.Message): SPARK_VERSION_FIELD_NUMBER: builtins.int DDL_PARSE_FIELD_NUMBER: builtins.int SAME_SEMANTICS_FIELD_NUMBER: builtins.int - client_id: builtins.str + session_id: builtins.str """(Required) - The client_id is set by the client to be able to collate streaming responses from - different queries. + The session_id specifies a spark session for a user id (which is specified + by user_context.user_id). The session_id is set by the client to be able to + collate streaming responses from different queries within the dedicated session. """ @property def user_context(self) -> global___UserContext: @@ -397,7 +398,7 @@ class AnalyzePlanRequest(google.protobuf.message.Message): def __init__( self, *, - client_id: builtins.str = ..., + session_id: builtins.str = ..., user_context: global___UserContext | None = ..., client_type: builtins.str | None = ..., schema: global___AnalyzePlanRequest.Schema | None = ..., @@ -448,8 +449,6 @@ class AnalyzePlanRequest(google.protobuf.message.Message): b"_client_type", "analyze", b"analyze", - "client_id", - b"client_id", "client_type", b"client_type", "ddl_parse", @@ -466,6 +465,8 @@ class AnalyzePlanRequest(google.protobuf.message.Message): b"same_semantics", "schema", b"schema", + "session_id", + b"session_id", "spark_version", b"spark_version", "tree_string", @@ -638,7 +639,7 @@ class AnalyzePlanResponse(google.protobuf.message.Message): self, field_name: typing_extensions.Literal["result", b"result"] ) -> None: ... - CLIENT_ID_FIELD_NUMBER: builtins.int + SESSION_ID_FIELD_NUMBER: builtins.int SCHEMA_FIELD_NUMBER: builtins.int EXPLAIN_FIELD_NUMBER: builtins.int TREE_STRING_FIELD_NUMBER: builtins.int @@ -648,7 +649,7 @@ class AnalyzePlanResponse(google.protobuf.message.Message): SPARK_VERSION_FIELD_NUMBER: builtins.int DDL_PARSE_FIELD_NUMBER: builtins.int SAME_SEMANTICS_FIELD_NUMBER: builtins.int - client_id: builtins.str + session_id: builtins.str @property def schema(self) -> global___AnalyzePlanResponse.Schema: ... @property @@ -670,7 +671,7 @@ class AnalyzePlanResponse(google.protobuf.message.Message): def __init__( self, *, - client_id: builtins.str = ..., + session_id: builtins.str = ..., schema: global___AnalyzePlanResponse.Schema | None = ..., explain: global___AnalyzePlanResponse.Explain | None = ..., tree_string: global___AnalyzePlanResponse.TreeString | None = ..., @@ -709,8 +710,6 @@ class AnalyzePlanResponse(google.protobuf.message.Message): def ClearField( self, field_name: typing_extensions.Literal[ - "client_id", - b"client_id", "ddl_parse", b"ddl_parse", "explain", @@ -727,6 +726,8 @@ class AnalyzePlanResponse(google.protobuf.message.Message): b"same_semantics", "schema", b"schema", + "session_id", + b"session_id", "spark_version", b"spark_version", "tree_string", @@ -754,19 +755,24 @@ class ExecutePlanRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - CLIENT_ID_FIELD_NUMBER: builtins.int + SESSION_ID_FIELD_NUMBER: builtins.int USER_CONTEXT_FIELD_NUMBER: builtins.int PLAN_FIELD_NUMBER: builtins.int CLIENT_TYPE_FIELD_NUMBER: builtins.int - client_id: builtins.str + session_id: builtins.str """(Required) - The client_id is set by the client to be able to collate streaming responses from - different queries. + The session_id specifies a spark session for a user id (which is specified + by user_context.user_id). The session_id is set by the client to be able to + collate streaming responses from different queries within the dedicated session. """ @property def user_context(self) -> global___UserContext: - """(Required) User context""" + """(Required) User context + + user_context.user_id and session+id both identify a unique remote spark session on the + server side. + """ @property def plan(self) -> global___Plan: """(Required) The logical plan to be executed / analyzed.""" @@ -778,7 +784,7 @@ class ExecutePlanRequest(google.protobuf.message.Message): def __init__( self, *, - client_id: builtins.str = ..., + session_id: builtins.str = ..., user_context: global___UserContext | None = ..., plan: global___Plan | None = ..., client_type: builtins.str | None = ..., @@ -801,12 +807,12 @@ class ExecutePlanRequest(google.protobuf.message.Message): field_name: typing_extensions.Literal[ "_client_type", b"_client_type", - "client_id", - b"client_id", "client_type", b"client_type", "plan", b"plan", + "session_id", + b"session_id", "user_context", b"user_context", ], @@ -819,7 +825,7 @@ global___ExecutePlanRequest = ExecutePlanRequest class ExecutePlanResponse(google.protobuf.message.Message): """The response of a query, can be one or more for each request. Responses belonging to the - same input query, carry the same `client_id`. + same input query, carry the same `session_id`. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -995,13 +1001,13 @@ class ExecutePlanResponse(google.protobuf.message.Message): self, field_name: typing_extensions.Literal["name", b"name", "values", b"values"] ) -> None: ... - CLIENT_ID_FIELD_NUMBER: builtins.int + SESSION_ID_FIELD_NUMBER: builtins.int ARROW_BATCH_FIELD_NUMBER: builtins.int SQL_COMMAND_RESULT_FIELD_NUMBER: builtins.int EXTENSION_FIELD_NUMBER: builtins.int METRICS_FIELD_NUMBER: builtins.int OBSERVED_METRICS_FIELD_NUMBER: builtins.int - client_id: builtins.str + session_id: builtins.str @property def arrow_batch(self) -> global___ExecutePlanResponse.ArrowBatch: ... @property @@ -1025,7 +1031,7 @@ class ExecutePlanResponse(google.protobuf.message.Message): def __init__( self, *, - client_id: builtins.str = ..., + session_id: builtins.str = ..., arrow_batch: global___ExecutePlanResponse.ArrowBatch | None = ..., sql_command_result: global___ExecutePlanResponse.SqlCommandResult | None = ..., extension: google.protobuf.any_pb2.Any | None = ..., @@ -1053,8 +1059,6 @@ class ExecutePlanResponse(google.protobuf.message.Message): field_name: typing_extensions.Literal[ "arrow_batch", b"arrow_batch", - "client_id", - b"client_id", "extension", b"extension", "metrics", @@ -1063,6 +1067,8 @@ class ExecutePlanResponse(google.protobuf.message.Message): b"observed_metrics", "response_type", b"response_type", + "session_id", + b"session_id", "sql_command_result", b"sql_command_result", ], @@ -1310,15 +1316,16 @@ class ConfigRequest(google.protobuf.message.Message): ) -> None: ... def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ... - CLIENT_ID_FIELD_NUMBER: builtins.int + SESSION_ID_FIELD_NUMBER: builtins.int USER_CONTEXT_FIELD_NUMBER: builtins.int OPERATION_FIELD_NUMBER: builtins.int CLIENT_TYPE_FIELD_NUMBER: builtins.int - client_id: builtins.str + session_id: builtins.str """(Required) - The client_id is set by the client to be able to collate streaming responses from - different queries. + The session_id specifies a spark session for a user id (which is specified + by user_context.user_id). The session_id is set by the client to be able to + collate streaming responses from different queries within the dedicated session. """ @property def user_context(self) -> global___UserContext: @@ -1334,7 +1341,7 @@ class ConfigRequest(google.protobuf.message.Message): def __init__( self, *, - client_id: builtins.str = ..., + session_id: builtins.str = ..., user_context: global___UserContext | None = ..., operation: global___ConfigRequest.Operation | None = ..., client_type: builtins.str | None = ..., @@ -1357,12 +1364,12 @@ class ConfigRequest(google.protobuf.message.Message): field_name: typing_extensions.Literal[ "_client_type", b"_client_type", - "client_id", - b"client_id", "client_type", b"client_type", "operation", b"operation", + "session_id", + b"session_id", "user_context", b"user_context", ], @@ -1378,10 +1385,10 @@ class ConfigResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - CLIENT_ID_FIELD_NUMBER: builtins.int + SESSION_ID_FIELD_NUMBER: builtins.int PAIRS_FIELD_NUMBER: builtins.int WARNINGS_FIELD_NUMBER: builtins.int - client_id: builtins.str + session_id: builtins.str @property def pairs( self, @@ -1402,14 +1409,14 @@ class ConfigResponse(google.protobuf.message.Message): def __init__( self, *, - client_id: builtins.str = ..., + session_id: builtins.str = ..., pairs: collections.abc.Iterable[global___KeyValue] | None = ..., warnings: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... def ClearField( self, field_name: typing_extensions.Literal[ - "client_id", b"client_id", "pairs", b"pairs", "warnings", b"warnings" + "pairs", b"pairs", "session_id", b"session_id", "warnings", b"warnings" ], ) -> None: ... @@ -1546,14 +1553,17 @@ class AddArtifactsRequest(google.protobuf.message.Message): ], ) -> None: ... - CLIENT_ID_FIELD_NUMBER: builtins.int + SESSION_ID_FIELD_NUMBER: builtins.int USER_CONTEXT_FIELD_NUMBER: builtins.int BATCH_FIELD_NUMBER: builtins.int BEGIN_CHUNK_FIELD_NUMBER: builtins.int CHUNK_FIELD_NUMBER: builtins.int - client_id: builtins.str - """The client_id is set by the client to be able to collate streaming responses from - different queries. + session_id: builtins.str + """(Required) + + The session_id specifies a spark session for a user id (which is specified + by user_context.user_id). The session_id is set by the client to be able to + collate streaming responses from different queries within the dedicated session. """ @property def user_context(self) -> global___UserContext: @@ -1574,7 +1584,7 @@ class AddArtifactsRequest(google.protobuf.message.Message): def __init__( self, *, - client_id: builtins.str = ..., + session_id: builtins.str = ..., user_context: global___UserContext | None = ..., batch: global___AddArtifactsRequest.Batch | None = ..., begin_chunk: global___AddArtifactsRequest.BeginChunkedArtifact | None = ..., @@ -1604,10 +1614,10 @@ class AddArtifactsRequest(google.protobuf.message.Message): b"begin_chunk", "chunk", b"chunk", - "client_id", - b"client_id", "payload", b"payload", + "session_id", + b"session_id", "user_context", b"user_context", ], diff --git a/python/pyspark/sql/tests/connect/test_client.py b/python/pyspark/sql/tests/connect/test_client.py index 84281a6764f81..6131e146363c1 100644 --- a/python/pyspark/sql/tests/connect/test_client.py +++ b/python/pyspark/sql/tests/connect/test_client.py @@ -64,7 +64,7 @@ def __init__(self, session_id: str): def ExecutePlan(self, req: proto.ExecutePlanRequest, metadata): self.req = req resp = proto.ExecutePlanResponse() - resp.client_id = self._session_id + resp.session_id = self._session_id pdf = pd.DataFrame(data={"col1": [1, 2]}) schema = pa.Schema.from_pandas(pdf)