正使用共享存取集群并获取错误同时试图上传Qdrant
eqrant_GRPC_URL,api_key,QDrant_API_KEY
UC_COMMAND_NOTsPORT.WITHOUT-RECOMNDATIONT命令:AppendData不支持Unity目录SQLSTE:0AKUC文件
, line 65 48 #embeddings_df = embeddings_df.limit(5) 50 options = { 51 "qdrant_url": QDRANT_GRPC_URL, 52 "api_key": QDRANT_API_KEY, (...) 60 "batch_size":"128", 61 } 63 embeddings_df.write.format("io.qdrant.spark.Qdrant").options(**options).mode( 64 "append" ---> 65 ).save() File /databricks/spark/python/pyspark/sql/connect/readwriter.py:670, in DataFrameWriter.save(self, path, format, mode, partitionBy, **options) 668 self.format(format) 669 self._write.path = path --> 670 self._spark.client.execute_command( 671 self._write.command(self._spark.client), self._write.observations 672 ) File /databricks/spark/python/pyspark/sql/connect/client/core.py:1203, in SparkConnectClient.execute_command(self, command, observations, extra_request_metadata) 1201 req.user_context.user_id = self._user_id 1202 req.plan.command.CopyFrom(command) -> 1203 data, _, _, _, properties = self._execute_and_fetch( 1204 req, observations or {}, extra_request_metadata 1205 ) 1206 if data is not None: 1207 return (data.to_pandas(), properties) File /databricks/spark/python/pyspark/sql/connect/client/core.py:1624, in SparkConnectClient._execute_and_fetch(self, req, observations, extra_request_metadata, self_destruct) 1621 schema: Optional[StructType] = None 1622 properties: Dict[str, Any] = {} -> 1624 for response in self._execute_and_fetch_as_iterator( 1625 req, observations, extra_request_metadata or [] 1626 ): 1627 if isinstance(response, StructType): 1628 schema = response File /databricks/spark/python/pyspark/sql/connect/client/core.py:1601, in SparkConnectClient._execute_and_fetch_as_iterator(self, req, observations, extra_request_metadata) 1599 yield from handle_response(b) 1600 except Exception as error: -> 1601 self._handle_error(error) File /databricks/spark/python/pyspark/sql/connect/client/core.py:1910, in SparkConnectClient._handle_error(self, error) 1908 self.thread_local.inside_error_handling = True 1909 if isinstance(error, grpc.RpcError): -> 1910 self._handle_rpc_error(error) 1911 elif isinstance(error, ValueError): 1912 if "Cannot invoke RPC" in str(error) and "closed" in str(error): File /databricks/spark/python/pyspark/sql/connect/client/core.py:1985, in SparkConnectClient._handle_rpc_error(self, rpc_error) 1982 info = error_details_pb2.ErrorInfo() 1983 d.Unpack(info) -> 1985 raise convert_exception( 1986 info, 1987 status.message, 1988 self._fetch_enriched_error(info), 1989 self._display_server_stack_trace(), 1990 ) from None 1992 raise SparkConnectGrpcException(status.message) from None
使用共享集群有办法吗工作我个人集群