You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Hi, I have added com.springml:spark-sftp_2.11:1.1.3 and com.jcraft:jsch:0.1.55 to the databricks cluster. I'm also using dbfs/dbfs/tmp idea to resolve the issue but I keep getting the same error. If anyone resolved the issue, could you please provide me with some pointers?
File /databricks/spark/python/pyspark/sql/readwriter.py:302, in DataFrameReader.load(self, path, format, schema, **options)
300 self.options(**options)
301 if isinstance(path, str):
--> 302 return self._df(self._jreader.load(path))
303 elif path is not None:
304 if type(path) != list:
Py4JJavaError: An error occurred while calling o747.load.
: com.jcraft.jsch.JSchException: Algorithm negotiation fail
at com.jcraft.jsch.Session.receive_kexinit(Session.java:582)
at com.jcraft.jsch.Session.connect(Session.java:320)
at com.jcraft.jsch.Session.connect(Session.java:183)
at com.springml.sftp.client.SFTPClient.createSFTPChannel(SFTPClient.java:275)
at com.springml.sftp.client.SFTPClient.copy(SFTPClient.java:73)
at com.springml.spark.sftp.DefaultSource.copy(DefaultSource.scala:212)
at com.springml.spark.sftp.DefaultSource.createRelation(DefaultSource.scala:80)
at com.springml.spark.sftp.DefaultSource.createRelation(DefaultSource.scala:41)
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:387)
at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:378)
at org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:334)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:334)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:240)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
at py4j.Gateway.invoke(Gateway.java:306)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:195)
at py4j.ClientServerConnection.run(ClientServerConnection.java:115)
at java.lang.Thread.run(Thread.java:750)
The text was updated successfully, but these errors were encountered:
Hi, I have added com.springml:spark-sftp_2.11:1.1.3 and com.jcraft:jsch:0.1.55 to the databricks cluster. I'm also using dbfs/dbfs/tmp idea to resolve the issue but I keep getting the same error. If anyone resolved the issue, could you please provide me with some pointers?
com.jcraft.jsch.JSchException: Algorithm negotiation fail
Py4JJavaError Traceback (most recent call last)
File :1
----> 1 df = spark.read
2 .format("com.springml.spark.sftp")
3 .option("host", "")
4 .option("username", "")
5 .option("pem", "")
6 .option("tempLocation", "/dbfs/tmp/")
7 .option("hdfsTempLocation", "/dbfs/dbfs/tmp/")
8 .option("fileType", "parquet")
9 .option("createDF", "true")
10 .load("")
File /databricks/spark/python/pyspark/instrumentation_utils.py:48, in _wrap_function..wrapper(*args, **kwargs)
46 start = time.perf_counter()
47 try:
---> 48 res = func(*args, **kwargs)
49 logger.log_success(
50 module_name, class_name, function_name, time.perf_counter() - start, signature
51 )
52 return res
File /databricks/spark/python/pyspark/sql/readwriter.py:302, in DataFrameReader.load(self, path, format, schema, **options)
300 self.options(**options)
301 if isinstance(path, str):
--> 302 return self._df(self._jreader.load(path))
303 elif path is not None:
304 if type(path) != list:
File /databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/java_gateway.py:1321, in JavaMember.call(self, *args)
1315 command = proto.CALL_COMMAND_NAME +
1316 self.command_header +
1317 args_command +
1318 proto.END_COMMAND_PART
1320 answer = self.gateway_client.send_command(command)
-> 1321 return_value = get_return_value(
1322 answer, self.gateway_client, self.target_id, self.name)
1324 for temp_arg in temp_args:
1325 temp_arg._detach()
File /databricks/spark/python/pyspark/errors/exceptions.py:228, in capture_sql_exception..deco(*a, **kw)
226 def deco(*a: Any, **kw: Any) -> Any:
227 try:
--> 228 return f(*a, **kw)
229 except Py4JJavaError as e:
230 converted = convert_exception(e.java_exception)
File /databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/protocol.py:326, in get_return_value(answer, gateway_client, target_id, name)
324 value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)
325 if answer[1] == REFERENCE_TYPE:
--> 326 raise Py4JJavaError(
327 "An error occurred while calling {0}{1}{2}.\n".
328 format(target_id, ".", name), value)
329 else:
330 raise Py4JError(
331 "An error occurred while calling {0}{1}{2}. Trace:\n{3}\n".
332 format(target_id, ".", name, value))
Py4JJavaError: An error occurred while calling o747.load.
: com.jcraft.jsch.JSchException: Algorithm negotiation fail
at com.jcraft.jsch.Session.receive_kexinit(Session.java:582)
at com.jcraft.jsch.Session.connect(Session.java:320)
at com.jcraft.jsch.Session.connect(Session.java:183)
at com.springml.sftp.client.SFTPClient.createSFTPChannel(SFTPClient.java:275)
at com.springml.sftp.client.SFTPClient.copy(SFTPClient.java:73)
at com.springml.spark.sftp.DefaultSource.copy(DefaultSource.scala:212)
at com.springml.spark.sftp.DefaultSource.createRelation(DefaultSource.scala:80)
at com.springml.spark.sftp.DefaultSource.createRelation(DefaultSource.scala:41)
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:387)
at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:378)
at org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:334)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:334)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:240)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
at py4j.Gateway.invoke(Gateway.java:306)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:195)
at py4j.ClientServerConnection.run(ClientServerConnection.java:115)
at java.lang.Thread.run(Thread.java:750)
The text was updated successfully, but these errors were encountered: