Created
July 29, 2021 07:24
-
-
Save nsivabalan/7f02c5c6dfc6f63ef5852ce0f4cd8e4d to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| > CREATE OR REPLACE TABLE delta_ext ( | |
| > public BOOLEAN, | |
| > repo_id BIGINT, | |
| > repo_name STRING, | |
| > repo_url STRING, | |
| > payload STRING, | |
| > created_at TIMESTAMP, | |
| > id STRING, | |
| > other STRING, | |
| > randomId DOUBLE, | |
| > date_col STRING, | |
| > type STRING) | |
| > USING DELTA location 's3a://siva-test-bucket-june-16/hudi_testing/gh_arch_dump/delta_1' | |
| > PARTITIONED BY (type); | |
| 21/07/29 07:14:46 ERROR Utils: Aborting task | |
| java.lang.RuntimeException: java.lang.ClassNotFoundException: Class org.apache.hadoop.fs.s3a.S3AFileSystem not found | |
| at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2595) | |
| at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3269) | |
| at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3301) | |
| at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124) | |
| at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3352) | |
| at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3320) | |
| at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:479) | |
| at org.apache.hadoop.fs.Path.getFileSystem(Path.java:361) | |
| at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.run(CreateDeltaTableCommand.scala:98) | |
| at org.apache.spark.sql.delta.catalog.DeltaCatalog.org$apache$spark$sql$delta$catalog$DeltaCatalog$$createDeltaTable(DeltaCatalog.scala:140) | |
| at org.apache.spark.sql.delta.catalog.DeltaCatalog$StagedDeltaTableV2.commitStagedChanges(DeltaCatalog.scala:328) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.$anonfun$commitOrAbortStagedChanges$1(ReplaceTableExec.scala:82) | |
| at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) | |
| at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.commitOrAbortStagedChanges(ReplaceTableExec.scala:81) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.run(ReplaceTableExec.scala:74) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45) | |
| at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3618) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) | |
| at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) | |
| at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3616) | |
| at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97) | |
| at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602) | |
| at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:650) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:63) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:377) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:496) | |
| at scala.collection.Iterator.foreach(Iterator.scala:941) | |
| at scala.collection.Iterator.foreach$(Iterator.scala:941) | |
| at scala.collection.AbstractIterator.foreach(Iterator.scala:1429) | |
| at scala.collection.IterableLike.foreach(IterableLike.scala:74) | |
| at scala.collection.IterableLike.foreach$(IterableLike.scala:73) | |
| at scala.collection.AbstractIterable.foreach(Iterable.scala:56) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:490) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:282) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
| at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
| at java.lang.reflect.Method.invoke(Method.java:498) | |
| at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) | |
| at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928) | |
| at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) | |
| at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) | |
| at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) | |
| at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007) | |
| at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016) | |
| at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
| Caused by: java.lang.ClassNotFoundException: Class org.apache.hadoop.fs.s3a.S3AFileSystem not found | |
| at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2499) | |
| at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2593) | |
| ... 58 more | |
| 21/07/29 07:14:46 ERROR SparkSQLDriver: Failed in [CREATE OR REPLACE TABLE delta_ext ( | |
| public BOOLEAN, | |
| repo_id BIGINT, | |
| repo_name STRING, | |
| repo_url STRING, | |
| payload STRING, | |
| created_at TIMESTAMP, | |
| id STRING, | |
| other STRING, | |
| randomId DOUBLE, | |
| date_col STRING, | |
| type STRING) | |
| USING DELTA location 's3a://siva-test-bucket-june-16/hudi_testing/gh_arch_dump/delta_1' | |
| PARTITIONED BY (type)] | |
| java.lang.RuntimeException: java.lang.ClassNotFoundException: Class org.apache.hadoop.fs.s3a.S3AFileSystem not found | |
| at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2595) | |
| at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3269) | |
| at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3301) | |
| at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124) | |
| at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3352) | |
| at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3320) | |
| at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:479) | |
| at org.apache.hadoop.fs.Path.getFileSystem(Path.java:361) | |
| at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.run(CreateDeltaTableCommand.scala:98) | |
| at org.apache.spark.sql.delta.catalog.DeltaCatalog.org$apache$spark$sql$delta$catalog$DeltaCatalog$$createDeltaTable(DeltaCatalog.scala:140) | |
| at org.apache.spark.sql.delta.catalog.DeltaCatalog$StagedDeltaTableV2.commitStagedChanges(DeltaCatalog.scala:328) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.$anonfun$commitOrAbortStagedChanges$1(ReplaceTableExec.scala:82) | |
| at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) | |
| at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.commitOrAbortStagedChanges(ReplaceTableExec.scala:81) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.run(ReplaceTableExec.scala:74) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45) | |
| at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3618) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) | |
| at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) | |
| at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3616) | |
| at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97) | |
| at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602) | |
| at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:650) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:63) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:377) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:496) | |
| at scala.collection.Iterator.foreach(Iterator.scala:941) | |
| at scala.collection.Iterator.foreach$(Iterator.scala:941) | |
| at scala.collection.AbstractIterator.foreach(Iterator.scala:1429) | |
| at scala.collection.IterableLike.foreach(IterableLike.scala:74) | |
| at scala.collection.IterableLike.foreach$(IterableLike.scala:73) | |
| at scala.collection.AbstractIterable.foreach(Iterable.scala:56) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:490) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:282) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
| at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
| at java.lang.reflect.Method.invoke(Method.java:498) | |
| at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) | |
| at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928) | |
| at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) | |
| at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) | |
| at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) | |
| at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007) | |
| at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016) | |
| at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
| Caused by: java.lang.ClassNotFoundException: Class org.apache.hadoop.fs.s3a.S3AFileSystem not found | |
| at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2499) | |
| at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2593) | |
| ... 58 more | |
| java.lang.RuntimeException: java.lang.ClassNotFoundException: Class org.apache.hadoop.fs.s3a.S3AFileSystem not found | |
| at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2595) | |
| at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3269) | |
| at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3301) | |
| at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124) | |
| at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3352) | |
| at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3320) | |
| at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:479) | |
| at org.apache.hadoop.fs.Path.getFileSystem(Path.java:361) | |
| at org.apache.spark.sql.delta.commands.CreateDeltaTableCommand.run(CreateDeltaTableCommand.scala:98) | |
| at org.apache.spark.sql.delta.catalog.DeltaCatalog.org$apache$spark$sql$delta$catalog$DeltaCatalog$$createDeltaTable(DeltaCatalog.scala:140) | |
| at org.apache.spark.sql.delta.catalog.DeltaCatalog$StagedDeltaTableV2.commitStagedChanges(DeltaCatalog.scala:328) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.$anonfun$commitOrAbortStagedChanges$1(ReplaceTableExec.scala:82) | |
| at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) | |
| at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.commitOrAbortStagedChanges(ReplaceTableExec.scala:81) | |
| at org.apache.spark.sql.execution.datasources.v2.AtomicReplaceTableExec.run(ReplaceTableExec.scala:74) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39) | |
| at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45) | |
| at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3618) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) | |
| at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) | |
| at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3616) | |
| at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97) | |
| at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602) | |
| at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:650) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:63) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:377) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:496) | |
| at scala.collection.Iterator.foreach(Iterator.scala:941) | |
| at scala.collection.Iterator.foreach$(Iterator.scala:941) | |
| at scala.collection.AbstractIterator.foreach(Iterator.scala:1429) | |
| at scala.collection.IterableLike.foreach(IterableLike.scala:74) | |
| at scala.collection.IterableLike.foreach$(IterableLike.scala:73) | |
| at scala.collection.AbstractIterable.foreach(Iterable.scala:56) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:490) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:282) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
| at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
| at java.lang.reflect.Method.invoke(Method.java:498) | |
| at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) | |
| at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928) | |
| at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) | |
| at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) | |
| at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) | |
| at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007) | |
| at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016) | |
| at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
| Caused by: java.lang.ClassNotFoundException: Class org.apache.hadoop.fs.s3a.S3AFileSystem not found | |
| at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2499) | |
| at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2593) | |
| ... 58 more |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment