Last active
          July 22, 2021 15:25 
        
      - 
      
- 
        Save nsivabalan/214a586e2aad6852d57dc7a381450b2d to your computer and use it in GitHub Desktop. 
  
    
      This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
      Learn more about bidirectional Unicode characters
    
  
  
    
  | 21/07/22 11:23:32 ERROR SparkSQLDriver: Failed in [create table hudi_gh_ext using hudi location 'file:///tmp/hudi-gh1' as select type, public, payload, repo, actor, org, id, other from gh_raw] | |
| java.lang.NoClassDefFoundError: org/apache/calcite/rel/type/RelDataTypeSystem | |
| at org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory.get(SemanticAnalyzerFactory.java:318) | |
| at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:484) | |
| at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1317) | |
| at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1457) | |
| at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237) | |
| at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1227) | |
| at org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLs(HoodieHiveClient.java:458) | |
| at org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLUsingHiveDriver(HoodieHiveClient.java:448) | |
| at org.apache.hudi.hive.HoodieHiveClient.updateHiveSQL(HoodieHiveClient.java:426) | |
| at org.apache.hudi.hive.HoodieHiveClient.createTable(HoodieHiveClient.java:322) | |
| at org.apache.hudi.hive.HiveSyncTool.syncSchema(HiveSyncTool.java:234) | |
| at org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:179) | |
| at org.apache.hudi.hive.HiveSyncTool.doSync(HiveSyncTool.java:130) | |
| at org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:116) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.syncHive(HoodieSparkSqlWriter.scala:442) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.$anonfun$metaSync$2(HoodieSparkSqlWriter.scala:496) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.$anonfun$metaSync$2$adapted(HoodieSparkSqlWriter.scala:492) | |
| at scala.collection.mutable.HashSet.foreach(HashSet.scala:79) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:492) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:566) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:242) | |
| at org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand$.run(InsertIntoHoodieTableCommand.scala:97) | |
| at org.apache.spark.sql.hudi.command.CreateHoodieTableAsSelectCommand.run(CreateHoodieTableAsSelectCommand.scala:77) | |
| at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:108) | |
| at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:106) | |
| at org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:120) | |
| at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3618) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) | |
| at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) | |
| at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3616) | |
| at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97) | |
| at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602) | |
| at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:650) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:63) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:377) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:496) | |
| at scala.collection.Iterator.foreach(Iterator.scala:941) | |
| at scala.collection.Iterator.foreach$(Iterator.scala:941) | |
| at scala.collection.AbstractIterator.foreach(Iterator.scala:1429) | |
| at scala.collection.IterableLike.foreach(IterableLike.scala:74) | |
| at scala.collection.IterableLike.foreach$(IterableLike.scala:73) | |
| at scala.collection.AbstractIterable.foreach(Iterable.scala:56) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:490) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:282) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
| at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
| at java.lang.reflect.Method.invoke(Method.java:498) | |
| at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) | |
| at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928) | |
| at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) | |
| at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) | |
| at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) | |
| at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007) | |
| at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016) | |
| at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
| Caused by: java.lang.ClassNotFoundException: org.apache.calcite.rel.type.RelDataTypeSystem | |
| at java.net.URLClassLoader.findClass(URLClassLoader.java:382) | |
| at java.lang.ClassLoader.loadClass(ClassLoader.java:424) | |
| at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) | |
| at java.lang.ClassLoader.loadClass(ClassLoader.java:357) | |
| ... 66 more | |
| java.lang.NoClassDefFoundError: org/apache/calcite/rel/type/RelDataTypeSystem | |
| at org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory.get(SemanticAnalyzerFactory.java:318) | |
| at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:484) | |
| at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1317) | |
| at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1457) | |
| at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237) | |
| at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1227) | |
| at org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLs(HoodieHiveClient.java:458) | |
| at org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLUsingHiveDriver(HoodieHiveClient.java:448) | |
| at org.apache.hudi.hive.HoodieHiveClient.updateHiveSQL(HoodieHiveClient.java:426) | |
| at org.apache.hudi.hive.HoodieHiveClient.createTable(HoodieHiveClient.java:322) | |
| at org.apache.hudi.hive.HiveSyncTool.syncSchema(HiveSyncTool.java:234) | |
| at org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:179) | |
| at org.apache.hudi.hive.HiveSyncTool.doSync(HiveSyncTool.java:130) | |
| at org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:116) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.syncHive(HoodieSparkSqlWriter.scala:442) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.$anonfun$metaSync$2(HoodieSparkSqlWriter.scala:496) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.$anonfun$metaSync$2$adapted(HoodieSparkSqlWriter.scala:492) | |
| at scala.collection.mutable.HashSet.foreach(HashSet.scala:79) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:492) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:566) | |
| at org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:242) | |
| at org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand$.run(InsertIntoHoodieTableCommand.scala:97) | |
| at org.apache.spark.sql.hudi.command.CreateHoodieTableAsSelectCommand.run(CreateHoodieTableAsSelectCommand.scala:77) | |
| at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:108) | |
| at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:106) | |
| at org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:120) | |
| at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3618) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) | |
| at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) | |
| at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) | |
| at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3616) | |
| at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229) | |
| at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97) | |
| at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607) | |
| at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) | |
| at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602) | |
| at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:650) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:63) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:377) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:496) | |
| at scala.collection.Iterator.foreach(Iterator.scala:941) | |
| at scala.collection.Iterator.foreach$(Iterator.scala:941) | |
| at scala.collection.AbstractIterator.foreach(Iterator.scala:1429) | |
| at scala.collection.IterableLike.foreach(IterableLike.scala:74) | |
| at scala.collection.IterableLike.foreach$(IterableLike.scala:73) | |
| at scala.collection.AbstractIterable.foreach(Iterable.scala:56) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:490) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:282) | |
| at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
| at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
| at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
| at java.lang.reflect.Method.invoke(Method.java:498) | |
| at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) | |
| at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928) | |
| at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) | |
| at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) | |
| at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) | |
| at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007) | |
| at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016) | |
| at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
| Caused by: java.lang.ClassNotFoundException: org.apache.calcite.rel.type.RelDataTypeSystem | |
| at java.net.URLClassLoader.findClass(URLClassLoader.java:382) | |
| at java.lang.ClassLoader.loadClass(ClassLoader.java:424) | |
| at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) | |
| at java.lang.ClassLoader.loadClass(ClassLoader.java:357) | |
| ... 66 more | 
  
    Sign up for free
    to join this conversation on GitHub.
    Already have an account?
    Sign in to comment