Created
June 12, 2021 13:48
-
-
Save nsivabalan/9a276c3c0dc8a2ac93310fedccaa2fea to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
21/06/10 21:50:27 ERROR HoodieTestSuiteJob: Failed to run Test Suite | |
java.util.concurrent.ExecutionException: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.PartitionedFile.<init>(Lorg/apache/spark/sql/catalyst/InternalRow;Ljava/lang/String;JJ[Ljava/lang/String;)V | |
at java.util.concurrent.FutureTask.report(FutureTask.java:122) | |
at java.util.concurrent.FutureTask.get(FutureTask.java:206) | |
at org.apache.hudi.integ.testsuite.dag.scheduler.DagScheduler.execute(DagScheduler.java:113) | |
at org.apache.hudi.integ.testsuite.dag.scheduler.DagScheduler.schedule(DagScheduler.java:68) | |
at org.apache.hudi.integ.testsuite.HoodieTestSuiteJob.runTestSuite(HoodieTestSuiteJob.java:203) | |
at org.apache.hudi.integ.testsuite.HoodieTestSuiteJob.main(HoodieTestSuiteJob.java:170) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) | |
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:853) | |
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161) | |
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184) | |
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) | |
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:928) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:937) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
Caused by: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.PartitionedFile.<init>(Lorg/apache/spark/sql/catalyst/InternalRow;Ljava/lang/String;JJ[Ljava/lang/String;)V | |
at org.apache.hudi.MergeOnReadSnapshotRelation$$anonfun$5.apply(MergeOnReadSnapshotRelation.scala:173) | |
at org.apache.hudi.MergeOnReadSnapshotRelation$$anonfun$5.apply(MergeOnReadSnapshotRelation.scala:168) | |
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) | |
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) | |
at scala.collection.AbstractIterable.foreach(Iterable.scala:54) | |
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) | |
at scala.collection.AbstractTraversable.map(Traversable.scala:104) | |
at org.apache.hudi.MergeOnReadSnapshotRelation.buildFileIndex(MergeOnReadSnapshotRelation.scala:168) | |
at org.apache.hudi.MergeOnReadSnapshotRelation.buildScan(MergeOnReadSnapshotRelation.scala:97) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$$anonfun$11.apply(DataSourceStrategy.scala:315) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$$anonfun$11.apply(DataSourceStrategy.scala:315) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$$anonfun$pruneFilterProject$1.apply(DataSourceStrategy.scala:348) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$$anonfun$pruneFilterProject$1.apply(DataSourceStrategy.scala:347) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy.pruneFilterProjectRaw(DataSourceStrategy.scala:403) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy.pruneFilterProject(DataSourceStrategy.scala:343) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy.apply(DataSourceStrategy.scala:311) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:63) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:63) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:440) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:100) | |
at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:90) | |
at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:106) | |
at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:105) | |
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withAction(Dataset.scala:3388) | |
at org.apache.spark.sql.Dataset.count(Dataset.scala:2838) | |
at org.apache.hudi.integ.testsuite.dag.nodes.ValidateDatasetNode.execute(ValidateDatasetNode.java:111) | |
at org.apache.hudi.integ.testsuite.dag.scheduler.DagScheduler.executeNode(DagScheduler.java:139) | |
at org.apache.hudi.integ.testsuite.dag.scheduler.DagScheduler.lambda$execute$0(DagScheduler.java:105) | |
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Exception in thread "main" org.apache.hudi.exception.HoodieException: Failed to run Test Suite | |
at org.apache.hudi.integ.testsuite.HoodieTestSuiteJob.runTestSuite(HoodieTestSuiteJob.java:208) | |
at org.apache.hudi.integ.testsuite.HoodieTestSuiteJob.main(HoodieTestSuiteJob.java:170) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) | |
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:853) | |
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161) | |
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184) | |
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) | |
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:928) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:937) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
Caused by: java.util.concurrent.ExecutionException: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.PartitionedFile.<init>(Lorg/apache/spark/sql/catalyst/InternalRow;Ljava/lang/String;JJ[Ljava/lang/String;)V | |
at java.util.concurrent.FutureTask.report(FutureTask.java:122) | |
at java.util.concurrent.FutureTask.get(FutureTask.java:206) | |
at org.apache.hudi.integ.testsuite.dag.scheduler.DagScheduler.execute(DagScheduler.java:113) | |
at org.apache.hudi.integ.testsuite.dag.scheduler.DagScheduler.schedule(DagScheduler.java:68) | |
at org.apache.hudi.integ.testsuite.HoodieTestSuiteJob.runTestSuite(HoodieTestSuiteJob.java:203) | |
... 13 more | |
Caused by: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.PartitionedFile.<init>(Lorg/apache/spark/sql/catalyst/InternalRow;Ljava/lang/String;JJ[Ljava/lang/String;)V | |
at org.apache.hudi.MergeOnReadSnapshotRelation$$anonfun$5.apply(MergeOnReadSnapshotRelation.scala:173) | |
at org.apache.hudi.MergeOnReadSnapshotRelation$$anonfun$5.apply(MergeOnReadSnapshotRelation.scala:168) | |
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) | |
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) | |
at scala.collection.AbstractIterable.foreach(Iterable.scala:54) | |
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) | |
at scala.collection.AbstractTraversable.map(Traversable.scala:104) | |
at org.apache.hudi.MergeOnReadSnapshotRelation.buildFileIndex(MergeOnReadSnapshotRelation.scala:168) | |
at org.apache.hudi.MergeOnReadSnapshotRelation.buildScan(MergeOnReadSnapshotRelation.scala:97) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$$anonfun$11.apply(DataSourceStrategy.scala:315) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$$anonfun$11.apply(DataSourceStrategy.scala:315) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$$anonfun$pruneFilterProject$1.apply(DataSourceStrategy.scala:348) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$$anonfun$pruneFilterProject$1.apply(DataSourceStrategy.scala:347) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy.pruneFilterProjectRaw(DataSourceStrategy.scala:403) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy.pruneFilterProject(DataSourceStrategy.scala:343) | |
at org.apache.spark.sql.execution.datasources.DataSourceStrategy.apply(DataSourceStrategy.scala:311) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:63) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:63) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:440) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:78) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2$$anonfun$apply$2.apply(QueryPlanner.scala:75) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) | |
at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1334) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:75) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$2.apply(QueryPlanner.scala:67) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93) | |
at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69) | |
at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:100) | |
at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:90) | |
at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:106) | |
at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:105) | |
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withAction(Dataset.scala:3388) | |
at org.apache.spark.sql.Dataset.count(Dataset.scala:2838) | |
at org.apache.hudi.integ.testsuite.dag.nodes.ValidateDatasetNode.execute(ValidateDatasetNode.java:111) | |
at org.apache.hudi.integ.testsuite.dag.scheduler.DagScheduler.executeNode(DagScheduler.java:139) | |
at org.apache.hudi.integ.testsuite.dag.scheduler.DagScheduler.lambda$execute$0(DagScheduler.java:105) | |
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment