Last active
January 11, 2022 18:52
-
-
Save nsivabalan/77848af0b47aa3c58ad665acc276785c to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
TestInsertTable | |
Test Different Type of Partition Column | |
Expected Array([1,a1,10,2021-05-20 01:00:00], [2,a2,10,2021-05-20 01:00:00]), but got Array([1,a1,10.0,2021-05-19 13:00:00], [2,a2,10.0,2021-05-19 13:00:00]) | |
ScalaTestFailureLocation: org.apache.spark.sql.hudi.TestHoodieSqlBase at (TestHoodieSqlBase.scala:84) | |
org.scalatest.exceptions.TestFailedException: Expected Array([1,a1,10,2021-05-20 01:00:00], [2,a2,10,2021-05-20 01:00:00]), but got Array([1,a1,10.0,2021-05-19 13:00:00], [2,a2,10.0,2021-05-19 13:00:00]) | |
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528) | |
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560) | |
at org.scalatest.Assertions$class.assertResult(Assertions.scala:1023) | |
at org.scalatest.FunSuite.assertResult(FunSuite.scala:1560) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.checkAnswer(TestHoodieSqlBase.scala:84) | |
at org.apache.spark.sql.hudi.TestInsertTable.org$apache$spark$sql$hudi$TestInsertTable$$validateDifferentTypesOfPartitionColumn(TestInsertTable.scala:277) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$4$$anonfun$apply$mcV$sp$4$$anonfun$apply$2.apply(TestInsertTable.scala:243) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$4$$anonfun$apply$mcV$sp$4$$anonfun$apply$2.apply(TestInsertTable.scala:241) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$4$$anonfun$apply$mcV$sp$4.apply(TestInsertTable.scala:241) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$4$$anonfun$apply$mcV$sp$4.apply(TestInsertTable.scala:233) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.withTempDir(TestHoodieSqlBase.scala:56) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$4.apply$mcV$sp(TestInsertTable.scala:233) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$4.apply(TestInsertTable.scala:233) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$4.apply(TestInsertTable.scala:233) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase$$anonfun$test$1.apply(TestHoodieSqlBase.scala:62) | |
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
at org.scalatest.Transformer.apply(Transformer.scala:22) | |
at org.scalatest.Transformer.apply(Transformer.scala:20) | |
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) | |
at org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) | |
at org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) | |
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) | |
Test TimestampType Partition Column With Consistent Logical Timestamp Enabled | |
Expected Array([1,a1,10,2021-05-20 01:00:00], [2,a2,10,2021-05-20 01:00:00]), but got Array([1,a1,10.0,2021-05-19 13:00:00], [2,a2,10.0,2021-05-19 13:00:00]) | |
ScalaTestFailureLocation: org.apache.spark.sql.hudi.TestHoodieSqlBase at (TestHoodieSqlBase.scala:84) | |
org.scalatest.exceptions.TestFailedException: Expected Array([1,a1,10,2021-05-20 01:00:00], [2,a2,10,2021-05-20 01:00:00]), but got Array([1,a1,10.0,2021-05-19 13:00:00], [2,a2,10.0,2021-05-19 13:00:00]) | |
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528) | |
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560) | |
at org.scalatest.Assertions$class.assertResult(Assertions.scala:1023) | |
at org.scalatest.FunSuite.assertResult(FunSuite.scala:1560) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.checkAnswer(TestHoodieSqlBase.scala:84) | |
at org.apache.spark.sql.hudi.TestInsertTable.org$apache$spark$sql$hudi$TestInsertTable$$validateDifferentTypesOfPartitionColumn(TestInsertTable.scala:277) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$5$$anonfun$apply$mcV$sp$5$$anonfun$apply$3.apply(TestInsertTable.scala:257) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$5$$anonfun$apply$mcV$sp$5$$anonfun$apply$3.apply(TestInsertTable.scala:254) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$5$$anonfun$apply$mcV$sp$5.apply(TestInsertTable.scala:254) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$5$$anonfun$apply$mcV$sp$5.apply(TestInsertTable.scala:249) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.withTempDir(TestHoodieSqlBase.scala:56) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$5.apply$mcV$sp(TestInsertTable.scala:249) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$5.apply(TestInsertTable.scala:249) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$5.apply(TestInsertTable.scala:249) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase$$anonfun$test$1.apply(TestHoodieSqlBase.scala:62) | |
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
at org.scalatest.Transformer.apply(Transformer.scala:22) | |
at org.scalatest.Transformer.apply(Transformer.scala:20) | |
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) | |
at org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) | |
at org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) | |
Test Insert timestamp when 'spark.sql.datetime.java8API.enabled' enables | |
Expected Array([1,a1,10,2021-05-07 01:00:00]), but got Array([1,a1,10.0,2021-05-06 13:00:00]) | |
ScalaTestFailureLocation: org.apache.spark.sql.hudi.TestHoodieSqlBase at (TestHoodieSqlBase.scala:84) | |
org.scalatest.exceptions.TestFailedException: Expected Array([1,a1,10,2021-05-07 01:00:00]), but got Array([1,a1,10.0,2021-05-06 13:00:00]) | |
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528) | |
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560) | |
at org.scalatest.Assertions$class.assertResult(Assertions.scala:1023) | |
at org.scalatest.FunSuite.assertResult(FunSuite.scala:1560) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.checkAnswer(TestHoodieSqlBase.scala:84) | |
at org.apache.spark.sql.hudi.TestInsertTable$$anonfun$10.apply(TestInsertTable.scala:396) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase$$anonfun$test$1.apply(TestHoodieSqlBase.scala:62) | |
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
at org.scalatest.Transformer.apply(Transformer.scala:22) | |
at org.scalatest.Transformer.apply(Transformer.scala:20) | |
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) | |
at org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) | |
at org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Exception encountered when invoking run on a nested suite - Cannot read properties from dfs from file file:/Users/nsb/Documents/personal/projects/apache_hudi_dec/hudi/src/test/resources/external-config/hudi-defaults.conf | |
org.apache.hudi.exception.HoodieIOException: Cannot read properties from dfs from file file:/Users/nsb/Documents/personal/projects/apache_hudi_dec/hudi/src/test/resources/external-config/hudi-defaults.conf | |
at org.apache.hudi.common.config.DFSPropertiesConfiguration.addPropsFromFile(DFSPropertiesConfiguration.java:141) | |
at org.apache.hudi.common.config.DFSPropertiesConfiguration.loadGlobalProps(DFSPropertiesConfiguration.java:100) | |
at org.apache.hudi.common.config.DFSPropertiesConfiguration.refreshGlobalProps(DFSPropertiesConfiguration.java:108) | |
at org.apache.spark.sql.hudi.TestSqlConf$$anonfun$2.apply$mcV$sp(TestSqlConf.scala:98) | |
at org.apache.spark.sql.hudi.TestSqlConf$$anonfun$2.apply(TestSqlConf.scala:95) | |
at org.apache.spark.sql.hudi.TestSqlConf$$anonfun$2.apply(TestSqlConf.scala:95) | |
at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) | |
at org.apache.spark.sql.hudi.TestSqlConf.runTest(TestSqlConf.scala:31) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) | |
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) | |
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) | |
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) | |
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) | |
at org.scalatest.Suite$class.run(Suite.scala:1147) | |
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) | |
at org.scalatest.SuperEngine.runImpl(Engine.scala:521) | |
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.org$scalatest$BeforeAndAfterAll$$super$run(TestHoodieSqlBase.scala:30) | |
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213) | |
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210) | |
at org.apache.spark.sql.hudi.TestSqlConf.org$scalatest$BeforeAndAfter$$super$run(TestSqlConf.scala:31) | |
at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258) | |
at org.apache.spark.sql.hudi.TestSqlConf.run(TestSqlConf.scala:31) | |
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) | |
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340) | |
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334) | |
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011) | |
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010) | |
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500) | |
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010) | |
at org.scalatest.tools.Runner$.run(Runner.scala:850) | |
at org.scalatest.tools.Runner.run(Runner.scala) | |
at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38) | |
at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25) | |
Caused by: java.io.FileNotFoundException: File file:/Users/nsb/Documents/personal/projects/apache_hudi_dec/hudi/src/test/resources/external-config/hudi-defaults.conf does not exist | |
at org.apache.hadoop.fs.RawLocalFileSystem.deprecatedGetFileStatus(RawLocalFileSystem.java:611) | |
at org.apache.hadoop.fs.RawLocalFileSystem.getFileLinkStatusInternal(RawLocalFileSystem.java:824) | |
at org.apache.hadoop.fs.RawLocalFileSystem.getFileStatus(RawLocalFileSystem.java:601) | |
at org.apache.hadoop.fs.FilterFileSystem.getFileStatus(FilterFileSystem.java:421) | |
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSInputChecker.<init>(ChecksumFileSystem.java:142) | |
at org.apache.hadoop.fs.ChecksumFileSystem.open(ChecksumFileSystem.java:346) | |
at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:769) | |
at org.apache.hudi.common.config.DFSPropertiesConfiguration.addPropsFromFile(DFSPropertiesConfiguration.java:135) | |
... 42 more | |
451412 [pool-739-thread-2] ERROR org.apache.hudi.common.util.queue.BoundedInMemoryExecutor - error consuming records | |
org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.io.HoodieMergeHandle.write(HoodieMergeHandle.java:344) | |
at org.apache.hudi.table.action.commit.AbstractMergeHelper$UpdateHandler.consumeOneRecord(AbstractMergeHelper.java:122) | |
at org.apache.hudi.table.action.commit.AbstractMergeHelper$UpdateHandler.consumeOneRecord(AbstractMergeHelper.java:112) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryQueueConsumer.consume(BoundedInMemoryQueueConsumer.java:37) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.lambda$null$2(BoundedInMemoryExecutor.java:121) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: org.apache.hudi.exception.HoodieDuplicateKeyException: Duplicate key found for insert statement, key is: id:1 | |
at org.apache.spark.sql.hudi.command.ValidateDuplicateKeyPayload.combineAndGetUpdateValue(InsertIntoHoodieTableCommand.scala:309) | |
at org.apache.hudi.io.HoodieMergeHandle.write(HoodieMergeHandle.java:324) | |
... 8 more | |
451428 [Executor task launch worker for task 181] ERROR org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor - Error upserting bucketType UPDATE for partition :0 | |
org.apache.hudi.exception.HoodieException: org.apache.hudi.exception.HoodieException: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.table.action.commit.SparkMergeHelper.runMerge(SparkMergeHelper.java:102) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpdateInternal(BaseSparkCommitActionExecutor.java:351) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpdate(BaseSparkCommitActionExecutor.java:342) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpsertPartition(BaseSparkCommitActionExecutor.java:315) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.lambda$execute$ecf5068c$1(BaseSparkCommitActionExecutor.java:174) | |
at org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102) | |
at org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102) | |
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$25.apply(RDD.scala:853) | |
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$25.apply(RDD.scala:853) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:337) | |
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:335) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1182) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091) | |
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882) | |
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
at org.apache.spark.scheduler.Task.run(Task.scala:123) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: org.apache.hudi.exception.HoodieException: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.execute(BoundedInMemoryExecutor.java:147) | |
at org.apache.hudi.table.action.commit.SparkMergeHelper.runMerge(SparkMergeHelper.java:100) | |
... 33 more | |
Caused by: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at java.util.concurrent.FutureTask.report(FutureTask.java:122) | |
at java.util.concurrent.FutureTask.get(FutureTask.java:192) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.execute(BoundedInMemoryExecutor.java:141) | |
... 34 more | |
Caused by: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.io.HoodieMergeHandle.write(HoodieMergeHandle.java:344) | |
at org.apache.hudi.table.action.commit.AbstractMergeHelper$UpdateHandler.consumeOneRecord(AbstractMergeHelper.java:122) | |
at org.apache.hudi.table.action.commit.AbstractMergeHelper$UpdateHandler.consumeOneRecord(AbstractMergeHelper.java:112) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryQueueConsumer.consume(BoundedInMemoryQueueConsumer.java:37) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.lambda$null$2(BoundedInMemoryExecutor.java:121) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
... 3 more | |
Caused by: org.apache.hudi.exception.HoodieDuplicateKeyException: Duplicate key found for insert statement, key is: id:1 | |
at org.apache.spark.sql.hudi.command.ValidateDuplicateKeyPayload.combineAndGetUpdateValue(InsertIntoHoodieTableCommand.scala:309) | |
at org.apache.hudi.io.HoodieMergeHandle.write(HoodieMergeHandle.java:324) | |
... 8 more | |
451429 [Executor task launch worker for task 181] WARN org.apache.spark.storage.BlockManager - Putting block rdd_265_0 failed due to exception org.apache.hudi.exception.HoodieUpsertException: Error upserting bucketType UPDATE for partition :0. | |
451430 [Executor task launch worker for task 181] WARN org.apache.spark.storage.BlockManager - Block rdd_265_0 could not be removed as it was not found on disk or in memory | |
451433 [Executor task launch worker for task 181] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 130.0 (TID 181) | |
org.apache.hudi.exception.HoodieUpsertException: Error upserting bucketType UPDATE for partition :0 | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpsertPartition(BaseSparkCommitActionExecutor.java:322) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.lambda$execute$ecf5068c$1(BaseSparkCommitActionExecutor.java:174) | |
at org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102) | |
at org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102) | |
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$25.apply(RDD.scala:853) | |
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$25.apply(RDD.scala:853) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:337) | |
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:335) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1182) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091) | |
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882) | |
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
at org.apache.spark.scheduler.Task.run(Task.scala:123) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: org.apache.hudi.exception.HoodieException: org.apache.hudi.exception.HoodieException: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.table.action.commit.SparkMergeHelper.runMerge(SparkMergeHelper.java:102) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpdateInternal(BaseSparkCommitActionExecutor.java:351) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpdate(BaseSparkCommitActionExecutor.java:342) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpsertPartition(BaseSparkCommitActionExecutor.java:315) | |
... 30 more | |
Caused by: org.apache.hudi.exception.HoodieException: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.execute(BoundedInMemoryExecutor.java:147) | |
at org.apache.hudi.table.action.commit.SparkMergeHelper.runMerge(SparkMergeHelper.java:100) | |
... 33 more | |
Caused by: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at java.util.concurrent.FutureTask.report(FutureTask.java:122) | |
at java.util.concurrent.FutureTask.get(FutureTask.java:192) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.execute(BoundedInMemoryExecutor.java:141) | |
... 34 more | |
Caused by: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.io.HoodieMergeHandle.write(HoodieMergeHandle.java:344) | |
at org.apache.hudi.table.action.commit.AbstractMergeHelper$UpdateHandler.consumeOneRecord(AbstractMergeHelper.java:122) | |
at org.apache.hudi.table.action.commit.AbstractMergeHelper$UpdateHandler.consumeOneRecord(AbstractMergeHelper.java:112) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryQueueConsumer.consume(BoundedInMemoryQueueConsumer.java:37) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.lambda$null$2(BoundedInMemoryExecutor.java:121) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
... 3 more | |
Caused by: org.apache.hudi.exception.HoodieDuplicateKeyException: Duplicate key found for insert statement, key is: id:1 | |
at org.apache.spark.sql.hudi.command.ValidateDuplicateKeyPayload.combineAndGetUpdateValue(InsertIntoHoodieTableCommand.scala:309) | |
at org.apache.hudi.io.HoodieMergeHandle.write(HoodieMergeHandle.java:324) | |
... 8 more | |
451455 [task-result-getter-1] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 130.0 (TID 181, localhost, executor driver): org.apache.hudi.exception.HoodieUpsertException: Error upserting bucketType UPDATE for partition :0 | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpsertPartition(BaseSparkCommitActionExecutor.java:322) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.lambda$execute$ecf5068c$1(BaseSparkCommitActionExecutor.java:174) | |
at org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102) | |
at org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102) | |
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$25.apply(RDD.scala:853) | |
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$25.apply(RDD.scala:853) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:337) | |
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:335) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1182) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091) | |
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882) | |
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
at org.apache.spark.scheduler.Task.run(Task.scala:123) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: org.apache.hudi.exception.HoodieException: org.apache.hudi.exception.HoodieException: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.table.action.commit.SparkMergeHelper.runMerge(SparkMergeHelper.java:102) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpdateInternal(BaseSparkCommitActionExecutor.java:351) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpdate(BaseSparkCommitActionExecutor.java:342) | |
at org.apache.hudi.table.action.commit.BaseSparkCommitActionExecutor.handleUpsertPartition(BaseSparkCommitActionExecutor.java:315) | |
... 30 more | |
Caused by: org.apache.hudi.exception.HoodieException: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.execute(BoundedInMemoryExecutor.java:147) | |
at org.apache.hudi.table.action.commit.SparkMergeHelper.runMerge(SparkMergeHelper.java:100) | |
... 33 more | |
Caused by: java.util.concurrent.ExecutionException: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at java.util.concurrent.FutureTask.report(FutureTask.java:122) | |
at java.util.concurrent.FutureTask.get(FutureTask.java:192) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.execute(BoundedInMemoryExecutor.java:141) | |
... 34 more | |
Caused by: org.apache.hudi.exception.HoodieUpsertException: Failed to combine/merge new record with old value in storage, for new record {HoodieRecord{key=HoodieKey { recordKey=id:1 partitionPath=}, currentLocation='HoodieRecordLocation {instantTime=20220112023455660, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}', newLocation='HoodieRecordLocation {instantTime=20220112023458585, fileId=d95290af-cc74-4f17-8fda-dc502ecee648-0}'}}, old value {{"_hoodie_commit_time": "20220112023452536", "_hoodie_commit_seqno": "20220112023452536_0_124", "_hoodie_record_key": "id:1", "_hoodie_partition_path": "", "_hoodie_file_name": "d95290af-cc74-4f17-8fda-dc502ecee648-0_0-56-74_20220112023452536.parquet", "id": 1, "name": "a1", "price": 10.0, "ts": 1000}} | |
at org.apache.hudi.io.HoodieMergeHandle.write(HoodieMergeHandle.java:344) | |
at org.apache.hudi.table.action.commit.AbstractMergeHelper$UpdateHandler.consumeOneRecord(AbstractMergeHelper.java:122) | |
at org.apache.hudi.table.action.commit.AbstractMergeHelper$UpdateHandler.consumeOneRecord(AbstractMergeHelper.java:112) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryQueueConsumer.consume(BoundedInMemoryQueueConsumer.java:37) | |
at org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.lambda$null$2(BoundedInMemoryExecutor.java:121) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
... 3 more | |
Caused by: org.apache.hudi.exception.HoodieDuplicateKeyException: Duplicate key found for insert statement, key is: id:1 | |
at org.apache.spark.sql.hudi.command.ValidateDuplicateKeyPayload.combineAndGetUpdateValue(InsertIntoHoodieTableCommand.scala:309) | |
at org.apache.hudi.io.HoodieMergeHandle.write(HoodieMergeHandle.java:324) | |
... 8 more | |
451458 [task-result-getter-1] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 130.0 failed 1 times; aborting job | |
456208 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023506124__replacecommit__REQUESTED] | |
456212 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023506124__replacecommit__REQUESTED] | |
456330 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023506124__replacecommit__REQUESTED] | |
456563 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023506124__replacecommit__REQUESTED] | |
457130 [Executor task launch worker for task 252] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023506124__replacecommit__REQUESTED] | |
457132 [qtp418817786-7519] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023506124__replacecommit__INFLIGHT] | |
457977 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023507893__replacecommit__REQUESTED] | |
457983 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023507893__replacecommit__REQUESTED] | |
458095 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023507893__replacecommit__REQUESTED] | |
458363 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023507893__replacecommit__REQUESTED] | |
459073 [Executor task launch worker for task 268] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023507893__replacecommit__REQUESTED] | |
459076 [qtp1559558467-7551] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023507893__replacecommit__INFLIGHT] | |
460067 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023509980__replacecommit__REQUESTED] | |
460073 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023509980__replacecommit__REQUESTED] | |
460233 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023509980__replacecommit__REQUESTED] | |
460514 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023509980__replacecommit__REQUESTED] | |
461089 [Executor task launch worker for task 290] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023509980__replacecommit__REQUESTED] | |
461093 [qtp553004139-7583] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023509980__replacecommit__INFLIGHT] | |
463830 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023513740__replacecommit__REQUESTED] | |
463838 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023513740__replacecommit__REQUESTED] | |
463974 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023513740__replacecommit__REQUESTED] | |
464206 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023513740__replacecommit__REQUESTED] | |
464765 [Executor task launch worker for task 325] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023513740__replacecommit__REQUESTED] | |
464771 [qtp633779208-7648] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023513740__replacecommit__INFLIGHT] | |
465951 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023515859__replacecommit__REQUESTED] | |
465960 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023515859__replacecommit__REQUESTED] | |
466123 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023515859__replacecommit__REQUESTED] | |
466394 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023515859__replacecommit__REQUESTED] | |
467081 [Executor task launch worker for task 350] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023515859__replacecommit__REQUESTED] | |
467086 [qtp703598344-7680] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023515859__replacecommit__INFLIGHT] | |
468241 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023518156__replacecommit__REQUESTED] | |
468245 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023518156__replacecommit__REQUESTED] | |
468368 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023518156__replacecommit__REQUESTED] | |
468634 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023518156__replacecommit__REQUESTED] | |
468985 [Executor task launch worker for task 373] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023518156__replacecommit__REQUESTED] | |
468991 [qtp569849312-7700] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023518156__replacecommit__INFLIGHT] | |
478881 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.HoodieSparkSqlWriter$ - hoodie table at file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-307574ce-71b9-4f75-8e65-2dec5597a289/h11 already exists. Deleting existing data & overwriting with new data. | |
487256 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.HoodieSparkSqlWriter$ - hoodie table at file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-307574ce-71b9-4f75-8e65-2dec5597a289/h15 already exists. Deleting existing data & overwriting with new data. | |
489652 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023539569__replacecommit__REQUESTED] | |
489656 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023539569__replacecommit__REQUESTED] | |
490100 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023539569__replacecommit__REQUESTED] | |
490491 [ScalaTest-run-running-TestInsertTable] WARN org.apache.hudi.common.util.ClusteringUtils - No content found in requested file for instant [==>20220112023539569__replacecommit__REQUESTED] | |
554056 [ScalaTest-run-running-TestCreateTable] WARN org.apache.hudi.HoodieFileIndex - Cannot do the partition prune for table file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-abf389f3-26a3-4257-949c-cb80c5d5445b/h13.The partitionFragments size (2021,08,02,12) is not equal to the partition columns size(StructField(day,StringType,true),StructField(hh,IntegerType,false)) | |
557328 [ScalaTest-run-running-TestCreateTable] WARN org.apache.hudi.HoodieFileIndex - Cannot do the partition prune for table file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-abf389f3-26a3-4257-949c-cb80c5d5445b/h13.The partitionFragments size (2021,08,02,12) is not equal to the partition columns size(StructField(day,StringType,true),StructField(hh,IntegerType,true)) | |
560678 [ScalaTest-run-running-TestCreateTable] WARN org.apache.hudi.HoodieFileIndex - Cannot do the partition prune for table file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-abf389f3-26a3-4257-949c-cb80c5d5445b/h13.The partitionFragments size (2021,08,02,12) is not equal to the partition columns size(StructField(day,StringType,true),StructField(hh,IntegerType,true)) | |
561361 [ScalaTest-run-running-TestCreateTable] WARN org.apache.hudi.HoodieFileIndex - Cannot do the partition prune for table file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-abf389f3-26a3-4257-949c-cb80c5d5445b/h13.The partitionFragments size (2021,08,02,12) is not equal to the partition columns size(StructField(day,StringType,true),StructField(hh,IntegerType,true)) | |
564663 [ScalaTest-run-running-TestCreateTable] WARN org.apache.hudi.HoodieFileIndex - Cannot do the partition prune for table file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-abf389f3-26a3-4257-949c-cb80c5d5445b/h13.The partitionFragments size (2021,08,02,12) is not equal to the partition columns size(StructField(day,StringType,true),StructField(hh,IntegerType,true)) | |
565387 [ScalaTest-run-running-TestCreateTable] WARN org.apache.hudi.HoodieFileIndex - Cannot do the partition prune for table file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-abf389f3-26a3-4257-949c-cb80c5d5445b/h13.The partitionFragments size (2021,08,02,12) is not equal to the partition columns size(StructField(day,StringType,true),StructField(hh,IntegerType,true)) | |
568808 [ScalaTest-run-running-TestCreateTable] WARN org.apache.hudi.HoodieFileIndex - Cannot do the partition prune for table file:/private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/spark-abf389f3-26a3-4257-949c-cb80c5d5445b/h13.The partitionFragments size (2021,08,02,12) is not equal to the partition columns size(StructField(day,StringType,true),StructField(hh,IntegerType,true)) | |
586885 [ScalaTest-run-running-TestTruncateTable] WARN org.apache.spark.sql.hudi.command.TruncateHoodieTableCommand - Exception when attempting to uncache table `default`.`h0` | |
org.spark_project.guava.util.concurrent.UncheckedExecutionException: org.apache.hudi.exception.TableNotFoundException: Hoodie table not found in path Unable to find a hudi table for the user provided paths. | |
at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2263) | |
at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000) | |
at org.spark_project.guava.cache.LocalCache$LocalManualCache.get(LocalCache.java:4789) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.getCachedPlan(SessionCatalog.scala:141) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable.org$apache$spark$sql$execution$datasources$FindDataSourceTable$$readDataSourceTable(DataSourceStrategy.scala:227) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anonfun$apply$2.applyOrElse(DataSourceStrategy.scala:264) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anonfun$apply$2.applyOrElse(DataSourceStrategy.scala:255) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1$$anonfun$2.apply(AnalysisHelper.scala:108) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1$$anonfun$2.apply(AnalysisHelper.scala:108) | |
at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1.apply(AnalysisHelper.scala:107) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1.apply(AnalysisHelper.scala:106) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsDown(AnalysisHelper.scala:106) | |
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDown(LogicalPlan.scala:29) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1$$anonfun$apply$6.apply(AnalysisHelper.scala:113) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1$$anonfun$apply$6.apply(AnalysisHelper.scala:113) | |
at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:329) | |
at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187) | |
at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:327) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1.apply(AnalysisHelper.scala:113) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1.apply(AnalysisHelper.scala:106) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsDown(AnalysisHelper.scala:106) | |
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDown(LogicalPlan.scala:29) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperators(AnalysisHelper.scala:73) | |
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:29) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable.apply(DataSourceStrategy.scala:255) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable.apply(DataSourceStrategy.scala:223) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:87) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:84) | |
at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124) | |
at scala.collection.immutable.List.foldLeft(List.scala:84) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:84) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:76) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:127) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:106) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105) | |
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57) | |
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55) | |
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78) | |
at org.apache.spark.sql.SparkSession.table(SparkSession.scala:628) | |
at org.apache.spark.sql.execution.command.TruncateTableCommand.run(tables.scala:480) | |
at org.apache.spark.sql.hudi.command.TruncateHoodieTableCommand.run(TruncateHoodieTableCommand.scala:42) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3370) | |
at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) | |
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3369) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:194) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:79) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(TestTruncateTable.scala:44) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(TestTruncateTable.scala:24) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1.apply$mcV$sp(TestTruncateTable.scala:24) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1.apply(TestTruncateTable.scala:24) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1.apply(TestTruncateTable.scala:24) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase$$anonfun$test$1.apply(TestHoodieSqlBase.scala:62) | |
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
at org.scalatest.Transformer.apply(Transformer.scala:22) | |
at org.scalatest.Transformer.apply(Transformer.scala:20) | |
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) | |
at org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) | |
at org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) | |
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) | |
at org.scalatest.FunSuite.runTest(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) | |
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) | |
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) | |
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) | |
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) | |
at org.scalatest.Suite$class.run(Suite.scala:1147) | |
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) | |
at org.scalatest.SuperEngine.runImpl(Engine.scala:521) | |
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.org$scalatest$BeforeAndAfterAll$$super$run(TestHoodieSqlBase.scala:30) | |
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213) | |
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.run(TestHoodieSqlBase.scala:30) | |
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) | |
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340) | |
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334) | |
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011) | |
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010) | |
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500) | |
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010) | |
at org.scalatest.tools.Runner$.run(Runner.scala:850) | |
at org.scalatest.tools.Runner.run(Runner.scala) | |
at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38) | |
at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25) | |
Caused by: org.apache.hudi.exception.TableNotFoundException: Hoodie table not found in path Unable to find a hudi table for the user provided paths. | |
at org.apache.hudi.DataSourceUtils.getTablePath(DataSourceUtils.java:85) | |
at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:103) | |
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:316) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anon$1.call(DataSourceStrategy.scala:242) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anon$1.call(DataSourceStrategy.scala:227) | |
at org.spark_project.guava.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4792) | |
at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) | |
at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) | |
at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) | |
at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) | |
... 115 more | |
593631 [ScalaTest-run-running-TestTruncateTable] WARN org.apache.spark.sql.hudi.command.TruncateHoodieTableCommand - Exception when attempting to uncache table `default`.`h1` | |
org.spark_project.guava.util.concurrent.UncheckedExecutionException: org.apache.hudi.exception.TableNotFoundException: Hoodie table not found in path Unable to find a hudi table for the user provided paths. | |
at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2263) | |
at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000) | |
at org.spark_project.guava.cache.LocalCache$LocalManualCache.get(LocalCache.java:4789) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.getCachedPlan(SessionCatalog.scala:141) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable.org$apache$spark$sql$execution$datasources$FindDataSourceTable$$readDataSourceTable(DataSourceStrategy.scala:227) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anonfun$apply$2.applyOrElse(DataSourceStrategy.scala:264) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anonfun$apply$2.applyOrElse(DataSourceStrategy.scala:255) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1$$anonfun$2.apply(AnalysisHelper.scala:108) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1$$anonfun$2.apply(AnalysisHelper.scala:108) | |
at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1.apply(AnalysisHelper.scala:107) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1.apply(AnalysisHelper.scala:106) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsDown(AnalysisHelper.scala:106) | |
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDown(LogicalPlan.scala:29) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1$$anonfun$apply$6.apply(AnalysisHelper.scala:113) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1$$anonfun$apply$6.apply(AnalysisHelper.scala:113) | |
at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:329) | |
at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187) | |
at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:327) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1.apply(AnalysisHelper.scala:113) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$anonfun$resolveOperatorsDown$1.apply(AnalysisHelper.scala:106) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsDown(AnalysisHelper.scala:106) | |
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDown(LogicalPlan.scala:29) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperators(AnalysisHelper.scala:73) | |
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:29) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable.apply(DataSourceStrategy.scala:255) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable.apply(DataSourceStrategy.scala:223) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:87) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:84) | |
at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124) | |
at scala.collection.immutable.List.foldLeft(List.scala:84) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:84) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:76) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:127) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:106) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105) | |
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201) | |
at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105) | |
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57) | |
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55) | |
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78) | |
at org.apache.spark.sql.SparkSession.table(SparkSession.scala:628) | |
at org.apache.spark.sql.execution.command.TruncateTableCommand.run(tables.scala:480) | |
at org.apache.spark.sql.hudi.command.TruncateHoodieTableCommand.run(TruncateHoodieTableCommand.scala:42) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3370) | |
at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) | |
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3369) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:194) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:79) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(TestTruncateTable.scala:44) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(TestTruncateTable.scala:24) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1.apply$mcV$sp(TestTruncateTable.scala:24) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1.apply(TestTruncateTable.scala:24) | |
at org.apache.spark.sql.hudi.TestTruncateTable$$anonfun$1.apply(TestTruncateTable.scala:24) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase$$anonfun$test$1.apply(TestHoodieSqlBase.scala:62) | |
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
at org.scalatest.Transformer.apply(Transformer.scala:22) | |
at org.scalatest.Transformer.apply(Transformer.scala:20) | |
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) | |
at org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) | |
at org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) | |
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) | |
at org.scalatest.FunSuite.runTest(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) | |
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) | |
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) | |
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) | |
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) | |
at org.scalatest.Suite$class.run(Suite.scala:1147) | |
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) | |
at org.scalatest.SuperEngine.runImpl(Engine.scala:521) | |
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.org$scalatest$BeforeAndAfterAll$$super$run(TestHoodieSqlBase.scala:30) | |
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213) | |
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210) | |
at org.apache.spark.sql.hudi.TestHoodieSqlBase.run(TestHoodieSqlBase.scala:30) | |
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) | |
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340) | |
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334) | |
at scala.collection.immutable.List.foreach(List.scala:392) | |
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334) | |
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011) | |
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010) | |
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500) | |
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010) | |
at org.scalatest.tools.Runner$.run(Runner.scala:850) | |
at org.scalatest.tools.Runner.run(Runner.scala) | |
at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38) | |
at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25) | |
Caused by: org.apache.hudi.exception.TableNotFoundException: Hoodie table not found in path Unable to find a hudi table for the user provided paths. | |
at org.apache.hudi.DataSourceUtils.getTablePath(DataSourceUtils.java:85) | |
at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:103) | |
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:316) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anon$1.call(DataSourceStrategy.scala:242) | |
at org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anon$1.call(DataSourceStrategy.scala:227) | |
at org.spark_project.guava.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4792) | |
at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) | |
at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) | |
at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) | |
at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) | |
... 115 more | |
603688 [Executor task launch worker for task 123] WARN org.apache.hudi.index.bloom.HoodieBloomIndex - Unable to find range metadata in file :(,effd16b7-5156-490d-a71e-6444109e4794-0) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment