Created
          July 9, 2021 04:56 
        
      - 
      
- 
        Save nsivabalan/9204cbefaa470ad07708ca902d649f77 to your computer and use it in GitHub Desktop. 
  
    
      This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
      Learn more about bidirectional Unicode characters
    
  
  
    
  | /Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/bin/java -agentlib:jdwp=transport=dt_socket,address=127.0.0.1:49636,suspend=y,server=n -javaagent:/Users/nsb/Library/Caches/JetBrains/IdeaIC2021.1/captureAgent/debugger-agent.jar -Dfile.encoding=UTF-8 -classpath "/Users/nsb/Library/Application Support/JetBrains/IdeaIC2021.1/plugins/Scala/lib/runners.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/charsets.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/deploy.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/cldrdata.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/dnsns.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/jaccess.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/jfxrt.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/localedata.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/nashorn.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/sunec.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/sunjce_provider.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/sunpkcs11.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/ext/zipfs.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/javaws.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/jce.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/jfr.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/jfxswt.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/jsse.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/management-agent.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/plugin.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/resources.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/jre/lib/rt.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/lib/ant-javafx.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/lib/dt.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/lib/javafx-mx.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/lib/jconsole.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/lib/packager.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/lib/sa-jdi.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_192.jdk/Contents/Home/lib/tools.jar:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-spark-datasource/hudi-spark/target/test-classes:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-spark-datasource/hudi-spark/target/classes:/Users/nsb/.m2/repository/org/scala-lang/scala-library/2.11.12/scala-library-2.11.12.jar:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-client/hudi-client-common/target/classes:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-timeline-service/target/classes:/Users/nsb/.m2/repository/io/javalin/javalin/2.8.0/javalin-2.8.0.jar:/Users/nsb/.m2/repository/org/jetbrains/kotlin/kotlin-stdlib-jdk8/1.2.71/kotlin-stdlib-jdk8-1.2.71.jar:/Users/nsb/.m2/repository/org/jetbrains/kotlin/kotlin-stdlib/1.2.71/kotlin-stdlib-1.2.71.jar:/Users/nsb/.m2/repository/org/jetbrains/kotlin/kotlin-stdlib-common/1.2.71/kotlin-stdlib-common-1.2.71.jar:/Users/nsb/.m2/repository/org/jetbrains/kotlin/kotlin-stdlib-jdk7/1.2.71/kotlin-stdlib-jdk7-1.2.71.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-server/9.4.15.v20190215/jetty-server-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-http/9.4.15.v20190215/jetty-http-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-util/9.4.15.v20190215/jetty-util-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-io/9.4.15.v20190215/jetty-io-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-webapp/9.4.15.v20190215/jetty-webapp-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-xml/9.4.15.v20190215/jetty-xml-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-servlet/9.4.15.v20190215/jetty-servlet-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-security/9.4.15.v20190215/jetty-security-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/websocket/websocket-server/9.4.15.v20190215/websocket-server-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/websocket/websocket-common/9.4.15.v20190215/websocket-common-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/websocket/websocket-api/9.4.15.v20190215/websocket-api-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/websocket/websocket-client/9.4.15.v20190215/websocket-client-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/jetty-client/9.4.15.v20190215/jetty-client-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/org/eclipse/jetty/websocket/websocket-servlet/9.4.15.v20190215/websocket-servlet-9.4.15.v20190215.jar:/Users/nsb/.m2/repository/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/Users/nsb/.m2/repository/io/dropwizard/metrics/metrics-graphite/4.1.1/metrics-graphite-4.1.1.jar:/Users/nsb/.m2/repository/io/dropwizard/metrics/metrics-core/4.1.1/metrics-core-4.1.1.jar:/Users/nsb/.m2/repository/io/dropwizard/metrics/metrics-jmx/4.1.1/metrics-jmx-4.1.1.jar:/Users/nsb/.m2/repository/io/prometheus/simpleclient/0.8.0/simpleclient-0.8.0.jar:/Users/nsb/.m2/repository/io/prometheus/simpleclient_httpserver/0.8.0/simpleclient_httpserver-0.8.0.jar:/Users/nsb/.m2/repository/io/prometheus/simpleclient_common/0.8.0/simpleclient_common-0.8.0.jar:/Users/nsb/.m2/repository/io/prometheus/simpleclient_dropwizard/0.8.0/simpleclient_dropwizard-0.8.0.jar:/Users/nsb/.m2/repository/io/prometheus/simpleclient_pushgateway/0.8.0/simpleclient_pushgateway-0.8.0.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-hdfs/2.7.3/hadoop-hdfs-2.7.3-tests.jar:/Users/nsb/.m2/repository/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/Users/nsb/.m2/repository/xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.jar:/Users/nsb/.m2/repository/xml-apis/xml-apis/1.3.04/xml-apis-1.3.04.jar:/Users/nsb/.m2/repository/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-common/2.7.3/hadoop-common-2.7.3-tests.jar:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-client/hudi-spark-client/target/classes:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-common/target/classes:/Users/nsb/.m2/repository/com/fasterxml/jackson/core/jackson-databind/2.6.7.3/jackson-databind-2.6.7.3.jar:/Users/nsb/.m2/repository/org/apache/orc/orc-core/1.6.0/orc-core-1.6.0-nohive.jar:/Users/nsb/.m2/repository/org/apache/orc/orc-shims/1.6.0/orc-shims-1.6.0.jar:/Users/nsb/.m2/repository/io/airlift/aircompressor/0.15/aircompressor-0.15.jar:/Users/nsb/.m2/repository/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/Users/nsb/.m2/repository/org/jetbrains/annotations/17.0.0/annotations-17.0.0.jar:/Users/nsb/.m2/repository/org/apache/httpcomponents/fluent-hc/4.4.1/fluent-hc-4.4.1.jar:/Users/nsb/.m2/repository/org/apache/httpcomponents/httpclient/4.4.1/httpclient-4.4.1.jar:/Users/nsb/.m2/repository/org/rocksdb/rocksdbjni/5.17.2/rocksdbjni-5.17.2.jar:/Users/nsb/.m2/repository/com/esotericsoftware/kryo-shaded/4.0.2/kryo-shaded-4.0.2.jar:/Users/nsb/.m2/repository/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/Users/nsb/.m2/repository/org/objenesis/objenesis/2.5.1/objenesis-2.5.1.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-server/1.2.3/hbase-server-1.2.3.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-common/1.2.3/hbase-common-1.2.3.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-protocol/1.2.3/hbase-protocol-1.2.3.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-procedure/1.2.3/hbase-procedure-1.2.3.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-common/1.2.3/hbase-common-1.2.3-tests.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-prefix-tree/1.2.3/hbase-prefix-tree-1.2.3.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-hadoop-compat/1.2.3/hbase-hadoop-compat-1.2.3.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-hadoop2-compat/1.2.3/hbase-hadoop2-compat-1.2.3.jar:/Users/nsb/.m2/repository/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/Users/nsb/.m2/repository/org/apache/commons/commons-math/2.2/commons-math-2.2.jar:/Users/nsb/.m2/repository/org/jamon/jamon-runtime/2.4.1/jamon-runtime-2.4.1.jar:/Users/nsb/.m2/repository/com/lmax/disruptor/3.3.0/disruptor-3.3.0.jar:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-hadoop-mr/target/classes:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-sync/hudi-hive-sync/target/classes:/Users/nsb/.m2/repository/com/beust/jcommander/1.72/jcommander-1.72.jar:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-sync/hudi-sync-common/target/classes:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-spark-datasource/hudi-spark-common/target/classes:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-spark-datasource/hudi-spark2/target/classes:/Users/nsb/.m2/repository/log4j/log4j/1.2.17/log4j-1.2.17.jar:/Users/nsb/.m2/repository/com/fasterxml/jackson/core/jackson-annotations/2.6.7/jackson-annotations-2.6.7.jar:/Users/nsb/.m2/repository/com/fasterxml/jackson/module/jackson-module-scala_2.11/2.6.7.1/jackson-module-scala_2.11-2.6.7.1.jar:/Users/nsb/.m2/repository/org/scala-lang/scala-reflect/2.11.8/scala-reflect-2.11.8.jar:/Users/nsb/.m2/repository/com/fasterxml/jackson/core/jackson-core/2.6.7/jackson-core-2.6.7.jar:/Users/nsb/.m2/repository/com/fasterxml/jackson/module/jackson-module-paranamer/2.7.9/jackson-module-paranamer-2.7.9.jar:/Users/nsb/.m2/repository/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/Users/nsb/.m2/repository/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/Users/nsb/.m2/repository/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/Users/nsb/.m2/repository/org/xerial/snappy/snappy-java/1.1.1.3/snappy-java-1.1.1.3.jar:/Users/nsb/.m2/repository/org/apache/commons/commons-compress/1.8.1/commons-compress-1.8.1.jar:/Users/nsb/.m2/repository/org/tukaani/xz/1.5/xz-1.5.jar:/Users/nsb/.m2/repository/org/slf4j/slf4j-api/1.7.7/slf4j-api-1.7.7.jar:/Users/nsb/.m2/repository/org/apache/parquet/parquet-avro/1.10.1/parquet-avro-1.10.1.jar:/Users/nsb/.m2/repository/org/apache/parquet/parquet-column/1.10.1/parquet-column-1.10.1.jar:/Users/nsb/.m2/repository/org/apache/parquet/parquet-common/1.10.1/parquet-common-1.10.1.jar:/Users/nsb/.m2/repository/org/apache/parquet/parquet-encoding/1.10.1/parquet-encoding-1.10.1.jar:/Users/nsb/.m2/repository/org/apache/parquet/parquet-hadoop/1.10.1/parquet-hadoop-1.10.1.jar:/Users/nsb/.m2/repository/org/apache/parquet/parquet-jackson/1.10.1/parquet-jackson-1.10.1.jar:/Users/nsb/.m2/repository/org/apache/parquet/parquet-format/2.4.0/parquet-format-2.4.0.jar:/Users/nsb/.m2/repository/it/unimi/dsi/fastutil/7.0.13/fastutil-7.0.13.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-core_2.11/2.4.4/spark-core_2.11-2.4.4.jar:/Users/nsb/.m2/repository/com/thoughtworks/paranamer/paranamer/2.8/paranamer-2.8.jar:/Users/nsb/.m2/repository/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar:/Users/nsb/.m2/repository/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar:/Users/nsb/.m2/repository/com/twitter/chill_2.11/0.9.3/chill_2.11-0.9.3.jar:/Users/nsb/.m2/repository/com/twitter/chill-java/0.9.3/chill-java-0.9.3.jar:/Users/nsb/.m2/repository/org/apache/xbean/xbean-asm6-shaded/4.8/xbean-asm6-shaded-4.8.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-launcher_2.11/2.4.4/spark-launcher_2.11-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-kvstore_2.11/2.4.4/spark-kvstore_2.11-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-network-common_2.11/2.4.4/spark-network-common_2.11-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-network-shuffle_2.11/2.4.4/spark-network-shuffle_2.11-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-unsafe_2.11/2.4.4/spark-unsafe_2.11-2.4.4.jar:/Users/nsb/.m2/repository/javax/activation/activation/1.1.1/activation-1.1.1.jar:/Users/nsb/.m2/repository/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/Users/nsb/.m2/repository/org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.jar:/Users/nsb/.m2/repository/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/Users/nsb/.m2/repository/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/Users/nsb/.m2/repository/org/slf4j/jul-to-slf4j/1.7.16/jul-to-slf4j-1.7.16.jar:/Users/nsb/.m2/repository/org/slf4j/jcl-over-slf4j/1.7.16/jcl-over-slf4j-1.7.16.jar:/Users/nsb/.m2/repository/org/slf4j/slf4j-log4j12/1.7.16/slf4j-log4j12-1.7.16.jar:/Users/nsb/.m2/repository/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/Users/nsb/.m2/repository/org/lz4/lz4-java/1.4.0/lz4-java-1.4.0.jar:/Users/nsb/.m2/repository/com/github/luben/zstd-jni/1.3.2-2/zstd-jni-1.3.2-2.jar:/Users/nsb/.m2/repository/org/roaringbitmap/RoaringBitmap/0.7.45/RoaringBitmap-0.7.45.jar:/Users/nsb/.m2/repository/org/roaringbitmap/shims/0.7.45/shims-0.7.45.jar:/Users/nsb/.m2/repository/commons-net/commons-net/3.1/commons-net-3.1.jar:/Users/nsb/.m2/repository/org/json4s/json4s-jackson_2.11/3.5.3/json4s-jackson_2.11-3.5.3.jar:/Users/nsb/.m2/repository/org/json4s/json4s-core_2.11/3.5.3/json4s-core_2.11-3.5.3.jar:/Users/nsb/.m2/repository/org/json4s/json4s-ast_2.11/3.5.3/json4s-ast_2.11-3.5.3.jar:/Users/nsb/.m2/repository/org/json4s/json4s-scalap_2.11/3.5.3/json4s-scalap_2.11-3.5.3.jar:/Users/nsb/.m2/repository/org/glassfish/jersey/core/jersey-client/2.22.2/jersey-client-2.22.2.jar:/Users/nsb/.m2/repository/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/Users/nsb/.m2/repository/org/glassfish/hk2/hk2-api/2.4.0-b34/hk2-api-2.4.0-b34.jar:/Users/nsb/.m2/repository/org/glassfish/hk2/hk2-utils/2.4.0-b34/hk2-utils-2.4.0-b34.jar:/Users/nsb/.m2/repository/org/glassfish/hk2/external/aopalliance-repackaged/2.4.0-b34/aopalliance-repackaged-2.4.0-b34.jar:/Users/nsb/.m2/repository/org/glassfish/hk2/external/javax.inject/2.4.0-b34/javax.inject-2.4.0-b34.jar:/Users/nsb/.m2/repository/org/glassfish/hk2/hk2-locator/2.4.0-b34/hk2-locator-2.4.0-b34.jar:/Users/nsb/.m2/repository/org/javassist/javassist/3.18.1-GA/javassist-3.18.1-GA.jar:/Users/nsb/.m2/repository/org/glassfish/jersey/core/jersey-common/2.22.2/jersey-common-2.22.2.jar:/Users/nsb/.m2/repository/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/Users/nsb/.m2/repository/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.22.2/jersey-guava-2.22.2.jar:/Users/nsb/.m2/repository/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/Users/nsb/.m2/repository/org/glassfish/jersey/core/jersey-server/2.17/jersey-server-2.17.jar:/Users/nsb/.m2/repository/org/glassfish/jersey/media/jersey-media-jaxb/2.17/jersey-media-jaxb-2.17.jar:/Users/nsb/.m2/repository/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/Users/nsb/.m2/repository/org/glassfish/jersey/containers/jersey-container-servlet/2.22.2/jersey-container-servlet-2.22.2.jar:/Users/nsb/.m2/repository/org/glassfish/jersey/containers/jersey-container-servlet-core/2.17/jersey-container-servlet-core-2.17.jar:/Users/nsb/.m2/repository/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/Users/nsb/.m2/repository/io/netty/netty/3.9.9.Final/netty-3.9.9.Final.jar:/Users/nsb/.m2/repository/com/clearspring/analytics/stream/2.7.0/stream-2.7.0.jar:/Users/nsb/.m2/repository/io/dropwizard/metrics/metrics-jvm/3.1.5/metrics-jvm-3.1.5.jar:/Users/nsb/.m2/repository/io/dropwizard/metrics/metrics-json/3.1.5/metrics-json-3.1.5.jar:/Users/nsb/.m2/repository/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/Users/nsb/.m2/repository/oro/oro/2.0.8/oro-2.0.8.jar:/Users/nsb/.m2/repository/net/razorvine/pyrolite/4.13/pyrolite-4.13.jar:/Users/nsb/.m2/repository/net/sf/py4j/py4j/0.10.7/py4j-0.10.7.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-tags_2.11/2.4.4/spark-tags_2.11-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/Users/nsb/.m2/repository/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-sql_2.11/2.4.4/spark-sql_2.11-2.4.4.jar:/Users/nsb/.m2/repository/com/univocity/univocity-parsers/2.7.3/univocity-parsers-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-sketch_2.11/2.4.4/spark-sketch_2.11-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-catalyst_2.11/2.4.4/spark-catalyst_2.11-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/orc/orc-mapreduce/1.5.5/orc-mapreduce-1.5.5-nohive.jar:/Users/nsb/.m2/repository/org/apache/arrow/arrow-vector/0.10.0/arrow-vector-0.10.0.jar:/Users/nsb/.m2/repository/org/apache/arrow/arrow-format/0.10.0/arrow-format-0.10.0.jar:/Users/nsb/.m2/repository/org/apache/arrow/arrow-memory/0.10.0/arrow-memory-0.10.0.jar:/Users/nsb/.m2/repository/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/Users/nsb/.m2/repository/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-hive_2.11/2.4.4/spark-hive_2.11-2.4.4.jar:/Users/nsb/.m2/repository/com/twitter/parquet-hadoop-bundle/1.6.0/parquet-hadoop-bundle-1.6.0.jar:/Users/nsb/.m2/repository/org/spark-project/hive/hive-exec/1.2.1.spark2/hive-exec-1.2.1.spark2.jar:/Users/nsb/.m2/repository/log4j/apache-log4j-extras/1.2.17/apache-log4j-extras-1.2.17.jar:/Users/nsb/.m2/repository/com/googlecode/javaewah/JavaEWAH/0.3.2/JavaEWAH-0.3.2.jar:/Users/nsb/.m2/repository/org/iq80/snappy/snappy/0.2/snappy-0.2.jar:/Users/nsb/.m2/repository/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/Users/nsb/.m2/repository/org/spark-project/hive/hive-metastore/1.2.1.spark2/hive-metastore-1.2.1.spark2.jar:/Users/nsb/.m2/repository/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/Users/nsb/.m2/repository/org/apache/calcite/calcite-avatica/1.2.0-incubating/calcite-avatica-1.2.0-incubating.jar:/Users/nsb/.m2/repository/org/apache/calcite/calcite-core/1.2.0-incubating/calcite-core-1.2.0-incubating.jar:/Users/nsb/.m2/repository/org/apache/calcite/calcite-linq4j/1.2.0-incubating/calcite-linq4j-1.2.0-incubating.jar:/Users/nsb/.m2/repository/net/hydromatic/eigenbase-properties/1.1.5/eigenbase-properties-1.1.5.jar:/Users/nsb/.m2/repository/commons-codec/commons-codec/1.10/commons-codec-1.10.jar:/Users/nsb/.m2/repository/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar:/Users/nsb/.m2/repository/org/datanucleus/datanucleus-core/3.2.10/datanucleus-core-3.2.10.jar:/Users/nsb/.m2/repository/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/Users/nsb/.m2/repository/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/Users/nsb/.m2/repository/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-sql_2.11/2.4.4/spark-sql_2.11-2.4.4-tests.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-core_2.11/2.4.4/spark-core_2.11-2.4.4-tests.jar:/Users/nsb/.m2/repository/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-catalyst_2.11/2.4.4/spark-catalyst_2.11-2.4.4-tests.jar:/Users/nsb/.m2/repository/org/scala-lang/modules/scala-parser-combinators_2.11/1.1.0/scala-parser-combinators_2.11-1.1.0.jar:/Users/nsb/.m2/repository/org/codehaus/janino/janino/3.0.9/janino-3.0.9.jar:/Users/nsb/.m2/repository/org/codehaus/janino/commons-compiler/3.0.9/commons-compiler-3.0.9.jar:/Users/nsb/.m2/repository/org/antlr/antlr4-runtime/4.7/antlr4-runtime-4.7.jar:/Users/nsb/.m2/repository/org/apache/spark/spark-avro_2.11/2.4.4/spark-avro_2.11-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-client/2.7.3/hadoop-client-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-hdfs/2.7.3/hadoop-hdfs-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-app/2.7.3/hadoop-mapreduce-client-app-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-common/2.7.3/hadoop-mapreduce-client-common-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-yarn-client/2.7.3/hadoop-yarn-client-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-yarn-server-common/2.7.3/hadoop-yarn-server-common-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-shuffle/2.7.3/hadoop-mapreduce-client-shuffle-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-yarn-api/2.7.3/hadoop-yarn-api-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-core/2.7.3/hadoop-mapreduce-client-core-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-yarn-common/2.7.3/hadoop-yarn-common-2.7.3.jar:/Users/nsb/.m2/repository/com/sun/jersey/jersey-client/1.9/jersey-client-1.9.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-jobclient/2.7.3/hadoop-mapreduce-client-jobclient-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-annotations/2.7.3/hadoop-annotations-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-common/2.7.3/hadoop-common-2.7.3.jar:/Users/nsb/.m2/repository/com/google/guava/guava/11.0.2/guava-11.0.2.jar:/Users/nsb/.m2/repository/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/Users/nsb/.m2/repository/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/Users/nsb/.m2/repository/commons-io/commons-io/2.4/commons-io-2.4.jar:/Users/nsb/.m2/repository/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/Users/nsb/.m2/repository/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/Users/nsb/.m2/repository/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/Users/nsb/.m2/repository/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/Users/nsb/.m2/repository/com/sun/jersey/jersey-json/1.9/jersey-json-1.9.jar:/Users/nsb/.m2/repository/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/Users/nsb/.m2/repository/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/Users/nsb/.m2/repository/org/codehaus/jackson/jackson-jaxrs/1.9.13/jackson-jaxrs-1.9.13.jar:/Users/nsb/.m2/repository/org/codehaus/jackson/jackson-xc/1.9.13/jackson-xc-1.9.13.jar:/Users/nsb/.m2/repository/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/Users/nsb/.m2/repository/asm/asm/3.1/asm-3.1.jar:/Users/nsb/.m2/repository/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/Users/nsb/.m2/repository/net/java/dev/jets3t/jets3t/0.9.0/jets3t-0.9.0.jar:/Users/nsb/.m2/repository/com/jamesmurty/utils/java-xmlbuilder/0.4/java-xmlbuilder-0.4.jar:/Users/nsb/.m2/repository/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/Users/nsb/.m2/repository/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/Users/nsb/.m2/repository/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/Users/nsb/.m2/repository/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar:/Users/nsb/.m2/repository/commons-beanutils/commons-beanutils-core/1.8.0/commons-beanutils-core-1.8.0.jar:/Users/nsb/.m2/repository/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/Users/nsb/.m2/repository/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-auth/2.7.3/hadoop-auth-2.7.3.jar:/Users/nsb/.m2/repository/org/apache/directory/server/apacheds-kerberos-codec/2.0.0-M15/apacheds-kerberos-codec-2.0.0-M15.jar:/Users/nsb/.m2/repository/org/apache/directory/server/apacheds-i18n/2.0.0-M15/apacheds-i18n-2.0.0-M15.jar:/Users/nsb/.m2/repository/org/apache/directory/api/api-asn1-api/1.0.0-M20/api-asn1-api-1.0.0-M20.jar:/Users/nsb/.m2/repository/org/apache/directory/api/api-util/1.0.0-M20/api-util-1.0.0-M20.jar:/Users/nsb/.m2/repository/com/jcraft/jsch/0.1.42/jsch-0.1.42.jar:/Users/nsb/.m2/repository/org/apache/htrace/htrace-core/3.1.0-incubating/htrace-core-3.1.0-incubating.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-exec/2.3.1/hive-exec-2.3.1-core.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-vector-code-gen/2.3.1/hive-vector-code-gen-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-llap-tez/2.3.1/hive-llap-tez-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-llap-client/2.3.1/hive-llap-client-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-llap-common/2.3.1/hive-llap-common-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-shims/2.3.1/hive-shims-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hive/shims/hive-shims-common/2.3.1/hive-shims-common-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hive/shims/hive-shims-0.23/2.3.1/hive-shims-0.23-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-yarn-server-resourcemanager/2.7.2/hadoop-yarn-server-resourcemanager-2.7.2.jar:/Users/nsb/.m2/repository/com/google/inject/extensions/guice-servlet/3.0/guice-servlet-3.0.jar:/Users/nsb/.m2/repository/com/sun/jersey/contribs/jersey-guice/1.9/jersey-guice-1.9.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/2.7.2/hadoop-yarn-server-applicationhistoryservice-2.7.2.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-yarn-server-web-proxy/2.7.2/hadoop-yarn-server-web-proxy-2.7.2.jar:/Users/nsb/.m2/repository/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6-tests.jar:/Users/nsb/.m2/repository/org/apache/hive/shims/hive-shims-scheduler/2.3.1/hive-shims-scheduler-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/logging/log4j/log4j-1.2-api/2.6.2/log4j-1.2-api-2.6.2.jar:/Users/nsb/.m2/repository/org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar:/Users/nsb/.m2/repository/org/apache/logging/log4j/log4j-core/2.6.2/log4j-core-2.6.2.jar:/Users/nsb/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.6.2/log4j-slf4j-impl-2.6.2.jar:/Users/nsb/.m2/repository/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/Users/nsb/.m2/repository/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/Users/nsb/.m2/repository/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/Users/nsb/.m2/repository/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/Users/nsb/.m2/repository/org/codehaus/groovy/groovy-all/2.4.4/groovy-all-2.4.4.jar:/Users/nsb/.m2/repository/org/apache/calcite/calcite-druid/1.10.0/calcite-druid-1.10.0.jar:/Users/nsb/.m2/repository/org/apache/calcite/avatica/avatica/1.8.0/avatica-1.8.0.jar:/Users/nsb/.m2/repository/org/apache/calcite/avatica/avatica-metrics/1.8.0/avatica-metrics-1.8.0.jar:/Users/nsb/.m2/repository/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-jdbc/2.3.1/hive-jdbc-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-service/2.3.1/hive-service-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-llap-server/2.3.1/hive-llap-server-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/slider/slider-core/0.90.2-incubating/slider-core-0.90.2-incubating.jar:/Users/nsb/.m2/repository/org/apache/hadoop/hadoop-yarn-registry/2.7.1/hadoop-yarn-registry-2.7.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-llap-common/2.3.1/hive-llap-common-2.3.1-tests.jar:/Users/nsb/.m2/repository/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/Users/nsb/.m2/repository/tomcat/jasper-compiler/5.5.23/jasper-compiler-5.5.23.jar:/Users/nsb/.m2/repository/ant/ant/1.6.5/ant-1.6.5.jar:/Users/nsb/.m2/repository/tomcat/jasper-runtime/5.5.23/jasper-runtime-5.5.23.jar:/Users/nsb/.m2/repository/commons-el/commons-el/1.0/commons-el-1.0.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-serde/2.3.1/hive-serde-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/parquet/parquet-hadoop-bundle/1.8.1/parquet-hadoop-bundle-1.8.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-service-rpc/2.3.1/hive-service-rpc-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/httpcomponents/httpcore/4.4.1/httpcore-4.4.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-metastore/2.3.1/hive-metastore-2.3.1.jar:/Users/nsb/.m2/repository/javolution/javolution/5.5.1/javolution-5.5.1.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-client/1.1.1/hbase-client-1.1.1.jar:/Users/nsb/.m2/repository/org/apache/hbase/hbase-annotations/1.1.1/hbase-annotations-1.1.1.jar:/Users/nsb/.m2/repository/org/jruby/jcodings/jcodings/1.0.8/jcodings-1.0.8.jar:/Users/nsb/.m2/repository/org/jruby/joni/joni/2.1.2/joni-2.1.2.jar:/Users/nsb/.m2/repository/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/Users/nsb/.m2/repository/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/Users/nsb/.m2/repository/com/zaxxer/HikariCP/2.5.1/HikariCP-2.5.1.jar:/Users/nsb/.m2/repository/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/Users/nsb/.m2/repository/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/Users/nsb/.m2/repository/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/Users/nsb/.m2/repository/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/Users/nsb/.m2/repository/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/Users/nsb/.m2/repository/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/Users/nsb/.m2/repository/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/Users/nsb/.m2/repository/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/Users/nsb/.m2/repository/com/google/inject/guice/3.0/guice-3.0.jar:/Users/nsb/.m2/repository/javax/inject/javax.inject/1/javax.inject-1.jar:/Users/nsb/.m2/repository/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/Users/nsb/.m2/repository/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/Users/nsb/.m2/repository/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/Users/nsb/.m2/repository/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/Users/nsb/.m2/repository/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/Users/nsb/.m2/repository/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/Users/nsb/.m2/repository/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/Users/nsb/.m2/repository/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/Users/nsb/.m2/repository/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-common/2.3.1/hive-common-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/hive/hive-storage-api/2.3.1/hive-storage-api-2.3.1.jar:/Users/nsb/.m2/repository/org/apache/orc/orc-core/1.3.3/orc-core-1.3.3.jar:/Users/nsb/.m2/repository/jline/jline/2.12/jline-2.12.jar:/Users/nsb/.m2/repository/org/apache/logging/log4j/log4j-web/2.6.2/log4j-web-2.6.2.jar:/Users/nsb/.m2/repository/com/tdunning/json/1.8/json-1.8.jar:/Users/nsb/.m2/repository/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/Users/nsb/.m2/repository/org/apache/curator/curator-framework/2.7.1/curator-framework-2.7.1.jar:/Users/nsb/.m2/repository/org/apache/curator/curator-client/2.7.1/curator-client-2.7.1.jar:/Users/nsb/.m2/repository/org/apache/curator/curator-recipes/2.7.1/curator-recipes-2.7.1.jar:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-client/hudi-client-common/target/test-classes:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-client/hudi-spark-client/target/test-classes:/Users/nsb/Documents/personal/projects/virtual_keys_hudi/hudi/hudi-common/target/test-classes:/Users/nsb/.m2/repository/org/scalatest/scalatest_2.11/3.0.1/scalatest_2.11-3.0.1.jar:/Users/nsb/.m2/repository/org/scalactic/scalactic_2.11/3.0.1/scalactic_2.11-3.0.1.jar:/Users/nsb/.m2/repository/org/scala-lang/modules/scala-xml_2.11/1.0.5/scala-xml_2.11-1.0.5.jar:/Users/nsb/.m2/repository/org/junit/jupiter/junit-jupiter-api/5.7.0-M1/junit-jupiter-api-5.7.0-M1.jar:/Users/nsb/.m2/repository/org/apiguardian/apiguardian-api/1.1.0/apiguardian-api-1.1.0.jar:/Users/nsb/.m2/repository/org/opentest4j/opentest4j/1.2.0/opentest4j-1.2.0.jar:/Users/nsb/.m2/repository/org/junit/platform/junit-platform-commons/1.7.0-M1/junit-platform-commons-1.7.0-M1.jar:/Users/nsb/.m2/repository/org/junit/jupiter/junit-jupiter-engine/5.7.0-M1/junit-jupiter-engine-5.7.0-M1.jar:/Users/nsb/.m2/repository/org/junit/platform/junit-platform-engine/1.7.0-M1/junit-platform-engine-1.7.0-M1.jar:/Users/nsb/.m2/repository/org/junit/vintage/junit-vintage-engine/5.7.0-M1/junit-vintage-engine-5.7.0-M1.jar:/Users/nsb/.m2/repository/junit/junit/4.13/junit-4.13.jar:/Users/nsb/.m2/repository/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/Users/nsb/.m2/repository/org/junit/jupiter/junit-jupiter-params/5.7.0-M1/junit-jupiter-params-5.7.0-M1.jar:/Users/nsb/.m2/repository/org/mockito/mockito-junit-jupiter/3.3.3/mockito-junit-jupiter-3.3.3.jar:/Users/nsb/.m2/repository/org/mockito/mockito-core/3.3.3/mockito-core-3.3.3.jar:/Users/nsb/.m2/repository/net/bytebuddy/byte-buddy/1.10.5/byte-buddy-1.10.5.jar:/Users/nsb/.m2/repository/net/bytebuddy/byte-buddy-agent/1.10.5/byte-buddy-agent-1.10.5.jar:/Applications/IntelliJ IDEA CE.app/Contents/lib/idea_rt.jar" org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner -s org.apache.hudi.functional.HoodieSparkSqlWriterSuite -testName test_bulk_insert_for_NONE -showProgressMessages true | |
| Testing started at 12:55 AM ... | |
| Connected to the target VM, address: '127.0.0.1:49636', transport: 'socket' | |
| SLF4J: Class path contains multiple SLF4J bindings. | |
| SLF4J: Found binding in [jar:file:/Users/nsb/.m2/repository/org/slf4j/slf4j-log4j12/1.7.16/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] | |
| SLF4J: Found binding in [jar:file:/Users/nsb/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.6.2/log4j-slf4j-impl-2.6.2.jar!/org/slf4j/impl/StaticLoggerBinder.class] | |
| SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. | |
| SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] | |
| 0 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SparkContext - Running Spark version 2.4.4 | |
| 337 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
| 461 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SparkContext - Submitted application: test_bulk_insert_datasource | |
| 529 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SecurityManager - Changing view acls to: nsb | |
| 530 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SecurityManager - Changing modify acls to: nsb | |
| 530 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SecurityManager - Changing view acls groups to: | |
| 531 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: | |
| 531 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nsb); groups with view permissions: Set(); users with modify permissions: Set(nsb); groups with modify permissions: Set() | |
| 886 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 49640. | |
| 906 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker | |
| 923 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster | |
| 926 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information | |
| 926 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up | |
| 942 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /private/var/folders/ym/8yjkm3n90kq8tk4gfmvk7y140000gn/T/blockmgr-d0498368-2d23-4730-ac92-5ccdbabeed81 | |
| 964 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 4.1 GB | |
| 978 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator | |
| 1057 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.util.log - Logging initialized @2604ms | |
| 1130 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: 2019-02-15T11:53:49-05:00, git hash: eb70b240169fcf1abbd86af36482d1c49826fa0b | |
| 1147 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.Server - Started @2695ms | |
| 1167 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@770beef5{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} | |
| 1167 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. | |
| 1190 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2be95d31{/jobs,null,AVAILABLE,@Spark} | |
| 1191 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@462e1e64{/jobs/json,null,AVAILABLE,@Spark} | |
| 1191 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@69afa141{/jobs/job,null,AVAILABLE,@Spark} | |
| 1192 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@52d6d273{/jobs/job/json,null,AVAILABLE,@Spark} | |
| 1193 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4667c4c1{/stages,null,AVAILABLE,@Spark} | |
| 1193 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f9a4401{/stages/json,null,AVAILABLE,@Spark} | |
| 1194 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22c75c01{/stages/stage,null,AVAILABLE,@Spark} | |
| 1195 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f0ed952{/stages/stage/json,null,AVAILABLE,@Spark} | |
| 1195 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6f044c58{/stages/pool,null,AVAILABLE,@Spark} | |
| 1196 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d074b14{/stages/pool/json,null,AVAILABLE,@Spark} | |
| 1196 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@c017175{/storage,null,AVAILABLE,@Spark} | |
| 1197 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@559cedee{/storage/json,null,AVAILABLE,@Spark} | |
| 1197 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@18371d89{/storage/rdd,null,AVAILABLE,@Spark} | |
| 1198 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f3faa70{/storage/rdd/json,null,AVAILABLE,@Spark} | |
| 1198 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4832f03b{/environment,null,AVAILABLE,@Spark} | |
| 1198 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7af3874e{/environment/json,null,AVAILABLE,@Spark} | |
| 1199 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5a8816cc{/executors,null,AVAILABLE,@Spark} | |
| 1201 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68565bc7{/executors/json,null,AVAILABLE,@Spark} | |
| 1202 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37baddde{/executors/threadDump,null,AVAILABLE,@Spark} | |
| 1202 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5115f590{/executors/threadDump/json,null,AVAILABLE,@Spark} | |
| 1210 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4b31a708{/static,null,AVAILABLE,@Spark} | |
| 1211 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5a034157{/,null,AVAILABLE,@Spark} | |
| 1212 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f4ba1ae{/api,null,AVAILABLE,@Spark} | |
| 1213 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@59fbb34{/jobs/job/kill,null,AVAILABLE,@Spark} | |
| 1214 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1b6924cb{/stages/stage/kill,null,AVAILABLE,@Spark} | |
| 1216 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://sivabalans-mbp.attlocal.net:4040 | |
| 1305 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost | |
| 1378 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 49641. | |
| 1378 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on sivabalans-mbp.attlocal.net:49641 | |
| 1380 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy | |
| 1411 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, sivabalans-mbp.attlocal.net, 49641, None) | |
| 1414 [dispatcher-event-loop-0] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager sivabalans-mbp.attlocal.net:49641 with 4.1 GB RAM, BlockManagerId(driver, sivabalans-mbp.attlocal.net, 49641, None) | |
| 1417 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, sivabalans-mbp.attlocal.net, 49641, None) | |
| 1417 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, sivabalans-mbp.attlocal.net, 49641, None) | |
| 1565 [ScalaTest-run-running-HoodieSparkSqlWriterSuite] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5eabff6b{/metrics/json,null,AVAILABLE,@Spark} | |
| 18669 [Executor task launch worker for task 7] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 4.0 (TID 7) | |
| org.apache.parquet.io.ParquetDecodingException: Failed to read 4 bytes | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:65) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readInteger(VectorizedPlainValuesReader.java:164) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readBinary(VectorizedPlainValuesReader.java:190) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedRleValuesReader.readBinarys(VectorizedRleValuesReader.java:426) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBinaryBatch(VectorizedColumnReader.java:472) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBatch(VectorizedColumnReader.java:220) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextBatch(VectorizedParquetRecordReader.java:261) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextKeyValue(VectorizedParquetRecordReader.java:159) | |
| at org.apache.spark.sql.execution.datasources.RecordReaderIterator.hasNext(RecordReaderIterator.scala:39) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.nextIterator(FileScanRDD.scala:181) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.scan_nextBatch_0$(Unknown Source) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source) | |
| at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) | |
| at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:255) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
| at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
| at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
| at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
| at org.apache.spark.scheduler.Task.run(Task.scala:123) | |
| at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
| at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
| at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
| at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
| at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
| at java.lang.Thread.run(Thread.java:748) | |
| Caused by: java.io.EOFException | |
| at org.apache.parquet.bytes.SingleBufferInputStream.slice(SingleBufferInputStream.java:116) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:63) | |
| ... 30 more | |
| 18669 [Executor task launch worker for task 8] ERROR org.apache.spark.executor.Executor - Exception in task 1.0 in stage 4.0 (TID 8) | |
| org.apache.parquet.io.ParquetDecodingException: Failed to read 4 bytes | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:65) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readInteger(VectorizedPlainValuesReader.java:164) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readBinary(VectorizedPlainValuesReader.java:190) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedRleValuesReader.readBinarys(VectorizedRleValuesReader.java:426) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBinaryBatch(VectorizedColumnReader.java:472) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBatch(VectorizedColumnReader.java:220) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextBatch(VectorizedParquetRecordReader.java:261) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextKeyValue(VectorizedParquetRecordReader.java:159) | |
| at org.apache.spark.sql.execution.datasources.RecordReaderIterator.hasNext(RecordReaderIterator.scala:39) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.nextIterator(FileScanRDD.scala:181) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.scan_nextBatch_0$(Unknown Source) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source) | |
| at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) | |
| at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:255) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
| at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
| at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
| at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
| at org.apache.spark.scheduler.Task.run(Task.scala:123) | |
| at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
| at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
| at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
| at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
| at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
| at java.lang.Thread.run(Thread.java:748) | |
| Caused by: java.io.EOFException | |
| at org.apache.parquet.bytes.SingleBufferInputStream.slice(SingleBufferInputStream.java:116) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:63) | |
| ... 30 more | |
| 18699 [task-result-getter-1] ERROR org.apache.spark.scheduler.TaskSetManager - Task 1 in stage 4.0 failed 1 times; aborting job | |
| Job aborted due to stage failure: Task 1 in stage 4.0 failed 1 times, most recent failure: Lost task 1.0 in stage 4.0 (TID 8, localhost, executor driver): org.apache.parquet.io.ParquetDecodingException: Failed to read 4 bytes | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:65) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readInteger(VectorizedPlainValuesReader.java:164) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readBinary(VectorizedPlainValuesReader.java:190) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedRleValuesReader.readBinarys(VectorizedRleValuesReader.java:426) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBinaryBatch(VectorizedColumnReader.java:472) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBatch(VectorizedColumnReader.java:220) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextBatch(VectorizedParquetRecordReader.java:261) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextKeyValue(VectorizedParquetRecordReader.java:159) | |
| at org.apache.spark.sql.execution.datasources.RecordReaderIterator.hasNext(RecordReaderIterator.scala:39) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.nextIterator(FileScanRDD.scala:181) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.scan_nextBatch_0$(Unknown Source) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source) | |
| at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) | |
| at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:255) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
| at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
| at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
| at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
| at org.apache.spark.scheduler.Task.run(Task.scala:123) | |
| at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
| at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
| at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
| at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
| at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
| at java.lang.Thread.run(Thread.java:748) | |
| Caused by: java.io.EOFException | |
| at org.apache.parquet.bytes.SingleBufferInputStream.slice(SingleBufferInputStream.java:116) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:63) | |
| ... 30 more | |
| Driver stacktrace: | |
| org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 4.0 failed 1 times, most recent failure: Lost task 1.0 in stage 4.0 (TID 8, localhost, executor driver): org.apache.parquet.io.ParquetDecodingException: Failed to read 4 bytes | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:65) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readInteger(VectorizedPlainValuesReader.java:164) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readBinary(VectorizedPlainValuesReader.java:190) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedRleValuesReader.readBinarys(VectorizedRleValuesReader.java:426) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBinaryBatch(VectorizedColumnReader.java:472) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBatch(VectorizedColumnReader.java:220) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextBatch(VectorizedParquetRecordReader.java:261) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextKeyValue(VectorizedParquetRecordReader.java:159) | |
| at org.apache.spark.sql.execution.datasources.RecordReaderIterator.hasNext(RecordReaderIterator.scala:39) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.nextIterator(FileScanRDD.scala:181) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.scan_nextBatch_0$(Unknown Source) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source) | |
| at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) | |
| at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:255) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
| at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
| at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
| at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
| at org.apache.spark.scheduler.Task.run(Task.scala:123) | |
| at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
| at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
| at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
| at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
| at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
| at java.lang.Thread.run(Thread.java:748) | |
| Caused by: java.io.EOFException | |
| at org.apache.parquet.bytes.SingleBufferInputStream.slice(SingleBufferInputStream.java:116) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:63) | |
| ... 30 more | |
| Driver stacktrace: | |
| at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1889) | |
| at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1877) | |
| at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1876) | |
| at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
| at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
| at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1876) | |
| at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) | |
| at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) | |
| at scala.Option.foreach(Option.scala:257) | |
| at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926) | |
| at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2110) | |
| at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2059) | |
| at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2048) | |
| at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) | |
| at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737) | |
| at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061) | |
| at org.apache.spark.SparkContext.runJob(SparkContext.scala:2082) | |
| at org.apache.spark.SparkContext.runJob(SparkContext.scala:2101) | |
| at org.apache.spark.SparkContext.runJob(SparkContext.scala:2126) | |
| at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:945) | |
| at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
| at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
| at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) | |
| at org.apache.spark.rdd.RDD.collect(RDD.scala:944) | |
| at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:299) | |
| at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3389) | |
| at org.apache.spark.sql.Dataset$$anonfun$collectAsList$1.apply(Dataset.scala:2800) | |
| at org.apache.spark.sql.Dataset$$anonfun$collectAsList$1.apply(Dataset.scala:2799) | |
| at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3370) | |
| at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) | |
| at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) | |
| at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) | |
| at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3369) | |
| at org.apache.spark.sql.Dataset.collectAsList(Dataset.scala:2799) | |
| at org.apache.hudi.functional.HoodieSparkSqlWriterSuite$$anonfun$11$$anonfun$apply$5.apply(HoodieSparkSqlWriterSuite.scala:170) | |
| at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
| at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
| at org.scalatest.Transformer.apply(Transformer.scala:22) | |
| at org.scalatest.Transformer.apply(Transformer.scala:20) | |
| at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) | |
| at org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) | |
| at org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) | |
| at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) | |
| at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
| at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) | |
| at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) | |
| at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) | |
| at org.scalatest.FunSuite.runTest(FunSuite.scala:1560) | |
| at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) | |
| at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) | |
| at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) | |
| at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) | |
| at scala.collection.immutable.List.foreach(List.scala:392) | |
| at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) | |
| at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) | |
| at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) | |
| at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) | |
| at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) | |
| at org.scalatest.Suite$class.run(Suite.scala:1147) | |
| at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) | |
| at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) | |
| at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) | |
| at org.scalatest.SuperEngine.runImpl(Engine.scala:521) | |
| at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) | |
| at org.scalatest.FunSuite.run(FunSuite.scala:1560) | |
| at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) | |
| at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340) | |
| at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334) | |
| at scala.collection.immutable.List.foreach(List.scala:392) | |
| at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334) | |
| at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011) | |
| at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010) | |
| at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500) | |
| at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010) | |
| at org.scalatest.tools.Runner$.run(Runner.scala:850) | |
| at org.scalatest.tools.Runner.run(Runner.scala) | |
| at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38) | |
| at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25) | |
| Caused by: org.apache.parquet.io.ParquetDecodingException: Failed to read 4 bytes | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:65) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readInteger(VectorizedPlainValuesReader.java:164) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.readBinary(VectorizedPlainValuesReader.java:190) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedRleValuesReader.readBinarys(VectorizedRleValuesReader.java:426) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBinaryBatch(VectorizedColumnReader.java:472) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedColumnReader.readBatch(VectorizedColumnReader.java:220) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextBatch(VectorizedParquetRecordReader.java:261) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedParquetRecordReader.nextKeyValue(VectorizedParquetRecordReader.java:159) | |
| at org.apache.spark.sql.execution.datasources.RecordReaderIterator.hasNext(RecordReaderIterator.scala:39) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.nextIterator(FileScanRDD.scala:181) | |
| at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:101) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.scan_nextBatch_0$(Unknown Source) | |
| at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source) | |
| at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) | |
| at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:255) | |
| at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) | |
| at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
| at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
| at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
| at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
| at org.apache.spark.scheduler.Task.run(Task.scala:123) | |
| at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
| at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
| at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
| at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
| at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
| at java.lang.Thread.run(Thread.java:748) | |
| Caused by: java.io.EOFException | |
| at org.apache.parquet.bytes.SingleBufferInputStream.slice(SingleBufferInputStream.java:116) | |
| at org.apache.spark.sql.execution.datasources.parquet.VectorizedPlainValuesReader.getBuffer(VectorizedPlainValuesReader.java:63) | |
| ... 30 more | |
| Disconnected from the target VM, address: '127.0.0.1:49636', transport: 'socket' | |
| Process finished with exit code 0 | 
  
    Sign up for free
    to join this conversation on GitHub.
    Already have an account?
    Sign in to comment