Last active
May 3, 2017 06:05
-
-
Save cooper6581/dc5baac8839ef00f17f2b399f878ea2b to your computer and use it in GitHub Desktop.
Spark 1.6.3 FileNotFoundException
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.hadoop.fs.{FileSystem,Path} | |
import org.apache.hadoop.conf.Configuration | |
val fs = FileSystem.get(new Configuration()) | |
case class TestRecord(a: String, part: Int) | |
val df = Seq(TestRecord("one", 1), TestRecord("two", 2), TestRecord("three", 2)).toDF | |
df.write.partitionBy("part").parquet("/tmp/test_data") | |
sqlContext.sql("create external table foo (a string) partitioned by (part int) stored as parquet location '/tmp/test_data'" ) | |
sqlContext.sql("alter table foo add partition (part=1)") | |
sqlContext.sql("alter table foo add partition (part=2)") | |
sqlContext.sql("alter table foo add partition (part=3)") | |
// delete /tmp/test_data/part=3 | |
fs.delete(new Path("/tmp/test_data/part=3"), true) | |
// Throws FileNotFoundException in Spark 1.6.3, but not in Spark 1.6.1 - See SPARK-16313 | |
sqlContext.sql("select * from foo limit 0").show | |
// Add the directory back | |
fs.mkdirs(new Path("/tmp/test_data/part=3")) | |
// Shouldn't throw any exceptions | |
sqlContext.sql("select * from foo limit 0").show |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment