-
-
Save ramn/5566596 to your computer and use it in GitHub Desktop.
import java.io._ | |
@SerialVersionUID(15L) | |
class Animal(name: String, age: Int) extends Serializable { | |
override def toString = s"Animal($name, $age)" | |
} | |
case class Person(name: String) | |
// or fork := true in sbt | |
class ObjectInputStreamWithCustomClassLoader( | |
fileInputStream: FileInputStream | |
) extends ObjectInputStream(fileInputStream) { | |
override def resolveClass(desc: java.io.ObjectStreamClass): Class[_] = { | |
try { Class.forName(desc.getName, false, getClass.getClassLoader) } | |
catch { case ex: ClassNotFoundException => super.resolveClass(desc) } | |
} | |
} | |
object MyDeserialize extends App { | |
val fis = new FileInputStream("../a.tmp") | |
val ois = new ObjectInputStreamWithCustomClassLoader(fis) | |
val animal = ois.readObject | |
val person = ois.readObject | |
ois.close | |
println(animal) | |
println(person) | |
} |
import java.io.FileOutputStream | |
import java.io.ObjectOutputStream | |
@SerialVersionUID(15L) | |
class Animal(name: String, age: Int) extends Serializable { | |
override def toString = s"Animal($name, $age)" | |
} | |
case class Person(name: String) extends Serializable | |
object MySerialize extends App { | |
val fos = new FileOutputStream("../a.tmp") | |
val oos = new ObjectOutputStream(fos) | |
oos.writeObject(new Animal("Dvorak", 12)) | |
oos.writeObject(Person("Dijkstra")) | |
oos.close | |
} |
I think it is recommended to use Scala pickling rather than Java serialization in Scala.
@danyaljj
It definitely is, yet pickling wasn't able to save and didn't even warn me about it's inability, which was kidna frustrating after 10 hours of computations.
can some one please explain how this works "ObjectInputStreamWithCustomClassLoader "
https://gist.github.com/ramn/5566596#file-serialization-scala
Awesome, this worked perfectly for me!
Thank you very much:smiley:! I have tried several other approaches (libraries like Scala Pickling, uPickle, Sphere JSON, Kryo + Chill), but none was able to properly handle dynamic (de)serialization and/or stuff like List filled with case classes or generic container classes.
Thank you very much!
Awesome, this worked perfectly for me!
+1 Bravo
@ramn This worked like a charm. I tried all other suggestions like pointing the jar via "spark.jars" but it didn't work. Did anyone ever figure out why we need this?
I still don't understand why this is necessary, but I'm glad I found it. Thanks for posting this!