Last active
August 29, 2015 14:07
-
-
Save ldacosta/f2f76b07ab7235e8bb69 to your computer and use it in GitHub Desktop.
Running the following code throwss NonSerializableException when using the case class
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.spark.SparkContext | |
import org.apache.spark.rdd.RDD | |
import scala.util.control.Exception._ | |
import scala.util.parsing.combinator.Parsers | |
import scala.util.parsing.input.{CharSequenceReader, Reader, OffsetPosition} | |
import scala.language.postfixOps | |
case class myCaseClass(c: Char) extends Serializable | |
object ParserTest2 { | |
object Ab01Parsers extends Parsers { | |
type Elem = Char | |
/* **************************************************** */ | |
// 2 parsers: code is identical, with the difference that one returns | |
// RDD[myCaseClass] and the other RDD[Char] | |
def charCaseClassParser(expected:Char, sc: SparkContext) = new Parser[RDD[myCaseClass]] { | |
def apply(in:Input):ParseResult[RDD[myCaseClass]] = { | |
if (in.atEnd) Failure("At end of Input", in) | |
else { | |
val c = in.first | |
if (c == expected) Success(sc.parallelize(Seq(myCaseClass(c)), 3), in.rest) | |
else Failure("Expected '" + expected + "' got '" + c + "'", in) | |
} | |
} | |
} | |
def charParser(expected:Char, sc: SparkContext) = new Parser[RDD[Char]] { | |
def apply(in:Input):ParseResult[RDD[Char]] = { | |
if (in.atEnd) Failure("At end of Input", in) | |
else { | |
val c = in.first | |
if (c == expected) Success(sc.parallelize(Seq(c), 3), in.rest) | |
else Failure("Expected '" + expected + "' got '" + c + "'", in) | |
} | |
} | |
} | |
/* **************************************************** */ | |
// 2 runners: one using each type of parser | |
def runCaseClass(s:String, sc: SparkContext):List[RDD[myCaseClass]] = { | |
val myParser = charCaseClassParser('a', sc)* | |
val input = new CharSequenceReader(s) | |
myParser(input) match { | |
case Success(list, next) if (next.atEnd) => list | |
//either an error or there is still input left | |
case _ => List.empty | |
} | |
} | |
def run(s:String, sc: SparkContext):List[RDD[Char]] = { | |
val myParser = charParser('a', sc)* | |
val input = new CharSequenceReader(s) | |
myParser(input) match { | |
case Success(list, next) if (next.atEnd) => list | |
//either an error or there is still input left | |
case _ => List.empty | |
} | |
} | |
def myTest(s:String, sc: SparkContext):(List[RDD[Char]], List[RDD[myCaseClass]]) = { | |
(run(s, sc), runCaseClass(s, sc)) | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment