Skip to content

Instantly share code, notes, and snippets.

View rupeshtr78's full-sized avatar

rupeshtr78 rupeshtr78

View GitHub Profile
@rupeshtr78
rupeshtr78 / Apply.scala
Last active November 4, 2020 02:41
apply method
object Greet {
def apply(name: String): String = {
"Hello %s".format(name)
}
}
// I can call apply explicitly if I want:
Greet.apply("bob") // => "Hello bob"
// Or I can call Greet like it is a function:
@rupeshtr78
rupeshtr78 / Function1Scala.scala
Last active November 4, 2020 02:13
function1 signature
val function1 = new Function1[Int, Int] {
def apply(x: Int) = x * 100
}
val res = (x: Int) => x * 100
assert(res(10) == function1(10))
@rupeshtr78
rupeshtr78 / CaseClass.scala
Last active November 4, 2020 02:12
case class
case class Person(name: String, age: Integer, favColor: String)
case class Person(name:String,age:Int = 0)
val p1 = Person("Rupesh",42)
val p2 = Person("Roopa")
case class Person(name:String,age:Option[Int] = Some(0))
val p1 = Person("Rupesh",Some(42))
val p2 = Person("Roopa")
@rupeshtr78
rupeshtr78 / SparkRead.scala
Last active November 4, 2020 02:12
SparkRead All options in one
spark.read
.option("header", "true")
.option("mode", "FAILFAST") // failFast ,dropMalformed, permissive (default) During parsing the records
.option("inferSchema", "true")
.option("path", "path/to/file(s)")
.option("delimiter","||")
.option("recursiveFileLookup","true") //→ Recursive read all files from directory
.csv("data/retail-data/all/online-retail-dataset.csv")
.repartition(2)
.schema(sparkguide.myManualSchema)
val userSchema = new StructType()
.add("id", "integer")
.add("name", "string")
.add("age", "integer")
.add("friends", "integer")
val carSchema = StructType(Array(
StructField("Name", StringType),
StructField("Miles_per_Gallon", DoubleType),
def catchBlocksPatternMatching(exception: Exception): String = {
try {
throw exception
} catch {
case ex: IllegalArgumentException => "It's an IllegalArgumentException"
case ex: RuntimeException => "It's a RuntimeException"
case _ => "It's an unknown kind of exception"
}
}
@rupeshtr78
rupeshtr78 / ListPatternMatching.scala
Last active November 10, 2020 03:02
ListPatternMatching
def listPatternMatching[T](someList: List[T]): String = {
someList match {
case Nil => s"empty list"
case x :: Nil => s"list with only one element"
case List(x) => s"list with only one element"
case x :: xs => s"a list with at least one element. $x is the head xs to the tail," // xs could be Nil or some other list.
case 1 :: 2 :: cs => s"lists that starts with 1 and then 2"
case (x, y) :: ps => s"a list where the head element is a pair"
case _ => s"default case if none of the above matches"
}
def typedPatternMatching(any: Any): String = {
any match {
case string: String => s"I'm a string. My value: $string"
case integer: Int => s"I'm an integer. My value: $integer"
case double:Double => s"$double"
case list: List[Any] if (list.size <= 10) => "Guards"
case List(0, _, _) => "a three-element list with 0 as the first element"
case List(1, _*) => "a list beginning with 1, any number of elements"
case Vector(1, _*) => "a vector starting with 1, any number of elements"
case map: Map[_, _] => map.keys.toString()
val spark = SparkSession.builder()
.appName("Accumulator")
.master("local[2]")
.config("spark.driver.memory", "3g")
.config("spark.executor.memory", "3g")
.config("spark.sql.shuffle.partitions",5)
.getOrCreate()
// Implicit parameters
implicit val int:Int = 42
def strImplicit(implicit integer:Int):String = {
s"$integer is implicitly passed into function"
}
println(strImplicit) // 42 is implicitly passed into function