I hereby claim:
- I am tomlous on github.
- I am tomlous (https://keybase.io/tomlous) on keybase.
- I have a public key ASCG1hBKpJp-h17yZpmJJKWukUQKonBAZT11kMFmT_ub6go
To claim this, I am signing this object:
I hereby claim:
To claim this, I am signing this object:
trait Parser{ | |
def parse(i: Int):String | |
} | |
object ParserA extends Parser{ | |
override def parse(i: Int): String = s"A: $i" | |
} | |
object ParserB extends Parser{ | |
override def parse(i: Int): String = s"B: $i" | |
} |
trait Parser{ | |
def parse(i: Int):String | |
} | |
object ParserA extends Parser{ | |
override def parse(i: Int): String = s"A: $i" | |
} | |
object ParserB extends Parser{ | |
override def parse(i: Int): String = s"B: $i" | |
} |
trait Parser{ | |
def parse(i: Int):String | |
} | |
implicit object ParserA extends Parser{ | |
override def parse(i: Int): String = s"A: $i" | |
} | |
// only include 1 implicit parser in case of ambiguity | |
//implicit object ParserB extends Parser{ | |
// override def parse(i: Int): String = s"B: $i" |
trait Parser{ | |
def parse(i: Int):String | |
} | |
trait ParserA extends Parser{ | |
override def parse(i: Int): String = s"A: $i" | |
} | |
trait ParserB extends Parser{ | |
override def parse(i: Int): String = s"B: $i" |
/** | |
* Merging two maps keeping the first value if avaialble, otherwise second | |
*/ | |
val map1 = Map("a"->Some(1), "b"->Some(3), "c"->None, "d"->None, "h"->Some(34)) | |
val map2 = Map("a"->None, "b"->Some(11), "c"->Some(22), "d"->None, "e"->Some(12), "f"->None) | |
// Option1 | |
(map1.toList ++ map2.toList) |
import org.apache.spark.sql.{Row, SparkSession} | |
import org.apache.spark.sql.functions._ | |
val spark = SparkSession.builder().master("local").appName("jobposting").getOrCreate() | |
import spark.implicits._ | |
val f1 = udf{(x:Double, y:Row) => y.getInt(0) + 1} | |
val f2 = udf{(x:Int, y:Row) => (y.getInt(1) / (x+3)) - y.getInt(0)} | |
val f3 = udf{(x:Int)=> x.toChar.toString} |
import org.apache.spark.sql.SparkSession | |
import org.apache.spark.sql.functions._ | |
case class DatlinqRecord(id: String, typeName: String) | |
case class LSH(hash: String, record: DatlinqRecord) | |
val spark = SparkSession.builder() | |
.master("local[2]") | |
.appName("test") | |
.getOrCreate() |
import org.apache.spark.sql.SparkSession | |
import org.apache.spark.sql.functions.from_unixtime | |
val spark = SparkSession.builder() | |
.master("local[2]") | |
.appName("test") | |
.getOrCreate() | |
import spark.implicits._ |
name := "[project name]" | |
version := "0.1" | |
scalaVersion := "2.12.3" |