import org.apache.spark.{SparkContext,SparkConf}
import org.apache.spark.sql.hive.HiveContext

//Do add the following artifact in build.sbt
  //libraryDependencies += "mysql" % "mysql-connector-java" % "5.1.43"

//initializations
val conf = new SparkConf().setAppName("xx").setMaster("local[2]")
val sc = new SparkContext(conf)

val hiveContext = new HiveContext(sc)
  // can use sqlContext as well (i.e. sparkSQL in native context)

val prop = new java.util.Properties

prop.setProperty("user","root")
prop.setProperty("password","XXX")
val df_mysql = hiveContext.read.jdbc("jdbc:mysql://localhost:3306/retail_db","products",prop)
//in pyspark it would have been:
  //sqlContext.read.jdbc("jdbc:mysql://localhost:3306/retail_db","products",properties={"user":"root","password":"cloudera"})
df_mysql.show