Skip to content

Instantly share code, notes, and snippets.

@meysampg
Created April 20, 2020 10:28
Show Gist options
  • Save meysampg/b09e1946113c162221cb4bd24f05a0c0 to your computer and use it in GitHub Desktop.
Save meysampg/b09e1946113c162221cb4bd24f05a0c0 to your computer and use it in GitHub Desktop.
package mypackage_name_space_or_sth_like_that
// some packages are imported here, but they're internally used and not related to kafka or spark
import org.apache.log4j.Logger
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies, OffsetRange}
object Main {
def main(args: Array[String]): Unit = {
def c = new Configurator(new EnvConfigurator, new FileConfigurator, new DefaultConfigurator)
Logger getLogger "org" setLevel (LogLevel(c("KS_SPARK_LOG_LEVEL")))
Logger getLogger "akka" setLevel (LogLevel(c("KS_SPARK_LOG_LEVEL")))
val conf = new SparkConf().setMaster(c("KS_SPARK_MASTER_ADDRESS"))
.set("spark.network.timeout", c("KS_SPARK_NETWORK_TIMEOUT"))
.set("spark.executor.heartbeatInterval", c("KS_SPARK_EXECUTOR_HEARTBEATINTERVAL"))
.set("spark.locality.wait", c("KS_SPARK_LOCALITY_WAIT"))
.set("spark.serializer", classOf[KryoSerializer].getName)
.set("spark.streaming.backpressure.enabled", "true")
.set("spark.streaming.backpressure.initialRate", c("KS_SPARK_STREAMING_BACKPRESSURE_INITIALRATE"))
.set("spark.streaming.unpersist", "true")
.set("spark.streaming.kafka.consumer.poll.ms", c("KS_SPARK_STREAMING_KAFKA_CONSUMER_POLL_MS"))
.setAppName("Kafka Spark Stream")
val spark = SparkSession.builder.config(conf).getOrCreate
val ssc = new StreamingContext(spark.sparkContext, Seconds(c("KS_STREAM_WINDOW_TIME_SECONDS").toInt))
val topics = c("KS_KAFKA_TOPICS") split "," map (i => i.trim)
val kafkaParams = Map[String, Object](
"bootstrap.servers" -> c("KS_KAFKA_BOOTSTRAP_SERVERS"),
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[StringDeserializer],
"group.id" -> c("KS_KAFKA_GROUP_ID"),
"auto.offset.reset" -> c("KS_KAFKA_AUTO_OFFSET_RESET"),
"enable.auto.commit" -> (false: java.lang.Boolean),
"session.timeout.ms" -> c("KS_KAFKA_SESSION_TIMEOUT_MS"),
"heartbeat.interval.ms" -> c("KS_KAFKA_HEARTBEAT_INTERVAL_MS"),
"max.poll.interval.ms" -> c("KS_KAFKA_MAX_POLL_INTERVAL_MS"),
"request.timeout.ms" -> c("KS_KAFKA_REQUEST_TIMEOUT_MS"),
"connections.max.idle.ms" -> c("KS_KAFKA_CONNECTIONS_MAX_IDLE_MS")
)
val stream = KafkaUtils.createDirectStream[String, String](
ssc,
LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)
)
stream.foreachRDD { rdd =>
val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
// do your works on RDD
stream.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)
}
ssc.start
ssc.awaitTermination
ssc.stop()
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment