Env: Spark 2.2.0 using Kafka integration 0.10
./spark-shell --packages org.apache.spark:spark-streaming-kafka-0-10_2.11:2.2.0
Welcome to
____ __
/ __/__ ___ _____/ /__
import cloudflow.sbt.CloudflowKeys.{cloudflowDockerImageName, cloudflowDockerRegistry, cloudflowDockerRepository} | |
import cloudflow.sbt.ImagePlugin | |
import sbt.{AutoPlugin, Def, taskKey} | |
trait Key { | |
val cloudflowImageName = taskKey[String]("The name of the Docker image to publish.") | |
} | |
object ImageNamePlugin extends AutoPlugin { | |
override def requires = ImagePlugin |
I hereby claim:
To claim this, I am signing this object:
package so | |
import java.io.PrintStream | |
import java.net.Socket | |
import java.net._ | |
import scala.concurrent.Future | |
class SocketHandler(socket: Socket) { | |
def deliver(data: Iterator[String]): Unit = { |
import org.apache.spark.mllib.linalg.Vectors | |
import org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD | |
import org.apache.spark.sql._ | |
import org.apache.spark.sql.types.{StringType, StructField, StructType} | |
import org.apache.spark.streaming.{Seconds, StreamingContext} | |
import org.apache.spark.{SparkConf, SparkContext} | |
import scala.util.Try | |
import java.io.File | |
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
import org.apache.spark.sql._
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import scala.util.Try