More patches can be found here: https://github.com/willb/spark-packaging
Created
July 31, 2014 06:33
-
-
Save zygm0nt/93cb1af594f173ca4053 to your computer and use it in GitHub Desktop.
spark 1.0.1 with akka 2.3.0
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| diff --git i/core/src/main/scala/org/apache/spark/deploy/Client.scala w/core/src/main/scala/org/apache/spark/deploy/Client.scala | |
| index aeb159a..85a0308 100644 | |
| --- i/core/src/main/scala/org/apache/spark/deploy/Client.scala | |
| +++ w/core/src/main/scala/org/apache/spark/deploy/Client.scala | |
| @@ -129,7 +129,7 @@ private class ClientActor(driverArgs: ClientArguments, conf: SparkConf) extends | |
| println(s"Error connecting to master ${driverArgs.master} ($remoteAddress), exiting.") | |
| System.exit(-1) | |
| - case AssociationErrorEvent(cause, _, remoteAddress, _) => | |
| + case AssociationErrorEvent(cause, _, remoteAddress, _, _) => | |
| println(s"Error connecting to master ${driverArgs.master} ($remoteAddress), exiting.") | |
| println(s"Cause was: $cause") | |
| System.exit(-1) | |
| diff --git i/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala w/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala | |
| index d38e9e7..47d9793 100644 | |
| --- i/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala | |
| +++ w/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala | |
| @@ -154,7 +154,7 @@ private[spark] class AppClient( | |
| logWarning(s"Connection to $address failed; waiting for master to reconnect...") | |
| markDisconnected() | |
| - case AssociationErrorEvent(cause, _, address, _) if isPossibleMaster(address) => | |
| + case AssociationErrorEvent(cause, _, address, _, _) if isPossibleMaster(address) => | |
| logWarning(s"Could not connect to $address: $cause") | |
| case StopAppClient => | |
| diff --git i/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala w/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala | |
| index 530c147..34fb641 100644 | |
| --- i/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala | |
| +++ w/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala | |
| @@ -52,7 +52,7 @@ private[spark] class WorkerWatcher(workerUrl: String) extends Actor | |
| case AssociatedEvent(localAddress, remoteAddress, inbound) if isWorker(remoteAddress) => | |
| logInfo(s"Successfully connected to $workerUrl") | |
| - case AssociationErrorEvent(cause, localAddress, remoteAddress, inbound) | |
| + case AssociationErrorEvent(cause, localAddress, remoteAddress, inbound, _) | |
| if isWorker(remoteAddress) => | |
| // These logs may not be seen if the worker (and associated pipe) has died | |
| logError(s"Could not initialize connection to worker $workerUrl. Exiting.") | |
| diff --git i/project/SparkBuild.scala w/project/SparkBuild.scala | |
| index ee4c3e1..30d7808 100644 | |
| --- i/project/SparkBuild.scala | |
| +++ w/project/SparkBuild.scala | |
| @@ -32,7 +32,7 @@ import scala.collection.JavaConversions._ | |
| // import com.jsuereth.pgp.sbtplugin.PgpKeys._ | |
| object SparkBuild extends Build { | |
| - val SPARK_VERSION = "1.0.1" | |
| + val SPARK_VERSION = "1.0.1-akka-2.3-cdh-4.2.1" | |
| val SPARK_VERSION_SHORT = SPARK_VERSION.replaceAll("-SNAPSHOT", "") | |
| // Hadoop version to build against. For example, "1.0.4" for Apache releases, or | |
| @@ -298,7 +298,8 @@ object SparkBuild extends Build { | |
| publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn | |
| ) ++ net.virtualvoid.sbt.graph.Plugin.graphSettings ++ ScalaStyleSettings ++ genjavadocSettings | |
| - val akkaVersion = "2.2.3-shaded-protobuf" | |
| + //val akkaVersion = "2.2.3-shaded-protobuf" | |
| + val akkaVersion = "2.3.0" | |
| val chillVersion = "0.3.6" | |
| val codahaleMetricsVersion = "3.0.0" | |
| val jblasVersion = "1.2.3" | |
| @@ -344,9 +345,9 @@ object SparkBuild extends Build { | |
| "commons-daemon" % "commons-daemon" % "1.0.10", // workaround for bug HADOOP-9407 | |
| "com.ning" % "compress-lzf" % "1.0.0", | |
| "org.xerial.snappy" % "snappy-java" % "1.0.5", | |
| - "org.spark-project.akka" %% "akka-remote" % akkaVersion, | |
| - "org.spark-project.akka" %% "akka-slf4j" % akkaVersion, | |
| - "org.spark-project.akka" %% "akka-testkit" % akkaVersion % "test", | |
| + "com.typesafe.akka" %% "akka-remote" % akkaVersion, | |
| + "com.typesafe.akka" %% "akka-slf4j" % akkaVersion, | |
| + "com.typesafe.akka" %% "akka-testkit" % akkaVersion % "test", | |
| "org.json4s" %% "json4s-jackson" % "3.2.6" excludeAll(excludeScalap), | |
| "colt" % "colt" % "1.2.0", | |
| "org.apache.mesos" % "mesos" % "0.18.1" classifier("shaded-protobuf") exclude("com.google.protobuf", "protobuf-java"), | |
| @@ -643,7 +644,7 @@ object SparkBuild extends Build { | |
| name := "spark-streaming-zeromq", | |
| previousArtifact := sparkPreviousArtifact("spark-streaming-zeromq"), | |
| libraryDependencies ++= Seq( | |
| - "org.spark-project.akka" %% "akka-zeromq" % akkaVersion | |
| + "com.typesafe.akka" %% "akka-zeromq" % akkaVersion | |
| ) | |
| ) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment