Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
6d6776a
SPARK-1729. Make Flume pull data from source, rather than the current…
harishreedharan May 9, 2014
d24d9d4
SPARK-1729. Make Flume pull data from source, rather than the current…
harishreedharan May 18, 2014
08176ad
SPARK-1729. Make Flume pull data from source, rather than the current…
harishreedharan May 18, 2014
03d6c1c
SPARK-1729. Make Flume pull data from source, rather than the current…
harishreedharan May 19, 2014
8df37e4
SPARK-1729. Make Flume pull data from source, rather than the current…
harishreedharan May 20, 2014
87775aa
SPARK-1729. Make Flume pull data from source, rather than the current…
harishreedharan May 21, 2014
0f10788
SPARK-1729. Make Flume pull data from source, rather than the current…
harishreedharan May 24, 2014
c604a3c
SPARK-1729. Optimize imports.
harishreedharan Jun 5, 2014
9741683
SPARK-1729. Fixes based on review.
harishreedharan Jun 6, 2014
e7da512
SPARK-1729. Fixing import order
harishreedharan Jun 6, 2014
d6fa3aa
SPARK-1729. New Flume-Spark integration.
harishreedharan Jun 10, 2014
70bcc2a
SPARK-1729. New Flume-Spark integration.
harishreedharan Jun 10, 2014
3c23c18
SPARK-1729. New Spark-Flume integration.
harishreedharan Jun 10, 2014
0d69604
FLUME-1729. Better Flume-Spark integration.
harishreedharan Jun 16, 2014
bda01fc
FLUME-1729. Flume-Spark integration.
harishreedharan Jun 17, 2014
4b0c7fc
FLUME-1729. New Flume-Spark integration.
harishreedharan Jun 18, 2014
205034d
Merging master in
harishreedharan Jun 18, 2014
86aa274
Merge remote-tracking branch 'asf/master'
harishreedharan Jul 10, 2014
8136aa6
Adding TransactionProcessor to map on returning batch of data
harishreedharan Jul 14, 2014
9fd0da7
SPARK-1729. Use foreach instead of map for all Options.
harishreedharan Jul 14, 2014
120e2a1
SPARK-1729. Some test changes and changes to utils classes.
harishreedharan Jul 15, 2014
393bd94
SPARK-1729. Use LinkedBlockingQueue instead of ArrayBuffer to keep tr…
harishreedharan Jul 15, 2014
8c00289
More debug messages
harishreedharan Jul 15, 2014
1edc806
SPARK-1729. Update logging in Spark Sink.
harishreedharan Jul 15, 2014
10b6214
Changed public API, changed sink package, and added java unit test to…
tdas Jul 17, 2014
d248d22
Merge pull request #1 from tdas/flume-polling
Jul 17, 2014
3c5194c
Merge remote-tracking branch 'asf/master'
harishreedharan Jul 19, 2014
799509f
Fix a compile issue.
harishreedharan Jul 21, 2014
3572180
Adding a license header, making Jenkins happy.
harishreedharan Jul 21, 2014
f3c99d1
Merge remote-tracking branch 'asf/master'
harishreedharan Jul 23, 2014
e59cc20
Use SparkFlumeEvent instead of the new type. Also, Flume Polling Rece…
harishreedharan Jul 23, 2014
65b76b4
Fixing the unit test.
harishreedharan Jul 23, 2014
73d6f6d
Cleaned up tests a bit. Added some docs in multiple places.
harishreedharan Jul 24, 2014
1f47364
Minor fixes.
harishreedharan Jul 25, 2014
a082eb3
Merge remote-tracking branch 'asf/master'
harishreedharan Jul 25, 2014
7a1bc6e
Fix SparkBuild.scala
harishreedharan Jul 25, 2014
981bf62
Merge remote-tracking branch 'asf/master'
harishreedharan Jul 28, 2014
5f212ce
Ignore Spark Sink from mima.
harishreedharan Jul 28, 2014
e48d785
Documenting flume-sink being ignored for Mima checks.
harishreedharan Jul 28, 2014
96cfb6f
Merge remote-tracking branch 'asf/master'
harishreedharan Jul 29, 2014
e7f70a3
Merge remote-tracking branch 'asf-git/master'
harishreedharan Jul 29, 2014
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Merge remote-tracking branch 'asf/master'
Conflicts:
	external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
	project/SparkBuild.scala
  • Loading branch information
harishreedharan committed Jul 10, 2014
commit 86aa274d0a471844746a22f830fa3548b84ca2b5
6 changes: 5 additions & 1 deletion external/flume-sink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,15 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>1.0.0-SNAPSHOT</version>
<version>1.1.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

<artifactId>spark-streaming-flume-sink_2.10</artifactId>
<properties>
<sbt.project.name>streaming-flume-sink</sbt.project.name>
</properties>

<packaging>jar</packaging>
<name>Spark Project External Flume Sink</name>
<url>http://spark.apache.org/</url>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ import org.apache.flume.source.avro.Status
import org.apache.avro.ipc.specific.SpecificResponder
import org.apache.avro.ipc.NettyServer

import org.apache.spark.util.Utils

import org.apache.spark.Logging
import org.apache.spark.util.Utils
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream._
import org.apache.spark.streaming.StreamingContext
Expand Down
187 changes: 48 additions & 139 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,6 @@
* limitations under the License.
*/

import sbt._
import sbt.ClasspathDependency
import sbt.Classpaths.publishTask
import sbt.ExclusionRule
import sbt.Keys._
import sbt.Task
import sbtassembly.Plugin._
import AssemblyKeys._
import scala.Some
import scala.util.Properties
import scala.collection.JavaConversions._

Expand All @@ -38,10 +29,11 @@ object BuildCommons {
private val buildLocation = file(".").getAbsoluteFile.getParentFile

val allProjects@Seq(bagel, catalyst, core, graphx, hive, mllib, repl, spark, sql, streaming,
streamingFlume, streamingKafka, streamingMqtt, streamingTwitter, streamingZeromq) =
streamingFlumeSink, streamingFlume, streamingKafka, streamingMqtt, streamingTwitter,
streamingZeromq) =
Seq("bagel", "catalyst", "core", "graphx", "hive", "mllib", "repl", "spark", "sql",
"streaming", "streaming-flume", "streaming-kafka", "streaming-mqtt", "streaming-twitter",
"streaming-zeromq").map(ProjectRef(buildLocation, _))
"streaming", "streaming-flume-sink", "streaming-flume", "streaming-kafka", "streaming-mqtt",
"streaming-twitter", "streaming-zeromq").map(ProjectRef(buildLocation, _))

val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha, java8Tests, sparkGangliaLgpl) =
Seq("yarn", "yarn-stable", "yarn-alpha", "java8-tests", "ganglia-lgpl")
Expand Down Expand Up @@ -124,95 +116,13 @@ object SparkBuild extends PomBuild {
Some("org.apache.spark" % fullId % "1.0.0")
}

// A configuration to set an alternative publishLocalConfiguration
lazy val MavenCompile = config("m2r") extend(Compile)
lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy")
val sparkHome = System.getProperty("user.dir")

// Allows build configuration to be set through environment variables
lazy val hadoopVersion = Properties.envOrElse("SPARK_HADOOP_VERSION", DEFAULT_HADOOP_VERSION)
lazy val isNewHadoop = Properties.envOrNone("SPARK_IS_NEW_HADOOP") match {
case None => {
val isNewHadoopVersion = "^2\\.[2-9]+".r.findFirstIn(hadoopVersion).isDefined
(isNewHadoopVersion|| DEFAULT_IS_NEW_HADOOP)
}
case Some(v) => v.toBoolean
}

lazy val isYarnEnabled = Properties.envOrNone("SPARK_YARN") match {
case None => DEFAULT_YARN
case Some(v) => v.toBoolean
}
lazy val hadoopClient = if (hadoopVersion.startsWith("0.20.") || hadoopVersion == "1.0.0") "hadoop-core" else "hadoop-client"
val maybeAvro = if (hadoopVersion.startsWith("0.23.")) Seq("org.apache.avro" % "avro" % "1.7.4") else Seq()

lazy val isHiveEnabled = Properties.envOrNone("SPARK_HIVE") match {
case None => DEFAULT_HIVE
case Some(v) => v.toBoolean
}

// Include Ganglia integration if the user has enabled Ganglia
// This is isolated from the normal build due to LGPL-licensed code in the library
lazy val isGangliaEnabled = Properties.envOrNone("SPARK_GANGLIA_LGPL").isDefined
lazy val gangliaProj = Project("spark-ganglia-lgpl", file("extras/spark-ganglia-lgpl"), settings = gangliaSettings).dependsOn(core)
val maybeGanglia: Seq[ClasspathDependency] = if (isGangliaEnabled) Seq(gangliaProj) else Seq()
val maybeGangliaRef: Seq[ProjectReference] = if (isGangliaEnabled) Seq(gangliaProj) else Seq()

// Include the Java 8 project if the JVM version is 8+
lazy val javaVersion = System.getProperty("java.specification.version")
lazy val isJava8Enabled = javaVersion.toDouble >= "1.8".toDouble
val maybeJava8Tests = if (isJava8Enabled) Seq[ProjectReference](java8Tests) else Seq[ProjectReference]()
lazy val java8Tests = Project("java8-tests", file("extras/java8-tests"), settings = java8TestsSettings).
dependsOn(core) dependsOn(streaming % "compile->compile;test->test")

// Include the YARN project if the user has enabled YARN
lazy val yarnAlpha = Project("yarn-alpha", file("yarn/alpha"), settings = yarnAlphaSettings) dependsOn(core)
lazy val yarn = Project("yarn", file("yarn/stable"), settings = yarnSettings) dependsOn(core)

lazy val maybeYarn: Seq[ClasspathDependency] = if (isYarnEnabled) Seq(if (isNewHadoop) yarn else yarnAlpha) else Seq()
lazy val maybeYarnRef: Seq[ProjectReference] = if (isYarnEnabled) Seq(if (isNewHadoop) yarn else yarnAlpha) else Seq()

lazy val externalTwitter = Project("external-twitter", file("external/twitter"), settings = twitterSettings)
.dependsOn(streaming % "compile->compile;test->test")

lazy val externalKafka = Project("external-kafka", file("external/kafka"), settings = kafkaSettings)
.dependsOn(streaming % "compile->compile;test->test")

lazy val externalFlumeSink = Project("external-flume-sink", file("external/flume-sink"), settings = flumeSinkSettings)

lazy val externalFlume = Project("external-flume", file("external/flume"), settings = flumeSettings)
.dependsOn(streaming % "compile->compile;test->test").dependsOn(externalFlumeSink)


lazy val externalZeromq = Project("external-zeromq", file("external/zeromq"), settings = zeromqSettings)
.dependsOn(streaming % "compile->compile;test->test")

lazy val externalMqtt = Project("external-mqtt", file("external/mqtt"), settings = mqttSettings)
.dependsOn(streaming % "compile->compile;test->test")

lazy val allExternal = Seq[ClasspathDependency](externalTwitter, externalKafka, externalFlume, externalFlumeSink, externalZeromq, externalMqtt)
lazy val allExternalRefs = Seq[ProjectReference](externalTwitter, externalKafka, externalFlume, externalFlumeSink, externalZeromq, externalMqtt)

lazy val examples = Project("examples", file("examples"), settings = examplesSettings)
.dependsOn(core, mllib, graphx, bagel, streaming, hive) dependsOn(allExternal: _*)

// Everything except assembly, hive, tools, java8Tests and examples belong to packageProjects
lazy val packageProjects = Seq[ProjectReference](core, repl, bagel, streaming, mllib, graphx, catalyst, sql) ++ maybeYarnRef ++ maybeHiveRef ++ maybeGangliaRef

lazy val allProjects = packageProjects ++ allExternalRefs ++
Seq[ProjectReference](examples, tools, assemblyProj) ++ maybeJava8Tests

def sharedSettings = Defaults.defaultSettings ++ MimaBuild.mimaSettings(file(sparkHome)) ++ Seq(
organization := "org.apache.spark",
version := SPARK_VERSION,
scalaVersion := "2.10.4",
scalacOptions := Seq("-Xmax-classfile-name", "120", "-unchecked", "-deprecation", "-feature",
"-target:" + SCALAC_JVM_VERSION),
javacOptions := Seq("-target", JAVAC_JVM_VERSION, "-source", JAVAC_JVM_VERSION),
unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath },
def oldDepsSettings() = Defaults.defaultSettings ++ Seq(
name := "old-deps",
scalaVersion := "2.10.4",
retrieveManaged := true,
retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
"spark-streaming-flume-sink",
"spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
"spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
"spark-core").map(versionArtifact(_).get intransitive())
Expand Down Expand Up @@ -244,6 +154,8 @@ object SparkBuild extends PomBuild {
/* Hive console settings */
enable(Hive.settings)(hive)

enable(Flume.settings)(streamingFlumeSink)

// TODO: move this to its upstream project.
override def projectDefinitions(baseDirectory: File): Seq[Project] = {
super.projectDefinitions(baseDirectory).map { x =>
Expand All @@ -254,6 +166,10 @@ object SparkBuild extends PomBuild {

}

object Flume {
lazy val settings = sbtavro.SbtAvro.avroSettings
}

object Hive {

lazy val settings = Seq(
Expand Down Expand Up @@ -355,50 +271,43 @@ object Unidoc {
"-noqualifier", "java.lang"
)
)
}

def kafkaSettings() = sharedSettings ++ Seq(
name := "spark-streaming-kafka",
previousArtifact := sparkPreviousArtifact("spark-streaming-kafka"),
libraryDependencies ++= Seq(
"com.github.sgroschupf" % "zkclient" % "0.1",
"org.apache.kafka" %% "kafka" % "0.8.0"
exclude("com.sun.jdmk", "jmxtools")
exclude("com.sun.jmx", "jmxri")
exclude("net.sf.jopt-simple", "jopt-simple")
excludeAll(excludeSLF4J)
)
)

def flumeSettings() = sharedSettings ++ Seq(
name := "spark-streaming-flume",
previousArtifact := sparkPreviousArtifact("spark-streaming-flume"),
libraryDependencies ++= Seq(
"org.apache.flume" % "flume-ng-sdk" % "1.4.0" % "compile" excludeAll(excludeIONetty, excludeThrift)
)
)
object TestSettings {
import BuildCommons._

def flumeSinkSettings() = {
sharedSettings ++ Seq(
name := "spark-streaming-flume-sink",
libraryDependencies ++= Seq(
"org.apache.flume" % "flume-ng-sdk" % "1.4.0" % "compile"
excludeAll(excludeIONetty, excludeThrift),
"org.apache.flume" % "flume-ng-core" % "1.4.0" % "compile"
excludeAll(excludeIONetty, excludeThrift)
)
) ++ sbtavro.SbtAvro.avroSettings
}
def zeromqSettings() = sharedSettings ++ Seq(
name := "spark-streaming-zeromq",
previousArtifact := sparkPreviousArtifact("spark-streaming-zeromq"),
libraryDependencies ++= Seq(
"org.spark-project.akka" %% "akka-zeromq" % akkaVersion
lazy val settings = Seq (
// Fork new JVMs for tests and set Java options for those
fork := true,
javaOptions in Test += "-Dspark.home=" + sparkHome,
javaOptions in Test += "-Dspark.testing=1",
javaOptions in Test += "-Dsun.io.serialization.extendedDebugInfo=true",
javaOptions in Test ++= System.getProperties.filter(_._1 startsWith "spark")
.map { case (k,v) => s"-D$k=$v" }.toSeq,
javaOptions in Test ++= "-Xmx3g -XX:PermSize=128M -XX:MaxNewSize=256m -XX:MaxPermSize=1g"
.split(" ").toSeq,
javaOptions += "-Xmx3g",

// Show full stack trace and duration in test cases.
testOptions in Test += Tests.Argument("-oDF"),
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
// Enable Junit testing.
libraryDependencies += "com.novocode" % "junit-interface" % "0.9" % "test",
// Only allow one test at a time, even across projects, since they run in the same JVM
parallelExecution in Test := false,
concurrentRestrictions in Global += Tags.limit(Tags.Test, 1),
// Remove certain packages from Scaladoc
scalacOptions in (Compile, doc) := Seq(
"-groups",
"-skip-packages", Seq(
"akka",
"org.apache.spark.api.python",
"org.apache.spark.network",
"org.apache.spark.deploy",
"org.apache.spark.util.collection"
).mkString(":"),
"-doc-title", "Spark " + version.value.replaceAll("-SNAPSHOT", "") + " ScalaDoc"
)
)

def mqttSettings() = streamingSettings ++ Seq(
name := "spark-streaming-mqtt",
previousArtifact := sparkPreviousArtifact("spark-streaming-mqtt"),
libraryDependencies ++= Seq("org.eclipse.paho" % "mqtt-client" % "0.4.0")
)
}
You are viewing a condensed version of this merge commit. You can view the full changes here.