Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Add the generated files to the build by hand
  • Loading branch information
hvanhovell committed Dec 30, 2015
commit 90c11e87fabb6afea82b7ae94fccee778421f0fd
10 changes: 8 additions & 2 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ object SQL {
}

object Hive {

import sbtantlr.SbtAntlrPlugin._
lazy val settings = Seq(
javaOptions += "-XX:MaxPermSize=256m",
// Specially disable assertions since some Hive tests fail them
Expand Down Expand Up @@ -412,11 +412,17 @@ object Hive {
|import org.apache.spark.sql.types._""".stripMargin,
cleanupCommands in console := "sparkContext.stop()",
logLevel in Compile := Level.Debug,
sourceGenerators in Compile <+= Def.task {
// This is quite a hack.
val pkg = (sourceManaged in Compile).value / "org" / "apache" / "spark" / "sql" / "parser"
val names = Seq("SparkSqlLexer", "SparkSqlParser", "SparkSqlParser_FromClauseParser", "SparkSqlParser_IdentifiersParser", "SparkSqlParser_SelectClauseParser")
names.map(name => pkg / (name + ".java"))
},
// Some of our log4j jars make it impossible to submit jobs from this JVM to Hive Map/Reduce
// in order to generate golden files. This is only required for developers who are adding new
// new query tests.
fullClasspath in Test := (fullClasspath in Test).value.filterNot { f => f.toString.contains("jcl-over") }
) ++ sbtantlr.SbtAntlrPlugin.antlrSettings
) ++ antlrSettings

}

Expand Down