Skip to content

Commit 90c11e8

Browse files
committed
Add the generated files to the build by hand
1 parent a7749a8 commit 90c11e8

File tree

1 file changed

+8
-2
lines changed

1 file changed

+8
-2
lines changed

project/SparkBuild.scala

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,7 @@ object SQL {
384384
}
385385

386386
object Hive {
387-
387+
import sbtantlr.SbtAntlrPlugin._
388388
lazy val settings = Seq(
389389
javaOptions += "-XX:MaxPermSize=256m",
390390
// Specially disable assertions since some Hive tests fail them
@@ -412,11 +412,17 @@ object Hive {
412412
|import org.apache.spark.sql.types._""".stripMargin,
413413
cleanupCommands in console := "sparkContext.stop()",
414414
logLevel in Compile := Level.Debug,
415+
sourceGenerators in Compile <+= Def.task {
416+
// This is quite a hack.
417+
val pkg = (sourceManaged in Compile).value / "org" / "apache" / "spark" / "sql" / "parser"
418+
val names = Seq("SparkSqlLexer", "SparkSqlParser", "SparkSqlParser_FromClauseParser", "SparkSqlParser_IdentifiersParser", "SparkSqlParser_SelectClauseParser")
419+
names.map(name => pkg / (name + ".java"))
420+
},
415421
// Some of our log4j jars make it impossible to submit jobs from this JVM to Hive Map/Reduce
416422
// in order to generate golden files. This is only required for developers who are adding new
417423
// new query tests.
418424
fullClasspath in Test := (fullClasspath in Test).value.filterNot { f => f.toString.contains("jcl-over") }
419-
) ++ sbtantlr.SbtAntlrPlugin.antlrSettings
425+
) ++ antlrSettings
420426

421427
}
422428

0 commit comments

Comments
 (0)