Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1324,7 +1324,7 @@ object CodeGenerator extends Logging {

// Reset compile time.
// Visible for testing
def resetCompileTime: Unit = _compileTime.reset()
def resetCompileTime(): Unit = _compileTime.reset()

/**
* Compile the Java source code into a Java class, using Janino.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,7 @@ object WholeStageCodegenExec {

// Reset generation time of Java source code.
// Visible for testing
def resetCodeGenTime: Unit = _codeGenTime.set(0L)
def resetCodeGenTime(): Unit = _codeGenTime.set(0L)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ package org.apache.spark.sql
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.sql.catalyst.expressions.codegen.{ByteCodeStats, CodeFormatter, CodeGenerator}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_SECOND
import org.apache.spark.sql.execution.{SparkPlan, WholeStageCodegenExec}
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.util.Utils
Expand All @@ -36,7 +37,17 @@ abstract class BenchmarkQueryTest extends QueryTest with SharedSparkSession {
protected override def afterAll(): Unit = {
try {
// For debugging dump some statistics about how much time was spent in various optimizer rules
// code generation, and compilation.
logWarning(RuleExecutor.dumpTimeSpent())
val codeGenTime = WholeStageCodegenExec.codeGenTime.toDouble / NANOS_PER_SECOND
val compileTime = CodeGenerator.compileTime.toDouble / NANOS_PER_SECOND
val codegenInfo =
s"""
|=== Metrics of Whole-stage Codegen ===
|Total code generation time: $codeGenTime seconds
|Total compile time: $compileTime seconds
""".stripMargin
logWarning(codegenInfo)
spark.sessionState.catalog.reset()
} finally {
super.afterAll()
Expand All @@ -46,6 +57,8 @@ abstract class BenchmarkQueryTest extends QueryTest with SharedSparkSession {
override def beforeAll(): Unit = {
super.beforeAll()
RuleExecutor.resetMetrics()
CodeGenerator.resetCompileTime()
WholeStageCodegenExec.resetCodeGenTime()
}

protected def checkGeneratedCode(plan: SparkPlan, checkMethodCodeSize: Boolean = true): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -683,8 +683,8 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession {
// Add Locale setting
Locale.setDefault(Locale.US)
RuleExecutor.resetMetrics()
CodeGenerator.resetCompileTime
WholeStageCodegenExec.resetCodeGenTime
CodeGenerator.resetCompileTime()
WholeStageCodegenExec.resetCodeGenTime()
}

override def afterAll(): Unit = {
Expand All @@ -696,12 +696,13 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession {
// For debugging dump some statistics about how much time was spent in various optimizer rules
logWarning(RuleExecutor.dumpTimeSpent())

val generateJavaTime = WholeStageCodegenExec.codeGenTime
val codeGenTime = WholeStageCodegenExec.codeGenTime.toDouble / NANOS_PER_SECOND
val compileTime = CodeGenerator.compileTime.toDouble / NANOS_PER_SECOND
val codegenInfo =
s"""
|=== Metrics of Whole-Stage Codegen ===
|Total code generation time: ${generateJavaTime.toDouble / NANOS_PER_SECOND} seconds
|Total compile time: ${CodeGenerator.compileTime.toDouble / NANOS_PER_SECOND} seconds
|=== Metrics of Whole-stage Codegen ===
|Total code generation time: $codeGenTime seconds
|Total compile time: $compileTime seconds
""".stripMargin
logWarning(codegenInfo)
} finally {
Expand Down