Skip to content
Closed
27 changes: 27 additions & 0 deletions common/tags/src/test/java/org/apache/spark/tags/ChromeUITest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.tags;

import java.lang.annotation.*;

import org.scalatest.TagAnnotation;

@TagAnnotation
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.TYPE})
public @interface ChromeUITest { }
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.ui

import org.openqa.selenium.WebDriver
import org.openqa.selenium.chrome.{ChromeDriver, ChromeOptions}

import org.apache.spark.tags.ChromeUITest

/**
* Selenium tests for the Spark Web UI with Chrome.
*/
@ChromeUITest
class ChromeUISeleniumSuite extends RealBrowserUISeleniumSuite("webdriver.chrome.driver") {

override var webDriver: WebDriver = _

override def beforeAll(): Unit = {
super.beforeAll()
val chromeOptions = new ChromeOptions
chromeOptions.addArguments("--headless", "--disable-gpu")
webDriver = new ChromeDriver(chromeOptions)
}

override def afterAll(): Unit = {
try {
if (webDriver != null) {
webDriver.quit()
}
} finally {
super.afterAll()
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.ui

import org.openqa.selenium.{By, WebDriver}
import org.scalatest._
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.scalatestplus.selenium.WebBrowser

import org.apache.spark._
import org.apache.spark.LocalSparkContext.withSpark
import org.apache.spark.internal.config.MEMORY_OFFHEAP_SIZE
import org.apache.spark.internal.config.UI.{UI_ENABLED, UI_KILL_ENABLED, UI_PORT}
import org.apache.spark.util.CallSite

/**
* Selenium tests for the Spark Web UI with real web browsers.
*/
abstract class RealBrowserUISeleniumSuite(val driverProp: String)
extends SparkFunSuite with WebBrowser with Matchers with BeforeAndAfterAll {

implicit var webDriver: WebDriver
private val driverPropPrefix = "spark.test."

override def beforeAll(): Unit = {
super.beforeAll()
assume(
sys.props(driverPropPrefix + driverProp) !== null,
"System property " + driverPropPrefix + driverProp +
" should be set to the corresponding driver path.")
sys.props(driverProp) = sys.props(driverPropPrefix + driverProp)
}

override def afterAll(): Unit = {
sys.props.remove(driverProp)
super.afterAll()
}

test("SPARK-31534: text for tooltip should be escaped") {
withSpark(newSparkContext()) { sc =>
sc.setLocalProperty(CallSite.LONG_FORM, "collect at <console>:25")
sc.setLocalProperty(CallSite.SHORT_FORM, "collect at <console>:25")
sc.parallelize(1 to 10).collect

eventually(timeout(10.seconds), interval(50.milliseconds)) {
goToUi(sc, "/jobs")

val jobDesc =
webDriver.findElement(By.cssSelector("div[class='application-timeline-content']"))
jobDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")

goToUi(sc, "/jobs/job/?id=0")
webDriver.get(sc.ui.get.webUrl.stripSuffix("/") + "/jobs/job/?id=0")
val stageDesc = webDriver.findElement(By.cssSelector("div[class='job-timeline-content']"))
stageDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")

// Open DAG Viz.
webDriver.findElement(By.id("job-dag-viz")).click()
val nodeDesc = webDriver.findElement(By.cssSelector("g[class='node_0 node']"))
nodeDesc.getAttribute("name") should include ("collect at &lt;console&gt;:25")
}
}
}

/**
* Create a test SparkContext with the SparkUI enabled.
* It is safe to `get` the SparkUI directly from the SparkContext returned here.
*/
private def newSparkContext(
killEnabled: Boolean = true,
master: String = "local",
additionalConfs: Map[String, String] = Map.empty): SparkContext = {
val conf = new SparkConf()
.setMaster(master)
.setAppName("test")
.set(UI_ENABLED, true)
.set(UI_PORT, 0)
.set(UI_KILL_ENABLED, killEnabled)
.set(MEMORY_OFFHEAP_SIZE.key, "64m")
additionalConfs.foreach { case (k, v) => conf.set(k, v) }
val sc = new SparkContext(conf)
assert(sc.ui.isDefined)
sc
}

def goToUi(sc: SparkContext, path: String): Unit = {
goToUi(sc.ui.get, path)
}

def goToUi(ui: SparkUI, path: String): Unit = {
go to (ui.webUrl.stripSuffix("/") + path)
}
}
27 changes: 0 additions & 27 deletions core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -773,33 +773,6 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
}
}

test("SPARK-31534: text for tooltip should be escaped") {
withSpark(newSparkContext()) { sc =>
sc.setLocalProperty(CallSite.LONG_FORM, "collect at <console>:25")
sc.setLocalProperty(CallSite.SHORT_FORM, "collect at <console>:25")
sc.parallelize(1 to 10).collect

val driver = webDriver.asInstanceOf[HtmlUnitDriver]
driver.setJavascriptEnabled(true)

eventually(timeout(10.seconds), interval(50.milliseconds)) {
goToUi(sc, "/jobs")
val jobDesc =
driver.findElement(By.cssSelector("div[class='application-timeline-content']"))
jobDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")

goToUi(sc, "/jobs/job/?id=0")
val stageDesc = driver.findElement(By.cssSelector("div[class='job-timeline-content']"))
stageDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")

// Open DAG Viz.
driver.findElement(By.id("job-dag-viz")).click()
val nodeDesc = driver.findElement(By.cssSelector("g[class='node_0 node']"))
nodeDesc.getAttribute("name") should include ("collect at &lt;console&gt;:25")
}
}
}

def goToUi(sc: SparkContext, path: String): Unit = {
goToUi(sc.ui.get, path)
}
Expand Down
5 changes: 5 additions & 0 deletions dev/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@
from sparktestsupport.toposort import toposort_flatten
import sparktestsupport.modules as modules

always_excluded_tags = [
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

always_excluded_tags might be misleading because this should be tested in the proper environments.

"org.apache.spark.tags.ChromeUITest"
]

# -------------------------------------------------------------------------------------------------
# Functions for traversing module dependency graph
Expand Down Expand Up @@ -606,6 +609,8 @@ def main():
print("[info] Found the following changed modules:",
", ".join(x.name for x in changed_modules))

excluded_tags.extend(always_excluded_tags)

# setup environment variables
# note - the 'root' module doesn't collect environment variables for all modules. Because the
# environment variables should not be set if a module is not changed, even if running the 'root'
Expand Down
4 changes: 3 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@
<leveldbjni.group>org.fusesource.leveldbjni</leveldbjni.group>

<test.java.home>${java.home}</test.java.home>
<test.exclude.tags></test.exclude.tags>
<test.exclude.tags>org.apache.spark.tags.ChromeUITest</test.exclude.tags>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you add some comment before this line?

<test.include.tags></test.include.tags>

<!-- Package to use when relocating shaded classes. -->
Expand Down Expand Up @@ -243,6 +243,7 @@
things breaking.
-->
<spark.test.home>${session.executionRootDirectory}</spark.test.home>
<spark.test.webdriver.chrome.driver></spark.test.webdriver.chrome.driver>

<CodeCacheSize>1g</CodeCacheSize>
</properties>
Expand Down Expand Up @@ -2512,6 +2513,7 @@
<spark.ui.enabled>false</spark.ui.enabled>
<spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress>
<spark.unsafe.exceptionOnMemoryLeak>true</spark.unsafe.exceptionOnMemoryLeak>
<spark.test.webdriver.chrome.driver>${spark.test.webdriver.chrome.driver}</spark.test.webdriver.chrome.driver>
<!-- Needed by sql/hive tests. -->
<test.src.tables>__not_used__</test.src.tables>
</systemProperties>
Expand Down
9 changes: 6 additions & 3 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -967,6 +967,9 @@ object TestSettings {
"2.12"
}
*/

private val defaultExcludedTagsForScalaTest = Seq("org.apache.spark.tags.ChromeUITest")

lazy val settings = Seq (
// Fork new JVMs for tests and set Java options for those
fork := true,
Expand Down Expand Up @@ -1001,9 +1004,9 @@ object TestSettings {
javaOptions += "-Xmx3g",
// Exclude tags defined in a system property
testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest,
sys.props.get("test.exclude.tags").map { tags =>
tags.split(",").flatMap { tag => Seq("-l", tag) }.toSeq
}.getOrElse(Nil): _*),
sys.props.get("test.exclude.tags").map(tag => tag.split(",").toSeq)
.getOrElse(defaultExcludedTagsForScalaTest).filter(!_.trim.isEmpty)
Copy link
Member

@dongjoon-hyun dongjoon-hyun May 24, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This looks not robust enough.
If a developer set an irrelevant tag (ExtendedSQLTest or ExtendedHiveTest), ChromeUITest suddenly will cause a failure in the irrelevant Core module. Did I understand correctly?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's what I intended that if developers set test.excluded.tags explicitly, all the tags which are intended to be excluded should be added explicitly.
But as you concerned, this change seems to break the build compatibility.
I'll submit another idea.

.flatMap(tag => Seq("-l", tag)): _*),
testOptions in Test += Tests.Argument(TestFrameworks.JUnit,
sys.props.get("test.exclude.tags").map { tags =>
Seq("--exclude-categories=" + tags)
Expand Down