Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Adjust Join exec abstraction
  • Loading branch information
Eric5553 committed Feb 27, 2020
commit be06b3749bd9834581a31cbb901cff5ba48056a4
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,8 @@ abstract class SparkStrategies extends QueryPlanner[SparkPlan] {

def createCartesianProduct() = {
if (joinType.isInstanceOf[InnerLike]) {
Some(Seq(joins.CartesianProductExec(planLater(left), planLater(right), condition)))
Some(Seq(joins.CartesianProductExec(
planLater(left), planLater(right), joinType, condition)))
} else {
None
}
Expand Down Expand Up @@ -367,7 +368,8 @@ abstract class SparkStrategies extends QueryPlanner[SparkPlan] {

def createCartesianProduct() = {
if (joinType.isInstanceOf[InnerLike]) {
Some(Seq(joins.CartesianProductExec(planLater(left), planLater(right), condition)))
Some(Seq(joins.CartesianProductExec(
planLater(left), planLater(right), joinType, condition)))
} else {
None
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.joins

import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans.JoinType
import org.apache.spark.sql.execution.{BinaryExecNode, ExplainUtils}

/**
* Holds common logic for join operators
*/
trait BaseJoinExec extends BinaryExecNode {
def joinType: JoinType
def condition: Option[Expression]

override def simpleStringWithNodeId(): String = {
val opId = ExplainUtils.getOpId(this)
s"$nodeName $joinType ($opId)".trim
}

override def verboseStringWithOperatorId(): String = {
val joinCondStr = if (condition.isDefined) {
s"${condition.get}"
} else "None"
s"""
|(${ExplainUtils.getOpId(this)}) $nodeName ${ExplainUtils.getCodegenId(this)}
|Join condition : ${joinCondStr}
""".stripMargin
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.physical.{BroadcastDistribution, Distribution, UnspecifiedDistribution}
import org.apache.spark.sql.execution.{BinaryExecNode, CodegenSupport, SparkPlan}
import org.apache.spark.sql.execution.{CodegenSupport, SparkPlan}
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.sql.types.{BooleanType, LongType}

Expand All @@ -44,7 +44,7 @@ case class BroadcastHashJoinExec(
condition: Option[Expression],
left: SparkPlan,
right: SparkPlan)
extends BinaryExecNode with HashJoin with CodegenSupport {
extends HashJoin with CodegenSupport {

override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution.{BinaryExecNode, SparkPlan}
import org.apache.spark.sql.execution.{ExplainUtils, SparkPlan}
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.util.collection.{BitSet, CompactBuffer}

Expand All @@ -32,7 +32,7 @@ case class BroadcastNestedLoopJoinExec(
right: SparkPlan,
buildSide: BuildSide,
joinType: JoinType,
condition: Option[Expression]) extends BinaryExecNode {
condition: Option[Expression]) extends BaseJoinExec {

override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
Expand All @@ -43,6 +43,11 @@ case class BroadcastNestedLoopJoinExec(
case BuildLeft => (right, left)
}

override def simpleStringWithNodeId(): String = {
val opId = ExplainUtils.getOpId(this)
s"$nodeName $joinType ${buildSide} ($opId)".trim
}

override def requiredChildDistribution: Seq[Distribution] = buildSide match {
case BuildLeft =>
BroadcastDistribution(IdentityBroadcastMode) :: UnspecifiedDistribution :: Nil
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ import org.apache.spark.rdd.{CartesianPartition, CartesianRDD, RDD}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression, JoinedRow, Predicate, UnsafeRow}
import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeRowJoiner
import org.apache.spark.sql.execution.{BinaryExecNode, ExplainUtils, ExternalAppendOnlyUnsafeRowArray, SparkPlan}
import org.apache.spark.sql.catalyst.plans.JoinType
import org.apache.spark.sql.execution.{ExternalAppendOnlyUnsafeRowArray, SparkPlan}
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.util.CompletionIterator

Expand Down Expand Up @@ -60,23 +61,13 @@ class UnsafeCartesianRDD(
case class CartesianProductExec(
left: SparkPlan,
right: SparkPlan,
condition: Option[Expression]) extends BinaryExecNode {
joinType: JoinType,
condition: Option[Expression]) extends BaseJoinExec {
override def output: Seq[Attribute] = left.output ++ right.output

override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))

override def verboseStringWithOperatorId(): String = {
val joinCondStr = if (condition.isDefined) {
s"${condition.get}"
} else "None"

s"""
|(${ExplainUtils.getOpId(this)}) $nodeName ${ExplainUtils.getCodegenId(this)}
|${ExplainUtils.generateFieldString("Join condition", joinCondStr)}
""".stripMargin
}

protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,14 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.BindReferences.bindReferences
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.execution.{ExplainUtils, RowIterator, SparkPlan}
import org.apache.spark.sql.execution.{ExplainUtils, RowIterator}
import org.apache.spark.sql.execution.metric.SQLMetric
import org.apache.spark.sql.types.{IntegralType, LongType}

trait HashJoin {
self: SparkPlan =>

trait HashJoin extends BaseJoinExec {
def leftKeys: Seq[Expression]
def rightKeys: Seq[Expression]
def joinType: JoinType
def buildSide: BuildSide
def condition: Option[Expression]
def left: SparkPlan
def right: SparkPlan

override def simpleStringWithNodeId(): String = {
val opId = ExplainUtils.getOpId(this)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution.{BinaryExecNode, SparkPlan}
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.metric.SQLMetrics

/**
Expand All @@ -39,7 +39,7 @@ case class ShuffledHashJoinExec(
condition: Option[Expression],
left: SparkPlan,
right: SparkPlan)
extends BinaryExecNode with HashJoin {
extends HashJoin {

override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ case class SortMergeJoinExec(
condition: Option[Expression],
left: SparkPlan,
right: SparkPlan,
isSkewJoin: Boolean = false) extends BinaryExecNode with CodegenSupport {
isSkewJoin: Boolean = false) extends BaseJoinExec with CodegenSupport {

override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
Expand All @@ -52,11 +52,6 @@ case class SortMergeJoinExec(

override def stringArgs: Iterator[Any] = super.stringArgs.toSeq.dropRight(1).iterator

override def simpleStringWithNodeId(): String = {
val opId = ExplainUtils.getOpId(this)
s"$nodeName $joinType ($opId)".trim
}

override def verboseStringWithOperatorId(): String = {
val joinCondStr = if (condition.isDefined) {
s"${condition.get}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ class InnerJoinSuite extends SparkPlanTest with SharedSparkSession {
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1",
SQLConf.CROSS_JOINS_ENABLED.key -> "true") {
checkAnswer2(leftRows, rightRows, (left: SparkPlan, right: SparkPlan) =>
CartesianProductExec(left, right, Some(condition())),
CartesianProductExec(left, right, Inner, Some(condition())),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
Expand Down