Skip to content

Commit ced487a

Browse files
author
云峤
committed
update pep8
1 parent b6e690b commit ced487a

File tree

5 files changed

+0
-52
lines changed

5 files changed

+0
-52
lines changed

python/pyspark/sql/dataframe.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1330,16 +1330,6 @@ def cast(self, dataType):
13301330
raise TypeError("unexpected type: %s" % type(dataType))
13311331
return Column(jc)
13321332

1333-
@ignore_unicode_prefix
1334-
def between(self, lowerBound, upperBound):
1335-
""" A boolean expression that is evaluated to true if the value of this
1336-
expression is between the given columns.
1337-
1338-
>>> df[df.col1.between(lowerBound, upperBound)].collect()
1339-
[Row(col1=5, col2=6, col3=8)]
1340-
"""
1341-
return (self >= lowerBound) & (self <= upperBound)
1342-
13431333
def __repr__(self):
13441334
return 'Column<%s>' % self._jc.toString().encode('utf8')
13451335

python/pyspark/sql/tests.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -438,12 +438,6 @@ def test_rand_functions(self):
438438
for row in rndn:
439439
assert row[1] >= -4.0 and row[1] <= 4.0, "got: %s" % row[1]
440440

441-
def test_between_function(self):
442-
df = self.sqlCtx.parallelize([Row(a=1, b=2, c=3), Row(a=2, b=1, c=3), Row(a=4, b=1, c=4)]).toDF()
443-
self.assertEqual([False, True, True],
444-
df.select(df.a.between(df.b, df.c)).collect())
445-
446-
447441
def test_save_and_load(self):
448442
df = self.df
449443
tmpPath = tempfile.mkdtemp()

sql/core/src/main/scala/org/apache/spark/sql/Column.scala

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -295,25 +295,6 @@ class Column(protected[sql] val expr: Expression) extends Logging {
295295
*/
296296
def eqNullSafe(other: Any): Column = this <=> other
297297

298-
/**
299-
* True if the current column is between the lower bound and upper bound, inclusive.
300-
*
301-
* @group java_expr_ops
302-
*/
303-
def between(lowerBound: String, upperBound: String): Column = {
304-
between(Column(lowerBound), Column(upperBound))
305-
}
306-
307-
/**
308-
* True if the current column is between the lower bound and upper bound, inclusive.
309-
*
310-
* @group java_expr_ops
311-
*/
312-
def between(lowerBound: Column, upperBound: Column): Column = {
313-
And(GreaterThanOrEqual(this.expr, lowerBound.expr),
314-
LessThanOrEqual(this.expr, upperBound.expr))
315-
}
316-
317298
/**
318299
* True if the current expression is null.
319300
*

sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -208,12 +208,6 @@ class ColumnExpressionSuite extends QueryTest {
208208
testData2.collect().toSeq.filter(r => r.getInt(0) <= r.getInt(1)))
209209
}
210210

211-
test("between") {
212-
checkAnswer(
213-
testData4.filter($"a".between($"b", $"c")),
214-
testData4.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2)))
215-
}
216-
217211
val booleanData = TestSQLContext.createDataFrame(TestSQLContext.sparkContext.parallelize(
218212
Row(false, false) ::
219213
Row(false, true) ::

sql/core/src/test/scala/org/apache/spark/sql/TestData.scala

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -57,17 +57,6 @@ object TestData {
5757
TestData2(3, 2) :: Nil, 2).toDF()
5858
testData2.registerTempTable("testData2")
5959

60-
case class TestData4(a: Int, b: Int, c: Int)
61-
val testData4 =
62-
TestSQLContext.sparkContext.parallelize(
63-
TestData4(0, 1, 2) ::
64-
TestData4(1, 2, 3) ::
65-
TestData4(2, 1, 0) ::
66-
TestData4(2, 2, 4) ::
67-
TestData4(3, 1, 6) ::
68-
TestData4(3, 2, 0) :: Nil, 2).toDF()
69-
testData4.registerTempTable("TestData4")
70-
7160
case class DecimalData(a: BigDecimal, b: BigDecimal)
7261

7362
val decimalData =

0 commit comments

Comments
 (0)