Skip to content

Commit 5390fd9

Browse files
author
Ilya Ganelin
committed
Merge remote-tracking branch 'upstream/master' into SPARK-5932
2 parents db9a963 + 327ebf0 commit 5390fd9

File tree

365 files changed

+9626
-4032
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

365 files changed

+9626
-4032
lines changed

R/pkg/DESCRIPTION

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,19 +17,19 @@ License: Apache License (== 2.0)
1717
Collate:
1818
'generics.R'
1919
'jobj.R'
20-
'SQLTypes.R'
2120
'RDD.R'
2221
'pairRDD.R'
22+
'schema.R'
2323
'column.R'
2424
'group.R'
2525
'DataFrame.R'
2626
'SQLContext.R'
27+
'backend.R'
2728
'broadcast.R'
29+
'client.R'
2830
'context.R'
2931
'deserialize.R'
3032
'serialize.R'
3133
'sparkR.R'
32-
'backend.R'
33-
'client.R'
3434
'utils.R'
3535
'zzz.R'

R/pkg/NAMESPACE

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ exportMethods(
55
"aggregateByKey",
66
"aggregateRDD",
77
"cache",
8+
"cartesian",
89
"checkpoint",
910
"coalesce",
1011
"cogroup",
@@ -28,6 +29,7 @@ exportMethods(
2829
"fullOuterJoin",
2930
"glom",
3031
"groupByKey",
32+
"intersection",
3133
"join",
3234
"keyBy",
3335
"keys",
@@ -52,11 +54,14 @@ exportMethods(
5254
"reduceByKeyLocally",
5355
"repartition",
5456
"rightOuterJoin",
57+
"sampleByKey",
5558
"sampleRDD",
5659
"saveAsTextFile",
5760
"saveAsObjectFile",
5861
"sortBy",
5962
"sortByKey",
63+
"subtract",
64+
"subtractByKey",
6065
"sumRDD",
6166
"take",
6267
"takeOrdered",
@@ -95,6 +100,7 @@ exportClasses("DataFrame")
95100
exportMethods("columns",
96101
"distinct",
97102
"dtypes",
103+
"except",
98104
"explain",
99105
"filter",
100106
"groupBy",
@@ -118,7 +124,6 @@ exportMethods("columns",
118124
"show",
119125
"showDF",
120126
"sortDF",
121-
"subtract",
122127
"toJSON",
123128
"toRDD",
124129
"unionAll",
@@ -178,5 +183,14 @@ export("cacheTable",
178183
"toDF",
179184
"uncacheTable")
180185

181-
export("print.structType",
182-
"print.structField")
186+
export("sparkRSQL.init",
187+
"sparkRHive.init")
188+
189+
export("structField",
190+
"structField.jobj",
191+
"structField.character",
192+
"print.structField",
193+
"structType",
194+
"structType.jobj",
195+
"structType.structField",
196+
"print.structType")

R/pkg/R/DataFrame.R

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
# DataFrame.R - DataFrame class and methods implemented in S4 OO classes
1919

20-
#' @include jobj.R SQLTypes.R RDD.R pairRDD.R column.R group.R
20+
#' @include generics.R jobj.R schema.R RDD.R pairRDD.R column.R group.R
2121
NULL
2222

2323
setOldClass("jobj")
@@ -1141,29 +1141,31 @@ setMethod("intersect",
11411141
dataFrame(intersected)
11421142
})
11431143

1144-
#' Subtract
1144+
#' except
11451145
#'
11461146
#' Return a new DataFrame containing rows in this DataFrame
11471147
#' but not in another DataFrame. This is equivalent to `EXCEPT` in SQL.
11481148
#'
11491149
#' @param x A Spark DataFrame
11501150
#' @param y A Spark DataFrame
1151-
#' @return A DataFrame containing the result of the subtract operation.
1152-
#' @rdname subtract
1151+
#' @return A DataFrame containing the result of the except operation.
1152+
#' @rdname except
11531153
#' @export
11541154
#' @examples
11551155
#'\dontrun{
11561156
#' sc <- sparkR.init()
11571157
#' sqlCtx <- sparkRSQL.init(sc)
11581158
#' df1 <- jsonFile(sqlCtx, path)
11591159
#' df2 <- jsonFile(sqlCtx, path2)
1160-
#' subtractDF <- subtract(df, df2)
1160+
#' exceptDF <- except(df, df2)
11611161
#' }
1162-
setMethod("subtract",
1162+
#' @rdname except
1163+
#' @export
1164+
setMethod("except",
11631165
signature(x = "DataFrame", y = "DataFrame"),
11641166
function(x, y) {
1165-
subtracted <- callJMethod(x@sdf, "except", y@sdf)
1166-
dataFrame(subtracted)
1167+
excepted <- callJMethod(x@sdf, "except", y@sdf)
1168+
dataFrame(excepted)
11671169
})
11681170

11691171
#' Save the contents of the DataFrame to a data source

0 commit comments

Comments
 (0)