Skip to content

Commit 29ae3d5

Browse files
committed
Merge branch 'master' into SPARK-46437-conditional-jekyll-include
2 parents 68e301c + 7f056d8 commit 29ae3d5

File tree

9 files changed

+44
-27
lines changed

9 files changed

+44
-27
lines changed

connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectService.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ class SparkConnectService(debug: Boolean) extends AsyncService with BindableServ
191191
new SparkConnectReleaseExecuteHandler(responseObserver).handle(request)
192192
} catch
193193
ErrorUtils.handleError(
194-
"reattachExecute",
194+
"releaseExecute",
195195
observer = responseObserver,
196196
userId = request.getUserContext.getUserId,
197197
sessionId = request.getSessionId)

docs/Gemfile

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,12 @@
1717

1818
source "https://rubygems.org"
1919

20+
# Keep these specifications as flexible as possible and leave it to Bundler
21+
# to pin versions in the lock file.
22+
# To update the lock file, run `bundle update`.
23+
# Version constraint reference: https://guides.rubygems.org/patterns/#declaring-dependencies
2024
gem "ffi", "1.15.5"
21-
gem "jekyll", "4.3.2"
25+
gem "jekyll", "~> 4.3"
2226
gem "rouge", "3.26.0"
2327
gem "jekyll-redirect-from", "0.16.0"
2428
gem "webrick", "1.8.1"

docs/Gemfile.lock

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
GEM
22
remote: https://rubygems.org/
33
specs:
4-
addressable (2.8.5)
4+
addressable (2.8.6)
55
public_suffix (>= 2.0.2, < 6.0)
66
colorator (1.1.0)
77
concurrent-ruby (1.2.2)
@@ -11,11 +11,11 @@ GEM
1111
eventmachine (1.2.7)
1212
ffi (1.15.5)
1313
forwardable-extended (2.6.0)
14-
google-protobuf (3.24.2)
14+
google-protobuf (3.25.1)
1515
http_parser.rb (0.8.0)
1616
i18n (1.14.1)
1717
concurrent-ruby (~> 1.0)
18-
jekyll (4.3.2)
18+
jekyll (4.3.3)
1919
addressable (~> 2.4)
2020
colorator (~> 1.0)
2121
em-websocket (~> 0.5)
@@ -48,28 +48,28 @@ GEM
4848
mercenary (0.4.0)
4949
pathutil (0.16.2)
5050
forwardable-extended (~> 2.6)
51-
public_suffix (5.0.3)
52-
rake (13.0.6)
51+
public_suffix (5.0.4)
52+
rake (13.1.0)
5353
rb-fsevent (0.11.2)
5454
rb-inotify (0.10.1)
5555
ffi (~> 1.0)
5656
rexml (3.2.6)
5757
rouge (3.26.0)
5858
safe_yaml (1.0.5)
59-
sass-embedded (1.63.6)
60-
google-protobuf (~> 3.23)
59+
sass-embedded (1.69.7)
60+
google-protobuf (~> 3.25)
6161
rake (>= 13.0.0)
6262
terminal-table (3.0.2)
6363
unicode-display_width (>= 1.1.1, < 3)
64-
unicode-display_width (2.4.2)
64+
unicode-display_width (2.5.0)
6565
webrick (1.8.1)
6666

6767
PLATFORMS
6868
ruby
6969

7070
DEPENDENCIES
7171
ffi (= 1.15.5)
72-
jekyll (= 4.3.2)
72+
jekyll (~> 4.3)
7373
jekyll-redirect-from (= 0.16.0)
7474
rouge (= 3.26.0)
7575
webrick (= 1.8.1)

docs/README.md

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,13 @@ whichever version of Spark you currently have checked out of revision control.
3030

3131
The Spark documentation build uses a number of tools to build HTML docs and API docs in Scala, Java, Python, R, and SQL.
3232

33-
You need to have [Ruby](https://www.ruby-lang.org/en/documentation/installation/) and
34-
[Python](https://docs.python.org/2/using/unix.html#getting-and-installing-the-latest-version-of-python)
35-
installed. Make sure the `bundle` command is available, if not install the Gem containing it:
33+
You need to have [Ruby][ruby] and [Python][python] installed. Make sure the `bundle` command is available. If not, install it as follows:
34+
35+
[ruby]: https://www.ruby-lang.org/en/documentation/installation/
36+
[python]: https://www.python.org/downloads/
3637

3738
```sh
38-
$ sudo gem install bundler
39+
$ gem install bundler
3940
```
4041

4142
After this all the required ruby dependencies can be installed from the `docs/` directory via the Bundler:
@@ -45,8 +46,6 @@ $ cd docs
4546
$ bundle install
4647
```
4748

48-
Note: If you are on a system with both Ruby 1.9 and Ruby 2.0 you may need to replace gem with gem2.0.
49-
5049
To generate the Python or R docs, you'll need to [install Pandoc](https://pandoc.org/installing.html).
5150

5251
### SQL and Python API Documentation (Optional)

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
<url>https://spark.apache.org/</url>
3333
<licenses>
3434
<license>
35-
<name>Apache 2.0 License</name>
35+
<name>Apache-2.0</name>
3636
<url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
3737
<distribution>repo</distribution>
3838
</license>

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3379,7 +3379,9 @@ class AstBuilder extends DataTypeAstBuilder with SQLConfHelper with Logging {
33793379
ctx: ExpressionPropertyListContext): OptionList = {
33803380
val options = ctx.expressionProperty.asScala.map { property =>
33813381
val key: String = visitPropertyKey(property.key)
3382-
val value: Expression = Option(property.value).map(expression).orNull
3382+
val value: Expression = Option(property.value).map(expression).getOrElse {
3383+
operationNotAllowed(s"A value must be specified for the key: $key.", ctx)
3384+
}
33833385
key -> value
33843386
}.toSeq
33853387
OptionList(options)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XmlOptions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ class XmlOptions(
9595
val nullValue = parameters.getOrElse(NULL_VALUE, XmlOptions.DEFAULT_NULL_VALUE)
9696
val columnNameOfCorruptRecord =
9797
parameters.getOrElse(COLUMN_NAME_OF_CORRUPT_RECORD, defaultColumnNameOfCorruptRecord)
98-
val ignoreSurroundingSpaces = getBool(IGNORE_SURROUNDING_SPACES, false)
98+
val ignoreSurroundingSpaces = getBool(IGNORE_SURROUNDING_SPACES, true)
9999
val parseMode = ParseMode.fromString(parameters.getOrElse(MODE, PermissiveMode.name))
100100
val inferSchema = getBool(INFER_SCHEMA, true)
101101
val rowValidationXSDPath = parameters.get(ROW_VALIDATION_XSD_PATH).orNull

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2421,6 +2421,18 @@ class DDLParserSuite extends AnalysisTest {
24212421
stop = 42))
24222422
}
24232423

2424+
test("SPARK-46610: throw exception when no value for a key in create table options") {
2425+
val createTableSql = "create table test_table using my_data_source options (password)"
2426+
checkError(
2427+
exception = parseException(createTableSql),
2428+
errorClass = "_LEGACY_ERROR_TEMP_0035",
2429+
parameters = Map("message" -> "A value must be specified for the key: password."),
2430+
context = ExpectedContext(
2431+
fragment = createTableSql,
2432+
start = 0,
2433+
stop = 62))
2434+
}
2435+
24242436
test("UNCACHE TABLE") {
24252437
comparePlans(
24262438
parsePlan("UNCACHE TABLE a.b.c"),

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -761,7 +761,7 @@ class XmlSuite
761761
.collect()
762762

763763
assert(results(0) === Row("alice", "35"))
764-
assert(results(1) === Row("bob", " "))
764+
assert(results(1) === Row("bob", ""))
765765
assert(results(2) === Row("coc", "24"))
766766
}
767767

@@ -847,7 +847,7 @@ class XmlSuite
847847
assert(result(0) === Row(Row(null)))
848848
assert(result(1) === Row(Row(Row(null, null))))
849849
assert(result(2) === Row(Row(Row("E", null))))
850-
assert(result(3) === Row(Row(Row("E", " "))))
850+
assert(result(3) === Row(Row(Row("E", ""))))
851851
assert(result(4) === Row(Row(Row("E", ""))))
852852
}
853853

@@ -1177,18 +1177,18 @@ class XmlSuite
11771177
.option("inferSchema", true)
11781178
.xml(getTestResourcePath(resDir + "mixed_children.xml"))
11791179
val mixedRow = mixedDF.head()
1180-
assert(mixedRow.getAs[Row](0) === Row(List(" issue ", " text ignored "), " lorem "))
1181-
assert(mixedRow.getString(1) === " ipsum ")
1180+
assert(mixedRow.getAs[Row](0) === Row(List("issue", "text ignored"), "lorem"))
1181+
assert(mixedRow.getString(1) === "ipsum")
11821182
}
11831183

11841184
test("test mixed text and complex element children") {
11851185
val mixedDF = spark.read
11861186
.option("rowTag", "root")
11871187
.option("inferSchema", true)
11881188
.xml(getTestResourcePath(resDir + "mixed_children_2.xml"))
1189-
assert(mixedDF.select("foo.bar").head().getString(0) === " lorem ")
1189+
assert(mixedDF.select("foo.bar").head().getString(0) === "lorem")
11901190
assert(mixedDF.select("foo.baz.bing").head().getLong(0) === 2)
1191-
assert(mixedDF.select("missing").head().getString(0) === " ipsum ")
1191+
assert(mixedDF.select("missing").head().getString(0) === "ipsum")
11921192
}
11931193

11941194
test("test XSD validation") {
@@ -1752,7 +1752,7 @@ class XmlSuite
17521752
assert(result(1).getAs[String]("_attr") == "attr1"
17531753
&& result(1).getAs[String]("_VALUE") == "value2")
17541754
// comments aren't included in valueTag
1755-
assert(result(2).getAs[String]("_VALUE") == "\n value3\n ")
1755+
assert(result(2).getAs[String]("_VALUE") == "value3")
17561756
}
17571757
}
17581758

0 commit comments

Comments
 (0)