diff --git a/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java b/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java index 7376d1ddc481..06a8e1740d6a 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java +++ b/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java @@ -203,6 +203,11 @@ public long transfered() { return transferred; } + @Override + public long transferred() { + return transferred; + } + @Override public long transferTo(WritableByteChannel target, long position) throws IOException { Preconditions.checkArgument(position == transfered(), "Invalid position."); @@ -232,7 +237,7 @@ private void encryptMore() throws IOException { int copied = byteRawChannel.write(buf.nioBuffer()); buf.skipBytes(copied); } else { - region.transferTo(byteRawChannel, region.transfered()); + region.transferTo(byteRawChannel, region.transferred()); } cos.write(byteRawChannel.getData(), 0, byteRawChannel.length()); cos.flush(); @@ -241,6 +246,28 @@ private void encryptMore() throws IOException { 0, byteEncChannel.length()); } + @Override + public FileRegion retain() { + super.retain(); + return this; + } + + @Override + public FileRegion retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public FileRegion touch() { + return this; + } + + @Override + public FileRegion touch(Object o) { + return this; + } + @Override protected void deallocate() { byteRawChannel.reset(); diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java index 4f8781b42a0e..7e76a07cac93 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java +++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java @@ -95,6 +95,11 @@ public long transfered() { return totalBytesTransferred; } + @Override + public long transferred() { + return totalBytesTransferred; + } + /** * This code is more complicated than you would think because we might require multiple * transferTo invocations in order to transfer a single MessageWithHeader to avoid busy waiting. @@ -127,6 +132,28 @@ public long transferTo(final WritableByteChannel target, final long position) th return writtenHeader + writtenBody; } + @Override + public FileRegion touch(Object msg) { + return this; + } + + @Override + public FileRegion retain() { + super.retain(); + return this; + } + + @Override + public FileRegion retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public FileRegion touch() { + return this; + } + @Override protected void deallocate() { header.release(); diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java index 3d71ebaa7ea0..ec3829e88232 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java @@ -187,6 +187,11 @@ public long transfered() { return transferred; } + @Override + public long transferred() { + return transferred; + } + /** * Transfers data from the original message to the channel, encrypting it in the process. * @@ -262,7 +267,7 @@ private void nextChunk() throws IOException { int copied = byteChannel.write(buf.nioBuffer()); buf.skipBytes(copied); } else { - region.transferTo(byteChannel, region.transfered()); + region.transferTo(byteChannel, region.transferred()); } byte[] encrypted = backend.wrap(byteChannel.getData(), 0, byteChannel.length()); @@ -272,6 +277,28 @@ private void nextChunk() throws IOException { this.unencryptedChunkSize = byteChannel.length(); } + @Override + public FileRegion touch(Object o) { + return this; + } + + @Override + public FileRegion retain() { + super.retain(); + return this; + } + + @Override + public FileRegion retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public FileRegion touch() { + return this; + } + @Override protected void deallocate() { if (currentHeader != null) { diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java index 047c5f3f1f09..744e3bdb1b96 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java @@ -140,11 +140,12 @@ public void close() { channelFuture.channel().close().awaitUninterruptibly(10, TimeUnit.SECONDS); channelFuture = null; } - if (bootstrap != null && bootstrap.group() != null) { - bootstrap.group().shutdownGracefully(); + if (bootstrap != null && bootstrap.config() != null && bootstrap.config().group() != null) { + bootstrap.config().group().shutdownGracefully(); } - if (bootstrap != null && bootstrap.childGroup() != null) { - bootstrap.childGroup().shutdownGracefully(); + if (bootstrap != null && bootstrap.config() != null + && bootstrap.config().childGroup() != null) { + bootstrap.config().childGroup().shutdownGracefully(); } bootstrap = null; } diff --git a/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java b/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java index bb1c40c4b0e0..6fb44fea8c5a 100644 --- a/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java +++ b/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java @@ -56,7 +56,7 @@ private void testServerToClient(Message msg) { NettyUtils.createFrameDecoder(), MessageDecoder.INSTANCE); while (!serverChannel.outboundMessages().isEmpty()) { - clientChannel.writeInbound(serverChannel.readOutbound()); + clientChannel.writeOneInbound(serverChannel.readOutbound()); } assertEquals(1, clientChannel.inboundMessages().size()); @@ -72,7 +72,7 @@ private void testClientToServer(Message msg) { NettyUtils.createFrameDecoder(), MessageDecoder.INSTANCE); while (!clientChannel.outboundMessages().isEmpty()) { - serverChannel.writeInbound(clientChannel.readOutbound()); + serverChannel.writeOneInbound(clientChannel.readOutbound()); } assertEquals(1, serverChannel.inboundMessages().size()); @@ -116,8 +116,8 @@ public void encode(ChannelHandlerContext ctx, FileRegion in, List out) throws Exception { ByteArrayWritableChannel channel = new ByteArrayWritableChannel(Ints.checkedCast(in.count())); - while (in.transfered() < in.count()) { - in.transferTo(channel, in.transfered()); + while (in.transferred() < in.count()) { + in.transferTo(channel, in.transferred()); } out.add(Unpooled.wrappedBuffer(channel.getData())); } diff --git a/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java b/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java index b341c5681e00..04b9ab62f55b 100644 --- a/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java +++ b/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java @@ -134,6 +134,11 @@ public long transfered() { return 8 * written; } + @Override + public long transferred() { + return 8 * written; + } + @Override public long transferTo(WritableByteChannel target, long position) throws IOException { for (int i = 0; i < writesPerCall; i++) { @@ -148,6 +153,28 @@ public long transferTo(WritableByteChannel target, long position) throws IOExcep return 8 * writesPerCall; } + @Override + public FileRegion retain() { + super.retain(); + return this; + } + + @Override + public FileRegion retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public FileRegion touch(Object o) { + return this; + } + + @Override + public FileRegion touch() { + return this; + } + @Override protected void deallocate() { } diff --git a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java index cfbf6a8a9a61..39cc7336051b 100644 --- a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java +++ b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java @@ -22,7 +22,6 @@ import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.util.List; import java.util.Map; diff --git a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java index 86cb07ae711a..d2e18531bf48 100644 --- a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java +++ b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java @@ -53,7 +53,8 @@ public void getMetrics(MetricsCollector collector, boolean all) { } @VisibleForTesting - public static void collectMetric(MetricsRecordBuilder metricsRecordBuilder, String name, Metric metric) { + public static void collectMetric( + MetricsRecordBuilder metricsRecordBuilder, String name, Metric metric) { // The metric types used in ExternalShuffleBlockHandler.ShuffleMetrics if (metric instanceof Timer) { diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala index c6656341fcd1..160d1d62f9b6 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala @@ -266,6 +266,22 @@ private class ReadableChannelFileRegion(source: ReadableByteChannel, blockSize: } override def deallocate(): Unit = source.close() + + override def transferred(): Long = _transferred + + override def touch(o: scala.Any): FileRegion = this + + override def retain(): FileRegion = { + super.retain() + this + } + + override def retain(increment: Int): FileRegion = { + super.retain(increment) + this + } + + override def touch(): FileRegion = this } private class CountingWritableChannel(sink: WritableByteChannel) extends WritableByteChannel { diff --git a/dev/deps/spark-deps-hadoop-palantir b/dev/deps/spark-deps-hadoop-palantir index 33ae78ff3bee..dc05fb8fb913 100644 --- a/dev/deps/spark-deps-hadoop-palantir +++ b/dev/deps/spark-deps-hadoop-palantir @@ -1,13 +1,13 @@ JavaEWAH-0.3.2.jar -RoaringBitmap-0.5.11.jar +RoaringBitmap-0.6.43.jar ST4-4.0.4.jar activation-1.1.1.jar animal-sniffer-annotation-1.0.jar antlr-2.7.7.jar antlr-runtime-3.4.jar -antlr4-runtime-4.5.3.jar +antlr4-runtime-4.7.jar aopalliance-1.0.jar -aopalliance-repackaged-2.4.0-b34.jar +aopalliance-repackaged-2.5.0-b32.jar apache-log4j-extras-1.2.17.jar apacheds-i18n-2.0.0-M15.jar apacheds-kerberos-codec-2.0.0-M15.jar @@ -89,12 +89,12 @@ hadoop-yarn-client-2.8.0-palantir3.jar hadoop-yarn-common-2.8.0-palantir3.jar hadoop-yarn-server-common-2.8.0-palantir3.jar hadoop-yarn-server-web-proxy-2.8.0-palantir3.jar -hk2-api-2.4.0-b34.jar -hk2-locator-2.4.0-b34.jar -hk2-utils-2.4.0-b34.jar +hk2-api-2.5.0-b32.jar +hk2-locator-2.5.0-b32.jar +hk2-utils-2.5.0-b32.jar htrace-core4-4.0.1-incubating.jar -httpclient-4.5.2.jar -httpcore-4.4.4.jar +httpclient-4.5.3.jar +httpcore-4.4.6.jar ion-java-1.0.1.jar ivy-2.4.0.jar jackson-annotations-2.6.5.jar @@ -113,24 +113,24 @@ jackson-module-scala_2.11-2.6.5.jar jackson-xc-1.9.13.jar janino-3.0.0.jar java-xmlbuilder-1.0.jar -javassist-3.18.1-GA.jar +javassist-3.20.0-GA.jar javax.annotation-api-1.2.jar javax.inject-1.jar -javax.inject-2.4.0-b34.jar +javax.inject-2.5.0-b32.jar javax.servlet-api-3.1.0.jar javax.ws.rs-api-2.0.1.jar javolution-5.5.1.jar jaxb-api-2.2.2.jar jcip-annotations-1.0.jar -jcl-over-slf4j-1.7.16.jar +jcl-over-slf4j-1.7.25.jar jdo-api-3.0.1.jar -jersey-client-2.22.2.jar -jersey-common-2.22.2.jar -jersey-container-servlet-2.22.2.jar -jersey-container-servlet-core-2.22.2.jar -jersey-guava-2.22.2.jar -jersey-media-jaxb-2.22.2.jar -jersey-server-2.22.2.jar +jersey-client-2.25.1.jar +jersey-common-2.25.1.jar +jersey-container-servlet-2.25.1.jar +jersey-container-servlet-core-2.25.1.jar +jersey-guava-2.25.1.jar +jersey-media-jaxb-2.25.1.jar +jersey-server-2.25.1.jar jets3t-0.9.3.jar jetty-6.1.26.jar jetty-sslengine-6.1.26.jar @@ -144,10 +144,10 @@ json4s-ast_2.11-3.2.11.jar json4s-core_2.11-3.2.11.jar json4s-jackson_2.11-3.2.11.jar jsp-api-2.1.jar -jsr305-1.3.9.jar +jsr305-3.0.1.jar jta-1.1.jar jtransforms-2.4.0.jar -jul-to-slf4j-1.7.16.jar +jul-to-slf4j-1.7.25.jar kryo-shaded-3.0.3.jar kubernetes-client-2.2.13.jar kubernetes-model-1.0.67.jar @@ -166,13 +166,13 @@ metrics-json-3.1.2.jar metrics-jvm-3.1.2.jar minlog-1.3.0.jar mx4j-3.0.2.jar -netty-3.9.9.Final.jar -netty-all-4.0.43.Final.jar +netty-3.10.6.Final.jar +netty-all-4.1.9.Final.jar nimbus-jose-jwt-3.9.jar objenesis-2.1.jar -okhttp-2.4.0.jar -okhttp-3.6.0.jar -okio-1.11.0.jar +okhttp-2.7.5.jar +okhttp-3.7.0.jar +okio-1.12.0.jar opencsv-2.3.jar oro-2.0.8.jar osgi-resource-locator-1.0.1.jar @@ -196,8 +196,8 @@ scala-reflect-2.11.8.jar scala-xml_2.11-1.0.2.jar scalap-2.11.8.jar shapeless_2.11-2.3.2.jar -slf4j-api-1.7.16.jar -slf4j-log4j12-1.7.16.jar +slf4j-api-1.7.25.jar +slf4j-log4j12-1.7.25.jar snakeyaml-1.15.jar snappy-0.2.jar snappy-java-1.1.2.6.jar diff --git a/pom.xml b/pom.xml index c0547d369577..55e3acce98de 100644 --- a/pom.xml +++ b/pom.xml @@ -116,9 +116,9 @@ UTF-8 UTF-8 1.8 - 3.3.9 + 3.5.0 spark - 1.7.16 + 1.7.25 1.2.17 2.6.5 2.5.0 @@ -134,8 +134,11 @@ 10.12.1.1 1.9.1-palantir1 8.18.0 + 3.7.0 + 2.7.5 + 1.12.0 1.54 - 9.3.11.v20160721 + 9.4.5.v20170502 3.1.0 0.8.0 2.4.0 @@ -150,8 +153,8 @@ 0.10.2 - 4.5.2 - 4.4.4 + 4.5.3 + 4.4.6 3.1 3.4.1 @@ -172,12 +175,12 @@ 3.5 3.2.10 3.0.0 - 2.22.2 + 2.25.1 2.9.9 3.5.2 - 1.3.9 + 3.0.1 0.9.3 - 4.5.3 + 4.7 1.1 2.52.0 2.6 @@ -325,6 +328,22 @@ ${chill.version} + + com.squareup.okio + okio + ${okio.version} + + + com.squareup.okhttp3 + okhttp + ${okhttp3.version} + + + com.squareup.okhttp + okhttp + ${okhttp.version} + + com.netflix.feign feign-core @@ -594,7 +613,7 @@ org.roaringbitmap RoaringBitmap - 0.5.11 + 0.6.43 commons-net @@ -604,12 +623,12 @@ io.netty netty-all - 4.0.43.Final + 4.1.9.Final io.netty netty - 3.9.9.Final + 3.10.6.Final org.apache.derby diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 057cdf98737e..04ec67a4f448 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -491,6 +491,7 @@ object OldDeps { object Catalyst { lazy val settings = antlr4Settings ++ Seq( + antlr4Version in Antlr4 := "4.7", antlr4PackageName in Antlr4 := Some("org.apache.spark.sql.catalyst.parser"), antlr4GenListener in Antlr4 := true, antlr4GenVisitor in Antlr4 := true diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index ed5450b494cc..a2ccd995bec5 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -64,6 +64,10 @@ singleDataType : dataType EOF ; +standaloneColTypeList + : colTypeList EOF + ; + statement : query #statementDefault | USE db=identifier #use @@ -968,7 +972,7 @@ CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'; STRING : '\'' ( ~('\''|'\\') | ('\\' .) )* '\'' - | '\"' ( ~('\"'|'\\') | ('\\' .) )* '\"' + | '"' ( ~('"'|'\\') | ('\\' .) )* '"' ; BIGINT_LITERAL diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 046ea65d454a..3e9ad1367df5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -86,6 +86,11 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging { visitSparkDataType(ctx.dataType) } + override def visitStandaloneColTypeList(ctx: StandaloneColTypeListContext): Seq[StructField] = + withOrigin(ctx) { + visitColTypeList(ctx.colTypeList) + } + /* ******************************************************************************************** * Plan parsing * ******************************************************************************************** */ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala index dcccbd0ed8d6..71b199bdf921 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala @@ -60,7 +60,7 @@ abstract class AbstractSqlParser extends ParserInterface with Logging { * definitions which will preserve the correct Hive metadata. */ override def parseTableSchema(sqlText: String): StructType = parse(sqlText) { parser => - StructType(astBuilder.visitColTypeList(parser.colTypeList())) + StructType(astBuilder.visitStandaloneColTypeList(parser.standaloneColTypeList())) } /** Creates LogicalPlan for a given SQL string. */ diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala index cf137cfdf96e..4d26b9d77348 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala @@ -507,7 +507,7 @@ class PlanParserSuite extends PlanTest { val m = intercept[ParseException] { parsePlan("SELECT /*+ HINT() */ * FROM t") }.getMessage - assert(m.contains("no viable alternative at input")) + assert(m.contains("mismatched input ')' expecting")) // Hive compatibility: No database. val m2 = intercept[ParseException] { diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java index 9851f8b62bd2..2d4b72aea841 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java @@ -154,7 +154,8 @@ public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptCont ReadSupport.ReadContext readContext = readSupport.init(new InitContext( taskAttemptContext.getConfiguration(), toSetMultiMap(fileMetadata), fileSchema)); this.requestedSchema = readContext.getRequestedSchema(); - this.reader = ParquetFileReader.open(configuration, file, new ParquetMetadata(footer.getFileMetaData(), blocks)); + this.reader = ParquetFileReader.open( + configuration, file, new ParquetMetadata(footer.getFileMetaData(), blocks)); this.reader.setRequestedSchema(requestedSchema); String sparkRequestedSchemaString = configuration.get(ParquetReadSupport$.MODULE$.SPARK_ROW_REQUESTED_SCHEMA()); diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index 8f2a54f7c24e..992f39cb5fd8 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -161,7 +161,7 @@ struct<> -- !query 13 output org.apache.spark.sql.catalyst.parser.ParseException -mismatched input '' expecting 'LIKE'(line 1, pos 19) +mismatched input '' expecting {'FROM', 'IN', 'LIKE'}(line 1, pos 19) == SQL == SHOW TABLE EXTENDED @@ -184,7 +184,7 @@ struct<> -- !query 15 output org.apache.spark.sql.catalyst.parser.ParseException -mismatched input 'PARTITION' expecting 'LIKE'(line 1, pos 20) +mismatched input 'PARTITION' expecting {'FROM', 'IN', 'LIKE'}(line 1, pos 20) == SQL == SHOW TABLE EXTENDED PARTITION(c='Us', d=1) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala index bf1fd160704f..b317bc5b5264 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.jdbc -import java.sql.{Date, DriverManager, Timestamp} +import java.sql.DriverManager import java.util.Properties import scala.collection.JavaConverters.propertiesAsScalaMapConverter @@ -466,7 +466,7 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter { .option("createTableColumnTypes", "`name char(20)") // incorrectly quoted column .jdbc(url1, "TEST.USERDBTYPETEST", properties) }.getMessage() - assert(msg.contains("no viable alternative at input")) + assert(msg.contains("extraneous input '`' expecting")) } test("SPARK-10849: jdbc CreateTableColumnTypes duplicate columns") {