Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2909,7 +2909,15 @@ object SQLConf {
.booleanConf
.createWithDefault(sys.env.get("SPARK_ANSI_SQL_MODE").contains("true"))

val DOUBLE_QUOTED_IDENTIFIERS = buildConf("spark.sql.ansi.double_quoted_identifiers")
val ENFORCE_RESERVED_KEYWORDS = buildConf("spark.sql.ansi.enforceReservedKeywords")
.doc(s"When true and '${ANSI_ENABLED.key}' is true, the Spark SQL parser enforces the ANSI " +
"reserved keywords and forbids SQL queries that use reserved keywords as alias names " +
"and/or identifiers for table, view, function, etc.")
.version("3.3.0")
.booleanConf
.createWithDefault(false)

val DOUBLE_QUOTED_IDENTIFIERS = buildConf("spark.sql.ansi.doubleQuotedIdentifiers")
.doc("When true, Spark SQL reads literals enclosed in double quoted (\") as identifiers. " +
"When false they are read as string literals.")
.version("3.4.0")
Expand Down Expand Up @@ -2964,14 +2972,6 @@ object SQLConf {
.booleanConf
.createWithDefault(false)

val ENFORCE_RESERVED_KEYWORDS = buildConf("spark.sql.ansi.enforceReservedKeywords")
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note related to this PR, but I move this code next to the ANSI_ENABLED flag for grouping the ANSI SQL related flags.

.doc(s"When true and '${ANSI_ENABLED.key}' is true, the Spark SQL parser enforces the ANSI " +
"reserved keywords and forbids SQL queries that use reserved keywords as alias names " +
"and/or identifiers for table, view, function, etc.")
.version("3.3.0")
.booleanConf
.createWithDefault(false)

val SORT_BEFORE_REPARTITION =
buildConf("spark.sql.execution.sortBeforeRepartition")
.internal()
Expand Down Expand Up @@ -4592,7 +4592,7 @@ class SQLConf extends Serializable with Logging {

def enforceReservedKeywords: Boolean = ansiEnabled && getConf(ENFORCE_RESERVED_KEYWORDS)

def doubleQuotedIdentifiers: Boolean = getConf(DOUBLE_QUOTED_IDENTIFIERS)
def doubleQuotedIdentifiers: Boolean = ansiEnabled && getConf(DOUBLE_QUOTED_IDENTIFIERS)

def timestampType: AtomicType = getConf(TIMESTAMP_TYPE) match {
case "TIMESTAMP_LTZ" =>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
--SET spark.sql.ansi.doubleQuotedIdentifiers=false
--IMPORT double-quoted-identifiers.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
--SET spark.sql.ansi.doubleQuotedIdentifiers=true
--IMPORT double-quoted-identifiers.sql

Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
-- test cases for spark.sql.ansi.double_quoted_identifiers

-- Base line
SET spark.sql.ansi.double_quoted_identifiers = false;

-- All these should error out in the parser
SELECT 1 FROM "not_exist";

Expand Down Expand Up @@ -45,51 +40,6 @@ DROP VIEW v;

SELECT INTERVAL "1" YEAR;

-- Now turn on the config.
SET spark.sql.ansi.double_quoted_identifiers = true;

-- All these should error out in analysis now
SELECT 1 FROM "not_exist";

USE SCHEMA "not_exist";

ALTER TABLE "not_exist" ADD COLUMN not_exist int;

ALTER TABLE not_exist ADD COLUMN "not_exist" int;

SELECT 1 AS "not_exist" FROM not_exist;

SELECT 1 FROM not_exist AS X("hello");

SELECT "not_exist"();

SELECT "not_exist".not_exist();

SELECT "hello";

-- Back ticks still work
SELECT 1 FROM `hello`;

USE SCHEMA `not_exist`;

ALTER TABLE `not_exist` ADD COLUMN not_exist int;

ALTER TABLE not_exist ADD COLUMN `not_exist` int;

SELECT 1 AS `not_exist` FROM `not_exist`;

SELECT 1 FROM not_exist AS X(`hello`);

SELECT `not_exist`();

SELECT `not_exist`.not_exist();

-- These fail in the parser now
CREATE TEMPORARY VIEW v(c1 COMMENT "hello") AS SELECT 1;
DROP VIEW v;

SELECT INTERVAL "1" YEAR;

-- Single ticks still work
SELECT 'hello';

Expand Down
Loading