Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 18 additions & 16 deletions sql/core/src/test/resources/sql-tests/inputs/postgreSQL/date.sql
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,25 @@

CREATE TABLE DATE_TBL (f1 date) USING parquet;

INSERT INTO DATE_TBL VALUES ('1957-04-09');
INSERT INTO DATE_TBL VALUES ('1957-06-13');
INSERT INTO DATE_TBL VALUES ('1996-02-28');
INSERT INTO DATE_TBL VALUES ('1996-02-29');
INSERT INTO DATE_TBL VALUES ('1996-03-01');
INSERT INTO DATE_TBL VALUES ('1996-03-02');
INSERT INTO DATE_TBL VALUES ('1997-02-28');
-- PostgreSQL implicitly casts string literals to data with date types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO DATE_TBL VALUES (date('1957-04-09'));
INSERT INTO DATE_TBL VALUES (date('1957-06-13'));
INSERT INTO DATE_TBL VALUES (date('1996-02-28'));
INSERT INTO DATE_TBL VALUES (date('1996-02-29'));
INSERT INTO DATE_TBL VALUES (date('1996-03-01'));
INSERT INTO DATE_TBL VALUES (date('1996-03-02'));
INSERT INTO DATE_TBL VALUES (date('1997-02-28'));
-- [SPARK-27923] Skip invalid date: 1997-02-29
-- INSERT INTO DATE_TBL VALUES ('1997-02-29');
INSERT INTO DATE_TBL VALUES ('1997-03-01');
INSERT INTO DATE_TBL VALUES ('1997-03-02');
INSERT INTO DATE_TBL VALUES ('2000-04-01');
INSERT INTO DATE_TBL VALUES ('2000-04-02');
INSERT INTO DATE_TBL VALUES ('2000-04-03');
INSERT INTO DATE_TBL VALUES ('2038-04-08');
INSERT INTO DATE_TBL VALUES ('2039-04-09');
INSERT INTO DATE_TBL VALUES ('2040-04-10');
-- INSERT INTO DATE_TBL VALUES ('1997-02-29'));
INSERT INTO DATE_TBL VALUES (date('1997-03-01'));
INSERT INTO DATE_TBL VALUES (date('1997-03-02'));
INSERT INTO DATE_TBL VALUES (date('2000-04-01'));
INSERT INTO DATE_TBL VALUES (date('2000-04-02'));
INSERT INTO DATE_TBL VALUES (date('2000-04-03'));
INSERT INTO DATE_TBL VALUES (date('2038-04-08'));
INSERT INTO DATE_TBL VALUES (date('2039-04-09'));
INSERT INTO DATE_TBL VALUES (date('2040-04-10'));

SELECT f1 AS `Fifteen` FROM DATE_TBL;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@

CREATE TABLE FLOAT4_TBL (f1 float) USING parquet;

INSERT INTO FLOAT4_TBL VALUES (' 0.0');
INSERT INTO FLOAT4_TBL VALUES ('1004.30 ');
INSERT INTO FLOAT4_TBL VALUES (' -34.84 ');
INSERT INTO FLOAT4_TBL VALUES ('1.2345678901234e+20');
INSERT INTO FLOAT4_TBL VALUES ('1.2345678901234e-20');
-- PostgreSQL implicitly casts string literals to data with floating point types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO FLOAT4_TBL VALUES (float(' 0.0'));
INSERT INTO FLOAT4_TBL VALUES (float('1004.30 '));
INSERT INTO FLOAT4_TBL VALUES (float(' -34.84 '));
INSERT INTO FLOAT4_TBL VALUES (float('1.2345678901234e+20'));
INSERT INTO FLOAT4_TBL VALUES (float('1.2345678901234e-20'));

-- [SPARK-28024] Incorrect numeric values when out of range
-- test for over and under flow
Expand Down
24 changes: 14 additions & 10 deletions sql/core/src/test/resources/sql-tests/inputs/postgreSQL/float8.sql
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@

CREATE TABLE FLOAT8_TBL(f1 double) USING parquet;

INSERT INTO FLOAT8_TBL VALUES (' 0.0 ');
INSERT INTO FLOAT8_TBL VALUES ('1004.30 ');
INSERT INTO FLOAT8_TBL VALUES (' -34.84');
INSERT INTO FLOAT8_TBL VALUES ('1.2345678901234e+200');
INSERT INTO FLOAT8_TBL VALUES ('1.2345678901234e-200');
-- PostgreSQL implicitly casts string literals to data with floating point types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO FLOAT8_TBL VALUES (double(' 0.0 '));
INSERT INTO FLOAT8_TBL VALUES (double('1004.30 '));
INSERT INTO FLOAT8_TBL VALUES (double(' -34.84'));
INSERT INTO FLOAT8_TBL VALUES (double('1.2345678901234e+200'));
INSERT INTO FLOAT8_TBL VALUES (double('1.2345678901234e-200'));

-- [SPARK-28024] Incorrect numeric values when out of range
-- test for underflow and overflow handling
Expand Down Expand Up @@ -227,15 +229,17 @@ SELECT atanh(double('NaN'));

TRUNCATE TABLE FLOAT8_TBL;

INSERT INTO FLOAT8_TBL VALUES ('0.0');
-- PostgreSQL implicitly casts string literals to data with floating point types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO FLOAT8_TBL VALUES (double('0.0'));

INSERT INTO FLOAT8_TBL VALUES ('-34.84');
INSERT INTO FLOAT8_TBL VALUES (double('-34.84'));

INSERT INTO FLOAT8_TBL VALUES ('-1004.30');
INSERT INTO FLOAT8_TBL VALUES (double('-1004.30'));

INSERT INTO FLOAT8_TBL VALUES ('-1.2345678901234e+200');
INSERT INTO FLOAT8_TBL VALUES (double('-1.2345678901234e+200'));

INSERT INTO FLOAT8_TBL VALUES ('-1.2345678901234e-200');
INSERT INTO FLOAT8_TBL VALUES (double('-1.2345678901234e-200'));

SELECT '' AS five, * FROM FLOAT8_TBL;

Expand Down
14 changes: 9 additions & 5 deletions sql/core/src/test/resources/sql-tests/inputs/postgreSQL/int2.sql
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,23 @@
CREATE TABLE INT2_TBL(f1 smallint) USING parquet;

-- [SPARK-28023] Trim the string when cast string type to other types
INSERT INTO INT2_TBL VALUES (trim('0 '));
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT2_TBL VALUES (smallint(trim('0 ')));

INSERT INTO INT2_TBL VALUES (trim(' 1234 '));
INSERT INTO INT2_TBL VALUES (smallint(trim(' 1234 ')));

INSERT INTO INT2_TBL VALUES (trim(' -1234'));
INSERT INTO INT2_TBL VALUES (smallint(trim(' -1234')));

-- [SPARK-27923] Invalid input syntax for type short throws exception at PostgreSQL
-- INSERT INTO INT2_TBL VALUES ('34.5');

-- largest and smallest values
INSERT INTO INT2_TBL VALUES ('32767');
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT2_TBL VALUES (smallint('32767'));

INSERT INTO INT2_TBL VALUES ('-32767');
INSERT INTO INT2_TBL VALUES (smallint('-32767'));

-- bad input values -- should give errors
-- INSERT INTO INT2_TBL VALUES ('100000');
Expand Down
14 changes: 9 additions & 5 deletions sql/core/src/test/resources/sql-tests/inputs/postgreSQL/int4.sql
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,23 @@
CREATE TABLE INT4_TBL(f1 int) USING parquet;

-- [SPARK-28023] Trim the string when cast string type to other types
INSERT INTO INT4_TBL VALUES (trim(' 0 '));
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT4_TBL VALUES (int(trim(' 0 ')));

INSERT INTO INT4_TBL VALUES (trim('123456 '));
INSERT INTO INT4_TBL VALUES (int(trim('123456 ')));

INSERT INTO INT4_TBL VALUES (trim(' -123456'));
INSERT INTO INT4_TBL VALUES (int(trim(' -123456')));

-- [SPARK-27923] Invalid input syntax for integer: "34.5" at PostgreSQL
-- INSERT INTO INT4_TBL(f1) VALUES ('34.5');

-- largest and smallest values
INSERT INTO INT4_TBL VALUES ('2147483647');
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT4_TBL VALUES (int('2147483647'));

INSERT INTO INT4_TBL VALUES ('-2147483647');
INSERT INTO INT4_TBL VALUES (int('-2147483647'));

-- [SPARK-27923] Spark SQL insert these bad inputs to NULL
-- bad input values
Expand Down
12 changes: 7 additions & 5 deletions sql/core/src/test/resources/sql-tests/inputs/postgreSQL/int8.sql
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@
--
CREATE TABLE INT8_TBL(q1 bigint, q2 bigint) USING parquet;

INSERT INTO INT8_TBL VALUES(trim(' 123 '),trim(' 456'));
INSERT INTO INT8_TBL VALUES(trim('123 '),'4567890123456789');
INSERT INTO INT8_TBL VALUES('4567890123456789','123');
INSERT INTO INT8_TBL VALUES(+4567890123456789,'4567890123456789');
INSERT INTO INT8_TBL VALUES('+4567890123456789','-4567890123456789');
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT8_TBL VALUES(bigint(trim(' 123 ')),bigint(trim(' 456')));
INSERT INTO INT8_TBL VALUES(bigint(trim('123 ')),bigint('4567890123456789'));
INSERT INTO INT8_TBL VALUES(bigint('4567890123456789'),bigint('123'));
INSERT INTO INT8_TBL VALUES(+4567890123456789,bigint('4567890123456789'));
INSERT INTO INT8_TBL VALUES(bigint('+4567890123456789'),bigint('-4567890123456789'));

-- [SPARK-27923] Spark SQL insert there bad inputs to NULL
-- bad inputs
Expand Down
Loading