Skip to content

Commit 8bb4f3a

Browse files
committed
Run the test in different time zones
1 parent 0b61076 commit 8bb4f3a

File tree

1 file changed

+29
-22
lines changed

1 file changed

+29
-22
lines changed

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 29 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
2727
import org.apache.spark.SparkException
2828
import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row}
2929
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
30-
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
30+
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeTestUtils}
3131
import org.apache.spark.sql.execution.DataSourceScanExec
3232
import org.apache.spark.sql.execution.command.ExplainCommand
3333
import org.apache.spark.sql.execution.datasources.LogicalRelation
@@ -1525,27 +1525,34 @@ class JDBCSuite extends QueryTest
15251525
}
15261526

15271527
test("parsing timestamp bounds") {
1528-
Seq(
1529-
("1972-07-04 03:30:00", "1972-07-15 20:50:32.5", "1972-07-27 14:11:05"),
1530-
("2019-01-20 12:00:00.502", "2019-01-20 12:00:00.751", "2019-01-20 12:00:01.000"),
1531-
("2019-01-20T00:00:00.123456", "2019-01-20 00:05:00.123456", "2019-01-20T00:10:00.123456"),
1532-
("1500-01-20T00:00:00.123456", "1500-01-20 00:05:00.123456", "1500-01-20T00:10:00.123456")
1533-
).foreach { case (lower, middle, upper) =>
1534-
val df = spark.read.format("jdbc")
1535-
.option("url", urlWithUserAndPass)
1536-
.option("dbtable", "TEST.DATETIME")
1537-
.option("partitionColumn", "t")
1538-
.option("lowerBound", lower)
1539-
.option("upperBound", upper)
1540-
.option("numPartitions", 2)
1541-
.load()
1542-
1543-
df.logicalPlan match {
1544-
case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) =>
1545-
val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet
1546-
assert(whereClauses === Set(
1547-
s""""T" < '$middle' or "T" is null""",
1548-
s""""T" >= '$middle'"""))
1528+
DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
1529+
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
1530+
Seq(
1531+
("1972-07-04 03:30:00", "1972-07-15 20:50:32.5", "1972-07-27 14:11:05"),
1532+
("2019-01-20 12:00:00.502", "2019-01-20 12:00:00.751", "2019-01-20 12:00:01.000"),
1533+
(
1534+
"2019-01-20T00:00:00.123456",
1535+
"2019-01-20 00:05:00.123456",
1536+
"2019-01-20T00:10:00.123456"),
1537+
("1500-01-20T00:00:00.123456", "1500-01-20 00:05:00.123456", "1500-01-20T00:10:00.123456")
1538+
).foreach { case (lower, middle, upper) =>
1539+
val df = spark.read.format("jdbc")
1540+
.option("url", urlWithUserAndPass)
1541+
.option("dbtable", "TEST.DATETIME")
1542+
.option("partitionColumn", "t")
1543+
.option("lowerBound", lower)
1544+
.option("upperBound", upper)
1545+
.option("numPartitions", 2)
1546+
.load()
1547+
1548+
df.logicalPlan match {
1549+
case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) =>
1550+
val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet
1551+
assert(whereClauses === Set(
1552+
s""""T" < '$middle' or "T" is null""",
1553+
s""""T" >= '$middle'"""))
1554+
}
1555+
}
15491556
}
15501557
}
15511558
}

0 commit comments

Comments
 (0)