Skip to content

Commit d199f7b

Browse files
committed
fix.
1 parent 2b2dd08 commit d199f7b

File tree

3 files changed

+26
-4
lines changed

3 files changed

+26
-4
lines changed

sql/core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@
115115
<dependency>
116116
<groupId>com.h2database</groupId>
117117
<artifactId>h2</artifactId>
118-
<version>1.4.183</version>
118+
<version>1.4.195</version>
119119
<scope>test</scope>
120120
</dependency>
121121
<dependency>

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.execution.datasources.jdbc
1919

20-
import java.sql.{Connection, Driver, DriverManager, PreparedStatement, ResultSet, ResultSetMetaData, SQLException}
20+
import java.sql.{Connection, Driver, DriverManager, JDBCType, PreparedStatement, ResultSet, ResultSetMetaData, SQLException}
2121
import java.util.Locale
2222

2323
import scala.collection.JavaConverters._
@@ -230,7 +230,9 @@ object JdbcUtils extends Logging {
230230
// scalastyle:on
231231
}
232232

233-
if (answer == null) throw new SQLException("Unsupported type " + sqlType)
233+
if (answer == null) {
234+
throw new SQLException("Unsupported type " + JDBCType.valueOf(sqlType).getName)
235+
}
234236
answer
235237
}
236238

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.sql.jdbc
1919

2020
import java.math.BigDecimal
21-
import java.sql.{Date, DriverManager, Timestamp}
21+
import java.sql.{Date, DriverManager, SQLException, Timestamp}
2222
import java.util.{Calendar, GregorianCalendar, Properties}
2323

2424
import org.h2.jdbc.JdbcSQLException
@@ -141,6 +141,15 @@ class JDBCSuite extends SparkFunSuite
141141
|OPTIONS (url '$url', dbtable 'TEST.TIMETYPES', user 'testUser', password 'testPass')
142142
""".stripMargin.replaceAll("\n", " "))
143143

144+
conn.prepareStatement("CREATE TABLE test.timezone (tz TIMESTAMP WITH TIME ZONE) " +
145+
"AS SELECT '1999-01-08 04:05:06.543543543 GMT-08:00'")
146+
.executeUpdate()
147+
conn.commit()
148+
149+
conn.prepareStatement("CREATE TABLE test.array (ar ARRAY) " +
150+
"AS SELECT '(1, 2, 3)'")
151+
.executeUpdate()
152+
conn.commit()
144153

145154
conn.prepareStatement("create table test.flttypes (a DOUBLE, b REAL, c DECIMAL(38, 18))"
146155
).executeUpdate()
@@ -919,6 +928,17 @@ class JDBCSuite extends SparkFunSuite
919928
assert(res === (foobarCnt, 0L, foobarCnt) :: Nil)
920929
}
921930

931+
test("unsupported types") {
932+
var e = intercept[SQLException] {
933+
spark.read.jdbc(urlWithUserAndPass, "TEST.TIMEZONE", new Properties()).collect()
934+
}.getMessage
935+
assert(e.contains("Unsupported type TIMESTAMP_WITH_TIMEZONE"))
936+
e = intercept[SQLException] {
937+
spark.read.jdbc(urlWithUserAndPass, "TEST.ARRAY", new Properties()).collect()
938+
}.getMessage
939+
assert(e.contains("Unsupported type ARRAY"))
940+
}
941+
922942
test("SPARK-19318: Connection properties keys should be case-sensitive.") {
923943
def testJdbcOptions(options: JDBCOptions): Unit = {
924944
// Spark JDBC data source options are case-insensitive

0 commit comments

Comments
 (0)