Skip to content

Commit 0c9af99

Browse files
cloud-fandongjoon-hyun
authored andcommitted
[SPARK-51585][SQL][FOLLOWUP] Turn on ANSI mode in DockerJDBCIntegrationV2Suite
### What changes were proposed in this pull request? This is a followup of #50353 to fix tests in non ANSI CI job. We should follow `JDBCV2JoinPushdownIntegrationSuiteBase` and always enable ANSI in JDBC v2 tests. The DS v2 pushdown framework can only translate ANSI expressions, so it doesn't make sense to test them with ANSI off. This PR reverts #52012 to simplify the tests. ### Why are the changes needed? make test more stable ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? existing tests ### Was this patch authored or co-authored using generative AI tooling? no Closes #52146 from cloud-fan/follow. Authored-by: Wenchen Fan <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 51b5f30 commit 0c9af99

File tree

2 files changed

+37
-44
lines changed

2 files changed

+37
-44
lines changed

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DockerJDBCIntegrationV2Suite.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,17 @@ package org.apache.spark.sql.jdbc.v2
1919

2020
import java.sql.Connection
2121

22+
import org.apache.spark.SparkConf
23+
import org.apache.spark.sql.internal.SQLConf
2224
import org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite
2325

2426
abstract class DockerJDBCIntegrationV2Suite extends DockerJDBCIntegrationSuite {
2527

28+
override def sparkConf: SparkConf = super.sparkConf
29+
// DS V2 relies on ANSI mode to translate expressions, we should always
30+
// run JDBC v2 tests with ANSI on.
31+
.set(SQLConf.ANSI_ENABLED, true)
32+
2633
/**
2734
* Prepare databases and tables for testing.
2835
*/

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala

Lines changed: 30 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,8 @@ import java.util.Locale
2222

2323
import org.apache.spark.{SparkConf, SparkRuntimeException}
2424
import org.apache.spark.sql.{AnalysisException, Row}
25-
import org.apache.spark.sql.catalyst.expressions.EvalMode
2625
import org.apache.spark.sql.catalyst.util.CharVarcharUtils.CHAR_VARCHAR_TYPE_STRING_METADATA_KEY
2726
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
28-
import org.apache.spark.sql.internal.SQLConf
2927
import org.apache.spark.sql.jdbc.OracleDatabaseOnDocker
3028
import org.apache.spark.sql.types._
3129
import org.apache.spark.tags.DockerTest
@@ -205,10 +203,6 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes
205203
}
206204
}
207205

208-
// Oracle only supports TimestampType so `month(date)` will be analyzed to
209-
// `month(cast(date) as date)` and cast is not pushdownable in non-ansi mode
210-
private def ansiMode: Boolean = EvalMode.fromSQLConf(SQLConf.get) == EvalMode.ANSI
211-
212206
override def testDatetime(tbl: String): Unit = {
213207
val df1 = sql(s"SELECT name FROM $tbl WHERE " +
214208
"dayofyear(date1) > 100 AND dayofmonth(date1) > 10 ")
@@ -225,14 +219,12 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes
225219
assert(rows2(0).getString(0) === "amy")
226220
assert(rows2(1).getString(0) === "alex")
227221

228-
if (ansiMode) {
229-
val df3 = sql(s"SELECT name FROM $tbl WHERE month(date1) = 5")
230-
checkFilterPushed(df3)
231-
val rows3 = df3.collect()
232-
assert(rows3.length === 2)
233-
assert(rows3(0).getString(0) === "amy")
234-
assert(rows3(1).getString(0) === "alex")
235-
}
222+
val df3 = sql(s"SELECT name FROM $tbl WHERE month(date1) = 5")
223+
checkFilterPushed(df3)
224+
val rows3 = df3.collect()
225+
assert(rows3.length === 2)
226+
assert(rows3(0).getString(0) === "amy")
227+
assert(rows3(1).getString(0) === "alex")
236228

237229
val df4 = sql(s"SELECT name FROM $tbl WHERE hour(time1) = 0 AND minute(time1) = 0")
238230
checkFilterPushed(df4)
@@ -304,30 +296,26 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes
304296
assert(rows(0).getString(0) === "amy")
305297
}
306298

307-
if (ansiMode) {
308-
withClue("dayofmonth") {
309-
val dom = sql(s"SELECT dayofmonth(date1) FROM $tbl WHERE name = 'amy'")
310-
.collect().head.getInt(0)
311-
val df = sql(s"SELECT name FROM $tbl WHERE dayofmonth(date1) = $dom")
312-
checkFilterPushed(df)
313-
val rows = df.collect()
314-
assert(rows.length === 1)
315-
assert(rows(0).getString(0) === "amy")
316-
}
299+
withClue("dayofmonth") {
300+
val dom = sql(s"SELECT dayofmonth(date1) FROM $tbl WHERE name = 'amy'")
301+
.collect().head.getInt(0)
302+
val df = sql(s"SELECT name FROM $tbl WHERE dayofmonth(date1) = $dom")
303+
checkFilterPushed(df)
304+
val rows = df.collect()
305+
assert(rows.length === 1)
306+
assert(rows(0).getString(0) === "amy")
317307
}
318308

319-
if (ansiMode) {
320-
withClue("year") {
321-
val year = sql(s"SELECT year(date1) FROM $tbl WHERE name = 'amy'")
322-
.collect().head.getInt(0)
323-
val df = sql(s"SELECT name FROM $tbl WHERE year(date1) = $year")
324-
checkFilterPushed(df)
325-
val rows = df.collect()
326-
assert(rows.length === 3)
327-
assert(rows(0).getString(0) === "amy")
328-
assert(rows5(1).getString(0) === "alex")
329-
assert(rows5(2).getString(0) === "tom")
330-
}
309+
withClue("year") {
310+
val year = sql(s"SELECT year(date1) FROM $tbl WHERE name = 'amy'")
311+
.collect().head.getInt(0)
312+
val df = sql(s"SELECT name FROM $tbl WHERE year(date1) = $year")
313+
checkFilterPushed(df)
314+
val rows = df.collect()
315+
assert(rows.length === 3)
316+
assert(rows(0).getString(0) === "amy")
317+
assert(rows5(1).getString(0) === "alex")
318+
assert(rows5(2).getString(0) === "tom")
331319
}
332320

333321
withClue("second") {
@@ -346,13 +334,11 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes
346334
assert(rows9.length === 1)
347335
assert(rows9(0).getString(0) === "alex")
348336

349-
if (ansiMode) {
350-
val df10 = sql(s"SELECT name FROM $tbl WHERE trunc(date1, 'week') = date'2022-05-16'")
351-
checkFilterPushed(df10)
352-
val rows10 = df10.collect()
353-
assert(rows10.length === 2)
354-
assert(rows10(0).getString(0) === "amy")
355-
assert(rows10(1).getString(0) === "alex")
356-
}
337+
val df10 = sql(s"SELECT name FROM $tbl WHERE trunc(date1, 'week') = date'2022-05-16'")
338+
checkFilterPushed(df10)
339+
val rows10 = df10.collect()
340+
assert(rows10.length === 2)
341+
assert(rows10(0).getString(0) === "amy")
342+
assert(rows10(1).getString(0) === "alex")
357343
}
358344
}

0 commit comments

Comments
 (0)