Skip to content

Commit bc05ef8

Browse files
authored
test: extract conditional expression tests (#2807)
1 parent 07f95b9 commit bc05ef8

File tree

6 files changed

+226
-92
lines changed

6 files changed

+226
-92
lines changed

.github/workflows/pr_build_linux.yml

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -133,28 +133,33 @@ jobs:
133133
org.apache.comet.exec.CometExecSuite
134134
org.apache.comet.exec.CometWindowExecSuite
135135
org.apache.comet.exec.CometJoinSuite
136-
org.apache.comet.CometArrayExpressionSuite
137-
org.apache.comet.CometCastSuite
138-
org.apache.comet.CometExpressionSuite
139-
org.apache.comet.CometExpressionCoverageSuite
140-
org.apache.comet.CometMathExpressionSuite
141136
org.apache.comet.CometNativeSuite
142137
org.apache.comet.CometSparkSessionExtensionsSuite
143-
org.apache.comet.CometStringExpressionSuite
144138
org.apache.spark.CometPluginsSuite
145139
org.apache.spark.CometPluginsDefaultSuite
146140
org.apache.spark.CometPluginsNonOverrideSuite
147141
org.apache.spark.CometPluginsUnifiedModeOverrideSuite
148-
org.apache.comet.CometTemporalExpressionSuite
149142
org.apache.spark.sql.CometTPCDSQuerySuite
150143
org.apache.spark.sql.CometTPCDSQueryTestSuite
151144
org.apache.spark.sql.CometTPCHQuerySuite
152145
org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite
153146
org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite
154147
org.apache.spark.sql.comet.CometTaskMetricsSuite
148+
org.apache.comet.objectstore.NativeConfigSuite
149+
- name: "expressions"
150+
value: |
151+
org.apache.comet.CometExpressionSuite
152+
org.apache.comet.CometExpressionCoverageSuite
153+
org.apache.comet.CometTemporalExpressionSuite
154+
org.apache.comet.CometArrayExpressionSuite
155+
org.apache.comet.CometCastSuite
156+
org.apache.comet.CometMathExpressionSuite
157+
org.apache.comet.CometStringExpressionSuite
155158
org.apache.comet.CometBitwiseExpressionSuite
156159
org.apache.comet.CometMapExpressionSuite
157-
org.apache.comet.objectstore.NativeConfigSuite
160+
org.apache.comet.expressions.conditional.CometIfSuite
161+
org.apache.comet.expressions.conditional.CometCoalesceSuite
162+
org.apache.comet.expressions.conditional.CometCaseWhenSuite
158163
- name: "sql"
159164
value: |
160165
org.apache.spark.sql.CometToPrettyStringSuite

.github/workflows/pr_build_macos.yml

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -98,28 +98,33 @@ jobs:
9898
org.apache.comet.exec.CometExecSuite
9999
org.apache.comet.exec.CometWindowExecSuite
100100
org.apache.comet.exec.CometJoinSuite
101-
org.apache.comet.CometArrayExpressionSuite
102-
org.apache.comet.CometCastSuite
103-
org.apache.comet.CometExpressionSuite
104-
org.apache.comet.CometExpressionCoverageSuite
105-
org.apache.comet.CometMathExpressionSuite
106101
org.apache.comet.CometNativeSuite
107102
org.apache.comet.CometSparkSessionExtensionsSuite
108-
org.apache.comet.CometStringExpressionSuite
109103
org.apache.spark.CometPluginsSuite
110104
org.apache.spark.CometPluginsDefaultSuite
111105
org.apache.spark.CometPluginsNonOverrideSuite
112106
org.apache.spark.CometPluginsUnifiedModeOverrideSuite
113-
org.apache.comet.CometTemporalExpressionSuite
114107
org.apache.spark.sql.CometTPCDSQuerySuite
115108
org.apache.spark.sql.CometTPCDSQueryTestSuite
116109
org.apache.spark.sql.CometTPCHQuerySuite
117110
org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite
118111
org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite
119112
org.apache.spark.sql.comet.CometTaskMetricsSuite
113+
org.apache.comet.objectstore.NativeConfigSuite
114+
- name: "expressions"
115+
value: |
116+
org.apache.comet.CometExpressionSuite
117+
org.apache.comet.CometExpressionCoverageSuite
118+
org.apache.comet.CometTemporalExpressionSuite
119+
org.apache.comet.CometArrayExpressionSuite
120+
org.apache.comet.CometCastSuite
121+
org.apache.comet.CometMathExpressionSuite
122+
org.apache.comet.CometStringExpressionSuite
120123
org.apache.comet.CometBitwiseExpressionSuite
121124
org.apache.comet.CometMapExpressionSuite
122-
org.apache.comet.objectstore.NativeConfigSuite
125+
org.apache.comet.expressions.conditional.CometIfSuite
126+
org.apache.comet.expressions.conditional.CometCoalesceSuite
127+
org.apache.comet.expressions.conditional.CometCaseWhenSuite
123128
- name: "sql"
124129
value: |
125130
org.apache.spark.sql.CometToPrettyStringSuite

spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala

Lines changed: 0 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -156,19 +156,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
156156
}
157157
}
158158

159-
test("coalesce should return correct datatype") {
160-
Seq(true, false).foreach { dictionaryEnabled =>
161-
withTempDir { dir =>
162-
val path = new Path(dir.toURI.toString, "test.parquet")
163-
makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled = dictionaryEnabled, 10000)
164-
withParquetTable(path.toString, "tbl") {
165-
checkSparkAnswerAndOperator(
166-
"SELECT coalesce(cast(_18 as date), cast(_19 as date), _20) FROM tbl")
167-
}
168-
}
169-
}
170-
}
171-
172159
test("decimals divide by zero") {
173160
Seq(true, false).foreach { dictionary =>
174161
withSQLConf(
@@ -470,18 +457,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
470457
}
471458
}
472459

473-
test("test coalesce lazy eval") {
474-
withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
475-
val data = Seq((9999999999999L, 0))
476-
withParquetTable(data, "t1") {
477-
val res = spark.sql("""
478-
|SELECT coalesce(_1, CAST(_1 AS TINYINT)) from t1;
479-
| """.stripMargin)
480-
checkSparkAnswerAndOperator(res)
481-
}
482-
}
483-
}
484-
485460
test("dictionary arithmetic") {
486461
// TODO: test ANSI mode
487462
withSQLConf(SQLConf.ANSI_ENABLED.key -> "false", "parquet.enable.dictionary" -> "true") {
@@ -522,15 +497,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
522497
}
523498
}
524499

525-
test("string with coalesce") {
526-
withParquetTable(
527-
(0 until 10).map(i => (i.toString, if (i > 5) None else Some((i + 100).toString))),
528-
"tbl") {
529-
checkSparkAnswerAndOperator(
530-
"SELECT coalesce(_1), coalesce(_1, 1), coalesce(null, _1), coalesce(null, 1), coalesce(_2, _1), coalesce(null) FROM tbl")
531-
}
532-
}
533-
534500
test("substring with dictionary") {
535501
val data = (0 until 1000)
536502
.map(_ % 5) // reduce value space to trigger dictionary encoding
@@ -1635,30 +1601,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
16351601
}
16361602
}
16371603

1638-
test("case_when") {
1639-
Seq(false, true).foreach { dictionary =>
1640-
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
1641-
val table = "test"
1642-
withTable(table) {
1643-
sql(s"create table $table(id int) using parquet")
1644-
sql(s"insert into $table values(1), (NULL), (2), (2), (3), (3), (4), (5), (NULL)")
1645-
checkSparkAnswerAndOperator(
1646-
s"SELECT CASE WHEN id > 2 THEN 3333 WHEN id > 1 THEN 2222 ELSE 1111 END FROM $table")
1647-
checkSparkAnswerAndOperator(
1648-
s"SELECT CASE WHEN id > 2 THEN NULL WHEN id > 1 THEN 2222 ELSE 1111 END FROM $table")
1649-
checkSparkAnswerAndOperator(
1650-
s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE 3333 END FROM $table")
1651-
checkSparkAnswerAndOperator(
1652-
s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE NULL END FROM $table")
1653-
checkSparkAnswerAndOperator(
1654-
s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 WHEN 3 THEN 3333 WHEN 4 THEN 4444 END FROM $table")
1655-
checkSparkAnswerAndOperator(
1656-
s"SELECT CASE id WHEN NULL THEN 0 WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE 3333 END FROM $table")
1657-
}
1658-
}
1659-
}
1660-
}
1661-
16621604
test("not") {
16631605
Seq(false, true).foreach { dictionary =>
16641606
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
@@ -1685,24 +1627,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
16851627
}
16861628
}
16871629

1688-
test("conditional expressions") {
1689-
Seq(false, true).foreach { dictionary =>
1690-
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
1691-
val table = "test1"
1692-
withTable(table) {
1693-
sql(s"create table $table(c1 int, c2 string, c3 int) using parquet")
1694-
sql(
1695-
s"insert into $table values(1, 'comet', 1), (2, 'comet', 3), (null, 'spark', 4)," +
1696-
" (null, null, 4), (2, 'spark', 3), (2, 'comet', 3)")
1697-
checkSparkAnswerAndOperator(s"SELECT if (c1 < 2, 1111, 2222) FROM $table")
1698-
checkSparkAnswerAndOperator(s"SELECT if (c1 < c3, 1111, 2222) FROM $table")
1699-
checkSparkAnswerAndOperator(
1700-
s"SELECT if (c2 == 'comet', 'native execution', 'non-native execution') FROM $table")
1701-
}
1702-
}
1703-
}
1704-
}
1705-
17061630
test("basic arithmetic") {
17071631
withSQLConf("parquet.enable.dictionary" -> "false") {
17081632
withParquetTable((1 until 10).map(i => (i, i + 1)), "tbl", false) {
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing,
13+
* software distributed under the License is distributed on an
14+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
* KIND, either express or implied. See the License for the
16+
* specific language governing permissions and limitations
17+
* under the License.
18+
*/
19+
20+
package org.apache.comet.expressions.conditional
21+
22+
import org.scalactic.source.Position
23+
import org.scalatest.Tag
24+
25+
import org.apache.spark.sql.CometTestBase
26+
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
27+
28+
import org.apache.comet.CometConf
29+
30+
class CometCaseWhenSuite extends CometTestBase with AdaptiveSparkPlanHelper {
31+
32+
override protected def test(testName: String, testTags: Tag*)(testFun: => Any)(implicit
33+
pos: Position): Unit = {
34+
super.test(testName, testTags: _*) {
35+
withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_AUTO) {
36+
testFun
37+
}
38+
}
39+
}
40+
41+
test("case_when") {
42+
Seq(false, true).foreach { dictionary =>
43+
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
44+
val table = "test"
45+
withTable(table) {
46+
sql(s"create table $table(id int) using parquet")
47+
sql(s"insert into $table values(1), (NULL), (2), (2), (3), (3), (4), (5), (NULL)")
48+
checkSparkAnswerAndOperator(
49+
s"SELECT CASE WHEN id > 2 THEN 3333 WHEN id > 1 THEN 2222 ELSE 1111 END FROM $table")
50+
checkSparkAnswerAndOperator(
51+
s"SELECT CASE WHEN id > 2 THEN NULL WHEN id > 1 THEN 2222 ELSE 1111 END FROM $table")
52+
checkSparkAnswerAndOperator(
53+
s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE 3333 END FROM $table")
54+
checkSparkAnswerAndOperator(
55+
s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE NULL END FROM $table")
56+
checkSparkAnswerAndOperator(
57+
s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 WHEN 3 THEN 3333 WHEN 4 THEN 4444 END FROM $table")
58+
checkSparkAnswerAndOperator(
59+
s"SELECT CASE id WHEN NULL THEN 0 WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE 3333 END FROM $table")
60+
}
61+
}
62+
}
63+
}
64+
65+
}
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing,
13+
* software distributed under the License is distributed on an
14+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
* KIND, either express or implied. See the License for the
16+
* specific language governing permissions and limitations
17+
* under the License.
18+
*/
19+
20+
package org.apache.comet.expressions.conditional
21+
22+
import org.scalactic.source.Position
23+
import org.scalatest.Tag
24+
25+
import org.apache.hadoop.fs.Path
26+
import org.apache.spark.sql.CometTestBase
27+
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
28+
import org.apache.spark.sql.internal.SQLConf
29+
30+
import org.apache.comet.CometConf
31+
32+
class CometCoalesceSuite extends CometTestBase with AdaptiveSparkPlanHelper {
33+
34+
override protected def test(testName: String, testTags: Tag*)(testFun: => Any)(implicit
35+
pos: Position): Unit = {
36+
super.test(testName, testTags: _*) {
37+
withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_AUTO) {
38+
testFun
39+
}
40+
}
41+
}
42+
43+
test("coalesce should return correct datatype") {
44+
Seq(true, false).foreach { dictionaryEnabled =>
45+
withTempDir { dir =>
46+
val path = new Path(dir.toURI.toString, "test.parquet")
47+
makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled = dictionaryEnabled, 10000)
48+
withParquetTable(path.toString, "tbl") {
49+
checkSparkAnswerAndOperator(
50+
"SELECT coalesce(cast(_18 as date), cast(_19 as date), _20) FROM tbl")
51+
}
52+
}
53+
}
54+
}
55+
56+
test("test coalesce lazy eval") {
57+
withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
58+
val data = Seq((9999999999999L, 0))
59+
withParquetTable(data, "t1") {
60+
val res = spark.sql("""
61+
|SELECT coalesce(_1, CAST(_1 AS TINYINT)) from t1;
62+
| """.stripMargin)
63+
checkSparkAnswerAndOperator(res)
64+
}
65+
}
66+
}
67+
68+
test("string with coalesce") {
69+
withParquetTable(
70+
(0 until 10).map(i => (i.toString, if (i > 5) None else Some((i + 100).toString))),
71+
"tbl") {
72+
checkSparkAnswerAndOperator(
73+
"SELECT coalesce(_1), coalesce(_1, 1), coalesce(null, _1), coalesce(null, 1), coalesce(_2, _1), coalesce(null) FROM tbl")
74+
}
75+
}
76+
77+
}
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing,
13+
* software distributed under the License is distributed on an
14+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
* KIND, either express or implied. See the License for the
16+
* specific language governing permissions and limitations
17+
* under the License.
18+
*/
19+
20+
package org.apache.comet.expressions.conditional
21+
22+
import org.scalactic.source.Position
23+
import org.scalatest.Tag
24+
25+
import org.apache.spark.sql.CometTestBase
26+
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
27+
28+
import org.apache.comet.CometConf
29+
30+
class CometIfSuite extends CometTestBase with AdaptiveSparkPlanHelper {
31+
32+
override protected def test(testName: String, testTags: Tag*)(testFun: => Any)(implicit
33+
pos: Position): Unit = {
34+
super.test(testName, testTags: _*) {
35+
withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_AUTO) {
36+
testFun
37+
}
38+
}
39+
}
40+
41+
test("if expression") {
42+
Seq(false, true).foreach { dictionary =>
43+
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
44+
val table = "test1"
45+
withTable(table) {
46+
sql(s"create table $table(c1 int, c2 string, c3 int) using parquet")
47+
sql(
48+
s"insert into $table values(1, 'comet', 1), (2, 'comet', 3), (null, 'spark', 4)," +
49+
" (null, null, 4), (2, 'spark', 3), (2, 'comet', 3)")
50+
checkSparkAnswerAndOperator(s"SELECT if (c1 < 2, 1111, 2222) FROM $table")
51+
checkSparkAnswerAndOperator(s"SELECT if (c1 < c3, 1111, 2222) FROM $table")
52+
checkSparkAnswerAndOperator(
53+
s"SELECT if (c2 == 'comet', 'native execution', 'non-native execution') FROM $table")
54+
}
55+
}
56+
}
57+
}
58+
}

0 commit comments

Comments
 (0)