@@ -116,13 +116,13 @@ class CometTemporalExpressionSuite extends CometTestBase with AdaptiveSparkPlanH
116116
117117 test(" to_date parses date literal" ) {
118118 withoutConstantFolding {
119- checkSparkAnswer (" SELECT to_date('2026-01-30')" )
119+ checkSparkAnswerAndOperator (" SELECT to_date('2026-01-30')" )
120120 }
121121 }
122122
123123 test(" to_date parses date literal with explicit format" ) {
124124 withoutConstantFolding {
125- checkSparkAnswer (" SELECT to_date('2026/01/30', 'yyyy/MM/dd')" )
125+ checkSparkAnswerAndOperator (" SELECT to_date('2026/01/30', 'yyyy/MM/dd')" )
126126 }
127127 }
128128
@@ -136,7 +136,7 @@ class CometTemporalExpressionSuite extends CometTestBase with AdaptiveSparkPlanH
136136 .createDataFrame(spark.sparkContext.parallelize(data), schema)
137137 .createOrReplaceTempView(" string_tbl" )
138138
139- checkSparkAnswer (" SELECT dt_str, to_date(dt_str) FROM string_tbl" )
139+ checkSparkAnswerAndOperator (" SELECT dt_str, to_date(dt_str) FROM string_tbl" )
140140 }
141141 }
142142
@@ -151,14 +151,15 @@ class CometTemporalExpressionSuite extends CometTestBase with AdaptiveSparkPlanH
151151 .createDataFrame(spark.sparkContext.parallelize(data), schema)
152152 .createOrReplaceTempView(" string_tbl" )
153153
154- checkSparkAnswer(" SELECT dt_str, to_date(dt_str, 'yyyy/MM/dd') FROM string_tbl" )
154+ checkSparkAnswerAndOperator(
155+ " SELECT dt_str, to_date(dt_str, 'yyyy/MM/dd') FROM string_tbl" )
155156 }
156157 }
157158 }
158159
159160 test(" to_date parses timestamp literal string" ) {
160161 withoutConstantFolding {
161- checkSparkAnswer (" SELECT to_date('2026-01-30 04:17:52')" )
162+ checkSparkAnswerAndOperator (" SELECT to_date('2026-01-30 04:17:52')" )
162163 }
163164 }
164165
@@ -176,7 +177,49 @@ class CometTemporalExpressionSuite extends CometTestBase with AdaptiveSparkPlanH
176177 .createDataFrame(spark.sparkContext.parallelize(data), schema)
177178 .createOrReplaceTempView(" string_tbl" )
178179
179- checkSparkAnswer(" SELECT dt_str, to_date(dt_str) FROM string_tbl" )
180+ checkSparkAnswerAndOperator(" SELECT dt_str, to_date(dt_str) FROM string_tbl" )
181+ }
182+ }
183+
184+ test(" to_date returns null for malformed input when ANSI is disabled" ) {
185+ withSQLConf(SQLConf .ANSI_ENABLED .key -> " false" ) {
186+ withTempView(" string_tbl" ) {
187+ val schema = StructType (Seq (StructField (" dt_str" , DataTypes .StringType , nullable = true )))
188+
189+ val data = Seq (Row (" 2026-01-30" ), Row (" malformed" ), Row (null ))
190+
191+ spark
192+ .createDataFrame(spark.sparkContext.parallelize(data), schema)
193+ .createOrReplaceTempView(" string_tbl" )
194+
195+ checkSparkAnswerAndOperator(" SELECT dt_str, to_date(dt_str) FROM string_tbl" )
196+ }
197+ }
198+ }
199+
200+ test(" to_date throws for malformed input when ANSI is enabled" ) {
201+ withSQLConf(SQLConf .ANSI_ENABLED .key -> " true" ) {
202+ withTempView(" string_tbl" ) {
203+ val schema = StructType (Seq (StructField (" dt_str" , DataTypes .StringType , nullable = true )))
204+
205+ val data = Seq (Row (" 2026-01-30" ), Row (" malformed" ), Row (null ))
206+
207+ spark
208+ .createDataFrame(spark.sparkContext.parallelize(data), schema)
209+ .createOrReplaceTempView(" string_tbl" )
210+
211+ checkSparkAnswerMaybeThrows(sql(" SELECT dt_str, to_date(dt_str) FROM string_tbl" )) match {
212+ case (Some (sparkExc), Some (cometExc)) =>
213+ assert(sparkExc.getMessage.toLowerCase.contains(" date" ))
214+ assert(cometExc.getMessage.toLowerCase.contains(" date" ))
215+ case (Some (_), None ) =>
216+ fail(" Expected Comet to throw when Spark throws" )
217+ case (None , Some (cometExc)) =>
218+ throw cometExc
219+ case _ =>
220+ fail(" Expected both Spark and Comet to throw in ANSI mode" )
221+ }
222+ }
180223 }
181224 }
182225
0 commit comments