@@ -931,30 +931,28 @@ def eval(
931
931
yield result_table
932
932
933
933
# Test with DataFrame API using named arguments
934
- # TODO(SPARK-53426): Support named table argument with DataFrame API
935
- # input_df = self.spark.range(3) # [0, 1, 2]
936
- # result_df = NamedArgsUDTF(table_data=input_df.asTable(), multiplier=lit(5))
934
+ input_df = self .spark .range (3 ) # [0, 1, 2]
935
+ result_df = NamedArgsUDTF (table_data = input_df .asTable (), multiplier = lit (5 ))
937
936
expected_df = self .spark .createDataFrame (
938
937
[(0 , 5 ), (5 , 5 ), (10 , 5 )],
939
938
"result_id bigint, multiplier_used int"
940
939
)
941
- # assertDataFrameEqual(result_df, expected_df)
940
+ assertDataFrameEqual (result_df , expected_df )
942
941
943
942
# Test with DataFrame API using different named argument order
944
- # TODO(SPARK-53426): Support named table argument with DataFrame API
945
- # result_df2 = NamedArgsUDTF(multiplier=lit(3), table_data=input_df.asTable())
943
+ result_df2 = NamedArgsUDTF (multiplier = lit (3 ), table_data = input_df .asTable ())
946
944
expected_df2 = self .spark .createDataFrame (
947
945
[(0 , 3 ), (3 , 3 ), (6 , 3 )],
948
946
"result_id bigint, multiplier_used int"
949
947
)
950
- # assertDataFrameEqual(result_df2, expected_df2)
948
+ assertDataFrameEqual (result_df2 , expected_df2 )
951
949
952
950
# Test SQL registration and usage with named arguments
953
951
self .spark .udtf .register ("test_named_args_udtf" , NamedArgsUDTF )
954
952
955
953
sql_result_df = self .spark .sql ("""
956
954
SELECT * FROM test_named_args_udtf(
957
- table_data => TABLE(SELECT id FROM range(0, 3))
955
+ table_data => TABLE(SELECT id FROM range(0, 3)),
958
956
multiplier => 5
959
957
)
960
958
""" )
0 commit comments