You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
[SPARK-52590][SQL][TESTS] Add SQL query tests for SQL functions without explicit return types
### What changes were proposed in this pull request?
SQL UDFs support implicit return types. UThis PR adds more tests on this.
### Why are the changes needed?
To improve test coverage
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Test only
### Was this patch authored or co-authored using generative AI tooling?
No
Closes#51296 from allisonwang-db/spark-52590-opt-return-type-test.
Authored-by: Allison Wang <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
Project [spark_catalog.default.foo1_11b() AS spark_catalog.default.foo1_11b()#x]
1720
+
+- Project
1721
+
+- OneRowRelation
1722
+
1723
+
1724
+
-- !query
1725
+
CREATE OR REPLACE FUNCTION foo1_11c(a INT, b INT) RETURN a + b
1726
+
-- !query analysis
1727
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11c, a INT, b INT, , a + b, false, false, false, true
1728
+
1729
+
1730
+
-- !query
1731
+
SELECT foo1_11c(3, 5)
1732
+
-- !query analysis
1733
+
Project [spark_catalog.default.foo1_11c(a#x, b#x) AS spark_catalog.default.foo1_11c(3, 5)#x]
1734
+
+- Project [cast(3 as int) AS a#x, cast(5 as int) AS b#x]
1735
+
+- OneRowRelation
1736
+
1737
+
1738
+
-- !query
1739
+
CREATE OR REPLACE FUNCTION foo1_11d(a DOUBLE, b INT) RETURN a * b + 1.5
1740
+
-- !query analysis
1741
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11d, a DOUBLE, b INT, , a * b + 1.5, false, false, false, true
1742
+
1743
+
1744
+
-- !query
1745
+
SELECT foo1_11d(3.0, 5)
1746
+
-- !query analysis
1747
+
Project [spark_catalog.default.foo1_11d(a#x, b#x) AS spark_catalog.default.foo1_11d(3.0, 5)#x]
1748
+
+- Project [cast(3.0 as double) AS a#x, cast(5 as int) AS b#x]
1749
+
+- OneRowRelation
1750
+
1751
+
1752
+
-- !query
1753
+
CREATE OR REPLACE FUNCTION foo1_11e(a INT) RETURN a > 10
1754
+
-- !query analysis
1755
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11e, a INT, , a > 10, false, false, false, true
1756
+
1757
+
1758
+
-- !query
1759
+
SELECT foo1_11e(15), foo1_11e(5)
1760
+
-- !query analysis
1761
+
Project [spark_catalog.default.foo1_11e(a#x) AS spark_catalog.default.foo1_11e(15)#x, spark_catalog.default.foo1_11e(a#x) AS spark_catalog.default.foo1_11e(5)#x]
1762
+
+- Project [cast(15 as int) AS a#x, cast(5 as int) AS a#x]
1763
+
+- OneRowRelation
1764
+
1765
+
1766
+
-- !query
1767
+
CREATE OR REPLACE FUNCTION foo1_11f(d DATE) RETURN d + INTERVAL '1' DAY
1768
+
-- !query analysis
1769
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11f, d DATE, , d + INTERVAL '1' DAY, false, false, false, true
1770
+
1771
+
1772
+
-- !query
1773
+
SELECT foo1_11f(DATE '2024-01-01')
1774
+
-- !query analysis
1775
+
[Analyzer test output redacted due to nondeterminism]
1776
+
1777
+
1778
+
-- !query
1779
+
CREATE OR REPLACE FUNCTION foo1_11g(n INT) RETURN ARRAY(1, 2, n)
Project [spark_catalog.default.foo1_11g(n#x) AS spark_catalog.default.foo1_11g(5)#x]
1788
+
+- Project [cast(5 as int) AS n#x]
1789
+
+- OneRowRelation
1790
+
1791
+
1792
+
-- !query
1793
+
CREATE OR REPLACE FUNCTION foo1_11h(a INT, b STRING) RETURN STRUCT(a, b)
1794
+
-- !query analysis
1795
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11h, a INT, b STRING, , STRUCT(a, b), false, false, false, true
1796
+
1797
+
1798
+
-- !query
1799
+
SELECT foo1_11h(1, 'test')
1800
+
-- !query analysis
1801
+
Project [spark_catalog.default.foo1_11h(a#x, b#x) AS spark_catalog.default.foo1_11h(1, test)#x]
1802
+
+- Project [cast(1 as int) AS a#x, cast(test as string) AS b#x]
1803
+
+- OneRowRelation
1804
+
1805
+
1806
+
-- !query
1807
+
CREATE OR REPLACE FUNCTION foo1_11i(x INT) RETURN (SELECT x * 2)
1808
+
-- !query analysis
1809
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11i, x INT, , (SELECT x * 2), false, false, false, true
1810
+
1811
+
1812
+
-- !query
1813
+
SELECT foo1_11i(5)
1814
+
-- !query analysis
1815
+
Project [spark_catalog.default.foo1_11i(x#x) AS spark_catalog.default.foo1_11i(5)#x]
1816
+
+- Project [cast(5 as int) AS x#x]
1817
+
+- OneRowRelation
1818
+
1819
+
1820
+
-- !query
1821
+
CREATE OR REPLACE FUNCTION foo1_11j(s STRING) RETURN UPPER(s)
1822
+
-- !query analysis
1823
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11j, s STRING, , UPPER(s), false, false, false, true
1824
+
1825
+
1826
+
-- !query
1827
+
SELECT foo1_11j('hello')
1828
+
-- !query analysis
1829
+
Project [spark_catalog.default.foo1_11j(s#x) AS spark_catalog.default.foo1_11j(hello)#x]
1830
+
+- Project [cast(hello as string) AS s#x]
1831
+
+- OneRowRelation
1832
+
1833
+
1834
+
-- !query
1835
+
CREATE OR REPLACE FUNCTION foo1_11k(a INT, b STRING) RETURN CONCAT(CAST(a AS STRING), '_', b)
1836
+
-- !query analysis
1837
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11k, a INT, b STRING, , CONCAT(CAST(a AS STRING), '_', b), false, false, false, true
1838
+
1839
+
1840
+
-- !query
1841
+
SELECT foo1_11k(123, 'test')
1842
+
-- !query analysis
1843
+
Project [spark_catalog.default.foo1_11k(a#x, b#x) AS spark_catalog.default.foo1_11k(123, test)#x]
1844
+
+- Project [cast(123 as int) AS a#x, cast(test as string) AS b#x]
1845
+
+- OneRowRelation
1846
+
1847
+
1848
+
-- !query
1849
+
CREATE OR REPLACE FUNCTION foo1_11l() RETURNS TABLE RETURN SELECT 1 as id, 'hello' as name
1850
+
-- !query analysis
1851
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11l, TABLE, SELECT 1 as id, 'hello' as name, true, false, false, true
1852
+
1853
+
1854
+
-- !query
1855
+
SELECT * FROM foo1_11l()
1856
+
-- !query analysis
1857
+
Project [id#x, name#x]
1858
+
+- SQLFunctionNode spark_catalog.default.foo1_11l
1859
+
+- SubqueryAlias foo1_11l
1860
+
+- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS name#x]
1861
+
+- Project [1 AS id#x, hello AS name#x]
1862
+
+- OneRowRelation
1863
+
1864
+
1865
+
-- !query
1866
+
CREATE OR REPLACE FUNCTION foo1_11m(a INT, b STRING) RETURNS TABLE RETURN SELECT a * 2 as doubled, UPPER(b) as upper_name
1867
+
-- !query analysis
1868
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11m, a INT, b STRING, TABLE, SELECT a * 2 as doubled, UPPER(b) as upper_name, true, false, false, true
1869
+
1870
+
1871
+
-- !query
1872
+
SELECT * FROM foo1_11m(5, 'world')
1873
+
-- !query analysis
1874
+
Project [doubled#x, upper_name#x]
1875
+
+- SQLFunctionNode spark_catalog.default.foo1_11m
1876
+
+- SubqueryAlias foo1_11m
1877
+
+- Project [cast(doubled#x as int) AS doubled#x, cast(upper_name#x as string) AS upper_name#x]
1878
+
+- Project [(cast(5 as int) * 2) AS doubled#x, upper(cast(world as string)) AS upper_name#x]
1879
+
+- OneRowRelation
1880
+
1881
+
1882
+
-- !query
1883
+
CREATE OR REPLACE FUNCTION foo1_11n(arr ARRAY<INT>) RETURNS TABLE RETURN SELECT size(arr) as array_size, arr[0] as first_element
1884
+
-- !query analysis
1885
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11n, arr ARRAY<INT>, TABLE, SELECT size(arr) as array_size, arr[0] as first_element, true, false, false, true
1886
+
1887
+
1888
+
-- !query
1889
+
SELECT * FROM foo1_11n(ARRAY(1, 2, 3))
1890
+
-- !query analysis
1891
+
Project [array_size#x, first_element#x]
1892
+
+- SQLFunctionNode spark_catalog.default.foo1_11n
1893
+
+- SubqueryAlias foo1_11n
1894
+
+- Project [cast(array_size#x as int) AS array_size#x, cast(first_element#x as int) AS first_element#x]
1895
+
+- Project [size(cast(array(1, 2, 3) as array<int>), false) AS array_size#x, cast(array(1, 2, 3) as array<int>)[0] AS first_element#x]
1896
+
+- OneRowRelation
1897
+
1898
+
1899
+
-- !query
1900
+
CREATE OR REPLACE FUNCTION foo1_11o(id INT, name STRING) RETURNS TABLE RETURN SELECT STRUCT(id, name) as person_info, id + 100 as modified_id
1901
+
-- !query analysis
1902
+
CreateSQLFunctionCommand spark_catalog.default.foo1_11o, id INT, name STRING, TABLE, SELECT STRUCT(id, name) as person_info, id + 100 as modified_id, true, false, false, true
1903
+
1904
+
1905
+
-- !query
1906
+
SELECT * FROM foo1_11o(1, 'Alice')
1907
+
-- !query analysis
1908
+
Project [person_info#x, modified_id#x]
1909
+
+- SQLFunctionNode spark_catalog.default.foo1_11o
1910
+
+- SubqueryAlias foo1_11o
1911
+
+- Project [cast(person_info#x as struct<id:int,name:string>) AS person_info#x, cast(modified_id#x as int) AS modified_id#x]
1912
+
+- Project [struct(id, cast(1 as int), name, cast(Alice as string)) AS person_info#x, (cast(1 as int) + 100) AS modified_id#x]
1913
+
+- OneRowRelation
1914
+
1915
+
1696
1916
-- !query
1697
1917
CREATE FUNCTION foo2_1a(a INT) RETURNS INT RETURN a
0 commit comments