@@ -130,6 +130,31 @@ def fin():
130
130
return conn_cnx
131
131
132
132
133
+ class LobBackendParams (NamedTuple ):
134
+ max_lob_size_in_memory : int
135
+
136
+
137
+ @pytest .fixture ()
138
+ def lob_params (conn_cnx ) -> LobBackendParams :
139
+ with conn_cnx () as cnx :
140
+ (max_lob_size_in_memory_feat , max_lob_size_in_memory ) = (
141
+ (cnx .cursor ().execute (f"show parameters like '{ lob_param } '" ).fetchone ())
142
+ for lob_param in (
143
+ "FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY" ,
144
+ "MAX_LOB_SIZE_IN_MEMORY" ,
145
+ )
146
+ )
147
+ max_lob_size_in_memory_feat = (
148
+ max_lob_size_in_memory_feat and max_lob_size_in_memory_feat [1 ] == "ENABLED"
149
+ )
150
+ max_lob_size_in_memory = (
151
+ int (max_lob_size_in_memory [1 ])
152
+ if (max_lob_size_in_memory_feat and max_lob_size_in_memory )
153
+ else 2 ** 24
154
+ )
155
+ return LobBackendParams (max_lob_size_in_memory )
156
+
157
+
133
158
def _check_results (cursor , results ):
134
159
assert cursor .sfqid , "Snowflake query id is None"
135
160
assert cursor .rowcount == 3 , "the number of records"
@@ -1564,7 +1589,9 @@ def test_resultbatch(
1564
1589
("arrow" , "snowflake.connector.result_batch.ArrowResultBatch.create_iter" ),
1565
1590
),
1566
1591
)
1567
- def test_resultbatch_lazy_fetching_and_schemas (conn_cnx , result_format , patch_path ):
1592
+ def test_resultbatch_lazy_fetching_and_schemas (
1593
+ conn_cnx , result_format , patch_path , lob_params
1594
+ ):
1568
1595
"""Tests whether pre-fetching results chunks fetches the right amount of them."""
1569
1596
rowcount = 1000000 # We need at least 5 chunks for this test
1570
1597
with conn_cnx (
@@ -1592,7 +1619,17 @@ def test_resultbatch_lazy_fetching_and_schemas(conn_cnx, result_format, patch_pa
1592
1619
# all batches should have the same schema
1593
1620
assert schema == [
1594
1621
ResultMetadata ("C1" , 0 , None , None , 10 , 0 , False ),
1595
- ResultMetadata ("C2" , 2 , None , 16777216 , None , None , False ),
1622
+ ResultMetadata (
1623
+ "C2" ,
1624
+ 2 ,
1625
+ None ,
1626
+ schema [
1627
+ 1
1628
+ ].internal_size , # TODO: lob_params.max_lob_size_in_memory,
1629
+ None ,
1630
+ None ,
1631
+ False ,
1632
+ ),
1596
1633
]
1597
1634
assert patched_download .call_count == 0
1598
1635
assert len (result_batches ) > 5
@@ -1613,7 +1650,7 @@ def test_resultbatch_lazy_fetching_and_schemas(conn_cnx, result_format, patch_pa
1613
1650
1614
1651
@pytest .mark .skipolddriver (reason = "new feature in v2.5.0" )
1615
1652
@pytest .mark .parametrize ("result_format" , ["json" , "arrow" ])
1616
- def test_resultbatch_schema_exists_when_zero_rows (conn_cnx , result_format ):
1653
+ def test_resultbatch_schema_exists_when_zero_rows (conn_cnx , result_format , lob_params ):
1617
1654
with conn_cnx (
1618
1655
session_parameters = {"python_connector_query_result_format" : result_format }
1619
1656
) as con :
@@ -1629,7 +1666,15 @@ def test_resultbatch_schema_exists_when_zero_rows(conn_cnx, result_format):
1629
1666
schema = result_batches [0 ].schema
1630
1667
assert schema == [
1631
1668
ResultMetadata ("C1" , 0 , None , None , 10 , 0 , False ),
1632
- ResultMetadata ("C2" , 2 , None , 16777216 , None , None , False ),
1669
+ ResultMetadata (
1670
+ "C2" ,
1671
+ 2 ,
1672
+ None ,
1673
+ schema [1 ].internal_size , # TODO: lob_params.max_lob_size_in_memory,
1674
+ None ,
1675
+ None ,
1676
+ False ,
1677
+ ),
1633
1678
]
1634
1679
1635
1680
0 commit comments