19
19
from clickhouse_connect .driver .exceptions import ProgrammingError , OperationalError
20
20
from clickhouse_connect .driver .external import ExternalData
21
21
from clickhouse_connect .driver .insert import InsertContext
22
+ from clickhouse_connect .driver .options import check_arrow , check_pandas , check_numpy
22
23
from clickhouse_connect .driver .summary import QuerySummary
23
24
from clickhouse_connect .driver .models import ColumnDef , SettingDef , SettingStatus
24
25
from clickhouse_connect .driver .query import QueryResult , to_arrow , to_arrow_batches , QueryContext , arrow_buffer
@@ -68,7 +69,7 @@ def __init__(self,
68
69
self .uri = uri
69
70
self ._init_common_settings (apply_server_timezone )
70
71
71
- def _init_common_settings (self , apply_server_timezone :Optional [Union [str , bool ]] ):
72
+ def _init_common_settings (self , apply_server_timezone : Optional [Union [str , bool ]]):
72
73
self .server_tz , dst_safe = pytz .UTC , True
73
74
self .server_version , server_tz = \
74
75
tuple (self .command ('SELECT version(), timezone()' , use_database = False ))
@@ -122,14 +123,16 @@ def _validate_settings(self, settings: Optional[Dict[str, Any]]) -> Dict[str, st
122
123
return validated
123
124
124
125
def _validate_setting (self , key : str , value : Any , invalid_action : str ) -> Optional [str ]:
125
- new_value = str (value )
126
+ str_value = str (value )
126
127
if value is True :
127
- new_value = '1'
128
+ str_value = '1'
128
129
elif value is False :
129
- new_value = '0'
130
+ str_value = '0'
130
131
if key not in self .valid_transport_settings :
131
132
setting_def = self .server_settings .get (key )
132
- if setting_def is None or (setting_def .readonly and setting_def .value != new_value ):
133
+ if setting_def and setting_def .value == str_value :
134
+ return None # don't send settings that are already the expected value
135
+ if setting_def is None or setting_def .readonly :
133
136
if key in self .optional_transport_settings :
134
137
return None
135
138
if invalid_action == 'send' :
@@ -139,7 +142,7 @@ def _validate_setting(self, key: str, value: Any, invalid_action: str) -> Option
139
142
return None
140
143
else :
141
144
raise ProgrammingError (f'Setting { key } is unknown or readonly' ) from None
142
- return new_value
145
+ return str_value
143
146
144
147
def _setting_status (self , key : str ) -> SettingStatus :
145
148
comp_setting = self .server_settings .get (key )
@@ -342,6 +345,7 @@ def query_np(self,
342
345
create_query_context method
343
346
:return: Numpy array representing the result set
344
347
"""
348
+ check_numpy ()
345
349
return self ._context_query (locals (), use_numpy = True ).np_result
346
350
347
351
# pylint: disable=duplicate-code,too-many-arguments,unused-argument
@@ -361,6 +365,7 @@ def query_np_stream(self,
361
365
create_query_context method
362
366
:return: Generator that yield a numpy array per block representing the result set
363
367
"""
368
+ check_numpy ()
364
369
return self ._context_query (locals (), use_numpy = True , streaming = True ).np_stream
365
370
366
371
# pylint: disable=duplicate-code,unused-argument
@@ -384,6 +389,7 @@ def query_df(self,
384
389
create_query_context method
385
390
:return: Pandas dataframe representing the result set
386
391
"""
392
+ check_pandas ()
387
393
return self ._context_query (locals (), use_numpy = True , as_pandas = True ).df_result
388
394
389
395
# pylint: disable=duplicate-code,unused-argument
@@ -407,6 +413,7 @@ def query_df_stream(self,
407
413
create_query_context method
408
414
:return: Generator that yields a Pandas dataframe per block representing the result set
409
415
"""
416
+ check_pandas ()
410
417
return self ._context_query (locals (), use_numpy = True ,
411
418
as_pandas = True ,
412
419
streaming = True ).df_stream
@@ -519,6 +526,7 @@ def query_arrow(self,
519
526
:param external_data ClickHouse "external data" to send with query
520
527
:return: PyArrow.Table
521
528
"""
529
+ check_arrow ()
522
530
settings = self ._update_arrow_settings (settings , use_strings )
523
531
return to_arrow (self .raw_query (query ,
524
532
parameters ,
@@ -541,6 +549,7 @@ def query_arrow_stream(self,
541
549
:param external_data ClickHouse "external data" to send with query
542
550
:return: Generator that yields a PyArrow.Table for per block representing the result set
543
551
"""
552
+ check_arrow ()
544
553
settings = self ._update_arrow_settings (settings , use_strings )
545
554
return to_arrow_batches (self .raw_stream (query ,
546
555
parameters ,
@@ -661,6 +670,7 @@ def insert_df(self, table: str = None,
661
670
different data batches
662
671
:return: QuerySummary with summary information, throws exception if insert fails
663
672
"""
673
+ check_pandas ()
664
674
if context is None :
665
675
if column_names is None :
666
676
column_names = df .columns
@@ -686,6 +696,7 @@ def insert_arrow(self, table: str,
686
696
:param settings: Optional dictionary of ClickHouse settings (key/string values)
687
697
:return: QuerySummary with summary information, throws exception if insert fails
688
698
"""
699
+ check_arrow ()
689
700
full_table = table if '.' in table or not database else f'{ database } .{ table } '
690
701
compression = self .write_compression if self .write_compression in ('zstd' , 'lz4' ) else None
691
702
column_names , insert_block = arrow_buffer (arrow_table , compression )
0 commit comments