Skip to content

Commit b6a60a9

Browse files
authored
Gg/handle stream exception (#431)
* handle low level http error as stream end * fix deprecation warnings
1 parent 2aee21e commit b6a60a9

File tree

7 files changed

+18
-13
lines changed

7 files changed

+18
-13
lines changed

CHANGELOG.md

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,16 @@ release (0.9.0), unrecognized arguments/keywords for these methods of creating a
1111
instead of being passed as ClickHouse server settings. This is in conjunction with some refactoring in Client construction.
1212
The supported method of passing ClickHouse server settings is to prefix such arguments/query parameters with`ch_`.
1313

14+
## 0.8.8, 2024-11-27
15+
### Improvement
16+
- Handle low level HTTP errors as "Stream Complete". This provides better compatibility with the most recent
17+
ClickHouse version when the HTTP stream is abruptly closed after a server error.
18+
1419
## 0.8.7, 2024-11-21
1520
### Improvement
1621
- Added basic support for ClickHouse geometric types Ring, Polygon, MultiPolygon, LineString, and MultiLineString.
1722
Closes https://github.com/ClickHouse/clickhouse-connect/issues/427
18-
-
23+
1924
### Bug Fix
2025
- Settings/parameters from one Client will no longer leak into later client instantiations. Fixes
2126
https://github.com/ClickHouse/clickhouse-connect/issues/426

clickhouse_connect/__version__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
version = '0.8.7'
1+
version = '0.8.8'

clickhouse_connect/datatypes/temporal.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,6 @@ def _read_column_binary(self, source: ByteSource, num_rows: int, ctx: QueryConte
7979
return data_conv.read_date32_col(source, num_rows)
8080

8181

82-
from_ts_naive = datetime.utcfromtimestamp
8382
from_ts_tz = datetime.fromtimestamp
8483

8584

@@ -193,7 +192,7 @@ def _read_binary_tz(self, column: Sequence, tz_info: tzinfo):
193192
def _read_binary_naive(self, column: Sequence):
194193
new_col = []
195194
app = new_col.append
196-
dt_from = datetime.utcfromtimestamp
195+
dt_from = datetime.fromtimestamp
197196
prec = self.prec
198197
for ticks in column:
199198
seconds = ticks // prec

clickhouse_connect/driver/dataconv.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,6 @@ def read_ipv4_col(source: ByteSource, num_rows: int):
2828

2929
def read_datetime_col(source: ByteSource, num_rows: int, tz_info: Optional[tzinfo]):
3030
src_array = source.read_array('I', num_rows)
31-
if tz_info is None:
32-
fts = datetime.utcfromtimestamp
33-
return [fts(ts) for ts in src_array]
3431
fts = datetime.fromtimestamp
3532
return [fts(ts, tz_info) for ts in src_array]
3633

clickhouse_connect/driver/httputil.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,12 @@ def buffered():
228228
read_gen = response.stream(chunk_size, decompress is None)
229229
while True:
230230
while not done:
231-
chunk = next(read_gen, None) # Always try to read at least one chunk if there are any left
231+
try:
232+
chunk = next(read_gen, None) # Always try to read at least one chunk if there are any left
233+
except Exception: # pylint: disable=broad-except
234+
# By swallowing an unexpected exception reading the stream, we will let consumers decide how to
235+
# handle the unexpected end of stream
236+
pass
232237
if not chunk:
233238
done = True
234239
break

clickhouse_connect/tools/datagen.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,7 @@
1818
from clickhouse_connect.datatypes.temporal import Date, Date32, DateTime, DateTime64
1919
from clickhouse_connect.driver.common import array_sizes
2020

21-
dt_from_ts = datetime.utcfromtimestamp
22-
dt_from_ts_tz = datetime.fromtimestamp
21+
dt_from_ts = datetime.fromtimestamp
2322
epoch_date = date(1970, 1, 1)
2423
date32_start_date = date(1925, 1, 1)
2524

@@ -138,7 +137,7 @@ def random_datetime():
138137

139138

140139
def random_datetime_tz(timezone: tzinfo):
141-
return dt_from_ts_tz(int(random() * 2 ** 32), timezone).replace(microsecond=0)
140+
return dt_from_ts(int(random() * 2 ** 32), timezone).replace(microsecond=0)
142141

143142

144143
def random_ascii_str(max_len: int = 200, min_len: int = 0):
@@ -172,7 +171,7 @@ def random_datetime64_tz(prec: int, timezone: tzinfo):
172171
u_sec = int(random() * 1000) * 1000
173172
else:
174173
u_sec = int(random() * 1000000)
175-
return dt_from_ts_tz(int(random() * 4294967296), timezone).replace(microsecond=u_sec)
174+
return dt_from_ts(int(random() * 4294967296), timezone).replace(microsecond=u_sec)
176175

177176

178177
def random_ipv6():

tests/integration_tests/test_pandas.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ def test_pandas_row_df(test_client: Client, table_context:Callable):
257257
source_df = df.copy()
258258
test_client.insert_df('test_pandas_row_df', df)
259259
result_df = test_client.query_df('SELECT * FROM test_pandas_row_df', column_formats={'fs': 'string'})
260-
assert str(result_df.dtypes[2]) == 'string'
260+
assert str(result_df.dtypes.iloc[2]) == 'string'
261261
assert result_df.iloc[0]['key'] == 2
262262
assert result_df.iloc[0]['dt'] == pd.Timestamp(2023, 10, 15, 14, 50, 2, 4038)
263263
assert result_df.iloc[0]['fs'] == 'bit'

0 commit comments

Comments
 (0)