Skip to content

Commit 00e47d4

Browse files
authored
Merge pull request #790 from jettero/onto-4.0
s3-fix merge into 4.0 branch
2 parents 765c16b + d12652c commit 00e47d4

File tree

2 files changed

+22
-5
lines changed

2 files changed

+22
-5
lines changed

hubblestack/extmods/fileserver/s3fs.py

+9-2
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@
105105

106106
log = logging.getLogger(__name__)
107107

108-
S3_CACHE_EXPIRE = 30 # cache for 30 seconds
108+
S3_CACHE_EXPIRE = 1800 # cache for 30 minutes
109109
S3_SYNC_ON_UPDATE = True # sync cache on update rather than jit
110110

111111

@@ -336,6 +336,7 @@ def _get_s3_key():
336336
'service_url': None,
337337
'keyid': None,
338338
'key': None,
339+
'cache_expire': S3_CACHE_EXPIRE,
339340
}
340341

341342
ret = dict()
@@ -351,14 +352,17 @@ def _get_s3_key():
351352

352353
return ret
353354

355+
354356
def _init():
355357
"""
356358
Connect to S3 and download the metadata for each file in all buckets
357359
specified and cache the data to disk.
358360
"""
359361
cache_file = _get_buckets_cache_filename()
360-
exp = time.time() - S3_CACHE_EXPIRE
362+
cache_expire_time = float(_get_s3_key().get('cache_expire'))
363+
exp = time.time() - cache_expire_time
361364

365+
log.debug('S3 cache expire time is %ds', cache_expire_time)
362366
# check mtime of the buckets files cache
363367
metadata = None
364368
try:
@@ -443,6 +447,9 @@ def __get_s3_meta(bucket, key=s3_key_kwargs['key'], keyid=s3_key_kwargs['keyid']
443447
path_style=s3_key_kwargs['path_style'],
444448
https_enable=s3_key_kwargs['https_enable'],
445449
params={'marker': marker})
450+
if not tmp:
451+
return None
452+
446453
headers = []
447454
for header in tmp:
448455
if 'Key' in header:

hubblestack/extmods/utils/s3.py

+13-3
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
HAS_REQUESTS = False # pylint: disable=W0612
1818

1919
# Import Salt libs
20+
import os
2021
import salt.utils.aws
2122
import salt.utils.files
2223
import salt.utils.hashutils
@@ -203,6 +204,11 @@ def query(key, keyid, method='GET', params=None, headers=None,
203204
err_code = 'http-{0}'.format(result.status_code)
204205
err_msg = err_text
205206

207+
if os.environ.get('MOCK_SLOW_DOWN'):
208+
result.status_code = 503
209+
err_code = 'SlowDown'
210+
err_msg = 'MOCK_SLOW_DOWN environment variable set. All S3 queries will fail for testing purposes."
211+
206212
log.debug('S3 Response Status Code: %s', result.status_code)
207213

208214
if method == 'PUT':
@@ -219,7 +225,7 @@ def query(key, keyid, method='GET', params=None, headers=None,
219225
log.debug('Uploaded from %s to %s', local_file, path)
220226
else:
221227
log.debug('Created bucket %s', bucket)
222-
return
228+
return None
223229

224230
if method == 'DELETE':
225231
if not six.text_type(result.status_code).startswith('2'):
@@ -235,7 +241,7 @@ def query(key, keyid, method='GET', params=None, headers=None,
235241
log.debug('Deleted %s from bucket %s', path, bucket)
236242
else:
237243
log.debug('Deleted bucket %s', bucket)
238-
return
244+
return None
239245

240246
# This can be used to save a binary object to disk
241247
if local_file and method == 'GET':
@@ -250,6 +256,10 @@ def query(key, keyid, method='GET', params=None, headers=None,
250256
return 'Saved to local file: {0}'.format(local_file)
251257

252258
if result.status_code < 200 or result.status_code >= 300:
259+
if err_code in ['SlowDown', 'ServiceUnavailable', 'RequestTimeTooSkewed',
260+
'RequestTimeout', 'OperationAborted', 'InternalError']:
261+
log.error('Failed s3 operation: %s, %s', err_code, err_msg)
262+
return None
253263
raise CommandExecutionError(
254264
'Failed s3 operation. {0}: {1}'.format(err_code, err_msg))
255265

@@ -268,7 +278,7 @@ def query(key, keyid, method='GET', params=None, headers=None,
268278
return ret, requesturl
269279
else:
270280
if result.status_code != requests.codes.ok:
271-
return
281+
return None
272282
ret = {'headers': []}
273283
if full_headers:
274284
ret['headers'] = dict(result.headers)

0 commit comments

Comments
 (0)