Skip to content

Commit 7f95af6

Browse files
authored
bump dependancies (#58)
1 parent 1c5e3a1 commit 7f95af6

File tree

7 files changed

+62
-60
lines changed

7 files changed

+62
-60
lines changed

CHANGELOG.md

+7
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](http://keepachangelog.com/)
66
and this project adheres to [Semantic Versioning](http://semver.org/).
77

8+
## [1.1.1] - 2019-08-14
9+
10+
### Changed
11+
12+
- Bumped FS to 2.4
13+
- Bumped Boto to 1.9
14+
815
## [1.1.0] - 2018-01-01
916

1017
### Changed

fs_s3fs/_s3fs.py

-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
import tempfile
1414
import threading
1515
import mimetypes
16-
import json
1716

1817
import boto3
1918
from botocore.exceptions import ClientError, EndpointConnectionError

fs_s3fs/_version.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "1.1.0"
1+
__version__ = "1.1.1"

fs_s3fs/opener.py

+12-14
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
# coding: utf-8
2-
"""Defines the S3FSpener."""
2+
"""Defines the S3FS Opener."""
33

44
from __future__ import absolute_import
55
from __future__ import print_function
66
from __future__ import unicode_literals
77

8-
__all__ = ['S3FSOpener']
8+
__all__ = ["S3FSOpener"]
99

1010
from fs.opener import Opener
1111
from fs.opener.errors import OpenerError
@@ -14,27 +14,25 @@
1414

1515

1616
class S3FSOpener(Opener):
17-
protocols = ['s3']
17+
protocols = ["s3"]
1818

1919
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
20-
bucket_name, _, dir_path = parse_result.resource.partition('/')
20+
bucket_name, _, dir_path = parse_result.resource.partition("/")
2121
if not bucket_name:
22-
raise OpenerError(
23-
"invalid bucket name in '{}'".format(fs_url)
24-
)
22+
raise OpenerError("invalid bucket name in '{}'".format(fs_url))
2523
strict = (
26-
parse_result.params['strict'] == '1'
27-
if 'strict' in parse_result.params
24+
parse_result.params["strict"] == "1"
25+
if "strict" in parse_result.params
2826
else True
2927
)
3028
s3fs = S3FS(
3129
bucket_name,
32-
dir_path=dir_path or '/',
30+
dir_path=dir_path or "/",
3331
aws_access_key_id=parse_result.username or None,
3432
aws_secret_access_key=parse_result.password or None,
35-
endpoint_url=parse_result.params.get('endpoint_url', None),
36-
acl=parse_result.params.get('acl', None),
37-
cache_control=parse_result.params.get('cache_control', None),
38-
strict=strict
33+
endpoint_url=parse_result.params.get("endpoint_url", None),
34+
acl=parse_result.params.get("acl", None),
35+
cache_control=parse_result.params.get("cache_control", None),
36+
strict=strict,
3937
)
4038
return s3fs

fs_s3fs/tests/test_s3fs.py

+38-40
Original file line numberDiff line numberDiff line change
@@ -5,77 +5,75 @@
55
from nose.plugins.attrib import attr
66

77
from fs.test import FSTestCases
8-
98
from fs_s3fs import S3FS
109

1110
import boto3
1211

1312

1413
class TestS3FS(FSTestCases, unittest.TestCase):
1514
"""Test S3FS implementation from dir_path."""
16-
bucket_name = 'fsexample'
17-
s3 = boto3.resource('s3')
18-
client = boto3.client('s3')
15+
16+
bucket_name = "fsexample"
17+
s3 = boto3.resource("s3")
18+
client = boto3.client("s3")
1919

2020
def make_fs(self):
2121
self._delete_bucket_contents()
2222
return S3FS(self.bucket_name)
2323

2424
def _delete_bucket_contents(self):
25-
response = self.client.list_objects(
26-
Bucket=self.bucket_name
27-
)
25+
response = self.client.list_objects(Bucket=self.bucket_name)
2826
contents = response.get("Contents", ())
2927
for obj in contents:
30-
self.client.delete_object(
31-
Bucket=self.bucket_name,
32-
Key=obj["Key"]
33-
)
28+
self.client.delete_object(Bucket=self.bucket_name, Key=obj["Key"])
3429

3530

36-
@attr('slow')
31+
@attr("slow")
3732
class TestS3FSSubDir(FSTestCases, unittest.TestCase):
3833
"""Test S3FS implementation from dir_path."""
39-
bucket_name = 'fsexample'
40-
s3 = boto3.resource('s3')
41-
client = boto3.client('s3')
34+
35+
bucket_name = "fsexample"
36+
s3 = boto3.resource("s3")
37+
client = boto3.client("s3")
4238

4339
def make_fs(self):
4440
self._delete_bucket_contents()
45-
self.s3.Object(self.bucket_name, 'subdirectory').put()
46-
return S3FS(self.bucket_name, dir_path='subdirectory')
41+
self.s3.Object(self.bucket_name, "subdirectory").put()
42+
return S3FS(self.bucket_name, dir_path="subdirectory")
4743

4844
def _delete_bucket_contents(self):
49-
response = self.client.list_objects(
50-
Bucket=self.bucket_name
51-
)
45+
response = self.client.list_objects(Bucket=self.bucket_name)
5246
contents = response.get("Contents", ())
5347
for obj in contents:
54-
self.client.delete_object(
55-
Bucket=self.bucket_name,
56-
Key=obj["Key"]
57-
)
48+
self.client.delete_object(Bucket=self.bucket_name, Key=obj["Key"])
5849

5950

6051
class TestS3FSHelpers(unittest.TestCase):
61-
6252
def test_path_to_key(self):
63-
s3 = S3FS('foo')
64-
self.assertEqual(s3._path_to_key('foo.bar'), 'foo.bar')
65-
self.assertEqual(s3._path_to_key('foo/bar'), 'foo/bar')
53+
s3 = S3FS("foo")
54+
self.assertEqual(s3._path_to_key("foo.bar"), "foo.bar")
55+
self.assertEqual(s3._path_to_key("foo/bar"), "foo/bar")
6656

6757
def test_path_to_key_subdir(self):
68-
s3 = S3FS('foo', '/dir')
69-
self.assertEqual(s3._path_to_key('foo.bar'), 'dir/foo.bar')
70-
self.assertEqual(s3._path_to_key('foo/bar'), 'dir/foo/bar')
58+
s3 = S3FS("foo", "/dir")
59+
self.assertEqual(s3._path_to_key("foo.bar"), "dir/foo.bar")
60+
self.assertEqual(s3._path_to_key("foo/bar"), "dir/foo/bar")
7161

7262
def test_upload_args(self):
73-
s3 = S3FS('foo', acl='acl', cache_control='cc')
74-
self.assertDictEqual(s3._get_upload_args('test.jpg'),
75-
{'ACL': 'acl', 'CacheControl': 'cc', 'ContentType': 'image/jpeg'})
76-
self.assertDictEqual(s3._get_upload_args('test.mp3'),
77-
{'ACL': 'acl', 'CacheControl': 'cc', 'ContentType': 'audio/mpeg'})
78-
self.assertDictEqual(s3._get_upload_args('test.json'),
79-
{'ACL': 'acl', 'CacheControl': 'cc', 'ContentType': 'application/json'})
80-
self.assertDictEqual(s3._get_upload_args('unknown.unknown'),
81-
{'ACL': 'acl', 'CacheControl': 'cc', 'ContentType': 'binary/octet-stream'})
63+
s3 = S3FS("foo", acl="acl", cache_control="cc")
64+
self.assertDictEqual(
65+
s3._get_upload_args("test.jpg"),
66+
{"ACL": "acl", "CacheControl": "cc", "ContentType": "image/jpeg"},
67+
)
68+
self.assertDictEqual(
69+
s3._get_upload_args("test.mp3"),
70+
{"ACL": "acl", "CacheControl": "cc", "ContentType": "audio/mpeg"},
71+
)
72+
self.assertDictEqual(
73+
s3._get_upload_args("test.json"),
74+
{"ACL": "acl", "CacheControl": "cc", "ContentType": "application/json"},
75+
)
76+
self.assertDictEqual(
77+
s3._get_upload_args("unknown.unknown"),
78+
{"ACL": "acl", "CacheControl": "cc", "ContentType": "binary/octet-stream"},
79+
)

setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
with open("README.rst", "rt") as f:
2424
DESCRIPTION = f.read()
2525

26-
REQUIREMENTS = ["boto3~=1.7", "fs~=2.2", "six~=1.10"]
26+
REQUIREMENTS = ["boto3~=1.9", "fs~=2.4", "six~=1.10"]
2727

2828
setup(
2929
name="fs-s3fs",

tox.ini

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
[tox]
2-
envlist = py27,py34,py35,py36,pypy
2+
envlist = py27,py34,py35,py36,py37,pypy
33
sitepackages = False
44

55
[testenv]
66
deps = nose
7-
boto3==1.7.64
8-
fs==2.1.0
7+
boto3==1.9.207
8+
fs==2.4.10
99

1010
passenv = *
1111
#changedir=.tox

0 commit comments

Comments
 (0)