Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 10 additions & 8 deletions s3fs/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,6 +463,8 @@ def bulk_delete(self, pathlist):
pathlist : listof strings
The keys to remove, must all be in the same bucket.
"""
if not pathlist:
return
buckets = {split_path(path)[0] for path in pathlist}
if len(buckets) > 1:
raise ValueError("Bulk delete files should refer to only one bucket")
Expand Down Expand Up @@ -643,8 +645,8 @@ def __init__(self, s3, path, mode='rb', block_size=5 * 2 ** 20):
else:
try:
self.mpu = s3.s3.create_multipart_upload(Bucket=bucket, Key=key)
except (ClientError, ParamValidationError):
raise IOError('Open for write failed', path)
except (ClientError, ParamValidationError) as e:
raise IOError('Open for write failed', path, e)
self.loc = self.size
out = self.s3.s3.upload_part_copy(Bucket=self.bucket, Key=self.key,
PartNumber=1, UploadId=self.mpu['UploadId'],
Expand Down Expand Up @@ -824,8 +826,8 @@ def flush(self, force=False, retries=10):
try:
self.mpu = self.mpu or self.s3.s3.create_multipart_upload(
Bucket=self.bucket, Key=self.key)
except (ClientError, ParamValidationError):
raise IOError('Initating write failed: %s' % self.path)
except (ClientError, ParamValidationError) as e:
raise IOError('Initating write failed: %s' % self.path, e)

while True:
try:
Expand All @@ -842,8 +844,8 @@ def flush(self, force=False, retries=10):
else:
raise IOError('Write failed after %i retries' % retries,
self)
except:
raise IOError('Write failed', self)
except Exception as e:
raise IOError('Write failed', self, e)
self.parts.append({'PartNumber': part, 'ETag': out['ETag']})
self.buffer = io.BytesIO()

Expand All @@ -870,8 +872,8 @@ def close(self):
try:
self.s3.s3.put_object(Bucket=self.bucket, Key=self.key,
Body=self.buffer.read())
except (ClientError, ParamValidationError):
raise IOError('Write failed: %s' % self.path)
except (ClientError, ParamValidationError) as e:
raise IOError('Write failed: %s' % self.path, e)
self.s3.invalidate_cache(self.bucket)
self.closed = True

Expand Down
11 changes: 9 additions & 2 deletions s3fs/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from collections import MutableMapping
import os

from .core import S3FileSystem
from .core import S3FileSystem, split_path


class S3Map(MutableMapping):
Expand Down Expand Up @@ -30,12 +30,19 @@ class S3Map(MutableMapping):
b'Hello World'
"""

def __init__(self, root, s3=None, check=False):
def __init__(self, root, s3=None, check=False, create=False):
self.s3 = s3 or S3FileSystem.current()
self.root = root
if check:
self.s3.touch(root+'/a')
self.s3.rm(root+'/a')
else:
bucket = split_path(root)[0]
if create:
self.s3.mkdir(bucket)
elif not self.s3.exists(bucket):
raise ValueError("Bucket %s does not exist."
" Create bucket with the ``create=True`` keyword")

def clear(self):
"""Remove all keys below root - empties out mapping
Expand Down
14 changes: 14 additions & 0 deletions s3fs/tests/test_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,17 @@ def test_bytearray(s3):
d['x'] = bytearray(b'123')

assert d['x'] == b'123'


def test_new_bucket(s3):
try:
d = S3Map('new-bucket', s3)
assert False
except ValueError as e:
assert 'create=True' in str(e)

d = S3Map('new-bucket', s3, create=True)
assert not d

d = S3Map('new-bucket/new-directory', s3)
assert not d