Skip to content

Commit

Permalink
Merge pull request #209 from Azure/Uploader-SingleFile-Issue
Browse files Browse the repository at this point in the history
fixed upload issue for directory with single file
  • Loading branch information
milanchandna authored Mar 13, 2018
2 parents 923cc64 + 1541144 commit b88749a
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 3 deletions.
4 changes: 4 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@
Release History
===============

0.0.19 (2018-03-14)
-------------------
* Fixed upload issue where destination filename was wrong while upload of directory with single file #208

0.0.18 (2018-02-05)
-------------------
* Fixed read issue where whole file was cached while doing positional reads #198
Expand Down
2 changes: 1 addition & 1 deletion azure/datalake/store/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# license information.
# --------------------------------------------------------------------------

__version__ = "0.0.18"
__version__ = "0.0.19"

from .core import AzureDLFileSystem
from .multithread import ADLDownloader
Expand Down
4 changes: 3 additions & 1 deletion azure/datalake/store/multithread.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,17 +441,19 @@ def hash(self):
def _setup(self):
""" Create set of parameters to loop over
"""
is_path_walk_empty = False
if "*" not in self.lpath:
out = os.walk(self.lpath)
lfiles = sum(([os.path.join(dir, f) for f in fnames] for
(dir, _, fnames) in out), [])
if (not lfiles and os.path.exists(self.lpath) and
not os.path.isdir(self.lpath)):
lfiles = [self.lpath]
is_path_walk_empty = True
else:
lfiles = glob.glob(self.lpath)

if len(lfiles) > 1:
if len(lfiles) > 0 and not is_path_walk_empty:
local_rel_lpath = str(AzureDLPath(self.lpath).globless_prefix)
file_pairs = [(f, self.rpath / AzureDLPath(f).relative_to(local_rel_lpath)) for f in lfiles]
elif lfiles:
Expand Down
18 changes: 17 additions & 1 deletion tests/test_multithread.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,6 @@ def local_files(tempdir):
f.write(b'0123456789')
yield filenames


@my_vcr.use_cassette
def test_upload_one(local_files, azure):
with azure_teardown(azure):
Expand All @@ -253,6 +252,23 @@ def test_upload_one(local_files, azure):

azure.rm(test_dir / 'bigfile')

@my_vcr.use_cassette
def test_upload_single_file_in_dir(tempdir, azure):
with azure_teardown(azure):
lpath_dir = tempdir
lfilename = os.path.join(lpath_dir, 'singlefile')
with open(lfilename, 'wb') as f:
f.write(b'0123456789')

# transfer client w/ deterministic temporary directory
from azure.datalake.store.multithread import put_chunk
client = ADLTransferClient(azure, transfer=put_chunk,
unique_temporary=False)

up = ADLUploader(azure, test_dir / 'singlefiledir', lpath_dir, nthreads=1,
overwrite=True)
assert azure.info(test_dir / 'singlefiledir' / 'singlefile')['length'] == 10
azure.rm(test_dir / 'singlefiledir' / 'singlefile')

@my_vcr.use_cassette
def test_upload_one_empty_file(local_files, azure):
Expand Down

0 comments on commit b88749a

Please sign in to comment.