Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def storage_file_download_batch(cmd, client, source, destination, pattern=None,

from azure.cli.command_modules.storage.util import glob_files_remotely, mkdir_p

source_files = glob_files_remotely(cmd, client, source, pattern)
source_files = glob_files_remotely(cmd, client, source, pattern, snapshot=snapshot)

if dryrun:
source_files_list = list(source_files)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,9 @@ def test_storage_blob_batch_upload_scenarios(self, test_dir, storage_account_inf
@StorageTestFilesPreparer()
def test_storage_file_batch_download_scenarios(self, test_dir, storage_account_info):
src_share = self.create_share(storage_account_info)

# Prepare files
snapshot = self.storage_cmd('storage share snapshot -n {} ',
storage_account_info, src_share).get_output_in_json()["snapshot"]
self.storage_cmd('storage file upload-batch -s "{}" -d {} --max-connections 3', storage_account_info,
test_dir, src_share)

Expand Down Expand Up @@ -146,6 +148,25 @@ def test_storage_file_batch_download_scenarios(self, test_dir, storage_account_i
src_share, local_folder)
self.assertEqual(0, sum(len(f) for r, d, f in os.walk(local_folder)))

# download with snapshot
local_folder = self.create_temp_dir()
self.storage_cmd('storage file download-batch -s {} -d "{}" --snapshot {}', storage_account_info,
src_share, local_folder, snapshot)
self.assertEqual(0, sum(len(f) for r, d, f in os.walk(local_folder)))

snapshot = self.storage_cmd('storage share snapshot -n {} ',
storage_account_info, src_share).get_output_in_json()["snapshot"]
self.storage_cmd('storage file download-batch -s {} -d "{}" --snapshot {}', storage_account_info,
src_share, local_folder, snapshot)
self.assertEqual(41, sum(len(f) for r, d, f in os.walk(local_folder)))

local_folder = self.create_temp_dir()
share_url = self.storage_cmd('storage file url -s {} -p \'\' -otsv', storage_account_info,
src_share).output.strip()[:-1]
self.storage_cmd('storage file download-batch -s {} -d "{}" --pattern apple/* --snapshot {} ',
storage_account_info, share_url, local_folder, snapshot)
self.assertEqual(10, sum(len(f) for r, d, f in os.walk(local_folder)))

@ResourceGroupPreparer()
@StorageAccountPreparer()
@StorageTestFilesPreparer()
Expand Down
4 changes: 2 additions & 2 deletions src/azure-cli/azure/cli/command_modules/storage/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,15 +87,15 @@ def glob_files_locally(folder_path, pattern):
yield (full_path, full_path[len_folder_path:])


def glob_files_remotely(cmd, client, share_name, pattern):
def glob_files_remotely(cmd, client, share_name, pattern, snapshot=None):
"""glob the files in remote file share based on the given pattern"""
from collections import deque
t_dir, t_file = cmd.get_models('file.models#Directory', 'file.models#File')

queue = deque([""])
while queue:
current_dir = queue.pop()
for f in client.list_directories_and_files(share_name, current_dir):
for f in client.list_directories_and_files(share_name, current_dir, snapshot=snapshot):
if isinstance(f, t_file):
if not pattern or _match_path(os.path.join(current_dir, f.name), pattern):
yield current_dir, f.name
Expand Down