Skip to content

Commit

Permalink
Adopt large block blob (#2698)
Browse files Browse the repository at this point in the history
1. Maximize the upload block size and single put size.
2. Remove the mitigation for storage python SDK issue #190.
3. Update blob storage upload/download tests
4. Add live only tests for large blob download
5. Add live test base class
  • Loading branch information
troydai authored Apr 3, 2017
1 parent 8829361 commit ce75078
Show file tree
Hide file tree
Showing 35 changed files with 2,007 additions and 4,843 deletions.
1 change: 1 addition & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ concurrency = multiprocessing
omit =
*/env/*
*/tests/*
doc/*
cover.py
source =
src/
Expand Down
8 changes: 5 additions & 3 deletions src/azure-cli-testsdk/azure/cli/testsdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,16 @@
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------

from .base import ScenarioTest
from .base import ScenarioTest, LiveTest
from .preparers import StorageAccountPreparer, ResourceGroupPreparer
from .exceptions import CliTestError
from .checkers import JMESPathCheck, JMESPathCheckExists, NoneCheck
from .decorators import live_only
from .utilities import get_sha1_hash

__all__ = ['ScenarioTest',
__all__ = ['ScenarioTest', 'LiveTest',
'ResourceGroupPreparer', 'StorageAccountPreparer',
'CliTestError',
'JMESPathCheck', 'JMESPathCheckExists', 'NoneCheck', 'live_only']
'JMESPathCheck', 'JMESPathCheckExists', 'NoneCheck', 'live_only',
'get_sha1_hash']
__version__ = '0.1.0+dev'
135 changes: 76 additions & 59 deletions src/azure-cli-testsdk/azure/cli/testsdk/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,84 @@
GeneralNameReplacer, LargeRequestBodyProcessor,
LargeResponseBodyProcessor, LargeResponseBodyReplacer)
from .utilities import create_random_name
from .decorators import live_only


class ScenarioTest(unittest.TestCase): # pylint: disable=too-many-instance-attributes
class IntegrationTestBase(unittest.TestCase):
def __init__(self, method_name):
super(IntegrationTestBase, self).__init__(method_name)
self.diagnose = os.environ.get(ENV_TEST_DIAGNOSE, None) == 'True'
self.skip_assert = os.environ.get(ENV_SKIP_ASSERT, None) == 'True'

def cmd(self, command, checks=None):
if self.diagnose:
begin = datetime.datetime.now()
print('\nExecuting command: {}'.format(command))

result = execute(command)

if self.diagnose:
duration = datetime.datetime.now() - begin
print('\nCommand accomplished in {} s. Exit code {}.\n{}'.format(
duration.total_seconds(), result.exit_code, result.output))

if not checks:
checks = []
elif not isinstance(checks, list):
checks = [checks]

if not self.skip_assert:
for c in checks:
c(result)

return result

def create_random_name(self, prefix, length): # for override pylint: disable=no-self-use
return create_random_name(prefix, length)

def create_temp_file(self, size_kb, full_random=False):
"""
Create a temporary file for testing. The test harness will delete the file during tearing
down.
"""
_, path = tempfile.mkstemp()
self.addCleanup(lambda: os.remove(path))

with open(path, mode='r+b') as f:
if full_random:
chunk = os.urandom(1024)
else:
chunk = bytearray([0] * 1024)
for _ in range(size_kb):
f.write(chunk)

return path

def create_temp_dir(self):
"""
Create a temporary directory for testing. The test harness will delete the directory during
tearing down.
"""
temp_dir = tempfile.mkdtemp()
self.addCleanup(lambda: shutil.rmtree(temp_dir, ignore_errors=True))

return temp_dir

@classmethod
def set_env(cls, key, val):
os.environ[key] = val

@classmethod
def pop_env(cls, key):
return os.environ.pop(key, None)


@live_only()
class LiveTest(IntegrationTestBase):
pass


class ScenarioTest(IntegrationTestBase): # pylint: disable=too-many-instance-attributes
FILTER_HEADERS = [
'authorization',
'client-request-id',
Expand Down Expand Up @@ -67,8 +142,6 @@ def __init__(self, method_name):
if live_test and os.path.exists(self.recording_file):
os.remove(self.recording_file)

self.diagnose = os.environ.get(ENV_TEST_DIAGNOSE, None) == 'True'
self.skip_assert = os.environ.get(ENV_SKIP_ASSERT, None) == 'True'
self.in_recording = live_test or not os.path.exists(self.recording_file)
self.test_resources_count = 0
self.original_env = os.environ.copy()
Expand Down Expand Up @@ -103,62 +176,6 @@ def create_random_name(self, prefix, length):
else:
return moniker

def cmd(self, command, checks=None):
if self.diagnose:
begin = datetime.datetime.now()
print('\nExecuting command: {}'.format(command))

result = execute(command)

if self.diagnose:
duration = datetime.datetime.now() - begin
print('\nCommand accomplished in {} s. Exit code {}.\n{}'.format(
duration.total_seconds(), result.exit_code, result.output))

if not checks:
checks = []
elif not isinstance(checks, list):
checks = [checks]

if not self.skip_assert:
for c in checks:
c(result)

return result

def create_temp_file(self, size_kb):
"""
Create a temporary file for testing. The test harness will delete the file during tearing
down.
"""
_, path = tempfile.mkstemp()
self.addCleanup(lambda: os.remove(path))

with open(path, mode='r+b') as f:
chunk = bytearray([0] * 1024)
for _ in range(size_kb):
f.write(chunk)

return path

def create_temp_dir(self):
"""
Create a temporary directory for testing. The test harness will delete the directory during
tearing down.
"""
temp_dir = tempfile.mkdtemp()
self.addCleanup(lambda: shutil.rmtree(temp_dir, ignore_errors=True))

return temp_dir

@classmethod
def set_env(cls, key, val):
os.environ[key] = val

@classmethod
def pop_env(cls, key):
return os.environ.pop(key, None)

def _process_request_recording(self, request):
if self.in_recording:
for processor in self.recording_processors:
Expand Down
2 changes: 1 addition & 1 deletion src/azure-cli-testsdk/azure/cli/testsdk/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@
def live_only():
return unittest.skipUnless(
os.environ.get(ENV_LIVE_TEST, False),
'This test is designed to live test only.')
'This is a live only test. A live test will bypass all vcrpy components.')
9 changes: 4 additions & 5 deletions src/azure-cli-testsdk/azure/cli/testsdk/preparers.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,16 @@ def __init__(self, name_prefix, name_len):
self.resource_moniker = None
self.resource_random_name = None
self.test_class_instance = None
self.live_test = False

def __call__(self, fn):
def _preparer_wrapper(test_class_instance, **kwargs):
if not isinstance(test_class_instance, ScenarioTest):
raise CliTestError('The preparer decorator can be only used on the methods of '
'class derived from {}'.format(ScenarioTest.__name__))
self.live_test = not isinstance(test_class_instance, ScenarioTest)
self.test_class_instance = test_class_instance

if test_class_instance.in_recording:
if self.live_test or test_class_instance.in_recording:
resource_name = self.random_name
if isinstance(self, RecordingProcessor):
if not self.live_test and isinstance(self, RecordingProcessor):
test_class_instance.recording_processors.append(self)
else:
resource_name = self.moniker
Expand Down
12 changes: 12 additions & 0 deletions src/azure-cli-testsdk/azure/cli/testsdk/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,15 @@ def create_random_name(prefix='clitest', length=24):
datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f')).encode('utf-8')
prefix += str(hashlib.sha256(identity).hexdigest())
return prefix[:length]


def get_sha1_hash(file_path):
sha1 = hashlib.sha256()
with open(file_path, 'rb') as f:
while True:
data = f.read(65536)
if not data:
break
sha1.update(data)

return sha1.hexdigest()
Original file line number Diff line number Diff line change
Expand Up @@ -347,10 +347,6 @@ def register_source_uri_arguments(scope):
group.reg_arg('source_share')
group.reg_arg('prefix', validator=process_blob_copy_batch_namespace)

# TODO: Remove workaround when Python storage SDK issue #190 is fixed.
for item in ['upload', 'upload-batch']:
register_cli_argument('storage blob {}'.format(item), 'max_connections', type=int, help='Maximum number of parallel connections to use when the blob size exceeds 64MB.', default=1)

# FILE UPLOAD-BATCH PARAMETERS
with CommandContext('storage file upload-batch') as c:
c.reg_arg('source', options_list=('--source', '-s'), validator=process_file_upload_batch_parameters)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,9 @@ def upload_append_blob():
timeout=timeout)

def upload_block_blob():
client.MAX_BLOCK_SIZE = 100 * 1024 * 1024
client.MAX_SINGLE_PUT_SIZE = 256 * 1024 * 1024

return client.create_blob_from_path(
container_name=container_name,
blob_name=blob_name,
Expand Down
Loading

0 comments on commit ce75078

Please sign in to comment.