From fde9bd49b8178097a401382a7f6e67ca6b83a542 Mon Sep 17 00:00:00 2001 From: Daniel Miranda Date: Mon, 13 May 2019 20:29:59 -0300 Subject: [PATCH] hooks: lambda: allow uploading pre-built payloads (#564) * hooks: lambda: rewrite tests using pytest * hooks: lambda: add support for prebuilt payloads * tests: hooks: lambda: fix Python 2.7 and 3.5 compat --- stacker/hooks/aws_lambda.py | 98 ++- stacker/tests/hooks/test_aws_lambda.py | 864 +++++++++++++------------ 2 files changed, 533 insertions(+), 429 deletions(-) diff --git a/stacker/hooks/aws_lambda.py b/stacker/hooks/aws_lambda.py index 4b388f40c..5832559e5 100644 --- a/stacker/hooks/aws_lambda.py +++ b/stacker/hooks/aws_lambda.py @@ -100,6 +100,18 @@ def _calculate_hash(files, root): return file_hash.hexdigest() +def _calculate_prebuilt_hash(f): + file_hash = hashlib.md5() + while True: + chunk = f.read(4096) + if not chunk: + break + + file_hash.update(chunk) + + return file_hash.hexdigest() + + def _find_files(root, includes, excludes, follow_symlinks): """List files inside a directory based on include and exclude rules. @@ -272,6 +284,38 @@ def _check_pattern_list(patterns, key, default=None): 'list of strings'.format(key)) +def _upload_prebuilt_zip(s3_conn, bucket, prefix, name, options, path, + payload_acl): + logging.debug('lambda: using prebuilt ZIP %s', path) + + with open(path, 'rb') as zip_file: + # Default to the MD5 of the ZIP if no explicit version is provided + version = options.get('version') + if not version: + version = _calculate_prebuilt_hash(zip_file) + zip_file.seek(0) + + return _upload_code(s3_conn, bucket, prefix, name, zip_file, + version, payload_acl) + + +def _build_and_upload_zip(s3_conn, bucket, prefix, name, options, path, + follow_symlinks, payload_acl): + includes = _check_pattern_list(options.get('include'), 'include', + default=['**']) + excludes = _check_pattern_list(options.get('exclude'), 'exclude', + default=[]) + + # os.path.join will ignore other parameters if the right-most one is an + # absolute path, which is exactly what we want. + zip_contents, zip_version = _zip_from_file_patterns( + path, includes, excludes, follow_symlinks) + version = options.get('version') or zip_version + + return _upload_code(s3_conn, bucket, prefix, name, zip_contents, version, + payload_acl) + + def _upload_function(s3_conn, bucket, prefix, name, options, follow_symlinks, payload_acl): """Builds a Lambda payload from user configuration and uploads it to S3. @@ -309,30 +353,27 @@ def _upload_function(s3_conn, bucket, prefix, name, options, follow_symlinks, through. """ try: - root = os.path.expanduser(options['path']) + path = os.path.expanduser(options['path']) except KeyError as e: raise ValueError( "missing required property '{}' in function '{}'".format( e.args[0], name)) - includes = _check_pattern_list(options.get('include'), 'include', - default=['**']) - excludes = _check_pattern_list(options.get('exclude'), 'exclude', - default=[]) + if not os.path.isabs(path): + path = os.path.abspath(os.path.join(get_config_directory(), path)) - logger.debug('lambda: processing function %s', name) + if path.endswith('.zip') and os.path.isfile(path): + logging.debug('lambda: using prebuilt zip: %s', path) - # os.path.join will ignore other parameters if the right-most one is an - # absolute path, which is exactly what we want. - if not os.path.isabs(root): - root = os.path.abspath(os.path.join(get_config_directory(), root)) - zip_contents, content_hash = _zip_from_file_patterns(root, - includes, - excludes, - follow_symlinks) + return _upload_prebuilt_zip(s3_conn, bucket, prefix, name, options, + path, payload_acl) + elif os.path.isdir(path): + logging.debug('lambda: building from directory: %s', path) - return _upload_code(s3_conn, bucket, prefix, name, zip_contents, - content_hash, payload_acl) + return _build_and_upload_zip(s3_conn, bucket, prefix, name, options, + path, follow_symlinks, payload_acl) + else: + raise ValueError('Path must be an existing ZIP file or directory') def select_bucket_region(custom_bucket, hook_region, stacker_bucket_region, @@ -400,14 +441,16 @@ def upload_lambda_functions(context, provider, **kwargs): * path (str): - Base directory of the Lambda function payload content. + Base directory or path of a ZIP file of the Lambda function + payload content. + If it not an absolute path, it will be considered relative to the directory containing the stacker configuration file in use. - Files in this directory will be added to the payload ZIP, - according to the include and exclude patterns. If not - patterns are provided, all files in this directory + When a directory, files contained will be added to the + payload ZIP, according to the include and exclude patterns. + If not patterns are provided, all files in the directory (respecting default exclusions) will be used. Files are stored in the archive with path names relative to @@ -415,6 +458,12 @@ def upload_lambda_functions(context, provider, **kwargs): directly under this directory will be added to the root of the ZIP file. + When a ZIP file, it will be uploaded directly to S3. + The hash of whole ZIP file will be used as the version key + by default, which may cause spurious rebuilds when building + the ZIP in different environments. To avoid that, + explicitly provide a `version` option. + * include(str or list[str], optional): Pattern or list of patterns of files to include in the @@ -433,6 +482,15 @@ def upload_lambda_functions(context, provider, **kwargs): such as ``.git``, ``.svn``, ``__pycache__``, ``*.pyc``, ``.gitignore``, etc. + * version(str, optional): + Value to use as the version for the current function, which + will be used to determine if a payload already exists in + S3. The value can be any string, such as a version number + or a git commit. + + Note that when setting this value, to re-build/re-upload a + payload you must change the version manually. + Examples: .. Hook configuration. .. code-block:: yaml diff --git a/stacker/tests/hooks/test_aws_lambda.py b/stacker/tests/hooks/test_aws_lambda.py index 67acc934d..6c2bc948f 100644 --- a/stacker/tests/hooks/test_aws_lambda.py +++ b/stacker/tests/hooks/test_aws_lambda.py @@ -6,7 +6,6 @@ from builtins import range import os.path import os -import unittest import mock import random from io import BytesIO as StringIO @@ -14,481 +13,528 @@ import boto3 import botocore -from troposphere.awslambda import Code +import pytest from moto import mock_s3 -from testfixtures import TempDirectory, ShouldRaise, compare +from troposphere.awslambda import Code -from stacker.context import Context -from stacker.config import Config from stacker.hooks.aws_lambda import ( - upload_lambda_functions, ZIP_PERMS_MASK, _calculate_hash, select_bucket_region, + upload_lambda_functions, ) -from ..factories import mock_provider +from ..factories import mock_context, mock_provider REGION = "us-east-1" -ALL_FILES = ( - 'f1/f1.py', - 'f1/f1.pyc', - 'f1/__init__.py', - 'f1/test/__init__.py', - 'f1/test/f1.py', - 'f1/test/f1.pyc', - 'f1/test2/test.txt', - 'f2/f2.js' -) -F1_FILES = [p[3:] for p in ALL_FILES if p.startswith('f1')] -F2_FILES = [p[3:] for p in ALL_FILES if p.startswith('f2')] - - -class TestLambdaHooks(unittest.TestCase): - @classmethod - def temp_directory_with_files(cls, files=ALL_FILES): - d = TempDirectory() - for f in files: - d.write(f, b'') - return d - - @property - def s3(self): - if not hasattr(self, '_s3'): - self._s3 = boto3.client('s3', region_name=REGION) - return self._s3 - - def assert_s3_zip_file_list(self, bucket, key, files): - object_info = self.s3.get_object(Bucket=bucket, Key=key) - zip_data = StringIO(object_info['Body'].read()) - - found_files = set() - with ZipFile(zip_data, 'r') as zip_file: - for zip_info in zip_file.infolist(): - perms = (zip_info.external_attr & ZIP_PERMS_MASK) >> 16 - self.assertIn(perms, (0o755, 0o644), - 'ZIP member permission must be 755 or 644') - found_files.add(zip_info.filename) - - compare(found_files, set(files)) - - def assert_s3_bucket(self, bucket, present=True): - try: - self.s3.head_bucket(Bucket=bucket) - if not present: - self.fail('s3: bucket {} should not exist'.format(bucket)) - except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] == '404': - if present: - self.fail('s3: bucket {} does not exist'.format(bucket)) - - def setUp(self): - self.context = Context( - config=Config({'namespace': 'test', 'stacker_bucket': 'test'})) - self.provider = mock_provider(region="us-east-1") - - def run_hook(self, **kwargs): - real_kwargs = { - 'context': self.context, - 'provider': self.provider, - } - real_kwargs.update(kwargs) - return upload_lambda_functions(**real_kwargs) - @mock_s3 - def test_bucket_default(self): - self.assertIsNotNone( - self.run_hook(functions={})) +@pytest.fixture +def all_files(tmpdir): + files = ( + 'f1/f1.py', + 'f1/f1.pyc', + 'f1/__init__.py', + 'f1/test/__init__.py', + 'f1/test/f1.py', + 'f1/test/f1.pyc', + 'f1/test2/test.txt', + 'f2/f2.js' + ) - self.assert_s3_bucket('test') + def create(): + for file in files: + f = tmpdir.join(file) + f.write(b'', ensure=True) + yield f - @mock_s3 - def test_bucket_custom(self): - self.assertIsNotNone( - self.run_hook(bucket='custom', functions={})) + return list(create()) - self.assert_s3_bucket('test', present=False) - self.assert_s3_bucket('custom') - @mock_s3 - def test_prefix(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(prefix='cloudformation-custom-resources/', - functions={ - 'MyFunction': { - 'path': d.path + '/f1' - } - }) +@pytest.fixture +def f1_files(tmpdir, all_files): + return [p for p in all_files if p.relto(tmpdir).startswith('f1')] - self.assertIsNotNone(results) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, F1_FILES) - self.assertTrue(code.S3Key.startswith( - 'cloudformation-custom-resources/lambda-MyFunction-')) - - @mock_s3 - def test_prefix_missing(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ +@pytest.fixture +def f2_files(tmpdir, all_files): + return [p for p in all_files if p.relto(tmpdir).startswith('f2')] + + +@pytest.fixture(scope='package') +def prebuilt_zip(stacker_fixture_dir): + path = stacker_fixture_dir.join('test.zip') + content = path.read_binary() + md5 = 'c6fb602d9bde5a522856adabe9949f63' + return dict(path=path, md5=md5, contents=content) + + +@pytest.fixture(autouse=True) +def s3(): + with mock_s3(): + yield boto3.client('s3', region_name=REGION) + + +def assert_s3_zip_file_list(s3, bucket, key, files, root=None): + object_info = s3.get_object(Bucket=bucket, Key=key) + zip_data = StringIO(object_info['Body'].read()) + + expected_files = set() + for f in files: + rel_path = os.path.relpath(str(f), str(root)) if root else str(f) + expected_files.add(rel_path) + + found_files = set() + with ZipFile(zip_data, 'r') as zip_file: + for zip_info in zip_file.infolist(): + perms = (zip_info.external_attr & ZIP_PERMS_MASK) >> 16 + assert perms in (0o755, 0o644) + found_files.add(zip_info.filename) + + assert found_files == set(expected_files) + + +def assert_s3_zip_contents(s3, bucket, key, contents): + object_info = s3.get_object(Bucket=bucket, Key=key) + zip_data = object_info['Body'].read() + + assert zip_data == contents + + +def assert_s3_bucket(s3, bucket, present=True): + try: + s3.head_bucket(Bucket=bucket) + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == '404': + if present: + pytest.fail('s3: bucket {} does not exist'.format(bucket)) + else: + raise + else: + if not present: + pytest.fail('s3: bucket {} should not exist'.format(bucket)) + + +@pytest.fixture +def context(): + return mock_context() + + +@pytest.fixture +def provider(): + return mock_provider(region=REGION) + + +@pytest.fixture +def run_hook(context, provider): + def run(**kwargs): + return upload_lambda_functions(context=context, provider=provider, + **kwargs) + + return run + + +def test_bucket_default(s3, context, run_hook): + result = run_hook(functions={}) + assert result is not None + + assert_s3_bucket(s3, context.bucket_name, present=True) + + +def test_bucket_custom(s3, context, run_hook): + result = run_hook(bucket='custom', functions={}) + assert result is not None + + assert_s3_bucket(s3, context.bucket_name, present=False) + assert_s3_bucket(s3, 'custom', present=True) + + +def test_prefix(tmpdir, s3, all_files, f1_files, run_hook): + root = tmpdir.join('f1') + results = run_hook( + prefix='cloudformation-custom-resources/', + functions={ + 'MyFunction': { + 'path': str(root) + } + }) + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, f1_files, root=root) + assert code.S3Key.startswith( + 'cloudformation-custom-resources/lambda-MyFunction-') + + +def test_prefix_missing(tmpdir, s3, all_files, f1_files, run_hook): + root = tmpdir.join('f1') + results = run_hook( + functions={ + 'MyFunction': { + 'path': str(root) + } + } + ) + + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, f1_files, + root=root) + assert code.S3Key.startswith('lambda-MyFunction-') + + +def test_path_missing(run_hook): + msg = "missing required property 'path' in function 'MyFunction'" + with pytest.raises(ValueError, match=msg): + run_hook( + functions={ 'MyFunction': { - 'path': d.path + '/f1' } - }) + } + ) - self.assertIsNotNone(results) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, F1_FILES) - self.assertTrue(code.S3Key.startswith('lambda-MyFunction-')) - - @mock_s3 - def test_path_missing(self): - msg = "missing required property 'path' in function 'MyFunction'" - with ShouldRaise(ValueError(msg)): - self.run_hook(functions={ +def test_path_non_zip_non_dir(tmpdir, all_files, run_hook): + root = tmpdir + msg = 'Path must be an existing ZIP file or directory' + with pytest.raises(ValueError, match=msg): + run_hook( + functions={ 'MyFunction': { + 'path': str(root.join('test.txt')) } - }) + } + ) - @mock_s3 - def test_path_relative(self): - get_config_directory = 'stacker.hooks.aws_lambda.get_config_directory' - with self.temp_directory_with_files(['test/test.py']) as d, \ - mock.patch(get_config_directory) as m1: - m1.return_value = d.path - results = self.run_hook(functions={ +def test_path_relative(tmpdir, s3, run_hook): + root = tmpdir + root.join('test/test.py').write(b'', ensure=True) + + get_config_directory = 'stacker.hooks.aws_lambda.get_config_directory' + with mock.patch(get_config_directory, return_value=str(root)): + results = run_hook( + functions={ 'MyFunction': { 'path': 'test' } - }) + } + ) - self.assertIsNotNone(results) + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, ['test.py']) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, ['test.py']) - @mock_s3 - def test_path_home_relative(self): - test_path = '~/test' +def test_path_home_relative(tmpdir, s3, run_hook): + root = tmpdir + test_path = '~/test' - orig_expanduser = os.path.expanduser - with self.temp_directory_with_files(['test.py']) as d, \ - mock.patch('os.path.expanduser') as m1: - m1.side_effect = lambda p: (d.path if p == test_path - else orig_expanduser(p)) + orig_expanduser = os.path.expanduser + tmpdir.join('test.py').write(b'') - results = self.run_hook(functions={ + def expanduser(path): + return str(root) if path == test_path else orig_expanduser(path) + + with mock.patch('os.path.expanduser', side_effect=expanduser): + results = run_hook( + functions={ 'MyFunction': { 'path': test_path } - }) + } + ) - self.assertIsNotNone(results) + assert results is not None - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, ['test.py']) + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, ['test.py']) - @mock_s3 - def test_multiple_functions(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ - 'MyFunction': { - 'path': d.path + '/f1' - }, - 'OtherFunction': { - 'path': d.path + '/f2' - } - }) - self.assertIsNotNone(results) +def test_multiple_functions(tmpdir, s3, all_files, f1_files, f2_files, + run_hook): + root1 = tmpdir.join('f1') + root2 = tmpdir.join('f2') + results = run_hook( + functions={ + 'MyFunction': { + 'path': str(root1) + }, + 'OtherFunction': { + 'path': str(root2) + } + } + ) + + assert results is not None + + f1_code = results.get('MyFunction') + assert isinstance(f1_code, Code) + assert_s3_zip_file_list(s3, f1_code.S3Bucket, f1_code.S3Key, f1_files, + root=root1) - f1_code = results.get('MyFunction') - self.assertIsInstance(f1_code, Code) - self.assert_s3_zip_file_list(f1_code.S3Bucket, f1_code.S3Key, F1_FILES) + f2_code = results.get('OtherFunction') + assert isinstance(f2_code, Code) + assert_s3_zip_file_list(s3, f2_code.S3Bucket, f2_code.S3Key, f2_files, + root=root2) - f2_code = results.get('OtherFunction') - self.assertIsInstance(f2_code, Code) - self.assert_s3_zip_file_list(f2_code.S3Bucket, f2_code.S3Key, F2_FILES) - @mock_s3 - def test_patterns_invalid(self): - msg = ("Invalid file patterns in key 'include': must be a string or " - 'list of strings') +def test_patterns_invalid(tmpdir, run_hook): + root = tmpdir - with ShouldRaise(ValueError(msg)): - self.run_hook(functions={ + msg = ("Invalid file patterns in key 'include': must be a string or " + 'list of strings') + with pytest.raises(ValueError, match=msg): + run_hook( + functions={ 'MyFunction': { - 'path': 'test', + 'path': str(root), 'include': {'invalid': 'invalid'} } - }) + } + ) - @mock_s3 - def test_patterns_include(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ - 'MyFunction': { - 'path': d.path + '/f1', - 'include': ['*.py', 'test2/'] - } - }) - self.assertIsNotNone(results) +def test_patterns_include(tmpdir, s3, all_files, run_hook): + root = tmpdir.join('f1') + results = run_hook( + functions={ + 'MyFunction': { + 'path': str(root), + 'include': ['*.py', 'test2/'] + } + } + ) + + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, [ + 'f1.py', + '__init__.py', + 'test/__init__.py', + 'test/f1.py', + 'test2/test.txt' + ]) + + +def test_patterns_exclude(tmpdir, s3, all_files, run_hook): + root = tmpdir.join('f1') + results = run_hook( + functions={ + 'MyFunction': { + 'path': str(root), + 'exclude': ['*.pyc', 'test/'] + } + } + ) + + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, [ + 'f1.py', + '__init__.py', + 'test2/test.txt' + ]) + + +@mock_s3 +def test_patterns_include_exclude(tmpdir, s3, all_files, run_hook): + root = tmpdir.join('f1') + results = run_hook(functions={ + 'MyFunction': { + 'path': str(root), + 'include': '*.py', + 'exclude': 'test/' + } + }) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1.py', - '__init__.py', - 'test/__init__.py', - 'test/f1.py', - 'test2/test.txt' - ]) - - @mock_s3 - def test_patterns_exclude(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ - 'MyFunction': { - 'path': d.path + '/f1', - 'exclude': ['*.pyc', 'test/'] - } - }) + assert results is not None - self.assertIsNotNone(results) + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, [ + 'f1.py', + '__init__.py' + ]) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1.py', - '__init__.py', - 'test2/test.txt' - ]) - - @mock_s3 - def test_patterns_include_exclude(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ + +def test_patterns_exclude_all(tmpdir, all_files, run_hook): + root = tmpdir.join('f1') + + msg = ('Empty list of files for Lambda payload. Check your ' + 'include/exclude options for errors.') + with pytest.raises(RuntimeError, match=msg): + run_hook( + functions={ 'MyFunction': { - 'path': d.path + '/f1', - 'include': '*.py', - 'exclude': 'test/' + 'path': str(root), + 'exclude': ['**'] } - }) + } + ) - self.assertIsNotNone(results) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1.py', - '__init__.py' - ]) +def test_idempotence(tmpdir, s3, all_files, run_hook): + root = tmpdir.join('f1') - @mock_s3 - def test_patterns_exclude_all(self): - msg = ('Empty list of files for Lambda payload. Check your ' - 'include/exclude options for errors.') + bucket_name = 'test' + functions = { + 'MyFunction': { + 'path': str(root) + } + } - with self.temp_directory_with_files() as d, \ - ShouldRaise(RuntimeError(msg)): + s3.create_bucket(Bucket=bucket_name) - results = self.run_hook(functions={ - 'MyFunction': { - 'path': d.path + '/f1', - 'exclude': ['**'] - } - }) + previous = None + for i in range(2): + results = run_hook(bucket=bucket_name, functions=functions) + assert results is not None - self.assertIsNone(results) + code = results.get('MyFunction') + assert isinstance(code, Code) + + if not previous: + previous = code.S3Key + continue + + assert previous == code.S3Key + + +def test_calculate_hash(tmpdir, all_files, f1_files, f2_files): + root = tmpdir + + all_hash_1 = _calculate_hash(map(str, all_files), str(root)) + all_hash_2 = _calculate_hash(map(str, all_files), str(root)) + f1_hash = _calculate_hash(map(str, f1_files), str(root)) + f2_hash = _calculate_hash(map(str, f2_files), str(root)) + + assert all_hash_1 == all_hash_2 + assert f1_hash != all_hash_1 + assert f2_hash != all_hash_1 + assert f1_hash != f2_hash + + +def test_calculate_hash_diff_filename_same_contents(tmpdir, all_files): + root = tmpdir + + files = all_files[:2] + tmpdir.join(files[0]).write('data', ensure=True) + tmpdir.join(files[1]).write('data', ensure=True) + + hash1 = _calculate_hash([str(files[0])], str(root)) + hash2 = _calculate_hash([str(files[1])], str(root)) + + assert hash1 != hash2 + + +def test_calculate_hash_different_ordering(tmpdir, all_files): + root = tmpdir + + all_files_diff_order = random.sample(all_files, k=len(all_files)) + hash1 = _calculate_hash(map(str, all_files), str(root)) + hash2 = _calculate_hash(map(str, all_files_diff_order), str(root)) + assert hash1 == hash2 + + +@pytest.mark.parametrize( + 'case', + [ + dict( + custom_bucket="myBucket", + hook_region="us-east-1", + stacker_bucket_region="us-west-1", + provider_region="eu-west-1", + result="us-east-1" + ), + dict( + custom_bucket="myBucket", + hook_region=None, + stacker_bucket_region="us-west-1", + provider_region="eu-west-1", + result="eu-west-1"), + dict( + custom_bucket=None, + hook_region="us-east-1", + stacker_bucket_region="us-west-1", + provider_region="eu-west-1", + result="us-west-1"), + dict( + custom_bucket=None, + hook_region="us-east-1", + stacker_bucket_region=None, + provider_region="eu-west-1", + result="eu-west-1") + ] +) +def test_select_bucket_region(case): + result = case.pop('result') + assert select_bucket_region(**case) == result - @mock_s3 - def test_idempotence(self): - bucket_name = 'test' - with self.temp_directory_with_files() as d: - functions = { +def test_follow_symlink_nonbool(run_hook): + msg = "follow_symlinks option must be a boolean" + with pytest.raises(ValueError, match=msg): + run_hook( + follow_symlinks="raiseValueError", + functions={ 'MyFunction': { - 'path': d.path + '/f1' } } + ) - self.s3.create_bucket(Bucket=bucket_name) - - previous = None - for i in range(2): - results = self.run_hook(bucket=bucket_name, - functions=functions) - self.assertIsNotNone(results) - - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - - if not previous: - previous = code.S3Key - continue - - compare(previous, code.S3Key, - prefix="zipfile name should not be modified in " - "repeated runs.") - - def test_calculate_hash(self): - with self.temp_directory_with_files() as d1: - root = d1.path - hash1 = _calculate_hash(ALL_FILES, root) - - with self.temp_directory_with_files() as d2: - root = d2.path - hash2 = _calculate_hash(ALL_FILES, root) - - with self.temp_directory_with_files() as d3: - root = d3.path - with open(os.path.join(root, ALL_FILES[0]), "w") as fd: - fd.write("modified file data") - hash3 = _calculate_hash(ALL_FILES, root) - - self.assertEqual(hash1, hash2) - self.assertNotEqual(hash1, hash3) - self.assertNotEqual(hash2, hash3) - - def test_calculate_hash_diff_filename_same_contents(self): - files = ["file1.txt", "f2/file2.txt"] - file1, file2 = files - with TempDirectory() as d: - root = d.path - for fname in files: - d.write(fname, b"data") - hash1 = _calculate_hash([file1], root) - hash2 = _calculate_hash([file2], root) - self.assertNotEqual(hash1, hash2) - - def test_calculate_hash_different_ordering(self): - files1 = ALL_FILES - files2 = random.sample(ALL_FILES, k=len(ALL_FILES)) - with TempDirectory() as d1: - root1 = d1.path - for fname in files1: - d1.write(fname, b"") - with TempDirectory() as d2: - root2 = d2.path - for fname in files2: - d2.write(fname, b"") - hash1 = _calculate_hash(files1, root1) - hash2 = _calculate_hash(files2, root2) - self.assertEqual(hash1, hash2) - - def test_select_bucket_region(self): - tests = ( - (("myBucket", "us-east-1", "us-west-1", "eu-west-1"), "us-east-1"), - (("myBucket", None, "us-west-1", "eu-west-1"), "eu-west-1"), - ((None, "us-east-1", "us-west-1", "eu-west-1"), "us-west-1"), - ((None, "us-east-1", None, "eu-west-1"), "eu-west-1"), - ) +@pytest.fixture +def linked_dir(tmpdir): + linked_dir = tmpdir.join('linked') + linked_dir.mksymlinkto(tmpdir.join('f1')) + return linked_dir - for args, result in tests: - self.assertEqual(select_bucket_region(*args), result) - @mock_s3 - def test_follow_symlink_nonbool(self): - msg = "follow_symlinks option must be a boolean" - with ShouldRaise(ValueError(msg)): - self.run_hook(follow_symlinks="raiseValueError", functions={ - 'MyFunction': { - } - }) - - @mock_s3 - def test_follow_symlink_true(self): - # Testing if symlinks are followed - with self.temp_directory_with_files() as d1: - root1 = d1.path - with self.temp_directory_with_files() as d2: - root2 = d2.path - os.symlink(root1 + "/f1", root2 + "/f3") - results = self.run_hook(follow_symlinks=True, functions={ - 'MyFunction': { - 'path': root2} - }) - self.assertIsNotNone(results) - - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1/f1.py', - 'f1/__init__.py', - 'f1/f1.pyc', - 'f1/test/__init__.py', - 'f1/test/f1.py', - 'f1/test/f1.pyc', - 'f1/test2/test.txt', - 'f2/f2.js', - 'f3/__init__.py', - 'f3/f1.py', - 'f3/f1.pyc', - 'f3/test/__init__.py', - 'f3/test/f1.py', - 'f3/test/f1.pyc', - 'f3/test2/test.txt' - ]) - - @mock_s3 - def test_follow_symlink_false(self): - # testing if syminks are present and not folllowed - with self.temp_directory_with_files() as d1: - root1 = d1.path - with self.temp_directory_with_files() as d2: - root2 = d2.path - os.symlink(root1 + "/f1", root2 + "/f3") - results = self.run_hook(follow_symlinks=False, functions={ - 'MyFunction': { - 'path': root2} - }) - self.assertIsNotNone(results) - - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1/f1.py', - 'f1/__init__.py', - 'f1/f1.pyc', - 'f1/test/__init__.py', - 'f1/test/f1.py', - 'f1/test/f1.pyc', - 'f1/test2/test.txt', - 'f2/f2.js', - ]) - - @mock_s3 - def test_follow_symlink_omitted(self): - # same as test_follow_symlink_false, but default behaivor - with self.temp_directory_with_files() as d1: - root1 = d1.path - with self.temp_directory_with_files() as d2: - root2 = d2.path - os.symlink(root1 + "/f1", root2 + "/f3") - results = self.run_hook(functions={ - 'MyFunction': { - 'path': root2} - }) - self.assertIsNotNone(results) - - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1/f1.py', - 'f1/__init__.py', - 'f1/f1.pyc', - 'f1/test/__init__.py', - 'f1/test/f1.py', - 'f1/test/f1.pyc', - 'f1/test2/test.txt', - 'f2/f2.js', - ]) +def test_follow_symlink_true(tmpdir, s3, all_files, f1_files, run_hook, + linked_dir): + root = tmpdir + results = run_hook( + follow_symlinks=True, + functions={ + 'MyFunction': { + 'path': str(root) + } + } + ) + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + + linked_files = [p for p in linked_dir.visit() if p.check(file=1)] + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, + all_files + linked_files, root=tmpdir) + + +def test_follow_symlink_false(tmpdir, s3, all_files, run_hook, linked_dir): + root = tmpdir + results = run_hook( + follow_symlinks=False, + functions={ + 'MyFunction': { + 'path': str(root) + } + } + ) + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, all_files, + root=tmpdir)