def test__get_local_file_list(self, mock_oswalk, _, __): with open(Path(__file__).parent / "data/s3sync/oswalk_output.json") as f: oswalk_results = json.load(f) with open( Path(__file__).parent / "data/s3sync/s3sync_local_file_list_expected.json") as f: expected_results = json.load(f) mock_oswalk.return_value = oswalk_results m_s3_client = mock.Mock() prefix = "test_prefix" base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/").resolve() s3s = S3Sync( m_s3_client, "test_bucket", prefix, str(base_path / "lambda_build_with_submodules"), dry_run=True, ) actual_results = s3s._get_local_file_list("/tmp/repo", False) self.assertEqual(expected_results, actual_results)
def _sync_wrap(bucket, project_name, project_root, dry_run): S3Sync( bucket.s3_client, bucket.name, project_name, project_root, bucket.object_acl, dry_run=dry_run, )
def stage_in_s3(buckets, project_name, project_root): distinct_buckets = {} for test in buckets.values(): for bucket in test.values(): distinct_buckets[bucket.name] = bucket for bucket in distinct_buckets.values(): S3Sync( bucket.s3_client, bucket.name, project_name, project_root, bucket.object_acl )
def _sync_wrap(bucket, project_name, project_root, dry_run, exclude_prefix): if exclude_prefix: S3Sync.exclude_remote_path_prefixes += exclude_prefix S3Sync.exclude_path_prefixes += exclude_prefix S3Sync( bucket.s3_client, bucket.name, project_name, project_root, bucket.object_acl, dry_run=dry_run, )
def stage_in_s3(config): """ Upload templates and other artifacts to s3. This function creates the s3 bucket with name provided in the config yml file. If no bucket name provided, it creates the s3 bucket using project name provided in config yml file. And uploads the templates and other artifacts to the s3 bucket. :param config: Taskcat config object. """ bucket_set: set = set() for test in config.tests.values(): for region in test.regions: bucket_set.add(region.s3bucket) for bucket in bucket_set: bucket.create() for bucket in bucket_set: S3Sync(bucket.client, bucket.name, config.name, config.project_root, bucket.acl)
def test_init(self): m_s3_client = mock.Mock() m_s3_client.list_objects_v2.return_value = { "Contents": [{ "Key": "test_prefix/test_object", "ETag": "test_etag" }] } m_s3_client.delete_objects.return_value = {} m_s3_client.upload_file.return_value = None prefix = "test_prefix" base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/").resolve() S3Sync( m_s3_client, "test_bucket", prefix, str(base_path / "lambda_build_with_submodules"), ) m_s3_client.list_objects_v2.assert_called_once() m_s3_client.delete_objects.assert_called_once() m_s3_client.upload_file.assert_called()
#!/usr/bin/env python3 from sys import argv import boto3 import logging from taskcat._s3_sync import S3Sync, LOG LOG.setLevel(logging.INFO) bucket_name = argv[1] bucket_region = argv[2] bucket_profile = argv[3] key_prefix = argv[4] source_path = argv[5] object_acl = argv[6] client = boto3.Session(profile_name=bucket_profile).client( 's3', region_name=bucket_region) S3Sync(client, bucket_name, key_prefix, source_path, object_acl)
def _sync_wrap(bucket, project_name, project_root): S3Sync(bucket.s3_client, bucket.name, project_name, project_root, bucket.object_acl)