def test_unicode_bytes_path(self): filename = self.create_config_file(b'aws_config_unicode\xe2\x9c\x93') with mock.patch('sys.getfilesystemencoding') as encoding: encoding.return_value = 'utf-8' loaded_config = load_config(filename) self.assertIn('default', loaded_config['profiles']) self.assertIn('personal', loaded_config['profiles'])
def load_aws_config(config_path, credentials_path, profile_name="default"): aws_config = load_config(config_path) if not aws_config: raise RuntimeError("Failed to load config: {}".format(config_path)) aws_creds_provider = SharedCredentialProvider( credentials_path, profile_name=profile_name, ) aws_creds_config = aws_creds_provider.load() if not aws_creds_config: raise RuntimeError("Failed to load aws credentials config: {}".format( credentials_path)) ( aws_access_key, aws_secret_key, aws_token, ) = aws_creds_config.get_frozen_credentials() profile_attributes = aws_config["profiles"][profile_name] aws_config = dict( aws_access_key_id=aws_access_key, aws_secret_access_key=aws_secret_key, ) aws_config.update(profile_attributes) return aws_config
def main_sync(skip, max, only_new): credentials = load_config("./.aws/credentials") configs = load_config("./.aws/configs") bucket_from = create_bucket(credentials['profiles']['from'], configs['profiles']['from']) bucket_to = create_bucket(credentials['profiles']['to'], configs['profiles']['to']) if skip: print("Skipping %d" % skip) i = 0 total_size = 0 for obj in bucket_from.objects.all(): stdout.write("\rTotal files: %d, total size: %s" % (i, format_bytes(total_size))) stdout.flush() if i < skip: i += 1 continue key = obj.key object_to = bucket_to.Object(key) if only_new: objs = list(bucket_to.objects.filter(Prefix=key)) exists = True if (len(objs) > 0 and objs[0].key == key) else False else: exists = False if not exists: object_from = bucket_from.Object(key) file_stream = io.BytesIO() object_from.download_fileobj(file_stream) file_size = file_stream.getbuffer().nbytes total_size += file_size if file_size > 0: file_stream.seek(0) object_to.upload_fileobj(file_stream) if max != 0 and i > max: return i += 1
def test_nested_hierarchy_parsing(self): filename = path('aws_config_nested') loaded_config = load_config(filename) config = loaded_config['profiles']['default'] self.assertEqual(config['aws_access_key_id'], 'foo') self.assertEqual(config['region'], 'us-west-2') self.assertEqual(config['s3']['signature_version'], 's3v4') self.assertEqual(config['cloudwatch']['signature_version'], 'v4')
def test_bad_profiles_are_ignored(self): filename = path('aws_bad_profile') loaded_config = load_config(filename) self.assertEqual(len(loaded_config['profiles']), 3) profiles = loaded_config['profiles'] self.assertIn('my profile', profiles) self.assertIn('personal1', profiles) self.assertIn('default', profiles)
def _load_aws_config_file(self, profile): from botocore.configloader import load_config # pylint: disable=attribute-defined-outside-init self._transfer_config = None config_path = os.environ.get("AWS_CONFIG_FILE", _AWS_CONFIG_PATH) if not os.path.exists(config_path): return {} config = load_config(config_path) profile_config = config["profiles"].get(profile or "default") if not profile_config: return {} s3_config = profile_config.get("s3", {}) return self._split_s3_config(s3_config)
def _process_config(self): from boto3.s3.transfer import TransferConfig from botocore.configloader import load_config config_path = os.environ.get("AWS_CONFIG_FILE", _AWS_CONFIG_PATH) if not os.path.exists(config_path): return None config = load_config(config_path) profile = config["profiles"].get(self.profile or "default") if not profile: return None s3_config = profile.get("s3", {}) s3_config, transfer_config = self._transform_config(s3_config) self._transfer_config = TransferConfig(**transfer_config) return s3_config
def parse_config(filename): config = load_config(filename) return config
def test_nested_bad_config(self): filename = path('aws_config_nested_bad') with self.assertRaises(botocore.exceptions.ConfigParseError): loaded_config = load_config(filename)
def test_profile_map_conversion(self): loaded_config = load_config(path('aws_config')) self.assertIn('profiles', loaded_config) self.assertEqual(sorted(loaded_config['profiles'].keys()), ['default', 'personal'])
def test_unicode_bytes_path_not_found(self): with self.assertRaises(botocore.exceptions.ConfigNotFound): with mock.patch('sys.getfilesystemencoding') as encoding: encoding.return_value = 'utf-8' load_config(path(b'\xe2\x9c\x93'))
def test_unicode_bytes_path_not_found_filesystem_encoding_none(self): with mock.patch('sys.getfilesystemencoding') as encoding: encoding.return_value = None with self.assertRaises(botocore.exceptions.ConfigNotFound): load_config(path(b'\xe2\x9c\x93'))
def test_nested_bad_config_filesystem_encoding_none(self): filename = path('aws_config_nested_bad') with mock.patch('sys.getfilesystemencoding') as encoding: encoding.return_value = None with self.assertRaises(botocore.exceptions.ConfigParseError): loaded_config = load_config(filename)
def main_async(skip, max, only_new, threads_count): credentials = load_config("./.aws/credentials") configs = load_config("./.aws/configs") bucket_from = create_bucket(credentials['profiles']['from'], configs['profiles']['from']) bucket_to = create_bucket(credentials['profiles']['to'], configs['profiles']['to']) if skip: print("Skipping %d" % skip) keys = [] i = 0 total_size = 0 print("Start at %d threads" % threads_count) for obj in bucket_from.objects.all(): if i < skip: i += 1 continue keys.append(obj.key) if len(keys) == threads_count: futures = [] sizes = list(0 for _ in range(threads_count)) for k in range(len(keys)): key = keys[k] async def copy_func(l=k, url=key): object_to = bucket_to.Object(url) if only_new: objs = list(bucket_to.objects.filter(Prefix=url)) exists = True if (len(objs) > 0 and objs[0].key == key) else False else: exists = False if not exists: object_from = bucket_from.Object(url) file_stream = io.BytesIO() object_from.download_fileobj(file_stream) file_size = file_stream.getbuffer().nbytes if file_size > 0: file_stream.seek(0) object_to.upload_fileobj(file_stream) sizes[l] = file_size futures.append(copy_func()) loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.wait(futures)) for size in sizes: total_size += size i += 1 keys = [] stdout.write( "\rTotal files: %d, total size: %s " % (i, format_bytes(total_size))) stdout.flush() if max != 0 and i > max: return i += 1
def parse_aws_config(filename="~/.aws/config"): real_filename = os.path.expanduser(filename) return configloader.load_config(real_filename)