def test_bucket(self): rand1 = random.randrange(5000) rand2 = random.randrange(5000) self.bucket = str(rand1) + 'mybucket' + str(rand2) file_info = FileInfo( src=self.bucket, operation_name='make_bucket', size=0, client=self.client, source_client=self.source_client ) S3Handler(self.session, self.params).call([file_info]) buckets_list = [] for bucket in self.client.list_buckets().get('Buckets', []): buckets_list.append(bucket['Name']) self.assertIn(self.bucket, buckets_list) file_info = FileInfo( src=self.bucket, operation_name='remove_bucket', size=0, client=self.client, source_client=self.source_client) S3Handler(self.session, self.params).call([file_info]) buckets_list = [] for bucket in self.client.list_buckets().get('Buckets', []): buckets_list.append(bucket['Name']) self.assertNotIn(self.bucket, buckets_list)
def setUp(self): super(S3HandlerTestMvS3Local, self).setUp() self.session = FakeSession() self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') params = {'region': 'us-east-1'} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session) self.s3_files = [self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt'] directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep filename1 = directory1 + "text1.txt" directory2 = directory1 + 'another_directory' + os.sep filename2 = directory2 + "text2.txt" self.loc_files = [filename1, filename2]
def setUp(self): super(S3HandlerTestDownload, self).setUp() params = {'region': 'us-east-1'} self.s3_handler = S3Handler( self.session, params, runtime_config=runtime_config(max_concurrent_requests=1)) self.s3_handler_multi = S3Handler(self.session, params, runtime_config=runtime_config( multipart_threshold=10, multipart_chunksize=5, max_concurrent_requests=1)) self.bucket = 'mybucket' self.s3_files = [ self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt' ] directory1 = self.file_creator.rootdir + os.sep + 'some_directory' \ + os.sep filename1 = directory1 + "text1.txt" directory2 = directory1 + 'another_directory' + os.sep filename2 = directory2 + "text2.txt" self.loc_files = [filename1, filename2]
def test_bucket(self): rand1 = random.randrange(5000) rand2 = random.randrange(5000) self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/' file_info = FileInfo( src=self.bucket, operation_name='make_bucket', size=0, service=self.service, endpoint=self.endpoint, ) S3Handler(self.session, self.params).call([file_info]) buckets_list = [] for bucket in list_buckets(self.session): buckets_list.append(bucket['Name']) self.assertIn(self.bucket[:-1], buckets_list) file_info = FileInfo( src=self.bucket, operation_name='remove_bucket', size=0, service=self.service, endpoint=self.endpoint) S3Handler(self.session, self.params).call([file_info]) buckets_list = [] for bucket in list_buckets(self.session): buckets_list.append(bucket['Name']) self.assertNotIn(self.bucket[:-1], buckets_list)
def setUp(self): super(S3HandlerTestMvS3S3, self).setUp() self.session = FakeSession() params = {'region': 'us-east-1', 'acl': ['private']} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session) self.bucket2 = create_bucket(self.session) self.s3_files = [ self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt' ] self.s3_files2 = [ self.bucket2 + '/text1.txt', self.bucket2 + '/another_directory/text2.txt' ]
def setUp(self): self.session = botocore.session.get_session(EnvironmentVariables) self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') params = {'region': 'us-east-1', 'acl': ['private']} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session) self.bucket2 = create_bucket(self.session) self.s3_files = [ self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt' ] self.s3_files2 = [ self.bucket2 + '/text1.txt', self.bucket2 + '/another_directory/text2.txt' ]
def setUp(self): super(S3HandlerExceptionMultiTaskTest, self).setUp() self.session = FakeSession(True, True) self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') params = {'region': 'us-east-1'} self.s3_handler_multi = S3Handler(self.session, params, multi_threshold=10, chunksize=2) self.bucket = create_bucket(self.session) self.loc_files = make_loc_files() self.s3_files = [ self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt' ]
def setUp(self): self.session = botocore.session.get_session(EnvironmentVariables) self.client = self.session.create_client('s3', 'us-west-2') self.source_client = self.session.create_client('s3', 'us-west-2') params = {'region': 'us-west-2', 'acl': ['private']} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session) self.bucket2 = create_bucket(self.session) self.s3_files = [ self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt' ] self.s3_files2 = [ self.bucket2 + '/text1.txt', self.bucket2 + '/another_directory/text2.txt' ]
def test_make_bucket_non_us_east_1(self): self.params = {'region': 'us-west-2'} self.client = self.session.create_client('s3', 'us-west-2') self.source_client = self.session.create_client('s3', 'us-west-2') file_info = FileInfo( src=self.bucket, operation_name='make_bucket', size=0, client=self.client, source_client=self.source_client) s3_handler = S3Handler(self.session, self.params) ref_calls = [ ('CreateBucket', {'Bucket': self.bucket, 'CreateBucketConfiguration': {'LocationConstraint': 'us-west-2'}})] self.assert_operations_for_s3_handler(s3_handler, [file_info], ref_calls)
def setUp(self): super(S3HandlerTestMvS3S3, self).setUp() params = {'region': 'us-east-1', 'acl': 'private'} self.s3_handler = S3Handler( self.session, params, runtime_config=runtime_config(max_concurrent_requests=1)) self.bucket = 'mybucket' self.bucket2 = 'mybucket2' self.s3_files = [ self.bucket + '/text1.txt', self.bucket + '/another_directory/text2.txt' ] self.s3_files2 = [ self.bucket2 + '/text1.txt', self.bucket2 + '/another_directory/text2.txt' ]
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters['src'] dest = self.parameters['dest'] paths_type = self.parameters['paths_type'] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation['locals3'] = { 'cp': 'upload', 'sync': 'upload', 'mv': 'move' } cmd_translation['s3s3'] = {'cp': 'copy', 'sync': 'copy', 'mv': 'move'} cmd_translation['s3local'] = { 'cp': 'download', 'sync': 'download', 'mv': 'move' } cmd_translation['s3'] = { 'rm': 'delete', 'mb': 'make_bucket', 'rb': 'remove_bucket' } operation_name = cmd_translation[paths_type][self.cmd] file_generator = FileGenerator(self._service, self._endpoint, operation_name, self.parameters['follow_symlinks']) rev_generator = FileGenerator(self._service, self._endpoint, '', self.parameters['follow_symlinks']) taskinfo = [ TaskInfo(src=files['src']['path'], src_type='s3', operation_name=operation_name, service=self._service, endpoint=self._endpoint) ] s3handler = S3Handler(self.session, self.parameters) command_dict = {} if self.cmd == 'sync': command_dict = { 'setup': [files, rev_files], 'file_generator': [file_generator, rev_generator], 'filters': [ create_filter(self.parameters), create_filter(self.parameters) ], 'comparator': [Comparator(self.parameters)], 's3_handler': [s3handler] } elif self.cmd == 'cp': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler] } elif self.cmd == 'rm': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler] } elif self.cmd == 'mv': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler] } elif self.cmd == 'mb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} elif self.cmd == 'rb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} files = command_dict['setup'] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list # This is kinda quirky, but each call through the instructions # will replaces the files attr with the return value of the # file_list. The very last call is a single list of # [s3_handler], and the s3_handler returns the number of # tasks failed. This means that files[0] now contains # the number of failed tasks. In terms of the RC, we're # keeping it simple and saying that > 0 failed tasks # will give a 1 RC. rc = 0 if files[0] > 0: rc = 1 return rc
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters['src'] dest = self.parameters['dest'] paths_type = self.parameters['paths_type'] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation['locals3'] = { 'cp': 'upload', 'sync': 'upload', 'mv': 'move' } cmd_translation['s3s3'] = {'cp': 'copy', 'sync': 'copy', 'mv': 'move'} cmd_translation['s3local'] = { 'cp': 'download', 'sync': 'download', 'mv': 'move' } cmd_translation['s3'] = { 'rm': 'delete', 'mb': 'make_bucket', 'rb': 'remove_bucket' } result_queue = queue.Queue() operation_name = cmd_translation[paths_type][self.cmd] fgen_kwargs = { 'client': self._source_client, 'operation_name': operation_name, 'follow_symlinks': self.parameters['follow_symlinks'], 'page_size': self.parameters['page_size'], 'result_queue': result_queue } rgen_kwargs = { 'client': self._client, 'operation_name': '', 'follow_symlinks': self.parameters['follow_symlinks'], 'page_size': self.parameters['page_size'], 'result_queue': result_queue } fgen_request_parameters = {} fgen_head_object_params = {} fgen_request_parameters['HeadObject'] = fgen_head_object_params fgen_kwargs['request_parameters'] = fgen_request_parameters # SSE-C may be neaded for HeadObject for copies/downloads/deletes # If the operation is s3 to s3, the FileGenerator should use the # copy source key and algorithm. Otherwise, use the regular # SSE-C key and algorithm. Note the reverse FileGenerator does # not need any of these because it is used only for sync operations # which only use ListObjects which does not require HeadObject. RequestParamsMapper.map_head_object_params(fgen_head_object_params, self.parameters) if paths_type == 's3s3': RequestParamsMapper.map_head_object_params( fgen_head_object_params, { 'sse_c': self.parameters.get('sse_c_copy_source'), 'sse_c_key': self.parameters.get('sse_c_copy_source_key') }) file_generator = FileGenerator(**fgen_kwargs) rev_generator = FileGenerator(**rgen_kwargs) taskinfo = [ TaskInfo(src=files['src']['path'], src_type='s3', operation_name=operation_name, client=self._client) ] stream_dest_path, stream_compare_key = find_dest_path_comp_key(files) stream_file_info = [ FileInfo(src=files['src']['path'], dest=stream_dest_path, compare_key=stream_compare_key, src_type=files['src']['type'], dest_type=files['dest']['type'], operation_name=operation_name, client=self._client, is_stream=True) ] file_info_builder = FileInfoBuilder(self._client, self._source_client, self.parameters) s3handler = S3Handler(self.session, self.parameters, runtime_config=self._runtime_config, result_queue=result_queue) s3_stream_handler = S3StreamHandler(self.session, self.parameters, result_queue=result_queue) sync_strategies = self.choose_sync_strategies() command_dict = {} if self.cmd == 'sync': command_dict = { 'setup': [files, rev_files], 'file_generator': [file_generator, rev_generator], 'filters': [ create_filter(self.parameters), create_filter(self.parameters) ], 'comparator': [Comparator(**sync_strategies)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler] } elif self.cmd == 'cp' and self.parameters['is_stream']: command_dict = { 'setup': [stream_file_info], 's3_handler': [s3_stream_handler] } elif self.cmd == 'cp': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler] } elif self.cmd == 'rm': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler] } elif self.cmd == 'mv': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler] } elif self.cmd == 'mb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} elif self.cmd == 'rb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} files = command_dict['setup'] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list # This is kinda quirky, but each call through the instructions # will replaces the files attr with the return value of the # file_list. The very last call is a single list of # [s3_handler], and the s3_handler returns the number of # tasks failed and the number of tasks warned. # This means that files[0] now contains a namedtuple with # the number of failed tasks and the number of warned tasks. # In terms of the RC, we're keeping it simple and saying # that > 0 failed tasks will give a 1 RC and > 0 warned # tasks will give a 2 RC. Otherwise a RC of zero is returned. rc = 0 if files[0].num_tasks_failed > 0: rc = 1 if files[0].num_tasks_warned > 0: rc = 2 return rc
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters['src'] dest = self.parameters['dest'] paths_type = self.parameters['paths_type'] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation['locals3'] = { 'cp': 'upload', 'sync': 'upload', 'mv': 'move' } cmd_translation['s3s3'] = {'cp': 'copy', 'sync': 'copy', 'mv': 'move'} cmd_translation['s3local'] = { 'cp': 'download', 'sync': 'download', 'mv': 'move' } cmd_translation['s3'] = { 'rm': 'delete', 'ls': 'list_objects', 'mb': 'make_bucket', 'rb': 'remove_bucket' } operation = cmd_translation[paths_type][self.cmd] file_generator = FileGenerator(self.session, operation, self.parameters) rev_generator = FileGenerator(self.session, '', self.parameters) taskinfo = [ TaskInfo(src=files['src']['path'], src_type='s3', operation=operation) ] s3handler = S3Handler(self.session, self.parameters) command_dict = {} command_dict['sync'] = { 'setup': [files, rev_files], 'file_generator': [file_generator, rev_generator], 'filters': [Filter(self.parameters), Filter(self.parameters)], 'comparator': [Comparator(self.parameters)], 's3_handler': [s3handler] } command_dict['cp'] = { 'setup': [files], 'file_generator': [file_generator], 'filters': [Filter(self.parameters)], 's3_handler': [s3handler] } command_dict['rm'] = { 'setup': [files], 'file_generator': [file_generator], 'filters': [Filter(self.parameters)], 's3_handler': [s3handler] } command_dict['mv'] = { 'setup': [files], 'file_generator': [file_generator], 'filters': [Filter(self.parameters)], 's3_handler': [s3handler] } command_dict['ls'] = {'setup': [taskinfo], 's3_handler': [s3handler]} command_dict['mb'] = {'setup': [taskinfo], 's3_handler': [s3handler]} command_dict['rb'] = {'setup': [taskinfo], 's3_handler': [s3handler]} files = command_dict[self.cmd]['setup'] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[self.cmd][instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list
def setUp(self): super(S3HandlerTestURLEncodeDeletes, self).setUp() params = {'region': 'us-east-1'} self.s3_handler = S3Handler(self.session, params) self.bucket = 'mybucket'
def setUp(self): self.session = botocore.session.get_session(EnvironmentVariables) params = {'region': 'us-east-1'} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session) self.loc_files = make_loc_files()
def setUp(self): super(S3HandlerTestBucket, self).setUp() self.session = FakeSession() params = {'region': 'us-east-1'} self.s3_handler = S3Handler(self.session, params) self.bucket = None