def create_filter(self, filters=None, root=None, dst_root=None, parameters=None): if root is None: root = os.getcwd() if filters is None: filters = {} if dst_root is None: dst_root = 'bucket' if parameters is not None: return create_filter(parameters) return Filter(filters, root, dst_root)
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters['src'] dest = self.parameters['dest'] paths_type = self.parameters['paths_type'] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation['locals3'] = { 'cp': 'upload', 'sync': 'upload', 'mv': 'move' } cmd_translation['s3s3'] = {'cp': 'copy', 'sync': 'copy', 'mv': 'move'} cmd_translation['s3local'] = { 'cp': 'download', 'sync': 'download', 'mv': 'move' } cmd_translation['s3'] = { 'rm': 'delete', 'mb': 'make_bucket', 'rb': 'remove_bucket' } operation_name = cmd_translation[paths_type][self.cmd] file_generator = FileGenerator(self._service, self._endpoint, operation_name, self.parameters['follow_symlinks']) rev_generator = FileGenerator(self._service, self._endpoint, '', self.parameters['follow_symlinks']) taskinfo = [ TaskInfo(src=files['src']['path'], src_type='s3', operation_name=operation_name, service=self._service, endpoint=self._endpoint) ] s3handler = S3Handler(self.session, self.parameters) command_dict = {} if self.cmd == 'sync': command_dict = { 'setup': [files, rev_files], 'file_generator': [file_generator, rev_generator], 'filters': [ create_filter(self.parameters), create_filter(self.parameters) ], 'comparator': [Comparator(self.parameters)], 's3_handler': [s3handler] } elif self.cmd == 'cp': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler] } elif self.cmd == 'rm': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler] } elif self.cmd == 'mv': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler] } elif self.cmd == 'mb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} elif self.cmd == 'rb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} files = command_dict['setup'] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list # This is kinda quirky, but each call through the instructions # will replaces the files attr with the return value of the # file_list. The very last call is a single list of # [s3_handler], and the s3_handler returns the number of # tasks failed. This means that files[0] now contains # the number of failed tasks. In terms of the RC, we're # keeping it simple and saying that > 0 failed tasks # will give a 1 RC. rc = 0 if files[0] > 0: rc = 1 return rc
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters['src'] dest = self.parameters['dest'] paths_type = self.parameters['paths_type'] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation['locals3'] = {'cp': 'upload', 'sync': 'upload', 'mv': 'move'} cmd_translation['s3s3'] = {'cp': 'copy', 'sync': 'copy', 'mv': 'move'} cmd_translation['s3local'] = {'cp': 'download', 'sync': 'download', 'mv': 'move'} cmd_translation['s3'] = { 'rm': 'delete', 'mb': 'make_bucket', 'rb': 'remove_bucket' } result_queue = queue.Queue() operation_name = cmd_translation[paths_type][self.cmd] file_generator = FileGenerator(self._source_client, operation_name, self.parameters['follow_symlinks'], self.parameters['page_size'], result_queue=result_queue) rev_generator = FileGenerator(self._client, '', self.parameters['follow_symlinks'], self.parameters['page_size'], result_queue=result_queue) taskinfo = [TaskInfo(src=files['src']['path'], src_type='s3', operation_name=operation_name, client=self._client)] stream_dest_path, stream_compare_key = find_dest_path_comp_key(files) stream_file_info = [FileInfo(src=files['src']['path'], dest=stream_dest_path, compare_key=stream_compare_key, src_type=files['src']['type'], dest_type=files['dest']['type'], operation_name=operation_name, client=self._client, is_stream=True)] file_info_builder = FileInfoBuilder( self._client, self._source_client, self.parameters) s3handler = S3Handler(self.session, self.parameters, runtime_config=self._runtime_config, result_queue=result_queue) s3_stream_handler = S3StreamHandler(self.session, self.parameters, result_queue=result_queue) sync_strategies = self.choose_sync_strategies() command_dict = {} if self.cmd == 'sync': command_dict = {'setup': [files, rev_files], 'file_generator': [file_generator, rev_generator], 'filters': [create_filter(self.parameters), create_filter(self.parameters)], 'comparator': [Comparator(**sync_strategies)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler]} elif self.cmd == 'cp' and self.parameters['is_stream']: command_dict = {'setup': [stream_file_info], 's3_handler': [s3_stream_handler]} elif self.cmd == 'cp': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler]} elif self.cmd == 'rm': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler]} elif self.cmd == 'mv': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler]} elif self.cmd == 'mb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} elif self.cmd == 'rb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} files = command_dict['setup'] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list # This is kinda quirky, but each call through the instructions # will replaces the files attr with the return value of the # file_list. The very last call is a single list of # [s3_handler], and the s3_handler returns the number of # tasks failed and the number of tasks warned. # This means that files[0] now contains a namedtuple with # the number of failed tasks and the number of warned tasks. # In terms of the RC, we're keeping it simple and saying # that > 0 failed tasks will give a 1 RC and > 0 warned # tasks will give a 2 RC. Otherwise a RC of zero is returned. rc = 0 if files[0].num_tasks_failed > 0: rc = 1 if files[0].num_tasks_warned > 0: rc = 2 return rc
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters['src'] dest = self.parameters['dest'] paths_type = self.parameters['paths_type'] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation['locals3'] = {'cp': 'upload', 'sync': 'upload', 'mv': 'move'} cmd_translation['s3s3'] = {'cp': 'copy', 'sync': 'copy', 'mv': 'move'} cmd_translation['s3local'] = {'cp': 'download', 'sync': 'download', 'mv': 'move'} cmd_translation['s3'] = { 'rm': 'delete', 'mb': 'make_bucket', 'rb': 'remove_bucket' } operation_name = cmd_translation[paths_type][self.cmd] file_generator = FileGenerator(self._service, self._endpoint, operation_name, self.parameters) rev_generator = FileGenerator(self._service, self._endpoint, '', self.parameters) taskinfo = [TaskInfo(src=files['src']['path'], src_type='s3', operation_name=operation_name, service=self._service, endpoint=self._endpoint)] s3handler = S3Handler(self.session, self.parameters) command_dict = {} if self.cmd == 'sync': command_dict = {'setup': [files, rev_files], 'file_generator': [file_generator, rev_generator], 'filters': [create_filter(self.parameters), create_filter(self.parameters)], 'comparator': [Comparator(self.parameters)], 's3_handler': [s3handler]} elif self.cmd == 'cp': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler]} elif self.cmd == 'rm': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler]} elif self.cmd == 'mv': command_dict = {'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 's3_handler': [s3handler]} elif self.cmd == 'mb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} elif self.cmd == 'rb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} files = command_dict['setup'] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list # This is kinda quirky, but each call through the instructions # will replaces the files attr with the return value of the # file_list. The very last call is a single list of # [s3_handler], and the s3_handler returns the number of # tasks failed. This means that files[0] now contains # the number of failed tasks. In terms of the RC, we're # keeping it simple and saying that > 0 failed tasks # will give a 1 RC. rc = 0 if files[0] > 0: rc = 1 return rc
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters['src'] dest = self.parameters['dest'] paths_type = self.parameters['paths_type'] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation['locals3'] = { 'cp': 'upload', 'sync': 'upload', 'mv': 'move' } cmd_translation['s3s3'] = {'cp': 'copy', 'sync': 'copy', 'mv': 'move'} cmd_translation['s3local'] = { 'cp': 'download', 'sync': 'download', 'mv': 'move' } cmd_translation['s3'] = { 'rm': 'delete', 'mb': 'make_bucket', 'rb': 'remove_bucket' } result_queue = queue.Queue() operation_name = cmd_translation[paths_type][self.cmd] fgen_kwargs = { 'client': self._source_client, 'operation_name': operation_name, 'follow_symlinks': self.parameters['follow_symlinks'], 'page_size': self.parameters['page_size'], 'result_queue': result_queue } rgen_kwargs = { 'client': self._client, 'operation_name': '', 'follow_symlinks': self.parameters['follow_symlinks'], 'page_size': self.parameters['page_size'], 'result_queue': result_queue } fgen_request_parameters = {} fgen_head_object_params = {} fgen_request_parameters['HeadObject'] = fgen_head_object_params fgen_kwargs['request_parameters'] = fgen_request_parameters # SSE-C may be neaded for HeadObject for copies/downloads/deletes # If the operation is s3 to s3, the FileGenerator should use the # copy source key and algorithm. Otherwise, use the regular # SSE-C key and algorithm. Note the reverse FileGenerator does # not need any of these because it is used only for sync operations # which only use ListObjects which does not require HeadObject. RequestParamsMapper.map_head_object_params(fgen_head_object_params, self.parameters) if paths_type == 's3s3': RequestParamsMapper.map_head_object_params( fgen_head_object_params, { 'sse_c': self.parameters.get('sse_c_copy_source'), 'sse_c_key': self.parameters.get('sse_c_copy_source_key') }) file_generator = FileGenerator(**fgen_kwargs) rev_generator = FileGenerator(**rgen_kwargs) taskinfo = [ TaskInfo(src=files['src']['path'], src_type='s3', operation_name=operation_name, client=self._client) ] stream_dest_path, stream_compare_key = find_dest_path_comp_key(files) stream_file_info = [ FileInfo(src=files['src']['path'], dest=stream_dest_path, compare_key=stream_compare_key, src_type=files['src']['type'], dest_type=files['dest']['type'], operation_name=operation_name, client=self._client, is_stream=True) ] file_info_builder = FileInfoBuilder(self._client, self._source_client, self.parameters) s3handler = S3Handler(self.session, self.parameters, runtime_config=self._runtime_config, result_queue=result_queue) s3_stream_handler = S3StreamHandler(self.session, self.parameters, result_queue=result_queue) sync_strategies = self.choose_sync_strategies() command_dict = {} if self.cmd == 'sync': command_dict = { 'setup': [files, rev_files], 'file_generator': [file_generator, rev_generator], 'filters': [ create_filter(self.parameters), create_filter(self.parameters) ], 'comparator': [Comparator(**sync_strategies)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler] } elif self.cmd == 'cp' and self.parameters['is_stream']: command_dict = { 'setup': [stream_file_info], 's3_handler': [s3_stream_handler] } elif self.cmd == 'cp': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler] } elif self.cmd == 'rm': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler] } elif self.cmd == 'mv': command_dict = { 'setup': [files], 'file_generator': [file_generator], 'filters': [create_filter(self.parameters)], 'file_info_builder': [file_info_builder], 's3_handler': [s3handler] } elif self.cmd == 'mb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} elif self.cmd == 'rb': command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]} files = command_dict['setup'] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list # This is kinda quirky, but each call through the instructions # will replaces the files attr with the return value of the # file_list. The very last call is a single list of # [s3_handler], and the s3_handler returns the number of # tasks failed and the number of tasks warned. # This means that files[0] now contains a namedtuple with # the number of failed tasks and the number of warned tasks. # In terms of the RC, we're keeping it simple and saying # that > 0 failed tasks will give a 1 RC and > 0 warned # tasks will give a 2 RC. Otherwise a RC of zero is returned. rc = 0 if files[0].num_tasks_failed > 0: rc = 1 if files[0].num_tasks_warned > 0: rc = 2 return rc
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters["src"] dest = self.parameters["dest"] paths_type = self.parameters["paths_type"] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation["locals3"] = {"cp": "upload", "sync": "upload", "mv": "move"} cmd_translation["s3s3"] = {"cp": "copy", "sync": "copy", "mv": "move"} cmd_translation["s3local"] = {"cp": "download", "sync": "download", "mv": "move"} cmd_translation["s3"] = {"rm": "delete", "mb": "make_bucket", "rb": "remove_bucket"} result_queue = queue.Queue() operation_name = cmd_translation[paths_type][self.cmd] fgen_kwargs = { "client": self._source_client, "operation_name": operation_name, "follow_symlinks": self.parameters["follow_symlinks"], "page_size": self.parameters["page_size"], "result_queue": result_queue, } rgen_kwargs = { "client": self._client, "operation_name": "", "follow_symlinks": self.parameters["follow_symlinks"], "page_size": self.parameters["page_size"], "result_queue": result_queue, } fgen_request_parameters = {} fgen_head_object_params = {} fgen_request_parameters["HeadObject"] = fgen_head_object_params fgen_kwargs["request_parameters"] = fgen_request_parameters # SSE-C may be neaded for HeadObject for copies/downloads/deletes # If the operation is s3 to s3, the FileGenerator should use the # copy source key and algorithm. Otherwise, use the regular # SSE-C key and algorithm. Note the reverse FileGenerator does # not need any of these because it is used only for sync operations # which only use ListObjects which does not require HeadObject. RequestParamsMapper.map_head_object_params(fgen_head_object_params, self.parameters) if paths_type == "s3s3": RequestParamsMapper.map_head_object_params( fgen_head_object_params, { "sse_c": self.parameters.get("sse_c_copy_source"), "sse_c_key": self.parameters.get("sse_c_copy_source_key"), }, ) file_generator = FileGenerator(**fgen_kwargs) rev_generator = FileGenerator(**rgen_kwargs) taskinfo = [ TaskInfo(src=files["src"]["path"], src_type="s3", operation_name=operation_name, client=self._client) ] stream_dest_path, stream_compare_key = find_dest_path_comp_key(files) stream_file_info = [ FileInfo( src=files["src"]["path"], dest=stream_dest_path, compare_key=stream_compare_key, src_type=files["src"]["type"], dest_type=files["dest"]["type"], operation_name=operation_name, client=self._client, is_stream=True, ) ] file_info_builder = FileInfoBuilder(self._client, self._source_client, self.parameters) s3handler = S3Handler( self.session, self.parameters, runtime_config=self._runtime_config, result_queue=result_queue ) s3_stream_handler = S3StreamHandler(self.session, self.parameters, result_queue=result_queue) sync_strategies = self.choose_sync_strategies() command_dict = {} if self.cmd == "sync": command_dict = { "setup": [files, rev_files], "file_generator": [file_generator, rev_generator], "filters": [create_filter(self.parameters), create_filter(self.parameters)], "comparator": [Comparator(**sync_strategies)], "file_info_builder": [file_info_builder], "s3_handler": [s3handler], } elif self.cmd == "cp" and self.parameters["is_stream"]: command_dict = {"setup": [stream_file_info], "s3_handler": [s3_stream_handler]} elif self.cmd == "cp": command_dict = { "setup": [files], "file_generator": [file_generator], "filters": [create_filter(self.parameters)], "file_info_builder": [file_info_builder], "s3_handler": [s3handler], } elif self.cmd == "rm": command_dict = { "setup": [files], "file_generator": [file_generator], "filters": [create_filter(self.parameters)], "file_info_builder": [file_info_builder], "s3_handler": [s3handler], } elif self.cmd == "mv": command_dict = { "setup": [files], "file_generator": [file_generator], "filters": [create_filter(self.parameters)], "file_info_builder": [file_info_builder], "s3_handler": [s3handler], } elif self.cmd == "mb": command_dict = {"setup": [taskinfo], "s3_handler": [s3handler]} elif self.cmd == "rb": command_dict = {"setup": [taskinfo], "s3_handler": [s3handler]} files = command_dict["setup"] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list # This is kinda quirky, but each call through the instructions # will replaces the files attr with the return value of the # file_list. The very last call is a single list of # [s3_handler], and the s3_handler returns the number of # tasks failed and the number of tasks warned. # This means that files[0] now contains a namedtuple with # the number of failed tasks and the number of warned tasks. # In terms of the RC, we're keeping it simple and saying # that > 0 failed tasks will give a 1 RC and > 0 warned # tasks will give a 2 RC. Otherwise a RC of zero is returned. rc = 0 if files[0].num_tasks_failed > 0: rc = 1 if files[0].num_tasks_warned > 0: rc = 2 return rc
def run(self): """ This function wires together all of the generators and completes the command. First a dictionary is created that is indexed first by the command name. Then using the instruction, another dictionary can be indexed to obtain the objects corresponding to the particular instruction for that command. To begin the wiring, either a ``FileFormat`` or ``TaskInfo`` object, depending on the command, is put into a list. Then the function enters a while loop that pops off an instruction. It then determines the object needed and calls the call function of the object using the list as the input. Depending on the number of objects in the input list and the number of components in the list corresponding to the instruction, the call method of the component can be called two different ways. If the number of inputs is equal to the number of components a 1:1 mapping of inputs to components is used when calling the call function. If the there are more inputs than components, then a 2:1 mapping of inputs to components is used where the component call method takes two inputs instead of one. Whatever files are yielded from the call function is appended to a list and used as the input for the next repetition of the while loop until there are no more instructions. """ src = self.parameters["src"] dest = self.parameters["dest"] paths_type = self.parameters["paths_type"] files = FileFormat().format(src, dest, self.parameters) rev_files = FileFormat().format(dest, src, self.parameters) cmd_translation = {} cmd_translation["locals3"] = {"cp": "upload", "sync": "upload", "mv": "move"} cmd_translation["s3s3"] = {"cp": "copy", "sync": "copy", "mv": "move"} cmd_translation["s3local"] = {"cp": "download", "sync": "download", "mv": "move"} cmd_translation["s3"] = {"rm": "delete", "mb": "make_bucket", "rb": "remove_bucket"} result_queue = queue.Queue() operation_name = cmd_translation[paths_type][self.cmd] file_generator = FileGenerator( self._service, self._source_endpoint, operation_name, self.parameters["follow_symlinks"], self.parameters["page_size"], result_queue=result_queue, ) rev_generator = FileGenerator( self._service, self._endpoint, "", self.parameters["follow_symlinks"], self.parameters["page_size"], result_queue=result_queue, ) taskinfo = [ TaskInfo( src=files["src"]["path"], src_type="s3", operation_name=operation_name, service=self._service, endpoint=self._endpoint, ) ] file_info_builder = FileInfoBuilder(self._service, self._endpoint, self._source_endpoint, self.parameters) s3handler = S3Handler(self.session, self.parameters, result_queue=result_queue) command_dict = {} if self.cmd == "sync": command_dict = { "setup": [files, rev_files], "file_generator": [file_generator, rev_generator], "filters": [create_filter(self.parameters), create_filter(self.parameters)], "comparator": [Comparator(self.parameters)], "file_info_builder": [file_info_builder], "s3_handler": [s3handler], } elif self.cmd == "cp": command_dict = { "setup": [files], "file_generator": [file_generator], "filters": [create_filter(self.parameters)], "file_info_builder": [file_info_builder], "s3_handler": [s3handler], } elif self.cmd == "rm": command_dict = { "setup": [files], "file_generator": [file_generator], "filters": [create_filter(self.parameters)], "file_info_builder": [file_info_builder], "s3_handler": [s3handler], } elif self.cmd == "mv": command_dict = { "setup": [files], "file_generator": [file_generator], "filters": [create_filter(self.parameters)], "file_info_builder": [file_info_builder], "s3_handler": [s3handler], } elif self.cmd == "mb": command_dict = {"setup": [taskinfo], "s3_handler": [s3handler]} elif self.cmd == "rb": command_dict = {"setup": [taskinfo], "s3_handler": [s3handler]} files = command_dict["setup"] while self.instructions: instruction = self.instructions.pop(0) file_list = [] components = command_dict[instruction] for i in range(len(components)): if len(files) > len(components): file_list.append(components[i].call(*files)) else: file_list.append(components[i].call(files[i])) files = file_list # This is kinda quirky, but each call through the instructions # will replaces the files attr with the return value of the # file_list. The very last call is a single list of # [s3_handler], and the s3_handler returns the number of # tasks failed and the number of tasks warned. # This means that files[0] now contains a namedtuple with # the number of failed tasks and the number of warned tasks. # In terms of the RC, we're keeping it simple and saying # that > 0 failed tasks will give a 1 RC and > 0 warned # tasks will give a 2 RC. Otherwise a RC of zero is returned. rc = 0 if files[0].num_tasks_failed > 0: rc = 1 if files[0].num_tasks_warned > 0: rc = 2 return rc