def cleanup_job_execution(self, job_exe): """See :meth:`job.execution.job_exe_cleaner.JobExecutionCleaner.cleanup_job_execution` """ logger.info('Cleaning up a non-system job') download_dir = get_job_exe_input_data_dir(job_exe.id) download_work_dir = get_job_exe_input_work_dir(job_exe.id) upload_dir = get_job_exe_output_data_dir(job_exe.id) upload_work_dir = get_job_exe_output_work_dir(job_exe.id) logger.info('Cleaning up download directory') ScaleFile.objects.cleanup_download_dir(download_dir, download_work_dir) logger.info('Cleaning up upload directories') workspace_ids = job_exe.job.get_job_data().get_output_workspace_ids() for workspace in Workspace.objects.filter(id__in=workspace_ids): logger.info('Cleaning up upload directory for workspace %s', workspace.name) ScaleFile.objects.cleanup_upload_dir(upload_dir, upload_work_dir, workspace) move_work_dir = os.path.join(upload_work_dir, 'move_source_file_in_workspace') if os.path.exists(move_work_dir): logger.info('Cleaning up work directory for moving parsed source files') ScaleFile.objects.cleanup_move_dir(move_work_dir) logger.info('Deleting %s', move_work_dir) os.rmdir(move_work_dir) delete_normal_job_exe_dir_tree(job_exe.id)
def setup_job_dir(self, data_files, job_exe_id): '''Sets up the directory structure for a job execution and downloads the given files :param data_files: Dict with each file parameter name mapping to a bool indicating if the parameter accepts multiple files (True) and a relative directory path :type data_files: dict of str -> tuple(bool, str) :param job_exe_id: The job execution ID :type job_exe_id: int :returns: Dict with each file parameter name mapping to a list of absolute file paths of the written files :rtype: dict of str -> list of str ''' download_dir = get_job_exe_input_data_dir(job_exe_id) download_work_dir = get_job_exe_input_work_dir(job_exe_id) upload_dir = get_job_exe_output_data_dir(job_exe_id) upload_work_dir = get_job_exe_output_work_dir(job_exe_id) # Download the job execution input files self.retrieve_input_data_files(download_dir, download_work_dir, data_files) # Set up upload directories for output workspace workspace_ids = self.get_output_workspace_ids() for workspace in Workspace.objects.filter(id__in=workspace_ids): ScaleFile.objects.setup_upload_dir(upload_dir, upload_work_dir, workspace)
def setup_job_dir(self, data_files, job_exe_id): """Sets up the directory structure for a job execution and downloads the given files :param data_files: Dict with each file parameter name mapping to a bool indicating if the parameter accepts multiple files (True) and a relative directory path :type data_files: dict of str -> tuple(bool, str) :param job_exe_id: The job execution ID :type job_exe_id: int :returns: Dict with each file parameter name mapping to a list of absolute file paths of the written files :rtype: dict of str -> list of str """ download_dir = get_job_exe_input_data_dir(job_exe_id) download_work_dir = get_job_exe_input_work_dir(job_exe_id) upload_dir = get_job_exe_output_data_dir(job_exe_id) upload_work_dir = get_job_exe_output_work_dir(job_exe_id) # Download the job execution input files self.retrieve_input_data_files(download_dir, download_work_dir, data_files) # Set up upload directories for output workspace workspace_ids = self.get_output_workspace_ids() for workspace in Workspace.objects.filter(id__in=workspace_ids): ScaleFile.objects.setup_upload_dir(upload_dir, upload_work_dir, workspace) # If the upload dir did not get created (e.g. no output files), make sure it gets created for results manifests if not os.path.exists(upload_dir): logger.info("Creating %s", upload_dir) os.makedirs(upload_dir, mode=0755)
def cleanup_job_execution(self, job_exe): """See :meth:`job.execution.job_exe_cleaner.JobExecutionCleaner.cleanup_job_execution` """ logger.info('Cleaning up a non-system job') download_dir = get_job_exe_input_data_dir(job_exe.id) download_work_dir = get_job_exe_input_work_dir(job_exe.id) upload_dir = get_job_exe_output_data_dir(job_exe.id) upload_work_dir = get_job_exe_output_work_dir(job_exe.id) logger.info('Cleaning up download directory') ScaleFile.objects.cleanup_download_dir(download_dir, download_work_dir) logger.info('Cleaning up upload directories') workspace_ids = job_exe.job.get_job_data().get_output_workspace_ids() for workspace in Workspace.objects.filter(id__in=workspace_ids): logger.info('Cleaning up upload directory for workspace %s', workspace.name) ScaleFile.objects.cleanup_upload_dir(upload_dir, upload_work_dir, workspace) move_work_dir = os.path.join(upload_work_dir, 'move_source_file_in_workspace') if os.path.exists(move_work_dir): logger.info( 'Cleaning up work directory for moving parsed source files') ScaleFile.objects.cleanup_move_dir(move_work_dir) logger.info('Deleting %s', move_work_dir) os.rmdir(move_work_dir) delete_normal_job_exe_dir_tree(job_exe.id)
def cleanup_job_execution(self, job_exe): '''See :meth:`job.execution.job_exe_cleaner.JobExecutionCleaner.cleanup_job_execution` ''' logger.info('Cleaning up a non-system job') download_dir = get_job_exe_input_data_dir(job_exe.id) download_work_dir = get_job_exe_input_work_dir(job_exe.id) upload_dir = get_job_exe_output_data_dir(job_exe.id) upload_work_dir = get_job_exe_output_work_dir(job_exe.id) logger.info('Cleaning up download directory') ScaleFile.objects.cleanup_download_dir(download_dir, download_work_dir) logger.info('Cleaning up upload directories') workspace_ids = job_exe.job.get_job_data().get_output_workspace_ids() for workspace in Workspace.objects.filter(id__in=workspace_ids): logger.info('Cleaning up upload directory for workspace %s', workspace.name) ScaleFile.objects.cleanup_upload_dir(upload_dir, upload_work_dir, workspace) save_job_exe_metrics(job_exe)