def delete_files(request): """ Deletes multiple uploaded files. """ logger, user, upload_folder, process_folder = get_user_and_folders_plus_logger(request) logger.info("Deleting files for user: %s" % user) data = {} data['results'] = [] data['success'] = False del_dict = QueryDict(request.body) target_fnames = del_dict.getlist('delete_file[]'); file_type = 'upload' file_folder = os.path.join(settings.FILE_FOLDER_MAP.get(file_type), user) # Prevent files from being renamed/deleted if the workflow is running is_workflow_running = process_data.check_workflow_running(user, process_folder) if not is_workflow_running: for target_fname in target_fnames: logger.info("Deleting file %s" % target_fname) target_file = os.path.join(file_folder, target_fname) data['results'].append(process_data.delete_file(target_file, target_fname, file_folder, logger)) data['success'] = all([file['success'] for file in data['results']]) if data['results'] else False else: data['success'] = False data['target_files'] = target_fnames; data['error_msg'] = "Files cannot be deleted when a workflow is running." return JsonResponse(data)
def rename_file(request, file_name): """ Handle a request to rename any uploaded file that is not being processed or is an archived file. """ logger, user, upload_folder, process_folder = get_user_and_folders_plus_logger(request) logger.info("Renaming file for user: %s", user) data = {} # Files we are renaming should be lists with the first item being the file name # and the second item being the new filename to rename too and the third item being the # category of file (uploaded, archived, md5sum, viz, data_product) put_dict = QueryDict(request.body) rename_fname = put_dict.get('rename_file'); file_type = put_dict.get('type'); logger.info('Variables passed in: %s %s %s' % (file_name, rename_fname, file_type)) file_folder = os.path.join(settings.FILE_FOLDER_MAP.get(file_type), user) file = os.path.join(file_folder, file_name) renamed_file = os.path.join(file_folder, rename_fname) is_workflow_running = process_data.check_workflow_running(user, process_folder) if not is_workflow_running: data = process_data.rename_file(file, file_name, renamed_file, logger) else: data['success'] = False data['error_msg'] = "Files cannot be renamed when a workflow is running." data['renamed_file'] = rename_fname data['original_file'] = file_name return JsonResponse(data)
def delete_file(request, file_name): """ Deletes a single uploaded file. """ logger, user, upload_folder, process_folder = get_user_and_folders_plus_logger(request) logger.info("Deleting file %s for user: %s" % (file_name, user)) data = {} file_type = 'upload' file_folder = os.path.join(settings.FILE_FOLDER_MAP.get(file_type), user) target_file = os.path.join(file_folder, file_name) # Check if we have any workflows running. If so we want to prevent rename or delete operations # here since they will interfere with the workflows running is_workflow_running = process_data.check_workflow_running(user, process_folder) if not is_workflow_running: data = process_data.delete_file(target_file, file_name, file_folder, logger) else: data['success'] = False data['error_msg'] = "Files cannot be deleted when a workflow is running." return JsonResponse(data)
def process_files(request): # get the user, folders, and logger logger, user, user_full_name, user_email, upload_folder, process_folder = get_user_and_folders_plus_logger(request, full_user_info=True) metadata_folder = os.path.join(upload_folder, process_data.METADATA_FOLDER) files_nonempty = process_data.get_recursive_files_nonempty(upload_folder,include_path=False,recursive=False) # Var to keep track of our workflow status. Needed so we can clear our some cookies # on workflow success. success = False # set the default responses responses={"message1":[],"message2":[]} responses["raw_input"]="The following raw files have been uploaded and are ready to verify:\n" responses["raw_input"]+="\n".join(files_nonempty)+"\n" if request.method == 'POST' and not "refresh" in request.POST: logger.info("Post from process page received") metadata_file = os.path.join(metadata_folder,settings.METADATA_FILE_NAME) study_file = os.path.join(metadata_folder,settings.METADATA_GROUP_FILE_NAME) logger.info("Metadata file: %s", metadata_file) process_folder = os.path.join(settings.PROCESS_FOLDER,user) logger.info("Process folder: %s", process_folder) if "verify" in request.POST: # check the metadata matches the raw uploads responses["message1"] = process_data.check_metadata_files_complete(user,upload_folder,metadata_file,study_file) elif "process" in request.POST: responses["message2"] = process_data.check_md5sum_and_process_data(user,user_full_name,user_email,upload_folder,process_folder,metadata_file,study_file) # log messages for message_name, message in responses.items(): if message and not message_name == "raw_input": logger.info("Error code: %s", message[0]) logger.info("Message: %s", message[1]) # check if the workflow is running responses["workflow_running"] = process_data.check_workflow_running(user, process_folder) # get the stdout for the processing workflows responses["md5sum_stdout"]=read_stdout_stderr(os.path.join(process_folder,process_data.WORKFLOW_MD5SUM_FOLDER)) responses["data_products_stdout"]=read_stdout_stderr(os.path.join(process_folder,process_data.WORKFLOW_DATA_PRODUCTS_FOLDER)) responses["visualizations_stdout"]=read_stdout_stderr(os.path.join(process_folder,process_data.WORFLOW_VISUALIZATIONS_FOLDER)) # if the workflow just started, wait for a refresh before updating status if not responses["message2"]: if not responses["workflow_running"]: if list(filter(lambda x: "fail" in x.lower() or "error" in x.lower(), [responses["md5sum_stdout"],responses["data_products_stdout"],responses["visualizations_stdout"]])): # one of the workflows had a task that failed responses["message2"]=(1, "ERROR: The workflows have finished running. One of the tasks failed.") elif len(list(filter(lambda x: "Finished" in x, [responses["md5sum_stdout"],responses["data_products_stdout"],responses["visualizations_stdout"]]))) == 3: responses["message2"]=(0, "Success! All three workflows (md5sum check, data processing, and visualization) finished without error.") success = True elif ( responses["md5sum_stdout"] and not ( responses["data_products_stdout"] or responses["visualizations_stdout"] ) or responses["md5sum_stdout"] and responses["data_products_stdout"] and not responses["visualizations_stdout"] ): # add the case where we are in between workflows running responses["message2"]=(0, "The processing workflows (md5sum check, data processing, and visualization) are still running.") elif responses["md5sum_stdout"] or responses["data_products_stdout"] or responses["visualizations_stdout"]: responses["message2"]=(0, "The processing workflows (md5sum check, data processing, and visualization) are still running.") # determine which buttons should be active responses["verify_button"] = 1 if ( responses["message1"] and responses["message1"][0] == 0 ) or ( responses["message2"] and responses["message2"][0] in [0,1] ): responses["verify_button"] = 0 if responses["verify_button"] == 1: responses["process_button"] = 0 responses["refresh_button"] = 0 elif ( responses["message1"] and responses["message1"][0] == 0 ) or ( responses["message2"] and responses["message2"][0] == 1 ): responses["process_button"] = 1 responses["refresh_button"] = 0 else: responses["process_button"] = 0 responses["refresh_button"] = 1 response = render(request, 'process.html', responses) if success and request.COOKIES.get('sample_metadata', False): response.set_cookie('study_metadata', max_age=1) response.set_cookie('sample_metadata', max_age=1) return response