Esempio n. 1
0
PASSED_QA = 'Passed' # QC status set by the Image Analyst after looking at the results.
FAILED = 'Failed' # QC status set by the Image Analyst after looking at the results.
BAD = 'Bad' # QC status set by the Image Analyst after looking at the results.
POOR = 'Poor' # QC status set by the Image Analyst after looking at the results.
RERUN = 'Rerun' # will cause spider to delete results and rerun the processing
REPROC = 'Reproc' # will cause spider to zip the current results and put in OLD, and then processing
DONOTRUN = 'Do Not Run' # Do not run this assessor anymore
FAILED_NEEDS_REPROC = 'Failed-needs reprocessing' # FS
PASSED_EDITED_QA = 'Passed with edits' # FS
OPEN_QA_LIST = [RERUN, REPROC]
BAD_QA_STATUS = [FAILED, BAD, POOR, DONOTRUN]

# Other Constants
RESULTS_DIR = DAX_SETTINGS.get_results_dir()
DEFAULT_EMAIL_OPTS = DAX_SETTINGS.get_email_opts()
JOB_EXTENSION_FILE = DAX_SETTINGS.get_job_extension_file()



READY_TO_UPLOAD_FLAG_FILENAME = 'READY_TO_UPLOAD.txt'
OLD_RESOURCE = 'OLD'
EDITS_RESOURCE = 'EDITS'
REPROC_RES_SKIP_LIST = [OLD_RESOURCE, EDITS_RESOURCE]
INPUTS_DIRNAME = 'INPUTS'
BATCH_DIRNAME = 'BATCH'
OUTLOG_DIRNAME = 'OUTLOG'
PBS_DIRNAME = 'PBS'

def mkdirp(path):
    try:
        os.makedirs(path)
Esempio n. 2
0
File: task.py Progetto: Raab70/dax
""" Task object to generate / manage assessors and cluster """
import os
import time
import logging
from datetime import date

import cluster
from cluster import PBS

from dax_settings import DAX_Settings

DAX_SETTINGS = DAX_Settings()
RESULTS_DIR = DAX_SETTINGS.get_results_dir()
DEFAULT_EMAIL_OPTS = DAX_SETTINGS.get_email_opts()
JOB_EXTENSION_FILE = DAX_SETTINGS.get_job_extension_file()

# Logger to print logs
LOGGER = logging.getLogger("dax")

# Job Statuses
NO_DATA = (
    "NO_DATA"
)  # assessor that doesn't have data to run (for session assessor): E.G: dtiqa multi but no dti present.
NEED_TO_RUN = (
    "NEED_TO_RUN"
)  # assessor that is ready to be launch on the cluster (ACCRE). All the input data for the process to run are there.
NEED_INPUTS = "NEED_INPUTS"  # assessor where input data are missing from a scan, multiple scans or other assessor.
JOB_RUNNING = "JOB_RUNNING"  # the job has been submitted on the cluster and is running right now.
JOB_FAILED = "JOB_FAILED"  # the job failed on the cluster.
READY_TO_UPLOAD = "READY_TO_UPLOAD"  # Job done, waiting for the Spider to upload the results
UPLOADING = "UPLOADING"  # in the process of uploading the resources on XNAT.