Esempio n. 1
0
__copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'

import os
import sys
import logging
from datetime import datetime, timedelta

import processors
import modules
import XnatUtils
import task
import cluster
import bin
from task import Task, ClusterTask, XnatTask
from dax_settings import DAX_Settings
DAX_SETTINGS = DAX_Settings()

UPDATE_PREFIX = 'updated--'
UPDATE_FORMAT = "%Y-%m-%d %H:%M:%S"
BUILD_SUFFIX = 'BUILD_RUNNING.txt'
UPDATE_SUFFIX = 'UPDATE_RUNNING.txt'
LAUNCH_SUFFIX = 'LAUNCHER_RUNNING.txt'

#Logger to print logs
LOGGER = logging.getLogger('dax')


def str_to_timedelta(delta_str):
    if len(delta_str) <= 1:
        raise ValueError('invalid timedelta string value')
Esempio n. 2
0
__copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'

import os
import sys
import logging
from datetime import datetime, timedelta

import processors
import modules
import XnatUtils
import task
import cluster
import bin
from task import Task
from dax_settings import DAX_Settings
DAX_SETTINGS = DAX_Settings()
RESULTS_DIR = DAX_SETTINGS.get_results_dir()
DEFAULT_ROOT_JOB_DIR = DAX_SETTINGS.get_root_job_dir()
DEFAULT_QUEUE_LIMIT = DAX_SETTINGS.get_queue_limit()
DEFAULT_MAX_AGE = DAX_SETTINGS.get_max_age()

UPDATE_PREFIX = 'updated--'
UPDATE_FORMAT = "%Y-%m-%d %H:%M:%S"
BUILD_SUFFIX = 'BUILD_RUNNING.txt'
UPDATE_SUFFIX = 'UPDATE_RUNNING.txt'
LAUNCH_SUFFIX = 'LAUNCHER_RUNNING.txt'

#Logger to print logs
LOGGER = logging.getLogger('dax')

class Launcher(object):
Esempio n. 3
0
""" Task object to generate / manage assessors and cluster """
import os
import shutil
import errno
import time
import logging
from datetime import date

import cluster
from cluster import PBS

from dax_settings import DAX_Settings
DAX_SETTINGS = DAX_Settings()

#Logger to print logs
LOGGER = logging.getLogger('dax')

# Job Statuses
NO_DATA = 'NO_DATA'         # assessor that doesn't have data to run (for session assessor): E.G: dtiqa multi but no dti present.
NEED_TO_RUN = 'NEED_TO_RUN' # assessor that is ready to be launch on the cluster (ACCRE). All the input data for the process to run are there.
NEED_INPUTS = 'NEED_INPUTS' # assessor where input data are missing from a scan, multiple scans or other assessor.
JOB_RUNNING = 'JOB_RUNNING' # the job has been submitted on the cluster and is running right now.
JOB_FAILED = 'JOB_FAILED' # the job failed on the cluster.
READY_TO_UPLOAD = 'READY_TO_UPLOAD' # Job done, waiting for the Spider to upload the results
UPLOADING = 'UPLOADING' # in the process of uploading the resources on XNAT.
COMPLETE = 'COMPLETE' # the assessors contains all the files. The upload and the job are done.
READY_TO_COMPLETE = 'READY_TO_COMPLETE' # the job finished and upload is complete
DOES_NOT_EXIST = 'DOES_NOT_EXIST'
OPEN_STATUS_LIST = [NEED_TO_RUN, UPLOADING, JOB_RUNNING, READY_TO_COMPLETE, JOB_FAILED]
JOB_BUILT = 'JOB_BUILT'
Esempio n. 4
0
Cluster functionality
"""

#!/usr/bin/env python
# -*- coding: utf-8 -*-

__copyright__ = 'Copyright 2013 Vanderbilt University. All Rights Reserved'

import os
import time
import logging
import subprocess
from datetime import datetime
from subprocess import CalledProcessError
from dax_settings import DAX_Settings
DAX_SETTINGS = DAX_Settings()
DEFAULT_EMAIL_OPTS = DAX_SETTINGS.get_email_opts()
JOB_TEMPLATE = DAX_SETTINGS.get_job_template()
CMD_SUBMIT = DAX_SETTINGS.get_cmd_submit()
CMD_COUNT_NB_JOBS = DAX_SETTINGS.get_cmd_count_nb_jobs()
CMD_GET_JOB_STATUS = DAX_SETTINGS.get_cmd_get_job_status()
CMD_GET_JOB_WALLTIME = DAX_SETTINGS.get_cmd_get_job_walltime()
CMD_GET_JOB_MEMORY = DAX_SETTINGS.get_cmd_get_job_memory()
CMD_GET_JOB_NODE = DAX_SETTINGS.get_cmd_get_job_node()
RUNNING_STATUS = DAX_SETTINGS.get_running_status()
QUEUE_STATUS = DAX_SETTINGS.get_queue_status()
COMPLETE_STATUS = DAX_SETTINGS.get_complete_status()
PREFIX_JOBID = DAX_SETTINGS.get_prefix_jobid()
SUFFIX_JOBID = DAX_SETTINGS.get_suffix_jobid()
MAX_TRACE_DAYS = 30
Esempio n. 5
0
""" Module classes for Scan and Sessions """
import os
import shutil
import smtplib
import logging
import XnatUtils
from datetime import datetime
from email.mime.text import MIMEText
from dax_settings import DAX_Settings
DAX_SETTINGS = DAX_Settings()
SMTP_HOST = DAX_SETTINGS.get_smtp_host()
SMTP_FROM = DAX_SETTINGS.get_smtp_from()
SMTP_PASS = DAX_SETTINGS.get_smtp_pass()
#Logger for logs
LOGGER = logging.getLogger('dax')

class Module(object):
    """ Object Module to create a module for DAX
        Module runs directly during a build on a session or scan
        to generate inputs data for scans/sessions
    """
    def __init__(self, mod_name, directory, email, text_report):
        """
        Entry point of the Base Module Class.
        
        :param mod_name: Name of the module
        :param directory: Temp directory to store data
        :param email: email address to send report
        :param text_report: string to write at the beggining of the report email
        :return: None
        """
Esempio n. 6
0
File: bin.py Progetto: MattVUIIS/dax
""" File containing functions called by dax executables """
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
import imp
import redcap
import logging
from datetime import datetime

import log
import XnatUtils
from dax_settings import DAX_Settings
DAX_SETTINGS = DAX_Settings()
API_URL = DAX_SETTINGS.get_api_url()
API_KEY_DAX = DAX_SETTINGS.get_api_key_dax()
REDCAP_VAR = DAX_SETTINGS.get_dax_manager_data_dictionary()

def set_logger(logfile, debug):
    """
    Set the logging depth

    :param logfile: File to log output to
    :param debug: Should debug depth be used?
    :return: logger object

    """
    #Logger for logs
    if debug:
        logger = log.setup_debug_logger('dax', logfile)
    else:
Esempio n. 7
0
File: task.py Progetto: Raab70/dax
""" Task object to generate / manage assessors and cluster """
import os
import time
import logging
from datetime import date

import cluster
from cluster import PBS

from dax_settings import DAX_Settings

DAX_SETTINGS = DAX_Settings()
RESULTS_DIR = DAX_SETTINGS.get_results_dir()
DEFAULT_EMAIL_OPTS = DAX_SETTINGS.get_email_opts()
JOB_EXTENSION_FILE = DAX_SETTINGS.get_job_extension_file()

# Logger to print logs
LOGGER = logging.getLogger("dax")

# Job Statuses
NO_DATA = (
    "NO_DATA"
)  # assessor that doesn't have data to run (for session assessor): E.G: dtiqa multi but no dti present.
NEED_TO_RUN = (
    "NEED_TO_RUN"
)  # assessor that is ready to be launch on the cluster (ACCRE). All the input data for the process to run are there.
NEED_INPUTS = "NEED_INPUTS"  # assessor where input data are missing from a scan, multiple scans or other assessor.
JOB_RUNNING = "JOB_RUNNING"  # the job has been submitted on the cluster and is running right now.
JOB_FAILED = "JOB_FAILED"  # the job failed on the cluster.
READY_TO_UPLOAD = "READY_TO_UPLOAD"  # Job done, waiting for the Spider to upload the results
UPLOADING = "UPLOADING"  # in the process of uploading the resources on XNAT.
Esempio n. 8
0
File: bin.py Progetto: Raab70/dax
""" File containing functions called by dax executables """
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
import imp
import redcap
import logging
from datetime import datetime

import log
import XnatUtils
from dax_settings import DAX_Settings
DAX_SETTINGS = DAX_Settings()
API_URL = DAX_SETTINGS.get_api_url()
API_KEY_DAX = DAX_SETTINGS.get_api_key_dax()
REDCAP_VAR = DAX_SETTINGS.get_dax_manager_config()

def set_logger(logfile, debug):
    """
    Set the logging depth

    :param logfile: File to log output to
    :param debug: Should debug depth be used?
    :return: logger object

    """
    #Logger for logs
    if debug:
        logger = log.setup_debug_logger('dax', logfile)
    else: