예제 #1
0
    def __init__(self, pipe_template_dir, dbm, forTest=False):
        '''Load json file.

        Args:
            pipe_template_dir: Path to pipeline directory
        '''
        self.forTest = forTest
        self.dbm = dbm
        self.file_man = AppFileMan(self.dbm.lostconfig)
        if pipe_template_dir.endswith('/'):
            pipe_template_dir = pipe_template_dir[:-1]
        self.src_pipe_template_path = pipe_template_dir
        self.dst_pipe_template_path = os.path.join(
            self.file_man.pipe_path,
            os.path.basename(self.src_pipe_template_path))
        self.json_files = glob(os.path.join(pipe_template_dir, '*.json'))
        self.pipes = []
        self.namespace = os.path.basename(
            self.src_pipe_template_path).strip('/')
        for json_path in self.json_files:
            with open(json_path) as jfile:
                pipe = json.load(jfile)
            pipe['namespace'] = self.namespace
            pipe['name'] = self._namespaced_name(
                os.path.splitext(os.path.basename(json_path))[0])
            self.pipes.append(pipe)
            # Set name to name of the script file
            for pe in pipe['elements']:
                if 'script' in pe:
                    pe['script']['name'] = self._namespaced_name(
                        pe['script']['path'])
        self.checker = PipeDefChecker(logging)
예제 #2
0
파일: cron_jobs.py 프로젝트: l3p-cv/lost
def main():
    parser = argparse.ArgumentParser(description='Run LOST cronjobs')
    parser.add_argument('--debug',
                        action='store_true',
                        help='start cronjobs just once for debugging')
    args = parser.parse_args()
    lostconfig = config.LOSTConfig()
    fm = AppFileMan(lostconfig)
    log_name = 'cron_jobs'
    logger = get_file_logger(log_name, fm.get_app_log_path('cron_jobs.log'))
    logger.info('Starting cron jobs!')
    if args.debug:
        t = threading.Thread(target=worker_lifesign_loop,
                             args=(log_name, ),
                             daemon=True)
        t.start()
        client = Client('{}:{}'.format(lostconfig.scheduler_ip,
                                       lostconfig.scheduler_port))
        process_pipes(log_name, client)
    else:
        jobs = [
            process_pipes_loop, worker_lifesign_loop, release_annos_loop,
            remove_empty_annos_loop
        ]
        if lostconfig.worker_management == 'dynamic':
            jobs.append(dask_session.release_client_by_timeout_loop)
        jobs += lostconfig.extra_cron_jobs
        threads = []
        for j in jobs:
            t = threading.Thread(target=j, args=(log_name, ), daemon=True)
            t.start()
            threads.append(t)
        [t.join() for t in threads]
예제 #3
0
def exec_script_in_subprocess(pipe_element_id):
    try:
        lostconfig = LOSTConfig()
        dbm = DBMan(lostconfig)
        pipe_e = dbm.get_pipe_element(pipe_e_id=pipe_element_id)
        logger = logging
        if lostconfig.worker_management == 'static':
            worker = CurrentWorker(dbm, lostconfig)
            if not worker.enough_resources(pipe_e.script):
                # logger.warning('Not enough resources! Rejected {} (PipeElement ID {})'.format(pipe_e.script.path, pipe_e.idx))
                raise Exception('Not enough resources')
        pipe_e.state = state.PipeElement.IN_PROGRESS
        dbm.save_obj(pipe_e)
        file_man = AppFileMan(lostconfig)
        pipe = pipe_e.pipe

        cmd = gen_run_cmd("pudb3", pipe_e, lostconfig)
        debug_script_path = file_man.get_debug_path(pipe_e)
        debug_script_path = os.path.join(debug_script_path, 'debug.sh')
        with open(debug_script_path, 'w') as sfile:
            sfile.write(cmd)

        cmd = gen_run_cmd("python3", pipe_e, lostconfig)
        # file_man.create_debug_path(pipe_e)
        start_script_path = file_man.get_debug_path(pipe_e)
        start_script_path = os.path.join(start_script_path, 'start.sh')
        with open(start_script_path, 'w') as sfile:
            sfile.write(cmd)
        p = subprocess.Popen('bash {}'.format(start_script_path), stdout=subprocess.PIPE,
            stderr=subprocess.PIPE, shell=True)
        logger.info("{} ({}): Started script\n{}".format(pipe.name, pipe.idx, cmd))
        if lostconfig.worker_management == 'static':
            worker.add_script(pipe_e, pipe_e.script)       
        out, err = p.communicate()
        if lostconfig.worker_management == 'static':
            worker.remove_script(pipe_e, pipe_e.script)       
        if p.returncode != 0:
            raise Exception(err.decode('utf-8'))
        logger.info('{} ({}): Executed script successful: {}'.format(pipe.name, 
            pipe.idx, pipe_e.script.path))
        dbm.close_session()

    except:
        pipe = pipe_e.pipe
        logger.info('{} ({}): Exception occurred in script: {}'.format(pipe.name, 
            pipe.idx, pipe_e.script.path))
        msg = traceback.format_exc()
        logger.error(msg)
        script_api.report_script_err(pipe_e, pipe, dbm, msg)
        dbm.close_session()
예제 #4
0
 def __init__(self, dbm, pipe, lostconfig, client, logger_name=''):
     '''
     :type dbm: lost.db.access.DBMan
     :type pipe: lost.db.model.Pipe
     '''
     super().__init__(dbm=dbm, pipe=pipe)
     self.lostconfig = lostconfig #type: lost.logic.config.LOSTConfig
     self.file_man = AppFileMan(self.lostconfig)
     # self.logger = lost.logic.log.get_file_logger(
     #     'Executor: {}'.format(self.lostconfig.env_name), 
     #     self.file_man.get_app_log_path('PipeEngine.log'))
     # self.logger = get_task_logger(__name__)
     self.logger = logging.getLogger('{}.{}'.format(
         logger_name, self.__class__.__name__)
     )
     self.client = client
예제 #5
0
def update_version_log():
    fm = AppFileMan(LOSTConfig())
    path = fm.get_version_log_path()
    if not os.path.exists(path):
        print('Patchsystem: Created version log file: {}'.format(path))
        versions = []
        versions.append(lost.__version__)
        with open(path, 'w') as json_file:
            json.dump(versions, json_file)
    else:
        with open(path) as json_file:
            versions = json.load(json_file)
            print("Versions: ", versions)
        if versions[-1] == lost.__version__:
            print('Patchsystem: No version change!')
        else:
            print('Patchsystem: We maybe need to patch!')
            dbp = DBPatcher()
            dbp.patch()
            versions.append(lost.__version__)
            with open(path, 'w') as json_file:
                json.dump(versions, json_file)
예제 #6
0
 def __init__(self):
     self.mem = dict()
     self.fm = AppFileMan(config)
예제 #7
0
import shutil

logging.basicConfig(level=logging.INFO, format='(%(levelname)s): %(message)s')

if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description='Import a pipeline into the portal')
    parser.add_argument('pipe_dir',
                        help='Path directory with pipeline definition files.')
    parser.add_argument('--copy',
                        default=False,
                        action='store_true',
                        help='Copy to default pipe location before import')
    args = parser.parse_args()

    lostconfig = config.LOSTConfig()
    if args.copy:
        fm = AppFileMan(lostconfig)
        pp_path = fm.get_pipe_project_path()
        pipe_dir = args.pipe_dir
        if pipe_dir.endswith('/'):
            pipe_dir = pipe_dir[:-1]
        dst_path = os.path.join(pp_path, os.path.basename(pipe_dir))
        shutil.copytree(args.pipe_dir, dst_path, dirs_exist_ok=True)
    else:
        dst_path = args.pipe_dir
    dbm = access.DBMan(lostconfig)
    importer = template_import.PipeImporter(dst_path, dbm)
    importer.start_import()
    dbm.close_session()
예제 #8
0
from flask import Flask
from lost import settings
# from lost.taskman import make_celery
from lost.logic.file_man import AppFileMan
from flask_mail import Mail
import os
import traceback

app = Flask(__name__)

import logging
from logging import FileHandler

file_man = AppFileMan(settings.LOST_CONFIG)
logfile_path = file_man.get_app_log_path('flask.log')
# log_file_stream = file_man.fs.open(logfile_path, 'a')
# file_handler = StreamHandler(log_file_stream)
file_handler = FileHandler(logfile_path)
if settings.LOST_CONFIG.debug:
    logging.basicConfig(level=logging.INFO)
    file_handler.setLevel(logging.INFO)
else:
    file_handler.setLevel(logging.WARNING)
formatter = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
file_handler.setFormatter(formatter)
app.logger.addHandler(file_handler)

# app.config['CELERY_BROKER_URL'] = settings.CELERY_BROKER_URL
# app.config['CELERY_RESULT_BACKEND'] = settings.CELERY_RESULT_BACKEND