def check_status(ctx, projectid, snic_api_credentials=None, statusdb_config=None): """In grus delivery mode checks the status of an onggoing delivery """ for pid in projectid: if statusdb_config == None: logger.error("--statusdb-config or env variable $STATUS_DB_CONFIG need to be set to perform GRUS delivery") return 1 load_yaml_config(statusdb_config.name) if snic_api_credentials == None: logger.error("--snic-api-credentials or env variable $SNIC_API_STOCKHOLM need to be set to perform GRUS delivery") return 1 load_yaml_config(snic_api_credentials.name) d = _deliver_grus.GrusProjectDeliverer( pid, **ctx.parent.params) d.check_mover_delivery_status()
def cli(ctx, config_file): """ Tool for the Automation of Storage and Analyses """ ctx.obj = {} config = conf.load_yaml_config(config_file.name) log_file = config.get('log', {}).get('file', None) if log_file: level = config.get('log').get('log_level', 'INFO') taca.log.init_logger_file(log_file, level) logger.debug('starting up CLI')
def cli(ctx, config_file): """ Tool for the Automation of Storage and Analyses """ ctx.obj = {} config = conf.load_yaml_config(config_file) log_file = config.get('log', {}).get('file', None) if log_file: level = config.get('log').get('log_level', 'INFO') taca.log.init_logger_file(log_file, level) logger.debug('starting up CLI')
def test_load_yaml_config(self): """Load a yaml config file""" got_config_data = config.load_yaml_config( 'data/taca_test_cfg_minimal.yaml') expexted_config_data = { 'statusdb': { 'url': 'url', 'username': '******', 'password': '******', 'port': 'port' }, 'log': { 'file': 'data/taca.log' } } self.assertEqual(expexted_config_data, got_config_data) with self.assertRaises(IOError): missing_config_data = config.load_yaml_config( 'data/missing_file.yaml)')
def project(ctx, projectid, snic_api_credentials=None, statusdb_config=None, order_portal=None, pi_email=None, sensitive=True, hard_stage_only=False, add_user=None, fc_delivery=False): """ Deliver the specified projects to the specified destination """ for pid in projectid: if not ctx.parent.params['cluster']: # Soft stage case d = _deliver.ProjectDeliverer( pid, **ctx.parent.params) elif ctx.parent.params['cluster'] == 'grus': # Hard stage and deliver if statusdb_config == None: logger.error("--statusdb-config or env variable $STATUS_DB_CONFIG need to be set to perform GRUS delivery") return 1 load_yaml_config(statusdb_config.name) if snic_api_credentials == None: logger.error("--snic-api-credentials or env variable $SNIC_API_STOCKHOLM need to be set to perform GRUS delivery") return 1 load_yaml_config(snic_api_credentials.name) if order_portal == None: logger.error("--order-portal or env variable $ORDER_PORTAL need to be set to perform GRUS delivery") return 1 load_yaml_config(order_portal.name) d = _deliver_grus.GrusProjectDeliverer( projectid=pid, pi_email=pi_email, sensitive=sensitive, hard_stage_only=hard_stage_only, add_user=list(set(add_user)), fcid=fc_delivery, **ctx.parent.params) if fc_delivery: _exec_fn(d, d.deliver_run_folder) else: _exec_fn(d, d.deliver_project)
import shutil import tempfile import unittest import csv from datetime import datetime from taca.analysis.analysis import * from taca.illumina.Runs import Run from taca.illumina.HiSeqX_Runs import HiSeqX_Run from taca.utils import config as conf # This is only run if TACA is called from the CLI, as this is a test, we need to # call it explicitely CONFIG = conf.load_yaml_config('data/taca_test_cfg.yaml') def processing_status(run_dir): demux_dir = os.path.join(run_dir, 'Demultiplexing') if not os.path.exists(demux_dir): return 'TO_START' elif os.path.exists(os.path.join(demux_dir, 'Stats', 'DemultiplexingStats.xml')): return 'COMPLETED' else: return 'IN_PROGRESS' class TestTracker(unittest.TestCase): """ analysis.py script tests """ @classmethod def setUpClass(self):
#!/usr/bin/env python import os import shutil import tempfile import unittest import mock from datetime import datetime from taca.cleanup import cleanup from taca.utils import config as conf CONFIG = conf.load_yaml_config('data/taca_test_cfg_cleanup.yaml') class TestCleanup(unittest.TestCase): """Tests for TACA Cleanup module.""" @mock.patch('taca.cleanup.cleanup.shutil.move') @mock.patch('taca.cleanup.cleanup.os.listdir') def test_cleanup_nas(self, mock_listdir, mock_move): """Locate and move old data on NAS.""" seconds = 1 run = '190201_A00621_0032_BHHFCFDSXX' mock_listdir.return_value = [run] cleanup.cleanup_nas(seconds) mock_move.assert_called_once_with(run, 'nosync') @mock.patch('taca.cleanup.cleanup.shutil.rmtree') @mock.patch('taca.cleanup.cleanup.os.listdir') def test_cleanup_processing(self, mock_listdir, mock_rmtree):
#!/usr/bin/env python import os import shutil import unittest from datetime import datetime from taca.analysis.analysis import * from taca.illumina import Run from taca.utils import config # This is only run if TACA is called from the CLI, as this is a test, we need to # call it explicitely config.load_yaml_config(os.path.join(os.environ.get('HOME'), '.taca/taca.yaml')) def processing_status(run_dir): demux_dir = os.path.join(run_dir, 'Demultiplexing') if not os.path.exists(demux_dir): return 'TO_START' elif os.path.exists(os.path.join(demux_dir, 'Stats', 'DemultiplexingStats.xml')): return 'COMPLETED' else: return 'IN_PROGRESS' class TestTracker(unittest.TestCase): """ run_tracker.py script tests """ @classmethod def setUpClass(self):