예제 #1
0
    def setup_and_test_container_executor_and_logging(self, gcc_mock) :
        api = mock.MagicMock()
        api._rootDesc = copy.deepcopy(get_rootDesc())

        # Make sure ArvCwlExecutor thinks it's running inside a container so it
        # adds the logging handler that will call runtime_status_update() mock
        self.assertFalse(gcc_mock.called)
        runner = arvados_cwl.ArvCwlExecutor(api)
        self.assertEqual(runner.work_api, 'containers')
        root_logger = logging.getLogger('')
        handlerClasses = [h.__class__ for h in root_logger.handlers]
        self.assertTrue(arvados_cwl.RuntimeStatusLoggingHandler in handlerClasses)
        return runner
예제 #2
0
    def test_child_failure(self, col, reader, gcc_mock):
        api = mock.MagicMock()
        api._rootDesc = copy.deepcopy(get_rootDesc())
        del api._rootDesc.get('resources')['jobs']['methods']['create']

        # Set up runner with mocked runtime_status_update()
        self.assertFalse(gcc_mock.called)
        runtime_status_update = mock.MagicMock()
        arvados_cwl.ArvCwlExecutor.runtime_status_update = runtime_status_update
        runner = arvados_cwl.ArvCwlExecutor(api)
        self.assertEqual(runner.work_api, 'containers')

        # Make sure ArvCwlExecutor thinks it's running inside a container so it
        # adds the logging handler that will call runtime_status_update() mock
        gcc_mock.return_value = {"uuid": "zzzzz-dz642-zzzzzzzzzzzzzzz"}
        self.assertTrue(gcc_mock.called)
        root_logger = logging.getLogger('')
        handlerClasses = [h.__class__ for h in root_logger.handlers]
        self.assertTrue(
            arvados_cwl.RuntimeStatusLoggingHandler in handlerClasses)

        runner.num_retries = 0
        runner.ignore_docker_for_reuse = False
        runner.intermediate_output_ttl = 0
        runner.secret_store = cwltool.secrets.SecretStore()
        runner.label = mock.MagicMock()
        runner.label.return_value = '[container testjob]'

        runner.api.containers().get().execute.return_value = {
            "state": "Complete",
            "output": "abc+123",
            "exit_code": 1,
            "log": "def+234"
        }

        col().open.return_value = []

        loadingContext, runtimeContext = self.helper(runner)

        arvjob = arvados_cwl.ArvadosContainer(runner, runtimeContext,
                                              mock.MagicMock(), {}, None, [],
                                              [], "testjob")
        arvjob.output_callback = mock.MagicMock()
        arvjob.collect_outputs = mock.MagicMock()
        arvjob.successCodes = [0]
        arvjob.outdir = "/var/spool/cwl"
        arvjob.output_ttl = 3600
        arvjob.collect_outputs.return_value = {"out": "stuff"}

        arvjob.done({
            "state": "Final",
            "log_uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz1",
            "output_uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2",
            "uuid": "zzzzz-xvhdp-zzzzzzzzzzzzzzz",
            "container_uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz",
            "modified_at": "2017-05-26T12:01:22Z"
        })

        runtime_status_update.assert_called_with(
            'error',
            'arvados.cwl-runner: [container testjob] (zzzzz-xvhdp-zzzzzzzzzzzzzzz) error log:',
            '  ** log is empty **')
        arvjob.output_callback.assert_called_with({"out": "stuff"},
                                                  "permanentFail")
예제 #3
0
def run():
    # Timestamps are added by crunch-job, so don't print redundant timestamps.
    arvados.log_handler.setFormatter(
        logging.Formatter('%(name)s %(levelname)s: %(message)s'))

    # Print package versions
    logger.info(arvados_cwl.versionstring())

    api = arvados.api("v1")

    arvados_cwl.add_arv_hints()

    runner = None
    try:
        job_order_object = arvados.current_job()['script_parameters']
        toolpath = "file://%s/%s" % (os.environ['TASK_KEEPMOUNT'],
                                     job_order_object.pop("cwl:tool"))

        pdh_path = re.compile(r'^[0-9a-f]{32}\+\d+(/.+)?$')

        def keeppath(v):
            if pdh_path.match(v):
                return "keep:%s" % v
            else:
                return v

        def keeppathObj(v):
            if "location" in v:
                v["location"] = keeppath(v["location"])

        for k, v in viewitems(job_order_object):
            if isinstance(
                    v,
                    basestring) and arvados.util.keep_locator_pattern.match(v):
                job_order_object[k] = {
                    "class": "File",
                    "location": "keep:%s" % v
                }

        adjustFileObjs(job_order_object, keeppathObj)
        adjustDirObjs(job_order_object, keeppathObj)
        normalizeFilesDirs(job_order_object)

        output_name = None
        output_tags = None
        enable_reuse = True
        on_error = "continue"
        debug = False

        if "arv:output_name" in job_order_object:
            output_name = job_order_object["arv:output_name"]
            del job_order_object["arv:output_name"]

        if "arv:output_tags" in job_order_object:
            output_tags = job_order_object["arv:output_tags"]
            del job_order_object["arv:output_tags"]

        if "arv:enable_reuse" in job_order_object:
            enable_reuse = job_order_object["arv:enable_reuse"]
            del job_order_object["arv:enable_reuse"]

        if "arv:on_error" in job_order_object:
            on_error = job_order_object["arv:on_error"]
            del job_order_object["arv:on_error"]

        if "arv:debug" in job_order_object:
            debug = job_order_object["arv:debug"]
            del job_order_object["arv:debug"]

        arvargs = argparse.Namespace()
        arvargs.work_api = "jobs"
        arvargs.output_name = output_name
        arvargs.output_tags = output_tags
        arvargs.thread_count = 1
        arvargs.collection_cache_size = None

        runner = arvados_cwl.ArvCwlExecutor(
            api_client=arvados.safeapi.ThreadSafeApiCache(
                api_params={"model": OrderedJsonModel()},
                keep_params={"num_retries": 4}),
            arvargs=arvargs)

        make_fs_access = functools.partial(
            CollectionFsAccess, collection_cache=runner.collection_cache)

        t = load_tool(toolpath, runner.loadingContext)

        if debug:
            logger.setLevel(logging.DEBUG)
            logging.getLogger('arvados').setLevel(logging.DEBUG)
            logging.getLogger("cwltool").setLevel(logging.DEBUG)

        args = ArvRuntimeContext(vars(arvargs))
        args.project_uuid = arvados.current_job()["owner_uuid"]
        args.enable_reuse = enable_reuse
        args.on_error = on_error
        args.submit = False
        args.debug = debug
        args.quiet = False
        args.ignore_docker_for_reuse = False
        args.basedir = os.getcwd()
        args.name = None
        args.cwl_runner_job = {
            "uuid": arvados.current_job()["uuid"],
            "state": arvados.current_job()["state"]
        }
        args.make_fs_access = make_fs_access
        args.trash_intermediate = False
        args.intermediate_output_ttl = 0
        args.priority = arvados_cwl.DEFAULT_PRIORITY
        args.do_validate = True
        args.disable_js_validation = False
        args.tmp_outdir_prefix = "tmp"

        runner.arv_executor(t, job_order_object, args, logger=logger)
    except Exception as e:
        if isinstance(e, WorkflowException):
            logging.info("Workflow error %s", e)
        else:
            logging.exception("Unhandled exception")
        if runner and runner.final_output_collection:
            outputCollection = runner.final_output_collection.portable_data_hash(
            )
        else:
            outputCollection = None
        api.job_tasks().update(uuid=arvados.current_task()['uuid'],
                               body={
                                   'output': outputCollection,
                                   'success': False,
                                   'progress': 1.0
                               }).execute()