def test_get_data_inputs_not_found(self):
        pipeline_instance = MagicMock()
        pipeline_instance.instance_id = "instance_id_token"

        runtime = MagicMock()
        runtime.get_context = MagicMock(return_value="context_value_return")
        runtime.get_data_inputs = MagicMock(
            side_effect=exceptions.NotFoundError)

        runtime_cls = MagicMock(return_value=runtime)

        with mock.patch(TASKFLOW_DISPATCHERS_TASK_BAMBOO_DJANGO_RUNTIME,
                        runtime_cls):
            dispatcher = TaskCommandDispatcher(
                engine_ver=2,
                taskflow_id=1,
                pipeline_instance=pipeline_instance,
                project_id=1)
            result = dispatcher.render_current_constants_v2()

        runtime.get_context.assert_called_once_with(
            pipeline_instance.instance_id)
        runtime.get_data_inputs.assert_called_once_with(
            pipeline_instance.instance_id)
        self.assertEqual(
            result,
            {
                "result": False,
                "data": None,
                "code": err_code.CONTENT_NOT_EXIST.code,
                "message": "data not found, task is not running",
            },
        )
    def test_normal(self):
        pipeline_instance = MagicMock()
        pipeline_instance.instance_id = "instance_id_token"

        runtime = MagicMock()
        runtime.get_context = MagicMock(return_value="context_value_return")
        runtime.get_data_inputs = MagicMock(
            return_value={
                "root_key": DataInput(need_render=False, value="root_val")
            })

        context = MagicMock()
        context.hydrate = MagicMock(
            return_value={
                "k1": SystemObject({"system_key": "system_val"}),
                "k2": "val2"
            })

        runtime_cls = MagicMock(return_value=runtime)
        context_cls = MagicMock(return_value=context)

        with mock.patch(TASKFLOW_DISPATCHERS_TASK_BAMBOO_DJANGO_RUNTIME,
                        runtime_cls):
            with mock.patch(TASKFLOW_DISPATCHERS_TASK_CONTEXT, context_cls):
                dispatcher = TaskCommandDispatcher(
                    engine_ver=2,
                    taskflow_id=1,
                    pipeline_instance=pipeline_instance,
                    project_id=1)
                result = dispatcher.render_current_constants_v2()

        runtime.get_context.assert_called_once_with(
            pipeline_instance.instance_id)
        runtime.get_data_inputs.assert_called_once_with(
            pipeline_instance.instance_id)
        context_cls.assert_called_once_with(runtime, "context_value_return",
                                            {"root_key": "root_val"})
        context.hydrate.assert_called_once()

        self.assertEqual(
            result,
            {
                "result":
                True,
                "data": [{
                    "key": "k1",
                    "value": {
                        "system_key": "system_val"
                    }
                }, {
                    "key": "k2",
                    "value": "val2"
                }],
                "code":
                err_code.SUCCESS.code,
                "message":
                "",
            },
        )
    def test_is_not_started(self):
        pipeline_instance = MagicMock()
        pipeline_instance.is_started = False

        dispatcher = TaskCommandDispatcher(engine_ver=1,
                                           taskflow_id=1,
                                           pipeline_instance=pipeline_instance,
                                           project_id=1)
        result = dispatcher.render_current_constants_v1()
        self.assertEqual(
            result,
            {
                "result": False,
                "data": None,
                "code": err_code.INVALID_OPERATION.code,
                "message": "task is not running"
            },
        )
    def test_hydrate_error(self):
        pipeline_instance = MagicMock()
        pipeline_instance.instance_id = "instance_id_token"

        runtime = MagicMock()
        runtime.get_context = MagicMock(return_value="context_value_return")
        runtime.get_data_inputs = MagicMock(
            return_value={
                "root_key": DataInput(need_render=False, value="root_val")
            })

        context = MagicMock()
        context.hydrate = MagicMock(side_effect=Exception("exception message"))

        runtime_cls = MagicMock(return_value=runtime)
        context_cls = MagicMock(return_value=context)

        with mock.patch(TASKFLOW_DISPATCHERS_TASK_BAMBOO_DJANGO_RUNTIME,
                        runtime_cls):
            with mock.patch(TASKFLOW_DISPATCHERS_TASK_CONTEXT, context_cls):
                dispatcher = TaskCommandDispatcher(
                    engine_ver=2,
                    taskflow_id=1,
                    pipeline_instance=pipeline_instance,
                    project_id=1)
                result = dispatcher.render_current_constants_v2()

        runtime.get_context.assert_called_once_with(
            pipeline_instance.instance_id)
        runtime.get_data_inputs.assert_called_once_with(
            pipeline_instance.instance_id)
        context_cls.assert_called_once_with(runtime, "context_value_return",
                                            {"root_key": "root_val"})
        context.hydrate.assert_called_once()

        self.assertEqual(
            result,
            {
                "result": False,
                "data": None,
                "code": err_code.UNKNOWN_ERROR.code,
                "message": "context hydrate error: exception message",
            },
        )
Ejemplo n.º 5
0
def pipeline_archive_statistics_task(instance_id):
    taskflow_instance = TaskFlowInstance.objects.get(
        pipeline_instance__instance_id=instance_id)
    # 更新taskflowinstance统计数据start_time finish_time elapsed_time
    taskflow_statistic = TaskflowStatistics.objects.filter(
        task_instance_id=taskflow_instance.id).first()
    if taskflow_statistic:
        start_time = taskflow_instance.pipeline_instance.start_time
        finish_time = taskflow_instance.pipeline_instance.finish_time
        taskflow_statistic.start_time = start_time
        taskflow_statistic.finish_time = finish_time
        taskflow_statistic.elapsed_time = calculate_elapsed_time(
            start_time, finish_time)
        taskflow_statistic.save()
    engine_ver = taskflow_instance.engine_ver
    # 获取任务实例执行树
    cmd_dispatcher = TaskCommandDispatcher(engine_ver, taskflow_instance.id,
                                           taskflow_instance.pipeline_instance,
                                           taskflow_instance.project.id)
    status_tree_result = cmd_dispatcher.get_task_status()
    if not status_tree_result["result"]:
        logger.exception(
            "get task_status_result fail, taskflow_instace = {id}.".format(
                id=taskflow_instance.id))
        return False
    # 删除原有标准插件执行数据
    TaskflowExecutedNodeStatistics.objects.filter(
        instance_id=taskflow_instance.pipeline_instance.id).delete()
    data = taskflow_instance.pipeline_instance.execution_data
    try:
        component_list = recursive_collect_components_execution(
            activities=data[PE.activities],
            status_tree=status_tree_result["data"]["children"],
            task_instance=taskflow_instance,
            engine_ver=engine_ver,
        )
        TaskflowExecutedNodeStatistics.objects.bulk_create(component_list)
    except Exception:
        logger.exception((
            "pipeline_instance_handler save TaskflowExecuteNodeStatistics[instance_id={instance_id}] raise error"
        ).format(instance_id=instance_id))
        return False
    return True
Ejemplo n.º 6
0
    def get(self, request, task_id, format=None):
        # fetch require task data
        task = TaskFlowInstance.objects.filter(id=task_id).only(
            "id", "engine_ver", "pipeline_instance", "project_id")[0]

        # dispatch get constants command
        resp_data = TaskCommandDispatcher(
            engine_ver=task.engine_ver,
            taskflow_id=task.id,
            pipeline_instance=task.pipeline_instance,
            project_id=task.project_id,
        ).render_current_constants()

        if resp_data["result"]:
            for var in resp_data["data"]:
                if safe_for_json(var["value"]):
                    continue
                var["value"] = str(var["value"].__dict__) if hasattr(
                    var["value"], "__dict__") else str(var["value"])

        return Response(resp_data)
    def test_normal(self):
        self.maxDiff = None
        pipeline_instance = MagicMock()
        pipeline_instance.is_started = True
        pipeline_instance.is_finished = False
        pipeline_instance.is_revoked = False
        pipeline_instance.instance_id = "instance_id_token"
        pipeline_model = MagicMock()

        class VariableMagicMock(MagicMock, Variable):
            pass

        class TestTaskContext(TaskContext):
            def __init__(self):
                self.value = "TestTaskContext"

        class TestValueVar(Variable):
            def __init__(self, value):
                self.value = value

        var1 = TestValueVar(value=TestTaskContext())

        var2 = VariableMagicMock()
        var2.get = MagicMock(return_value="val2")

        var4 = VariableMagicMock()
        var4.get = MagicMock(side_effect=Exception)

        pipeline_model.process.root_pipeline.context.variables = {
            "k1": var1,
            "k2": var2,
            "k4": var4,
            "k5": "v5",
            "k6": 6,
        }

        pipeline_model_cls = MagicMock()
        pipeline_model_cls.objects.get = MagicMock(return_value=pipeline_model)

        with mock.patch(TASKFLOW_DISPATCHERS_TASK_PIPELINE_MODEL,
                        pipeline_model_cls):
            dispatcher = TaskCommandDispatcher(
                engine_ver=1,
                taskflow_id=1,
                pipeline_instance=pipeline_instance,
                project_id=1)
            result = dispatcher.render_current_constants_v1()

        pipeline_model_cls.objects.get.assert_called_once_with(
            id=pipeline_instance.instance_id)
        self.assertEqual(
            result,
            {
                "result":
                True,
                "data": [
                    {
                        "key": "k1",
                        "value": {
                            "value": "TestTaskContext"
                        }
                    },
                    {
                        "key": "k2",
                        "value": "val2"
                    },
                    {
                        "key": "k4",
                        "value": "[ERROR]value resolve error"
                    },
                    {
                        "key": "k5",
                        "value": "v5"
                    },
                    {
                        "key": "k6",
                        "value": 6
                    },
                ],
                "code":
                err_code.SUCCESS.code,
                "message":
                "",
            },
        )