コード例 #1
0
def form_for_activity(form):
    try:
        inputs = task_service.get_inputs(form['act_id'])
        outputs = task_service.get_outputs(form['act_id'])
    except Exception:
        subprocess_stack = form['subprocess_stack']
        act_id = form['act_id']
        instance_data = PipelineInstance.objects.get(instance_id=form['instance_id']).execution_data
        inputs = pipeline_parser.WebPipelineAdapter(instance_data).get_act_inputs(act_id=act_id,
                                                                                  subprocess_stack=subprocess_stack)
        outputs = {}

    component = library.ComponentLibrary.get_component_class(form['component_code'])
    # append inputs
    inputs_table = inputs

    # append outputs
    outputs_table = []
    outputs_format = component.outputs_format()
    for outputs_item in outputs_format:
        value = outputs.get('outputs', {}).get(outputs_item['key'], '')
        outputs_table.append({
            'name': outputs_item['name'],
            'value': value
        })

    data = {
        'inputs': inputs_table,
        'outputs': outputs_table,
        'ex_data': outputs.get('ex_data', '')
    }
    return data
コード例 #2
0
def get_taskflow_node_detail(request):
    task_id = request.GET.get("task_id")
    node_id = request.GET.get("node_id")
    subprocess_stack = json.loads(request.GET.get("subprocess_stack", "[]"))

    data = {
        "execution_info": {},
        "inputs": "pipeline has been destoryed",
        "outputs": "pipeline has been destoryed",
        "history": {},
        "log": "",
        "ex_data": "",
    }

    taskflow = TaskFlowInstance.objects.get(id=task_id)

    if not taskflow.pipeline_instance.is_started:
        return JsonResponse({"result": False, "message": f"task[{task_id}] is not start"})

    if not taskflow.has_node(node_id):
        return JsonResponse({"result": False, "message": f"task[{task_id}] does not have node[{node_id}]"})

    status = Status.objects.get(id=node_id)

    # collect execution info
    data["execution_info"] = {
        "name": status.name,
        "start_time": status.started_time.strftime(SERIALIZE_DATE_FORMAT) if status.started_time else None,
        "archive_time": status.archived_time.strftime(SERIALIZE_DATE_FORMAT) if status.archived_time else None,
        "elapsed_time": calculate_elapsed_time(status.started_time, status.archived_time),
        "skip": status.skip,
        "error_ignorable": status.error_ignorable,
        "retry_times": status.retry,
        "id": status.id,
        "state": status.state,
        "loop": status.loop,
        "create_time": status.created_time,
        "version": status.version,
        "schedule_id": None,
        "is_scheduling": False,
        "schedule_times": 0,
        "wait_callback": False,
        "is_finished": False,
        "schedule_version": None,
        "callback_data": None,
    }

    try:
        schedule = ScheduleService.objects.schedule_for(status.id, status.version)
    except ScheduleService.DoesNotExist:
        pass
    else:
        data["execution_info"].update(
            {
                "schedule_id": schedule.id,
                "is_scheduling": schedule.is_scheduling,
                "schedule_times": schedule.schedule_times,
                "wait_callback": schedule.wait_callback,
                "is_finished": schedule.is_finished,
                "schedule_version": schedule.version,
                "callback_data": schedule.callback_data,
            }
        )

    # collect inputs and outputs

    process = PipelineModel.objects.get(id=taskflow.pipeline_instance.instance_id).process

    # only process activity's inputs and outputs
    if process.root_pipeline:

        target_pipeline = process.root_pipeline
        for sub_id in subprocess_stack:
            subprocess_act = [x for x in target_pipeline.spec.activities if x.id == sub_id][0]
            target_pipeline = subprocess_act.pipeline

        node = target_pipeline.spec.objects[node_id]

        if isinstance(node, Activity):
            data["inputs"] = hydrate_inputs(node.data.inputs)
            data["outputs"] = node.data.outputs

        elif isinstance(node, Gateway):
            data["inputs"] = data["outputs"] = "gateway object does not have data"

        elif isinstance(node, StartEvent):
            data["inputs"] = data["outputs"] = "start event object does not have data"

        elif isinstance(node, EndEvent):
            data["inputs"] = node.data.inputs
            data["outputs"] = node.data.outputs

    elif taskflow.pipeline_instance.is_finished or taskflow.pipeline_instance.is_revoked:
        data["inputs"] = data["outputs"] = "pipeline had finished or had been revoked"

    # collect history
    data["history"] = task_service.get_activity_histories(node_id)

    # collect log
    data["log"] = handle_plain_log(task_service.get_plain_log_for_node(node_id))

    # set ex_data
    data["ex_data"] = task_service.get_outputs(node_id)["ex_data"]

    return JsonResponse({"result": True, "data": data})