Esempio n. 1
0
    def obj_create(self, bundle, **kwargs):
        template_id = bundle.data.get('template_id')
        try:
            template = PipelineTemplate.objects.get(template_id=template_id)
        except Exception:
            raise_validation_error(self, bundle,
                                   'instances', 'template_id', _(u"模板不存在"))

        exec_data = bundle.data.get('exec_data')
        try:
            exec_data = json.loads(exec_data)
        except Exception:
            raise_validation_error(self, bundle,
                                   'instances', 'exec_data', _(u"JSON 格式不合法"))

        # unfold subprocess
        unfold_subprocess(exec_data)
        instance_id = node_uniqid()
        exec_data['id'] = instance_id
        exec_snapshot, _ = Snapshot.objects.create_or_get_snapshot(exec_data)
        kwargs['template_id'] = template.id
        kwargs['instance_id'] = instance_id
        kwargs['snapshot_id'] = template.snapshot.id
        kwargs['execution_snapshot_id'] = exec_snapshot.id
        bundle.data.pop('exec_data')
        return super(PipelineInstanceResource, self).obj_create(bundle, **kwargs)
Esempio n. 2
0
    def create_instance(self, template, exec_data, spread=False, inputs=None, **kwargs):
        """
        创建流程实例对象
        @param template: 流程模板
        @param exec_data: 执行用流程数据
        @param spread: exec_data 是否已经展开
        @param kwargs: 其他参数
        @param inputs: 自定义输入
        @return: 实例对象
        """
        if not spread:
            PipelineTemplate.objects.unfold_subprocess(exec_data)
        else:
            PipelineTemplate.objects.replace_id(exec_data)

        inputs = inputs or {}

        for key, val in inputs.items():
            if key in exec_data['data']['inputs']:
                exec_data['data']['inputs'][key]['value'] = val

        instance_id = node_uniqid()
        exec_data['id'] = instance_id
        exec_snapshot, _ = Snapshot.objects.create_or_get_snapshot(exec_data)
        TreeInfo.objects.create()
        if template is not None:
            kwargs['template'] = template
            kwargs['snapshot_id'] = template.snapshot.id
        kwargs['instance_id'] = instance_id
        kwargs['execution_snapshot_id'] = exec_snapshot.id
        return self.create(**kwargs)
Esempio n. 3
0
    def fork_child(self, parent, current_node_id, destination_id):
        """
        创建一个上下文信息与当前 parent 一致的 child process
        :param parent:
        :param current_node_id:
        :param destination_id:
        :return:
        """
        # init runtime info

        # clear parent's change
        parent.top_pipeline.context.clear_change_keys()

        snapshot = ProcessSnapshot.objects.create_snapshot(
            pipeline_stack=parent.pipeline_stack,
            children=[],
            root_pipeline=parent.root_pipeline,
            subprocess_stack=parent.subprocess_stack)

        child = self.create(id=node_uniqid(),
                            root_pipeline_id=parent.root_pipeline.id,
                            current_node_id=current_node_id,
                            destination_id=destination_id,
                            parent_id=parent.id,
                            snapshot=snapshot)
        for subproc_id in parent.subprocess_stack:
            SubProcessRelationship.objects.add_relation(subproc_id, child.id)

        return child
Esempio n. 4
0
def recursive_replace_id(pipeline_data):
    pipeline_data[PE.id] = node_uniqid()
    replace_all_id(pipeline_data)
    activities = pipeline_data[PE.activities]
    for act_id, act in activities.items():
        if act[PE.type] == PE.SubProcess:
            recursive_replace_id(act[PE.pipeline])
            act[PE.pipeline][PE.id] = act_id
Esempio n. 5
0
 def create_instance(self, template, exec_data, **kwargs):
     unfold_subprocess(exec_data)
     instance_id = node_uniqid()
     exec_data['id'] = instance_id
     exec_snapshot, _ = Snapshot.objects.create_or_get_snapshot(exec_data)
     kwargs['template'] = template
     kwargs['instance_id'] = instance_id
     kwargs['snapshot_id'] = template.snapshot.id
     kwargs['execution_snapshot_id'] = exec_snapshot.id
     return self.create(**kwargs)
Esempio n. 6
0
def replace_all_id(pipeline_data):
    flows = pipeline_data['flows']
    node_map = {}
    flow_map = {}

    # step.1 replace nodes id

    # replace events id
    start_event_id = node_uniqid()
    end_event_id = node_uniqid()
    node_map[pipeline_data[PE.start_event][ID]] = start_event_id
    node_map[pipeline_data[PE.end_event][ID]] = end_event_id

    _replace_event_id(flows, pipeline_data[PE.start_event], start_event_id)
    _replace_event_id(flows, pipeline_data[PE.end_event], end_event_id)

    # replace activities id
    activities = pipeline_data[PE.activities]
    keys = activities.keys()
    for old_id in keys:
        substituted_id = node_uniqid()
        node_map[old_id] = substituted_id
        _replace_activity_id(flows, activities, old_id, substituted_id)

    # replace gateways id
    gateways = pipeline_data[PE.gateways]
    keys = gateways.keys()
    for old_id in keys:
        substituted_id = node_uniqid()
        node_map[old_id] = substituted_id
        _replace_gateway_id(flows, gateways, old_id, substituted_id)

    # step.2 replace flows id
    keys = flows.keys()
    for old_id in keys:
        substituted_id = line_uniqid()
        flow_map[old_id] = substituted_id
        _replace_flow_id(flows, old_id, substituted_id, pipeline_data)

    # step.3 replace front end data
    _replace_front_end_data_id(pipeline_data, node_map, flow_map)
Esempio n. 7
0
    def create_model(self, structure_data, **kwargs):
        result, msg = self.subprocess_ref_validate(structure_data)

        if not result:
            raise SubprocessRefError(msg)

        snapshot, _ = Snapshot.objects.create_or_get_snapshot(structure_data)
        kwargs['snapshot'] = snapshot
        kwargs['template_id'] = node_uniqid()
        obj = self.create(**kwargs)
        # version track
        # TemplateVersion.objects.track(obj)

        return obj
Esempio n. 8
0
    def clone(self, creator):
        # name = self.name[10:] if len(self.name) >= MAX_LEN_OF_NAME - 10 else self.name
        name = timezone.now().strftime('clone%Y%m%d%H%m%S')
        instance_id = node_uniqid()

        exec_data = self.execution_data
        self._replace_id(exec_data)
        # replace root id
        exec_data['id'] = instance_id
        new_snapshot, _ = Snapshot.objects.create_or_get_snapshot(exec_data)

        return self.__class__.objects.create(template=self.template, instance_id=instance_id,
                                             name=name, creator=creator,
                                             description=self.description, snapshot=self.snapshot,
                                             execution_snapshot=new_snapshot)
Esempio n. 9
0
 def prepare_for_pipeline(self, pipeline):
     """
     为 pipeline 创建相应的 process 并进行一系列初始化
     :param pipeline:
     :return:
     """
     # init runtime info
     snapshot = ProcessSnapshot.objects.create_snapshot(pipeline_stack=utils.Stack(),
                                                        children=[],
                                                        root_pipeline=pipeline,
                                                        subprocess_stack=utils.Stack())
     process = self.create(id=node_uniqid(), root_pipeline_id=pipeline.id, current_node_id=pipeline.start_event.id,
                           snapshot=snapshot)
     process.push_pipeline(pipeline)
     process.save()
     return process
Esempio n. 10
0
    def obj_create(self, bundle, **kwargs):
        json_data = bundle.data.get('data')
        try:
            data = json.loads(json_data)
        except Exception:
            raise_validation_error(self, bundle, 'templates', 'data', _(u"JSON 格式不合法"))

        self.subprocess_ref_validate(bundle, data)
        self.gateway_validate(bundle, data)

        snapshot, _ = Snapshot.objects.create_or_get_snapshot(data)
        kwargs['snapshot_id'] = snapshot.id
        kwargs['template_id'] = node_uniqid()
        # must pop data field after the creation of snapshot is finished.
        bundle.data.pop('data')

        return super(PipelineTemplateResource, self).obj_create(bundle, **kwargs)
Esempio n. 11
0
    def clone(self, creator, **kwargs):
        """
        返回当前实例对象的克隆
        @param creator: 创建者
        @param kwargs: 其他参数
        @return: 当前实例对象的克隆
        """
        name = kwargs.get('name') or timezone.localtime(timezone.now()).strftime('clone%Y%m%d%H%m%S')
        instance_id = node_uniqid()

        exec_data = self.execution_data
        self._replace_id(exec_data)
        # replace root id
        exec_data['id'] = instance_id
        new_snapshot, _ = Snapshot.objects.create_or_get_snapshot(exec_data)

        return self.__class__.objects.create(template=self.template, instance_id=instance_id,
                                             name=name, creator=creator,
                                             description=self.description, snapshot=self.snapshot,
                                             execution_snapshot=new_snapshot)
Esempio n. 12
0
    def create_instance(self, template, exec_data, spread=False, **kwargs):
        """
        创建流程实例对象
        @param template: 流程模板
        @param exec_data: 执行用流程数据
        @param spread: exec_data 是否已经展开
        @param kwargs: 其他参数
        @return: 实例对象
        """
        if not spread:
            PipelineTemplate.objects.unfold_subprocess(exec_data)
        else:
            PipelineTemplate.objects.replace_id(exec_data)

        instance_id = node_uniqid()
        exec_data['id'] = instance_id
        exec_snapshot, _ = Snapshot.objects.create_or_get_snapshot(exec_data)
        TreeInfo.objects.create()
        kwargs['template'] = template
        kwargs['instance_id'] = instance_id
        kwargs['snapshot_id'] = template.snapshot.id
        kwargs['execution_snapshot_id'] = exec_snapshot.id
        return self.create(**kwargs)
Esempio n. 13
0
def convert_atom_from_v2_step_to_v3_act(step, constants, biz_cc_id, stage_name):
    act_id = node_uniqid()
    v3_act = {
        'id': act_id,
        'incoming': '',
        'outgoing': '',
        'name': step['step_name'],
        'error_ignorable': bool(step['is_ignore']),
        'optional': bool(step['is_adjust']),
        'type': 'ServiceActivity',
        'loop': 1,
        'stage_name': stage_name,
        'component': {
            'code': '',
            'data': {}
        }
    }

    tag_code = step['tag_code']
    component_code = component_code_v2_to_v3.get(tag_code)
    if not component_code:
        raise Exception("unknown tag code: %s" % tag_code)

    data = step['tag_data']['data']
    tag_data = {}
    mount_constant(act_id, tag_code, data, constants)

    if tag_code in ['requests', 'job_fast_execute_script', 'job_fast_push_file', 'timer_countdown', 'cchost_replace',
                    'job_execute_task', 'ccupdate_custom_property', 'ccupdate_host_module', 'ccdelete_set',
                    'ccreset_set',
                    'timer_eta']:
        for key, val in data.items():
            hook = True if val['hook'] == 'on' else False

            tmp_val = val['value']

            # select数据适配
            if key in ['job_execute_tasks', 'cc_custom_property']:
                tmp_val = val['value']['value']

            # datatable数据适配
            elif key == "job_source_files":
                tmp_val = []
                for item in val['value']:
                    tmp_val.append({
                        'ip': item.get('ip'),
                        'account': item.get('account'),
                        'files': item.get('file')
                    })
            elif key == "cc_replace_host_info":
                tmp_val = []
                for item in val['value']:
                    tmp_val.append({
                        'cc_fault_ip': item.get('fault_ip'),
                        'cc_new_ip': item.get('replace_ip')
                    })

            # tree类型数据适配
            elif key == "cc_module":
                tmp_val = [int(val['value']['value'])]

            # tree类型需要置为空的key
            elif key in ['cc_set_names', 'cc_set_name']:
                tmp_val = []

            if hook:
                tag_val = val['constant']
                constants[tag_val]['value'] = tmp_val

                # 需要过滤掉的数据
                if key in ["cc_plat_id"]:
                    del constants['${%s}' % key]
                    continue
            else:
                tag_val = tmp_val

            tag_data[tag_v2_to_v3[tag_code][key]] = {
                'hook': hook,
                'value': tag_val
            }

    # 缺失数据填充
    if tag_code == 'ccupdate_host_module':
        tag_data['cc_is_increment'] = {
            'hook': False,
            'value': ""
        }
    if tag_code == 'ccmodify_set_property':
        tag_data.update({
            'cc_set_select': {
                'hook': False,
                'value': []
            },
            'cc_set_property': {
                'hook': False,
                'value': ""
            },
            'cc_set_prop_value': {
                'hook': False,
                'value': ""
            }
        })
    if tag_code == 'ccupdate_module_property':
        tag_data.update({
            'cc_module_select': {
                'hook': False,
                'value': []
            },
            'cc_module_property': {
                'hook': False,
                'value': ""
            },
            'cc_module_prop_value': {
                'hook': False,
                'value': ""
            }
        })
    if tag_code == 'ccadd_set':
        cc_set_info = {
            'hook': False,
            'value': [{}, ]
        }
        for key, val in data.items():
            if key == 'cc_set_name':
                cc_set_info['value'][0]['bk_set_name'] = val['value']
            elif key == 'cc_env_type':
                cc_set_info['value'][0]['bk_set_env'] = str(ENV_TYPE[val['value']])
            elif key == 'cc_service_type':
                cc_set_info['value'][0]['bk_service_status'] = str(SERVICE_TYPE[val['value']])
            elif key == 'cc_capacity':
                cc_set_info['value'][0]['bk_capacity'] = val['value']
            else:
                cc_set_info['value'][0]['bk_set_desc'] = val['value']
        tag_data.update({
            'cc_set_parent_select': {
                'hook': False,
                'value': [biz_cc_id]
            },
            'cc_set_info': cc_set_info
        })
    if tag_code == "ccbiz_status":
        for key, val in data.items():
            hook = True if val['hook'] == 'on' else False
            tmp_val = val['value']

            # tree类型需要置为空的key
            if key == 'cc_set_names':
                tmp_val = []
            elif key == 'cc_service_type':
                tmp_val = str(val['value'])
                if val['value'] == '0':
                    tmp_val = "2"

            if hook:
                tag_val = val['constant']
                constants[tag_val]['value'] = tmp_val
            else:
                tag_val = tmp_val

            tag_data[tag_v2_to_v3[tag_code][key]] = {
                'hook': hook,
                'value': tag_val
            }

    # TODO another tag

    v3_act['component']['code'] = component_code
    v3_act['component']['data'] = tag_data
    return v3_act
Esempio n. 14
0
def convert_stage_and_params_from_v2_to_v3(stage_data, params, biz_cc_id):
    step_2_tag = get_step_tagcode(stage_data)
    constants = convert_params_from_v2_to_v3(params, step_2_tag)

    pipeline_tree = {
        'start_event': {
            'id': node_uniqid(),
            'incoming': '',
            'outgoing': '',
            'type': 'EmptyStartEvent',
            'name': '',
        },
        'end_event': {
            'id': node_uniqid(),
            'incoming': '',
            'outgoing': '',
            'type': 'EmptyEndEvent',
            'name': '',
        },
        'activities': {},
        'gateways': {},
        'flows': {},
        'constants': constants,
        'outputs': []
    }
    last_node = pipeline_tree['start_event']

    for stage in stage_data:
        is_parallel = stage.get('is_parallel')
        step_data = stage['steps']
        stage_name = stage['stage_name']

        if is_parallel:
            flow = {
                'id': line_uniqid(),
                'source': last_node['id'],
                'target': '',
                'is_default': False,
            }
            last_node['outgoing'] = flow['id']

            parallel_gateway = {
                'id': node_uniqid(),
                'incoming': flow['id'],
                'outgoing': [],
                'type': 'ParallelGateway',
                'name': '',
            }
            flow['target'] = parallel_gateway['id']
            converge_gateway = {
                'id': node_uniqid(),
                'incoming': [],
                'outgoing': '',
                'type': 'ConvergeGateway',
                'name': '',
            }

            pipeline_tree['gateways'].update({
                parallel_gateway['id']: parallel_gateway,
                converge_gateway['id']: converge_gateway,
            })
            pipeline_tree['flows'].update({
                flow['id']: flow
            })

            last_node = parallel_gateway

        for step in step_data:
            activity = convert_atom_from_v2_step_to_v3_act(step, constants, biz_cc_id, stage_name)
            flow = {
                'id': line_uniqid(),
                'source': last_node['id'],
                'target': activity['id'],
                'is_default': False,
            }
            activity['incoming'] = flow['id']

            if is_parallel:
                parallel_gateway['outgoing'].append(flow['id'])

                flow2 = {
                    'id': line_uniqid(),
                    'source': activity['id'],
                    'target': converge_gateway['id'],
                    'is_default': False,
                }
                converge_gateway['incoming'].append(flow2['id'])
                activity['outgoing'] = flow2['id']

                pipeline_tree['flows'].update({
                    flow['id']: flow,
                    flow2['id']: flow2,
                })
            else:
                last_node['outgoing'] = flow['id']
                last_node = activity

                pipeline_tree['flows'].update({
                    flow['id']: flow
                })

            pipeline_tree['activities'].update({
                activity['id']: activity
            })

        if is_parallel:
            last_node = converge_gateway

    flow = {
        'id': line_uniqid(),
        'source': last_node['id'],
        'target': pipeline_tree['end_event']['id'],
        'is_default': False,
    }
    pipeline_tree['flows'].update({
        flow['id']: flow
    })
    last_node['outgoing'] = flow['id']
    pipeline_tree['end_event']['incoming'] = flow['id']

    return draw_pipeline_automatic(pipeline_tree)
Esempio n. 15
0
 def create_model(self, structure_data, **kwargs):
     snapshot, _ = Snapshot.objects.create_or_get_snapshot(structure_data)
     kwargs['snapshot'] = snapshot
     kwargs['template_id'] = node_uniqid()
     return self.create(**kwargs)
Esempio n. 16
0
    def __init__(self):
        id_list = [node_uniqid() for i in range(10)]
        self.data = {
            'id': id_list[0],
            'name': 'name',
            'start_event': {
                'id': id_list[1],
                'name': 'start',
                'type': 'EmptyStartEvent',
                'incoming': None,
                'outgoing': id_list[5]
            },
            'end_event': {
                'id': id_list[2],
                'name': 'end',
                'type': 'EmptyEndEvent',
                'incoming': id_list[7],
                'outgoing': None
            },
            'activities': {
                id_list[3]: {
                    'id': id_list[3],
                    'type': 'ServiceActivity',
                    'name': 'ansible',
                    'incoming': id_list[5],
                    'outgoing': id_list[6],
                    'component': {
                        'code': 'ansible',
                        'inputs': {
                            'input_test': {
                                'type': 'plain',
                                'value': 'custom2',
                            },
                            'radio_test': {
                                'type': 'plain',
                                'value': '1',
                            },
                        },
                        'global_outputs': {
                            'key1': '${global_key1}',
                        }
                    }
                },
                id_list[4]: {
                    'id': id_list[4],
                    'type': 'ServiceActivity',
                    'name': 'fabric',
                    'incoming': id_list[6],
                    'outgoing': id_list[7],
                    'component': {
                        'code': 'fabric',
                        'inputs': {
                            'input_test': {
                                'type': 'plain',
                                'value': 'value1'
                            },
                            'radio_test': {
                                'type': 'splice',
                                'value': 'before_${global_key1}'
                            },
                        },
                        'global_outputs': {

                        }
                    }
                },
            },
            'flows': {  # 存放该 Pipeline 中所有的线
                id_list[5]: {
                    'id': id_list[5],
                    'source': id_list[1],
                    'target': id_list[3]
                },
                id_list[6]: {
                    'id': id_list[6],
                    'source': id_list[3],
                    'target': id_list[4]
                },
                id_list[7]: {
                    'id': id_list[7],
                    'source': id_list[4],
                    'target': id_list[2]
                },
            },
            'gateways': {  # 这里存放着网关的详细信息
            },
            'data': {
                'inputs': {
                    '${demo_input_test}': {
                        'type': 'plain',
                        'value': 'value1'
                    },
                    '${global_key1}': {
                        'type': 'splice',
                        'source_act': id_list[3],
                        'source_key': 'key1',
                        'value': '',
                    },
                    '${custom_key1}': {
                        'type': 'splice',
                        'value': 'aaa_${global_key1}',
                    },
                    '${custom_key2}': {
                        'type': 'plain',
                        'value': 'custom2'
                    },
                },
                'outputs': {
                    '${demo_input_test}': '${demo_input_test}',
                    '${global_key1}': '${global_key1}',
                },
            }
        }
Esempio n. 17
0
def replace_all_id(pipeline_data):
    flows = pipeline_data[PE.flows]
    node_map = {}
    flow_map = {}

    # step.1 replace nodes id

    # replace events id
    start_event_id = node_uniqid()
    end_event_id = node_uniqid()
    node_map[pipeline_data[PE.start_event][PE.id]] = start_event_id
    node_map[pipeline_data[PE.end_event][PE.id]] = end_event_id

    start_event_id_maps = _replace_event_id(flows,
                                            pipeline_data[PE.start_event],
                                            start_event_id)
    end_event_id_maps = _replace_event_id(flows, pipeline_data[PE.end_event],
                                          end_event_id)

    # replace activities id
    activity_id_maps = {}
    activities = pipeline_data[PE.activities]
    keys = activities.keys()
    for old_id in keys:
        substituted_id = node_uniqid()
        node_map[old_id] = substituted_id
        _replace_activity_id(flows, activities, old_id, substituted_id)
        activity_id_maps[old_id] = substituted_id

    # replace gateways id
    gateway_id_maps = {}
    gateways = pipeline_data[PE.gateways]
    keys = gateways.keys()
    for old_id in keys:
        substituted_id = node_uniqid()
        node_map[old_id] = substituted_id
        _replace_gateway_id(flows, gateways, old_id, substituted_id)
        gateway_id_maps[old_id] = substituted_id

    # step.2 replace flows id
    flow_id_maps = {}
    keys = flows.keys()
    for old_id in keys:
        substituted_id = line_uniqid()
        flow_map[old_id] = substituted_id
        _replace_flow_id(flows, old_id, substituted_id, pipeline_data)
        flow_id_maps[old_id] = substituted_id

    # step.3 replace id in data
    _replace_id_in_data(pipeline_data, node_map)

    # step.4 try to replace front end data
    _replace_front_end_data_id(pipeline_data, node_map, flow_map)

    return {
        'start_event': start_event_id_maps,
        'end_event': end_event_id_maps,
        'activity': activity_id_maps,
        'gateway': gateway_id_maps,
        'flow': flow_id_maps
    }
Esempio n. 18
0
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa

import copy
from pipeline.utils.uniqid import node_uniqid

id_list = [node_uniqid() for i in xrange(10)]
PIPELINE_DATA = {
    'id': id_list[0],
    'name': 'name',
    'start_event': {
        'id': id_list[1],
        'name': 'start',
        'type': 'EmptyStartEvent',
        'incoming': None,
        'outgoing': id_list[5]
    },
    'end_event': {
        'id': id_list[2],
        'name': 'end',
        'type': 'EmptyEndEvent',
        'incoming': id_list[7],
        'outgoing': None
    },
    'activities': {
Esempio n. 19
0
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""

import copy
from pipeline.utils.uniqid import node_uniqid

id_list = [node_uniqid() for i in range(10)]
PIPELINE_DATA = {
    'id': id_list[0],
    'name': 'name',
    'start_event': {
        'id': id_list[1],
        'name': 'start',
        'type': 'EmptyStartEvent',
        'incoming': None,
        'outgoing': id_list[5]
    },
    'end_event': {
        'id': id_list[2],
        'name': 'end',
        'type': 'EmptyEndEvent',
        'incoming': id_list[7],
Esempio n. 20
0
def replace_long_path_with_dummy(pipeline, ranks):
    """
    @summary: 使用虚拟节点和虚拟边替换长边
    @param pipeline:
    @param ranks:
    @return: real_flows_chain: 被替换掉的长边
    """
    real_flows_chain = {}
    for flow_id, flow in list(pipeline[PWE.flows].items()):
        flow_slack = slack(ranks, flow)
        if flow_slack > 0:
            real_flows_chain[flow_id] = flow
            dummy_nodes_ranks = range(ranks[flow[PWE.source]] + MIN_LEN,
                                      ranks[flow[PWE.target]], MIN_LEN)

            incoming_flow_id = line_uniqid()
            dummy_node_id = node_uniqid()
            dummy_flow = {
                PWE.id: incoming_flow_id,
                PWE.type: DUMMY_FLOW_TYPE,
                PWE.source: flow[PWE.source],
                PWE.target: dummy_node_id
            }
            # change outgoing of flow.source node
            delete_flow_id_from_node_io(
                pipeline['all_nodes'][flow[PWE.source]], flow_id, PWE.outgoing)
            add_flow_id_to_node_io(pipeline['all_nodes'][flow[PWE.source]],
                                   incoming_flow_id, PWE.outgoing)
            # delete long path flow from pipeline
            pipeline[PWE.flows].pop(flow_id)
            for node_rank in dummy_nodes_ranks:
                # 生成当前 dummy node 的 outgoing flow
                outgoing_flow_id = line_uniqid()
                dummy_node = {
                    PWE.id: dummy_node_id,
                    PWE.type: DUMMY_NODE_TYPE,
                    PWE.name: DUMMY_NODE_TYPE,
                    PWE.incoming: incoming_flow_id,
                    PWE.outgoing: outgoing_flow_id
                }

                # add dummy to pipeline
                pipeline['all_nodes'].update({dummy_node_id: dummy_node})
                pipeline[PWE.flows].update({incoming_flow_id: dummy_flow})

                # add dummy to ranks
                ranks.update({dummy_node_id: node_rank})

                # next loop init data
                incoming_flow_id = outgoing_flow_id
                dummy_node_id = node_uniqid()
                dummy_flow = {
                    PWE.id: incoming_flow_id,
                    PWE.type: DUMMY_FLOW_TYPE,
                    PWE.source: dummy_node[PWE.id],
                    PWE.target: dummy_node_id
                }

            # add last dummy flow to pipeline
            dummy_flow[PWE.target] = flow[PWE.target]
            pipeline[PWE.flows].update({incoming_flow_id: dummy_flow})
            # change incoming of flow.target node
            delete_flow_id_from_node_io(
                pipeline['all_nodes'][flow[PWE.target]], flow_id, PWE.incoming)
            add_flow_id_to_node_io(pipeline['all_nodes'][flow[PWE.target]],
                                   incoming_flow_id, PWE.incoming)
    return real_flows_chain
Esempio n. 21
0
from __future__ import absolute_import

from copy import deepcopy

from django.test import TestCase, Client

from pipeline.utils.uniqid import node_uniqid

from gcloud.tests.mock import *  # noqa
from gcloud.tests.mock_settings import *  # noqa
from gcloud.taskflow3 import api


TEST_BIZ_CC_ID = '2'  # do not change this to non number
TEST_ID_LIST = [node_uniqid() for i in range(10)]
TEST_PIPELINE_TREE = {
    'id': TEST_ID_LIST[0],
    'name': 'name',
    'start_event': {
        'id': TEST_ID_LIST[1],
        'name': 'start',
        'type': 'EmptyStartEvent',
        'incoming': None,
        'outgoing': TEST_ID_LIST[5]
    },
    'end_event': {
        'id': TEST_ID_LIST[2],
        'name': 'end',
        'type': 'EmptyEndEvent',
        'incoming': TEST_ID_LIST[7],
def main_test():
    id_list = [node_uniqid() for i in xrange(100)]
    pipe1 = {
        'id': id_list[0],
        'name': 'name',
        'start_event': {
            'id': id_list[1],
            'name': 'start',
            'type': 'EmptyStartEvent',
            'incoming': None,
            'outgoing': id_list[2]
        },
        'end_event': {
            'id': id_list[53],
            'name': 'end',
            'type': 'EmptyEndEvent',
            'incoming': id_list[52],
            'outgoing': None
        },
        'activities': {
        },
        'flows': {  # 存放该 Pipeline 中所有的线
        },
        'gateways': {  # 这里存放着网关的详细信息
        },
        'data': {
            'inputs': {
            },
            'outputs': {
            },
        }
    }
    for i in xrange(2, 51, 2):
        pipe1['flows'][id_list[i]] = {
            'id': id_list[i],
            'source': id_list[i - 1],
            'target': id_list[i + 1]
        }
        pipe1['activities'][id_list[i + 1]] = {
            'id': id_list[i + 1],
            'type': 'ServiceActivity',
            'name': 'first_task',
            'incoming': id_list[i],
            'outgoing': id_list[i + 2],
            'component': {
                'code': 'demo',
                'inputs': {
                    'input_test': {
                        'type': 'plain',
                        'value': '2',
                    },
                    'radio_test': {
                        'type': 'plain',
                        'value': '1',
                    },
                },
            }
        }
    pipe1['flows'][id_list[52]] = {
         'id': id_list[52],
         'source': id_list[52 - 1],
         'target': id_list[52 + 1]
    }
    parser_obj = PipelineParser(pipe1)
    run_pipeline(parser_obj.parser())