Esempio n. 1
0
 def setUp(self):
     data = {
         '${key_a}': {
             'value': 'haha'
         },
         '${key_b}': {
             'value': 'str_${key_a}'
         },
         '${key_c}': {
             'value': 'str_${key_b}'
         }
     }
     resolved_data = {
         '${key_a}': {
             'value': 'haha'
         },
         '${key_b}': {
             'value': 'str_haha'
         },
         '${key_c}': {
             'value': 'str_str_haha'
         }
     }
     pool = ConstantPool(data)
     self.pool = pool
     self.resolved_data = resolved_data
Esempio n. 2
0
def format_web_data_to_pipeline(web_pipeline, is_subprocess=False):
    """
    @summary:
    @param web_pipeline:
    @return:
    """
    pipeline_tree = copy.deepcopy(web_pipeline)
    constants = pipeline_tree.pop('constants')
    classification = classify_constants(constants, is_subprocess)
    # 解析隐藏全局变量互引用
    pool_obj = ConstantPool(classification['constant_pool'])
    pre_resolved_constants = pool_obj.pool
    classification['data_inputs'] = calculate_constants_type(
        pre_resolved_constants, classification['data_inputs'])
    classification['data_inputs'] = calculate_constants_type(
        classification['params'], classification['data_inputs'])
    pipeline_tree['data'] = {
        'inputs': classification['data_inputs'],
        'outputs': {key: key
                    for key in pipeline_tree.pop('outputs')},
    }

    for act_id, act in pipeline_tree['activities'].items():
        if act['type'] == 'ServiceActivity':
            act_data = act['component'].pop('data')
            # for key, info in act_data.items():
            #     info['value'] = pool_obj.resolve_value(info['value'])

            all_inputs = calculate_constants_type(
                act_data, classification['data_inputs'])
            act['component']['inputs'] = {
                key: value
                for key, value in all_inputs.items() if key in act_data
            }
            act['component']['global_outputs'] = classification[
                'acts_outputs'].get(act_id, {})
        elif act['type'] == 'SubProcess':
            parent_params = {}
            act_constants = {}
            for key, info in act['pipeline']['constants'].items():
                act_constants[key] = info
                if info['show_type'] == 'show':
                    references = ConstantTemplate(
                        info['value']).get_reference()
                    for ref_key in references:
                        formatted_key = format_constant_key(ref_key)
                        if formatted_key in classification['data_inputs']:
                            parent_params[formatted_key] = classification[
                                'data_inputs'][formatted_key]
            act['params'] = parent_params
            act['pipeline'] = format_web_data_to_pipeline(act['pipeline'],
                                                          is_subprocess=True)
        else:
            raise exceptions.FlowTypeError(u"Unknown Activity type: %s" %
                                           act['type'])

    return pipeline_tree
Esempio n. 3
0
 def test_data_for_execution(self):
     data_dict = {
         'cc_host_ip': {
             'hook': False,
             'value': '127.0.0.1'
         },
         'cc_plat_id': {
             'hook': False,
             'value': '1'
         },
         'cc_module': {
             'hook': False,
             'value': '2'
         }
     }
     component = self.component(data_dict)
     do = component.data_for_execution(ConstantPool({}))
     data_dict = {
         'cc_host_ip': '127.0.0.1',
         'cc_plat_id': '1',
         'cc_module': '2'
     }
     self.assertEqual(data_dict, do.get_inputs())
Esempio n. 4
0
    def _remove_useless_constants(exclude_task_nodes_id,
                                  pipeline_tree,
                                  remove_outputs_without_refs=True):
        """
        @param exclude_task_nodes_id:
        @param pipeline_tree:
        @param remove_outputs_without_refs: 是否移除在当前流程设置为输出但未被引用的自定义变量
        @return:
        """
        # pop unreferenced constant
        data = {}
        for act_id, act in list(pipeline_tree[PE.activities].items()):
            if act["type"] == PE.ServiceActivity:
                node_data = {
                    ("%s_%s" % (act_id, key)): value
                    for key, value in list(act["component"]["data"].items())
                }
            # PE.SubProcess
            else:
                node_data = {
                    ("%s_%s" % (act_id, key)): value
                    for key, value in list(act.get("constants", {}).items())
                    if value["show_type"] == "show"
                }
            data.update(node_data)

        for gw_id, gw in list(pipeline_tree[PE.gateways].items()):
            if gw["type"] in [
                    PE.ExclusiveGateway, PE.ConditionalParallelGateway
            ]:
                gw_data = {("%s_%s" % (gw_id, key)): {
                    "value": value["evaluate"]
                }
                           for key, value in list(gw["conditions"].items())}
                data.update(gw_data)

        # get all referenced constants in flow
        constants = pipeline_tree[PE.constants]

        referenced_keys = []
        while True:
            last_count = len(referenced_keys)
            cons_pool = ConstantPool(data, lazy=True)
            refs = cons_pool.get_reference_info(strict=False)
            for keys in list(refs.values()):
                for key in keys:
                    # add outputs keys later
                    if key in constants and key not in referenced_keys:
                        referenced_keys.append(key)
                        data.update({key: constants[key]})
            if len(referenced_keys) == last_count:
                break

        # keep outputs constants
        outputs_keys = [
            key for key, value in list(constants.items())
            if value["source_type"] == "component_outputs"
        ]
        referenced_keys = list(set(referenced_keys + outputs_keys))
        init_outputs = pipeline_tree[PE.outputs]
        pipeline_tree[PE.outputs] = [
            key for key in init_outputs if key in referenced_keys
        ]
        # rebuild constants index
        referenced_keys.sort(key=lambda x: constants[x]["index"])
        new_constants = {}
        for index, key in enumerate(referenced_keys):
            value = constants[key]
            value["index"] = index
            # delete constant reference info to task node
            for act_id in exclude_task_nodes_id:
                if act_id in value["source_info"]:
                    value["source_info"].pop(act_id)
            new_constants[key] = value

        if not remove_outputs_without_refs:
            for key, value in constants.items():
                if value[
                        "source_type"] == "custom" and key in init_outputs and key not in pipeline_tree[
                            PE.outputs]:
                    new_constants[key] = value
                    pipeline_tree[PE.outputs].append(key)

        pipeline_tree[PE.constants] = new_constants
Esempio n. 5
0
def format_web_data_to_pipeline(web_pipeline):
    """
    @summary:
    @param web_pipeline:
    @return:
    """
    pipeline_tree = copy.deepcopy(web_pipeline)
    constants = pipeline_tree.pop('constants')
    constant_pool = {}
    data_inputs = {}
    acts_outputs = {}
    for key, info in constants.iteritems():
        if info['source_tag']:
            var_cls = library.VariableLibrary.get_var_class(
                info['source_tag'].split('.')[0])
        if info['source_type'] == 'component_outputs':
            source_key = info['source_info'].values()[0][0]
            source_step = info['source_info'].keys()[0]
            data_inputs[key] = {
                'type': 'splice',
                'source_act': source_step,
                'source_key': source_key,
                'value': info['value'],
            }
            acts_outputs.setdefault(source_step, {}).update({
                source_key: key,
            })
        # 自定义的Lazy类型变量
        elif info['source_tag'] and var_cls and issubclass(
                var_cls, var.LazyVariable):
            data_inputs[key] = {
                'type': 'lazy',
                'source_tag': info['source_tag'],
                'value': info['value'],
            }
        else:
            constant_pool[key] = info

    pool_obj = ConstantPool(constant_pool)
    resolved_constants = pool_obj.pool
    data_inputs = calculate_constants_type(resolved_constants, data_inputs)
    pipeline_tree['data'] = {
        'inputs': data_inputs,
        'outputs': {key: key
                    for key in pipeline_tree.pop('outputs')},
    }

    for act_id, act in pipeline_tree['activities'].iteritems():
        if act['type'] == 'ServiceActivity':
            act_data = act['component'].pop('data')
            for key, info in act_data.iteritems():
                info['value'] = pool_obj.resolve_value(info['value'])

            all_inputs = calculate_constants_type(act_data, data_inputs)
            act['component']['inputs'] = {
                key: value
                for key, value in all_inputs.iteritems() if key in act_data
            }
            act['component']['global_outputs'] = acts_outputs.get(act_id, {})
        elif act['type'] == 'SubProcess':
            act_data = {}
            act_constants = {}
            for key, info in act['pipeline']['constants'].iteritems():
                if info['show_type'] == 'show':
                    info['value'] = pool_obj.resolve_value(info['value'])
                    act_constants[key] = info
                    act_data[key] = info
                else:
                    act_constants[key] = info
            act['pipeline']['constants'] = act_constants
            act['exposed_constants'] = act_data.keys()
            all_inputs = calculate_constants_type(act_data, data_inputs)

            act['pipeline'] = format_web_data_to_pipeline(act['pipeline'])
            for key in act['exposed_constants']:
                act['pipeline']['data']['inputs'][key] = all_inputs[key]
        else:
            raise exceptions.FlowTypeError(u"Unknown Activity type: %s" %
                                           act['type'])

    return pipeline_tree
Esempio n. 6
0
    def preview_pipeline_tree_exclude_task_nodes(pipeline_tree,
                                                 exclude_task_nodes_id=None):
        if exclude_task_nodes_id is None:
            exclude_task_nodes_id = []

        locations = {
            item['id']: item
            for item in pipeline_tree.get(PE.location, [])
        }
        lines = {item['id']: item for item in pipeline_tree.get(PE.line, [])}

        for task_node_id in exclude_task_nodes_id:
            if task_node_id not in pipeline_tree[PE.activities]:
                error = 'task node[id=%s] is not in template pipeline tree' % task_node_id
                raise Exception(error)

            task_node = pipeline_tree[PE.activities].pop(task_node_id)
            if not task_node['optional']:
                error = 'task node[id=%s] is not optional' % task_node_id
                raise Exception(error)

            # change next_node's incoming: task node、control node is different
            # change incoming_flow's target to next node
            # delete outgoing_flow
            incoming_id, outgoing_id = task_node[PE.incoming], task_node[
                PE.outgoing]
            incoming_flow = pipeline_tree[PE.flows][incoming_id]
            outgoing_flow = pipeline_tree[PE.flows][outgoing_id]
            target_id = outgoing_flow[PE.target]

            if target_id in pipeline_tree[PE.activities]:
                next_node = pipeline_tree[PE.activities][target_id]
                next_node[PE.incoming] = incoming_id
            elif target_id in pipeline_tree[PE.gateways]:
                next_node = pipeline_tree[PE.gateways][target_id]
                if next_node['type'] in [
                        PE.ExclusiveGateway, PE.ParallelGateway
                ]:
                    next_node[PE.incoming] = incoming_id
                # PE.ConvergeGateway
                else:
                    next_node[PE.incoming].pop(
                        next_node[PE.incoming].index(outgoing_id))
                    next_node[PE.incoming].append(incoming_id)
            # PE.end_event
            else:
                next_node = pipeline_tree[PE.end_event]
                next_node[PE.incoming] = incoming_id

            incoming_flow[PE.target] = next_node['id']

            pipeline_tree[PE.flows].pop(outgoing_id)

            # web location data
            try:
                locations.pop(task_node_id)
                lines.pop(outgoing_id)
                lines[incoming_id][PE.target]['id'] = next_node['id']
            except Exception as e:
                logger.exception(
                    'create_pipeline_instance_exclude_task_nodes adjust web data error:%s'
                    % e)

        pipeline_tree[PE.line] = lines.values()
        pipeline_tree[PE.location] = locations.values()

        # pop unreferenced constant
        data = {}
        for task_node_id, task_node in pipeline_tree[PE.activities].items():
            if task_node['type'] == PE.ServiceActivity:
                node_data = {
                    ('%s_%s' % (task_node_id, key)): value
                    for key, value in task_node['component']['data'].items()
                }
            # PE.SubProcess
            else:
                node_data = {('%s_%s' % (task_node_id, key)): value
                             for key, value in task_node['constants'].items()
                             if value['show_type'] == 'show'}
            data.update(node_data)

        for gw_id, gw in pipeline_tree[PE.gateways].items():
            if gw['type'] == PE.ExclusiveGateway:
                gw_data = {('%s_%s' % (gw_id, key)): {
                    'value': value['evaluate']
                }
                           for key, value in gw['conditions'].items()}
                data.update(gw_data)

        constants = pipeline_tree[PE.constants]
        referenced_constants = []
        while True:
            last_count = len(referenced_constants)
            pool = ConstantPool(data)
            refs = pool.get_reference_info(strict=False)
            for _, keys in refs.items():
                for key in keys:
                    if key in constants and key not in referenced_constants:
                        referenced_constants.append(key)
                        data.update({key: constants[key]})
            if len(referenced_constants) == last_count:
                break
            last_count = len(referenced_constants)

        # rebuild constants index
        referenced_constants.sort(key=lambda x: constants[x]['index'])
        new_constants = {}
        for index, key in enumerate(referenced_constants):
            value = constants[key]
            value['index'] = index
            # delete constant reference info  to task node
            for task_node_id in exclude_task_nodes_id:
                if task_node_id in value['source_info']:
                    value['source_info'].pop(task_node_id)
            new_constants[key] = value
        pipeline_tree[PE.constants] = new_constants

        return