def get_meta_json_file_dict(json_path):
    plLogger = PLLogger.GetLogger("methodology")
    plLogger.LogDebug("begin.get_meta_json_file_dict.RunMethodologyTestCommand")

    this_cmd = get_this_cmd()
    meta_json = None

    try:
        # Open the json file
        json_string = None
        file_path = os.path.abspath(json_path)
        if not os.path.exists(file_path):
            return None, "File {} does not exist".format(file_path)
        with open(file_path, "r") as jsonFile:
            json_string = jsonFile.read()
        if not json_string:
            return None, "Error reading methodology json file"
    except:
        return None, "Invalid methodology JSON file: {}".format(json_path)

    # Validate against the schema
    res = json_utils.validate_json(json_string, this_cmd.Get("InputJsonSchema"))
    if res != "":
        err_str = "Methodology JSON is invalid or does not conform to the " + "schema: " + res
        return None, err_str

    # Load the json if it passes schema validation
    err_str, meta_json = json_utils.load_json(json_string)
    if err_str != "":
        return None, err_str

    plLogger.LogDebug("end.get_meta_json_file_dict.RunMethodologyTestCommand")
    return meta_json, ""
def parse_prop_val_data(prop_val_dict, row_idx):
    plLogger = PLLogger.GetLogger('methodology')
    plLogger.LogDebug('begin.processing_function_util.parse_prop_value_data')

    # Each object in the input dictionary corresponds to an
    # object in the interface list
    data_obj = {}

    # Validate the prop_val_dict against the schema
    res = json_utils.validate_json(json.dumps(prop_val_dict),
                                   get_property_dict_schema())
    if res != "":
        plLogger.LogError(res)
        return data_obj

    main_prop_val_list = []
    if "PropertyValueDict" in prop_val_dict:
        main_prop_val_obj = {}
        main_prop_val_obj["className"] = prop_val_dict["ClassName"]
        main_prop_val_obj["tagName"] = prop_val_dict["ParentTagName"]
        sub_prop_val_obj = {}
        for key in prop_val_dict["PropertyValueDict"]:
            if type(prop_val_dict["PropertyValueDict"][key]) is list:
                sub_prop_val_obj[key] = prop_val_dict["PropertyValueDict"][key][row_idx]
            else:
                sub_prop_val_obj[key] = prop_val_dict["PropertyValueDict"][key]
        main_prop_val_obj["propertyValueList"] = sub_prop_val_obj
        main_prop_val_list.append(main_prop_val_obj)

        data_obj["propertyValueList"] = main_prop_val_list

    plLogger.LogDebug('end.processing_function_util.parse_prop_value_data')
    return data_obj
def parse_merge_list_data(merge_list_dict, row_idx):
    plLogger = PLLogger.GetLogger('methodology')
    plLogger.LogDebug('begin.processing_function_util.parse_merge_list_data')

    # Each object in the input dictionary corresponds to an
    # object in the interface list
    data_obj = {}

    # Validate the merge_list_dict against the schema
    res = json_utils.validate_json(json.dumps(merge_list_dict),
                                   get_merge_list_schema())
    if res != "":
        plLogger.LogError(res)
        return data_obj

    data_obj["mergeSourceTag"] = merge_list_dict["ParentTagName"]
    data_obj["mergeTargetTag"] = merge_list_dict["MergeTargetTag"]
    data_obj["mergeSourceTemplateFile"] = merge_list_dict["MergeSourceTemplateFile"]

    if "PropertyValueDict" in merge_list_dict:
        prop_val_list = parse_prop_val_data(merge_list_dict, row_idx)
        data_obj["propertyValueList"] = prop_val_list["propertyValueList"]

    if "StmPropertyModifierDict" in merge_list_dict:
        prop_mod_list = parse_prop_mod_data(merge_list_dict, row_idx)
        data_obj["stmPropertyModifierList"] = prop_mod_list["stmPropertyModifierList"]

    plLogger.LogDebug('end.processing_function_util.parse_merge_list_data')
    return data_obj
def get_txml_proc_dicts(txml_root):
    plLogger = PLLogger.GetLogger('methodology')
    plLogger.LogDebug('RunStmTestCaseCommand.get_txml_proc_dicts.begin')
    MetaMan = txml_utils.MetaManager
    input_dict_list = []
    proc_funcs_ele = txml_root.find('.//' + MetaMan.P_PROC_FUNCS)
    if proc_funcs_ele is None:
        return input_dict_list
    for proc_func_ele in proc_funcs_ele:
        if proc_func_ele.tag != MetaMan.P_PROC_DICT:
            plLogger.LogWarn('(get_txml_proc_dicts) Skipping element '
                             + proc_func_ele.tag +
                             ' in the TXML')
            continue

        input_dict = proc_func_ele.get(MetaMan.P_INPUT_DICT)
        if input_dict is not None and input_dict != "":
            # Validate the interface_dict against the schema
            res = json_utils.validate_json(input_dict,
                                           get_datamodel_dict_schema())
            if res != "":
                plLogger.LogError(res)
                return input_dict_list
            err_str, input_json = json_utils.load_json(input_dict)

            # FIXME:
            # Gracefully exit somehow
            if err_str != "":
                plLogger.LogError(err_str)
            input_dict_list.append(input_json)

    plLogger.LogDebug('RunStmTestCaseCommand.get_txml_proc_dicts.end')
    return input_dict_list
def validate(MethodologyJson):
    plLogger = PLLogger.GetLogger("methodology")
    plLogger.LogDebug("CreateMethodologyTestCaseCommand.validate")
    this_cmd = get_this_cmd()
    res = json_utils.validate_json(MethodologyJson, this_cmd.Get("InputJsonSchema"))
    if res != "":
        return "MethodologyJson does not conform to the schema: " + res
    return ""
def parse_protocol_data(protocol_dict, row_idx):
    plLogger = PLLogger.GetLogger('methodology')
    plLogger.LogDebug('begin.processing_function_util.parse_protocol_data')

    # Each object in the input dictionary corresponds to an
    # object in the protocol list
    data_obj = {}

    # Validate the protocol_dict against the schema
    res = json_utils.validate_json(json.dumps(protocol_dict),
                                   get_protocol_dict_schema())
    if res != "":
        plLogger.LogError(res)
        return data_obj

    parent_tag_name = protocol_dict["ParentTagName"]
    data_obj["protocolSrcTag"] = parent_tag_name
    class_name = protocol_dict["ClassName"]
    main_prop_val_list = []
    main_prop_mod_list = []

    if "PropertyValueDict" in protocol_dict:
        main_prop_val_obj = {}
        main_prop_val_obj["className"] = class_name
        main_prop_val_obj["tagName"] = parent_tag_name
        sub_prop_val_obj = {}
        for key in protocol_dict["PropertyValueDict"]:
            if type(protocol_dict["PropertyValueDict"][key]) is list:
                sub_prop_val_obj[key] = protocol_dict["PropertyValueDict"][key][row_idx]
            else:
                sub_prop_val_obj[key] = protocol_dict["PropertyValueDict"][key]
        main_prop_val_obj["propertyValueList"] = sub_prop_val_obj
        main_prop_val_list.append(main_prop_val_obj)

        data_obj["propertyValueList"] = main_prop_val_list

    if "StmPropertyModifierDict" in protocol_dict:
        for key in protocol_dict["StmPropertyModifierDict"]:
            property_name = key
            property_info = protocol_dict["StmPropertyModifierDict"][key]
            prop_mod_obj = {}
            prop_mod_obj["className"] = class_name
            prop_mod_obj["tagName"] = parent_tag_name + "." + property_name
            prop_mod_obj["parentTagName"] = parent_tag_name
            prop_mod_obj["propertyName"] = property_name
            prop_val_list_obj = {}
            for prop in property_info:
                if type(property_info[prop]) is list:
                    prop_val_list_obj[prop] = property_info[prop][row_idx]
                else:
                    prop_val_list_obj[prop] = property_info[prop]
            prop_mod_obj["propertyValueList"] = prop_val_list_obj
            main_prop_mod_list.append(prop_mod_obj)

        data_obj["stmPropertyModifierList"] = main_prop_mod_list

    plLogger.LogDebug('end.processing_function_util.parse_protocol_data')
    return data_obj
def validate_input_dict(input_dict):
    # Validate the input_dict against the schema
    res = json_utils.validate_json(json.dumps(input_dict),
                                   get_datamodel_dict_schema())
    if res != "":
        return res

    weight_list = input_dict["input"]["customDict"]["Weight"]
    num_rows = len(weight_list)

    res = is_valid_rows(input_dict["input"]["customDict"]["EnableVlan"], num_rows, "EnableVlan")
    if res != "":
        return res

    dict_list = [input_dict["input"]["interfaceDict"], input_dict["input"]["protocolDict"]]
    # Loop through the interfaceDict in the input_dict
    for input_dict_list in dict_list:
        for dict in input_dict_list:
            if "EnableProperty" in dict:
                res = is_valid_rows(dict["EnableProperty"], num_rows, "EnableProperty")
                if res != "":
                    return res

            res = is_valid_rows(dict["ParentTagName"], num_rows, "ParentTagName")
            if res != "":
                return res

            res = is_valid_rows(dict["ClassName"], num_rows, "ClassName")
            if res != "":
                return res

            if "PropertyValueDict" in dict:
                for key in dict["PropertyValueDict"]:
                    res = is_valid_rows(dict["PropertyValueDict"][key], num_rows, key)
                    if res != "":
                        return res

            if "StmPropertyModifierDict" in dict:
                for key in dict["StmPropertyModifierDict"]:
                    property_info = dict["StmPropertyModifierDict"][key]
                    for prop in property_info:
                        res = is_valid_rows(property_info[prop], num_rows, prop)
                        if res != "":
                            return res
    return ""
def match_modifier_to_obj_and_prop_names(mod_ele, obj_name, prop_name):
    plLogger = PLLogger.GetLogger("methodology")
    plLogger.LogDebug(
        "match_modifier_to_obj_and_prop_names: " + str(mod_ele) + "  " + str(obj_name) + "  " + str(prop_name)
    )
    if mod_ele is None:
        return "Invalid ElementTree element", None
    mod_info = mod_ele.get("ModifierInfo")
    if mod_info is None:
        return "Missing ModifierInfo attribute", None
    res = json_utils.validate_json(mod_info, proc_func.get_range_modifier_json_schema())
    if res != "":
        t_err_str = "Failed to validate ModifierInfo JSON against " + "its schema: " + res
        return t_err_str, None
    err_str, mod_dict = json_utils.load_json(mod_info)
    if err_str != "":
        t_err_str = "Failed to load ModifierInfo JSON: " + err_str
        return t_err_str, None
    plLogger.LogInfo("mod_dict: " + str(mod_dict))
    if obj_name != "":
        mod_obj_name = mod_dict.get("objectName")
        plLogger.LogInfo("mod_obj_name: " + str(mod_obj_name))
        if mod_obj_name == obj_name:
            if prop_name != "":
                mod_prop_name = mod_dict.get("propertyName")
                plLogger.LogInfo("mod_prop_name: " + str(mod_prop_name))
                if mod_prop_name == prop_name:
                    return "", mod_ele
            else:
                # Assume it is what it is as it is tagged
                return "", mod_ele
    elif prop_name != "":
        mod_prop_name = mod_dict.get("propertyName")
        if mod_prop_name == prop_name:
            return "", mod_ele
    else:
        # Assume it is what it is as it is tagged
        return "", mod_ele
    # Didn't match
    return "", None
def validate(TestCaseKey, StmTestCase, MethodologyKey, MethodologyJson, EnableResourceCheck):
    plLogger = PLLogger.GetLogger("methodology")
    plLogger.LogDebug("begin.validate.RunMethodologyTestCommand.")

    if TestCaseKey:
        # Check if test case key exists in installed methodologies
        test_case_handle, err_str = mm_utils.get_test_case_from_key(TestCaseKey)
        return err_str
    elif StmTestCase:
        hnd_reg = CHandleRegistry.Instance()
        test_case = hnd_reg.Find(StmTestCase)
        if test_case is None or not test_case.IsTypeOf("StmTestCase"):
            plLogger.LogError("Was unable to find StmTestCase with handle " + str(StmTestCase) + " in the system.")
            return "Could not find StmTestCase"
    else:
        # Must specify a key and json
        if not MethodologyKey or not MethodologyJson:
            return "Must specify a TestCaseKey, StmTestCase or MethodologyKey and MethodologyJson"

        # Validate against the schema
        this_cmd = get_this_cmd()
        res = json_utils.validate_json(MethodologyJson, this_cmd.Get("InputJsonSchema"))
        if res != "":
            return "Methodology JSON is invalid or does not conform to the schema: " + res

        # Load the json if it passes schema validation
        err_str, meth_json = json_utils.load_json(MethodologyJson)
        if err_str != "":
            return err_str

        # Check the MethodologyKey matches the meth key in the json
        if MethodologyKey != meth_json["methodology_key"]:
            return "Methodology Key does not match the methodology_key in the JSON"

    plLogger.LogDebug("end.validate.RunMethodologyTestCommand")
    return ""
def run(TargetObjectList, TargetObjectTagList, MixInfo, MixTagName, AutoExpandTemplateMix):
    # TODO: Add sample JSON

    plLogger = PLLogger.GetLogger("methodology")
    stc_sys = CStcSystem.Instance()
    project = stc_sys.GetObject("Project")
    this_cmd = get_this_cmd()
    ctor = CScriptableCreator()

    # Validate the input MixInfo against its schema
    res = json_utils.validate_json(MixInfo, this_cmd.Get('MixInfoJsonSchema'))
    if res != '':
        plLogger.LogError(res)
        this_cmd.Set("Status", res)
        return False

    # Validate the hierarchy against our current command list...
    msg = mix_utils.run_validate_hierarchy(this_cmd, [hierarchy()])
    if msg != '':
        err_str = 'Invalid Sequence: ' + msg
        plLogger.LogError(err_str)
        this_cmd.Set("Status", err_str)
        return False
    # Tag the commands in our command list according to our hierarchy information...
    tag_dict = mix_utils.run_tag_hierarchy(this_cmd, [hierarchy()])

    # Setup for property chaining / outputting the tag dictionary...
    this_cmd.Set('GroupCommandTagInfo', json.dumps(tag_dict))
    plLogger.LogInfo('GroupCommandTagInfo: ' + this_cmd.Get('GroupCommandTagInfo'))

    # Create the StmTemplateMix object...
    plLogger.LogInfo('Creating the route mix object...')
    mix = ctor.Create('StmTemplateMix', project)
    this_cmd.Set('StmTemplateMix', mix.GetObjectHandle())
    # If a MixTagName was specified, then tag the mix with it...
    if MixTagName:
        tag_utils.add_tag_to_object(mix, MixTagName)
    # Copy the entire MixInfo into StmTemplateMix object...
    mix.Set('MixInfo', MixInfo)

    # Load the MixInfo...
    err_str, mix_info = json_utils.load_json(MixInfo)
    if err_str != "":
        plLogger.LogError(err_str)
        this_cmd.Set("Status", err_str)
        return False

    # Directly configure all tagged commands in our hierarchy...
    plLogger.LogDebug('setting up commands in the group based on ' + str(tag_dict))

    plLogger.LogDebug('loading json from MixInfo: ' + str(MixInfo))
    num_rows = len(mix_info.get('components', []))
    plLogger.LogDebug('Number of Rows: ' + str(num_rows))

    # Pass table to configurator - this guy will configure other commands per iteration...
    conf_cmd = tag_utils.get_tagged_objects_from_string_names([tag_dict['rowConfigurator']])[0]
    conf_cmd.Set('StmTemplateMix', mix.GetObjectHandle())
    conf_cmd.Set('TagData', this_cmd.Get('GroupCommandTagInfo'))

    # Pass the StmTemplateMix to the CreateTemplateConfigCommand...
    ctc_cmd = tag_utils.get_tagged_objects_from_string_names([tag_dict['templateConfigurator']])[0]
    ctc_cmd.Set('StmTemplateMix', mix.GetObjectHandle())
    ctc_cmd.Set('AutoExpandTemplate', False)

    # Setup Row Iterator (pass in number of rows) - the While command's expression command...
    iter_cmd = tag_utils.get_tagged_objects_from_string_names([tag_dict['rowIterator']])[0]
    iter_cmd.Set('IterMode', 'STEP')
    iter_cmd.Set('StepVal', '1')
    iter_cmd.Set('ValueType', 'RANGE')
    iter_cmd.Set('MinVal', 0.0)
    iter_cmd.Set('MaxVal', (float(num_rows) - 1.0))

    return True
def validate(ChartTemplateJsonFileName, Title,
             XAxisTitle, XAxisCategories,
             YAxisTitle, YAxisCategories,
             Series, TemplateModifier, SrcDatabase,
             ReportGroup):
    global SQL_RE

    logger = PLLogger.GetLogger('methodology')
    logger.LogInfo('CreateMethodologyChartCommand validate')

    # Validate ChartTemplateJsonFileName
    file_name, found = find_template_file(ChartTemplateJsonFileName)
    if not found:
        return "Invalid Chart Template: %s" % ChartTemplateJsonFileName

    # Validate Series
    if Series is None or not Series:
        return "Invalid Series specified: %s" % Series

    # Check for empty values
    if not SrcDatabase:
        return 'Empty SrcDatabase property is not allowed'
    if not ReportGroup:
        return 'Empty ReportGroup property is not allowed'

    # Validate property types
    schema = {
        "type": "object",
        "properties": {
            "title": {"type": "string"},
            "xAxis": {"type": "string"},
            "x_categories": {"type": "array",
                             "items": {"allOf": [{"type": "string"}]}},
            "yAxis": {"type": "string"},
            "y_categories": {"type": "array",
                             "items": {"allOf": [{"type": "string"}]}},
            "series_data": {"type": "array", "minItems": 1,
                            "items": {"allOf": [{"type": "string"}]}}
        },
    }

    json_dict = {
        "title": Title,
        "xAxis": XAxisTitle,
        "x_categories": XAxisCategories,
        "yAxis": YAxisTitle,
        "y_categories": YAxisCategories,
        "series_data": Series
    }

    try:
        res = json_utils.validate_json(
            json.dumps(json_dict), json.dumps(schema))
        if res != "":
            logger.LogInfo(res)
            return res
        # Before validating, "innoculate" the SQL-embedded junk by putting
        # quotes around them
        if TemplateModifier != "":
            mod_safe = SQL_RE.sub(r'"\1"', TemplateModifier)
            json.loads(mod_safe)
    except ValueError as ve:
        return ("Value Error: " + str(ve))
    except TypeError as te:
        return ("Type Error: " + str(te))
    except Exception as e:
        return ("Error: " + str(e))

    return ""
def run(StmTrafficMix, TrafficMixTagName, Load, LoadUnit):
    plLogger = PLLogger.GetLogger('AllocateTrafficMixLoad2Command')
    obj_list = []
    this_cmd = get_this_cmd()
    if StmTrafficMix:
        obj_list = CCommandEx.ProcessInputHandleVec('StmTrafficMix', [StmTrafficMix])
    if TrafficMixTagName:
        obj_list = obj_list + tag_utils.get_tagged_objects_from_string_names([TrafficMixTagName])
    if len(obj_list) == 0:
        err_str = "Neither StmTrafficMix nor TrafficMixTagName " + \
            "specified a valid StmTrafficMix Object"
        plLogger.LogError(err_str)
        this_cmd.Set("Status", err_str)
        return False

    obj_dict = {obj.GetObjectHandle(): obj for obj in obj_list}
    for mix in obj_dict.values():
        mix_info_s = mix.Get('MixInfo')

        # Validate the input MixInfo against its schema
        res = json_utils.validate_json(mix_info_s,
                                       this_cmd.Get('MixInfoJsonSchema'))
        if res != '':
            plLogger.LogError(res)
            this_cmd.Set("Status", res)
            return False

        err_str, mix_info = json_utils.load_json(mix_info_s)
        if err_str != "":
            plLoger.LogError(err_str)

        component_list = mix_info['components']

        # Record what we will use MIX wide...
        mix_info['load'] = Load
        mix_info['loadUnits'] = LoadUnit

        # Aggregate the individual streamblock information...
        static_list = []
        percent_list = []
        use_percent_list = []

        for component in component_list:
            weight = component["weight"]
            is_percent, act_val, err_str = \
                weight_ops.parse_weight_string(weight)
            if err_str != "":
                err_str = "Total static loads exceed the mix load."
                plLogger.LogError(err_str)
                this_cmd.Set("Status", err_str)
                return False

            if is_percent:
                static_list.append(0)
                percent_list.append(act_val)
            else:
                static_list.append(act_val)
                percent_list.append(0)
            use_percent_list.append(is_percent)

        total_static_load = sum(static_list)
        total_percent = sum(percent_list)

        # Don't allow the aggregate of static loads to exceed the MIX wide load...
        if total_static_load > Load:
            err_str = 'Total static load ({}) exceeds the ' \
                'configured mix load ({}).'.format(total_static_load, Load)
            plLogger.LogError(err_str)
            this_cmd.Set('Status', err_str)
            return False

        # Don't allow an invalid aggregate of streamblock weights...
        if total_percent > 100:
            err_str = "Total weights ({}) exceed 100%.".format(total_percent)
            plLogger.LogError(err_str)
            this_cmd.Set("Status", err_str)
            return False

        # Warn if there is no MIX wide load left to divide amongst the
        # weighted streamblocks...
        if total_percent > 0 and total_static_load == Load:
            err_str = 'No mix load available for weight distribution'
            plLogger.LogWarn(err_str)
            this_cmd.Set("Status", err_str)

        # Fractions are not supported only for these settings
        allow = (LoadUnit not in ['FRAMES_PER_SECOND', 'INTER_BURST_GAP'])

        # Calculate how much of the load is left for the weighted streamblocks...
        total_weighted_load = Load - total_static_load

        # Calculate the weighted loads from the weights...
        weighted_loads = weight_ops.allocate_weighted_list(total_weighted_load,
                                                           percent_list,
                                                           allow_fraction=allow)

        # Get all of the StmTemplateConfig objects for this MIX...
        templates = mix.GetObjects('StmTemplateConfig')

        # Apply the loads across each streamblock according to their individual preferences...
        # Yes, we are ASSUMING creation order to map components to templates...
        for component, template, weight_load in zip(component_list, templates, weighted_loads):
            is_percent, act_val, err_str = \
                weight_ops.parse_weight_string(component['weight'])
            if not is_percent:
                applied_load = act_val
            else:
                applied_load = weight_load

            # Note what we chose to apply and then apply it...
            component['appliedValue'] = applied_load
            allocate_weighted_load(applied_load, template, LoadUnit)
            config_generator(template, LoadUnit)

        # Update the MIX with our changes...
        mix.Set('MixInfo', json.dumps(mix_info))
    return True
def run(StmTemplateMix, TrafficMixTagName, Load, LoadUnit):
    '''
    StmTemplateMix is the handle to a StmTrafficMix object (just one object)
    TrafficMixTagName is the name of the tag associated with the StmTrafficMix
    object(s) Load is the traffic load for the entire Mix as the aggregate
    (however, consider how that load is applied to various components and how
    many streams are defined by a given component).
    LoadUnit defines the units context that the Load parameter belongs.

    Within the StmTrafficMix' MixInfo property is the information that
    describes the contents of the Mix. The format for this information is
    JSON. Examples follow:


    Simple east-west traffic with weighted count:
    --------------------------------------------
    {
        "load": 250,
        "loadUnits": "FRAMES_PER_SECOND",
        "components": [
            {
                "baseTemplateFile": "Ipv4_Stream.xml",
                "weight": 75,
                "staticValue": 0,
                "useStaticValue": False,
                "postExpandModify": [
                    {
                        "streamBlockExpand": {
                            "endpointMapping": {
                                "srcBindingTagList": ["East_Ipv4If"],
                                "dstBindingTagList": ["West_Ipv4If"]
                            }
                        }
                    }
                ]
            }
        ]
    }


    Bidirectional east-west traffic with static count:
    -------------------------------------------------
    {
        "load": 250,
        "loadUnits": "FRAMES_PER_SECOND",
        "components": [
            {
                "baseTemplateFile": "Ipv4_Stream.xml",
                "weight": 0,
                "staticValue": 80,
                "useStaticValue": True,
                "postExpandModify": [
                    {
                        "streamBlockExpand": {
                            "endpointMapping": {
                                "srcBindingTagList": ["East_Ipv4If"],
                                "dstBindingTagList": ["West_Ipv4If"],
                                "bidirectional": True
                            }
                        }
                    }
                ]
            }
        ]
    }


    Mesh (for say four devices) on one port:
    (Note that dstBindingTagList is used,
     but is the same as srcBindingTagList.)
    ---------------------------------------
    {
        "load": 250,
        "loadUnits": "FRAMES_PER_SECOND",
        "components": [
            {
                "baseTemplateFile": "AMeshTemplate.xml",
                "weight": 75,
                "staticValue": 0,
                "useStaticValue": False,
                "postExpandModify": [
                    {
                        "streamBlockExpand": {
                            "endpointMapping": {
                                "srcBindingTagList": ["East Ipv4If"],
                                "dstBindingTagList": ["East Ipv4If"]
                            }
                        }
                    }
                ]
            }
        ]
    }

    '''
    plLogger = PLLogger.GetLogger("methodology")
    hnd_reg = CHandleRegistry.Instance()
    ctor = CScriptableCreator()
    this_cmd = get_this_cmd()

    obj_list = []
    if StmTemplateMix:
        obj_list = CCommandEx.ProcessInputHandleVec('StmTrafficMix',
                                                    [StmTemplateMix])
    if TrafficMixTagName:
        obj_list = obj_list + tag_utils.get_tagged_objects_from_string_names(
            [TrafficMixTagName])
    if len(obj_list) == 0:
        err = "Neither StmTrafficMix nor TrafficMixTagName " \
              "specified a valid StmTrafficMix object."
        plLogger.LogError(err)
        this_cmd.Set('Status', err)
        return False

    for trf_mix in obj_list:
        tmi = trf_mix.Get('MixInfo')
        if tmi == '':
            err = "MixInfo is empty"
            plLogger.LogError(err)
            this_cmd.Set('Status', err)
            return False
        err_str = json_utils.validate_json(
            tmi, this_cmd.Get('MixInfoJsonSchema'))
        if err_str != "":
            err = "MixInfo in the StmTrafficMix does not conform to the " \
                  "schema " + this_cmd.Get('MixInfoJsonSchema')
            plLogger.LogError(err)
            this_cmd.Set('Status', err)
            return False
        err_str, mix_data = json_utils.load_json(tmi)
        if err_str != "":
            plLogger.LogError(err_str)
            this_cmd.Set("Status", err_str)
            return False
        mix_comp_set = mix_data.get("components")
        if mix_comp_set is None:
            err = "Invalid JSON in MixInfo: MixInfo does not " \
                  "contain components."
            plLogger.LogError(err)
            this_cmd.Set('Status', err)
            return False

        # Expand the StmTemplateConfig objects
        template_list = trf_mix.GetObjects('StmTemplateConfig')

        for template, mix_comp in zip(template_list, mix_comp_set):

            expand_obj = mix_comp.get('expand')
            target_tag_list = \
                expand_obj.get('targetTagList') if expand_obj else None
            copies_per_parent = \
                expand_obj.get('copiesPerParent') if expand_obj else None
            src_tag_list = \
                expand_obj.get('srcTagList') if expand_obj else None

            # Should be validated by schema, but leaving it just in case
            if expand_obj is not None and target_tag_list is None:
                err = 'Error in {}: expand does not ' \
                    'have required targetTagList'.format(template.Get('Name'))
                plLogger.LogError(err)
                this_cmd.Set('Status', err)
                return False

            # At this point, target_tag_list is None if we didn't find a valid
            # expand tag (for raw stream blocks)

            # Note that the streamblock generated for a raw streamblock will
            # not be project-based.

            proj_sb = template.GetObject("StreamBlock",
                                         RelationType("GeneratedObject"))
            if proj_sb is not None:
                err_str = "Cannot expand StreamBlock template, " + \
                    "StreamBlock already exists. Use " + \
                    PKG + "DeleteTemplatesAndGenerated" + \
                    "ObjectsCommand to clean up generated " + \
                    "objects before expanding."
                plLogger.LogError(err_str)
                this_cmd.Set("Status", err_str)
                return False

            cmd = ctor.CreateCommand(PKG + ".ExpandTemplateCommand")
            cmd.SetCollection("StmTemplateConfigList",
                              [template.GetObjectHandle()])
            # If it is a raw stream block, the target tag list is set
            if target_tag_list is not None:
                cmd.SetCollection('TargetTagList', target_tag_list)
            if copies_per_parent is not None:
                cmd.Set('CopiesPerParent', copies_per_parent)
            if src_tag_list is not None:
                cmd.SetCollection('SrcTagList', src_tag_list)
            cmd.Execute()
            pf_state = cmd.Get('PassFailState')
            status = cmd.Get('Status')
            cmd.MarkDelete()
            if pf_state == 'FAILED':
                err = '{}.ExpandTemplateCommand failed: {}' \
                    .format(PKG, status)
                plLogger.LogError(err)
                this_cmd.Set('Status', err)
                return False

            all_sb_list = template.GetObjects('StreamBlock',
                                              RelationType('GeneratedObject'))
            if not all_sb_list:
                err = 'Template {} ({}) did not contain stream blocks' \
                    .format(template.Get('Name'), template.GetObjectHandle())
                plLogger.LogError(err)
                this_cmd.Set('Status', err)
                return False
            proj_sb_list = [x for x in all_sb_list
                            if x.GetParent().IsTypeOf('Project')]
            if len(proj_sb_list) > 1:
                err_str = "Handling of more than one Project-level streamblock by the " + \
                    "ExpandTrafficMixCommand is not supported."
                plLogger.LogError(err_str)
                this_cmd.Set("Status", err_str)
                return False
            for sb in all_sb_list:

                # Retrieve all tags for this streamblock
                sb_tag_list = sb.GetObjects('Tag', RelationType('UserTag'))

                plLogger.LogDebug(show_prop_values("sb_tag_list", sb_tag_list))

                # Gather the endpoints and set up the project-level
                # streamblock. If there aren't any endpoints specified, this
                # is probably a raw streamblock.

                # Perform each expand op...
                for post_expand in mix_comp.get('postExpandModify', []):

                    # Get the streamBlockExpand entry...
                    sb_expand = post_expand.get('streamBlockExpand')
                    if sb_expand is None:
                        continue

                    # Configure the relations and keep track of
                    # src and dst bindings.  Need to handle SrcBinding
                    # and DstBinding depending on the trafficPattern and
                    # the number of bound endpoints.
                    ep_map = sb_expand.get('endpointMapping', None)
                    if ep_map is None:
                        continue
                    bidirectional = ep_map.get("bidirectional", False)
                    src_ep_tag_list = ep_map.get("srcBindingTagList", [])
                    dst_ep_tag_list = ep_map.get("dstBindingTagList", [])

                    if bidirectional:
                        src_ep_tag_list_list = [src_ep_tag_list, dst_ep_tag_list]
                        dst_ep_tag_list_list = [dst_ep_tag_list, src_ep_tag_list]
                    else:
                        src_ep_tag_list_list = [src_ep_tag_list]
                        dst_ep_tag_list_list = [dst_ep_tag_list]

                    src_tagged = []
                    dst_tagged = []
                    for src_ep_tag_list, dst_ep_tag_list in \
                            zip(src_ep_tag_list_list, dst_ep_tag_list_list):
                        src_tagged = tag_utils.get_tagged_objects_from_string_names(
                            src_ep_tag_list)
                        dst_tagged = tag_utils.get_tagged_objects_from_string_names(
                            dst_ep_tag_list)

                        relation_list = ["ParentChild", "GeneratedObject"]

                        src_list = []
                        src_bind_list = ep_map.get("srcBindingClassList", None)
                        if src_bind_list:
                            for src in src_tagged:
                                data_utils.rsearch(src, src_bind_list, relation_list, src_list)
                        else:
                            src_list = src_tagged

                        dst_list = []
                        dst_bind_list = ep_map.get("dstBindingClassList", None)
                        if dst_bind_list:
                            for dst in dst_tagged:
                                data_utils.rsearch(dst, dst_bind_list, relation_list, dst_list)
                        else:
                            dst_list = dst_tagged

                        src_count = len(src_list)
                        dst_count = len(dst_list)
                        plLogger.LogDebug(show_prop_values('src_ep', src_list))
                        plLogger.LogDebug(show_prop_values('dst_ep', dst_list))

                        if src_count == 0 or dst_count == 0:
                            err = "Skipping endpoint binding for " \
                                  "streamblock in template: {}" \
                                  " due to either no tagged sources or " \
                                  "destinations.".format(template.Get("Name"))
                            plLogger.LogError(err)
                            get_this_cmd().Set('Status', err)
                            return False

                        # Update the Project-level streamblock's endpoint
                        # bindings based on the src_list and dst_list and the
                        # trafficPattern

                        if sb.Get("TrafficPattern") == "PAIR":
                            min_count = min(src_count, dst_count)
                            if src_count == 1:
                                plLogger.LogDebug("Streamblocks are configured between " +
                                                  "a src endpoint and all dst end" +
                                                  "point.  src_list has " +
                                                  str(src_count) + " dst_list has " +
                                                  str(dst_count))
                                for ep in dst_list:
                                    sb.AddObject(src_list[0],
                                                 RelationType("SrcBinding"))
                                    sb.AddObject(ep, RelationType("DstBinding"))
                            elif dst_count == 1:
                                plLogger.LogDebug("Streamblocks are configured between " +
                                                  "all src endpoint and a dst end" +
                                                  "point.  src_list has " +
                                                  str(src_count) + " dst_list has " +
                                                  str(dst_count))
                                for ep in src_list:
                                    sb.AddObject(ep, RelationType("SrcBinding"))
                                    sb.AddObject(dst_list[0],
                                                 RelationType("DstBinding"))
                            else:
                                if src_count != dst_count:
                                    plLogger.LogDebug("Streamblocks are configured " +
                                                      "between endpoint pairs until " +
                                                      "one side runs out of endpoints: " +
                                                      "src_list has " + str(src_count) +
                                                      " dst_list has " + str(dst_count))
                                index = 0
                                for ep in src_list:
                                    if index < min_count:
                                        sb.AddObject(ep, RelationType("SrcBinding"))
                                        index = index + 1
                                index = 0
                                for ep in dst_list:
                                    if index < min_count:
                                        sb.AddObject(ep, RelationType("DstBinding"))
                                        index = index + 1
                        else:
                            # BACKBONE or FULLMESH
                            for ep in src_list:
                                sb.AddObject(ep, RelationType("SrcBinding"))
                            for ep in dst_list:
                                sb.AddObject(ep, RelationType("DstBinding"))

                    parent = sb.GetParent()
                    if parent.IsTypeOf('Project'):
                        # Call StreamBlockExpandCommand
                        exp_cmd = ctor.CreateCommand("StreamBlockExpandCommand")
                        exp_cmd.SetCollection("StreamBlockList",
                                              [sb.GetObjectHandle()])
                        exp_cmd.Execute()
                        if exp_cmd.Get("State") == "FAILED":
                            err_str = "Failed to expand Project-level streamblock: " + \
                                sb.Get("Name") + " with handle: " + \
                                str(sb.GetObjectHandle())
                            plLogger.LogError(err_str)
                            this_cmd.Set("Status", err_str)
                            exp_cmd.MarkDelete()
                            return False
                        exp_cmd.MarkDelete()

                        sb_hdl_list = exp_cmd.GetCollection("ExpandedStreamBlockList")
                        gen_sb_list = [hnd_reg.Find(x) for x in sb_hdl_list]
                    else:
                        # For the case of port-based, just return the one
                        gen_sb_list = [sb]
                    for sb in gen_sb_list:
                        if sb is None:
                            continue
                        plLogger.LogDebug("adding gen sb: " + sb.Get("Name") +
                                          " with handle: " + str(sb))
                        template.AddObject(sb, RelationType("GeneratedObject"))

                        # Tag the duplicated port-level streamblock with
                        # original's tags
                        if parent.IsTypeOf('Project'):
                            for tag in sb_tag_list:
                                sb.AddObject(tag, RelationType('UserTag'))
                        dst_list = sb.GetObjects('Scriptable',
                                                 RelationType('DstBinding'))
                        dst_port_map = {}

                        # Go through each destination endpoint and find the port
                        for dst_ep in dst_list:
                            walk = dst_ep
                            # If we've walked to Project we've run out of parents
                            while not walk.IsTypeOf('Project'):
                                if walk.IsTypeOf('EmulatedDevice'):
                                    port = walk.GetObject('Port',
                                                          RelationType('AffiliationPort'))
                                    if port.GetObjectHandle() not in dst_port_map:
                                        dst_port_map[port.GetObjectHandle()] = port
                                    break
                                else:
                                    walk = walk.GetParent()
                        for port in dst_port_map.itervalues():
                            sb.AddObject(port, RelationType('ExpectedRx'))

        # Allocate the Load based on the Weight
        cmd = ctor.CreateCommand(PKG_TRF + ".AllocateTrafficMixLoad")
        cmd.Set("Load", float(Load))
        cmd.Set("LoadUnit", LoadUnit)
        cmd.Set("StmTrafficMix", trf_mix.GetObjectHandle())
        cmd.Execute()
        cmd.MarkDelete()

    return True
def validate(ChartTemplateJsonFileName, Title,
             XAxisTitle, XAxisCategories,
             YAxisTitle, YAxisCategories,
             Series, SeriesDataType, CustomModifier,
             UseMultipleResultsDatabases, UseSummary, ReportGroup):
    global SQL_RE

    logger = PLLogger.GetLogger('ExportDbChartCommand')
    logger.LogInfo(' ExportDbChartCommand validate')

    # Validate ChartTemplateJsonFileName
    file_name, found = find_template_file(ChartTemplateJsonFileName)
    if not found:
        return "Invalid Chart Template: %s" % ChartTemplateJsonFileName

    if Series is None or Series == []:
        return "Invalid Series specified: %s" % Series

    # Validate property types
    schema = {
        "type": "object",
        "properties": {
            "title": {"type": "string"},
            "xAxis": {"type": "string"},
            "x_categories": {"type": "array",
                             "items": {"allOf": [{"type": "string"}]}},
            "yAxis": {"type": "string"},
            "y_categories": {"type": "array",
                             "items": {"allOf": [{"type": "string"}]}},
            "series_data": {"type": "array", "minItems": 1,
                            "items": {"allOf": [{"type": "string"}]}},
            "series_data_type": {"enum": ["SINGLE", "PAIR"]}
        },
    }
    json_dict = {
        "title": Title,
        "xAxis": XAxisTitle,
        "x_categories": XAxisCategories,
        "yAxis": YAxisTitle,
        "y_categories": YAxisCategories,
        "series_data": Series,
        "series_data_type": SeriesDataType
    }

    try:
        res = json_utils.validate_json(
            json.dumps(json_dict), json.dumps(schema))
        if res != "":
            logger.LogInfo(res)
            return res
        # Before validating, "innoculate" the SQL-embedded junk by putting
        # quotes around them
        if CustomModifier != "":
            mod_safe = SQL_RE.sub(r'"\1"', CustomModifier)
            json.loads(mod_safe)
    except ValueError as ve:
        return ("Value Error: " + str(ve))
    except TypeError as te:
        return ("Type Error: " + str(te))
    except Exception as e:
        return ("Error: " + str(e))

    # Validate Series given as pair values if SeriesDataType is set to PAIR
    if SeriesDataType == "PAIR":
        for s in Series:
            if not is_sql_query(s):
                if len(s.split(',')) != 2:
                    return ("Series data must be pair values")

    return ""
def run(StmTemplateMix, TagData, ObjectList,
        IgnoreEmptyTags, TagList, CurrVal, Iteration):
    '''
        StmTemplateMix: Handle of StmTemplateMix Object
        TagData: JSON String of Tags Needed in Sequence
        ObjectList: (from Base Class)
        IgnoreEmptyTags: (from Base Class)
        TagList: (from Base Class)
        CurrVal: Current Row Index (from Base Class)
        Iteration: (from Base Class)
    '''
    hnd_reg = CHandleRegistry.Instance()
    plLogger = PLLogger.GetLogger("Methodology")

    # Check Mandatory Arguments
    if (StmTemplateMix is None) or (StmTemplateMix == ""):
        plLogger.LogError("StmTemplateMix is a mandatory argument for " +
                          "IteratorConfigMixParamsCommand.")
        return False
    if (TagData is None) or (TagData == ""):
        plLogger.LogError("TagData is a mandatory argument for " +
                          "IteratorConfigMixParamsCommand.")
        return False

    err_str, tag_data = json_utils.load_json(TagData)
    if err_str != "":
        plLogger.LogError(err_str)
        return False

    # MixInfo in StmTemplateMix contains JSON String of Input Table
    mix_obj = hnd_reg.Find(StmTemplateMix)
    if mix_obj is None:
        plLogger.LogError("No objects with handle, " +
                          str(StmTemplateMix) +
                          ", found.")
        return False
    elif not mix_obj.IsTypeOf("StmTemplateMix"):
        plLogger.LogError(str(StmTemplateMix) +
                          " does not refer to a StmTemplateMix object.")
        return False

    # Fetch the json string from the MIX object...
    mix_info_json = mix_obj.Get("MixInfo")

    # Validate the json against its schema...
    res = json_utils.validate_json(mix_info_json, get_this_cmd().Get('MixJsonSchema'))
    if res != '':
        plLogger.LogError(res)
        return False

    # Load the json information...
    err_str, mix_info = json_utils.load_json(mix_info_json)
    if err_str != "":
        plLogger.LogError(err_str)
        return False
    components = mix_info["components"]

    # Get row by matching index (CurrVal) with TableData
    row = components[int(CurrVal)]
    # DEBUG PRINT
    plLogger.LogDebug("component " + str(CurrVal) + ":")
    for property in row:
        plLogger.LogDebug("   " + str(property) + " = " + str(row[property]))

    # Setup CreateTemplateConfigCommand
    temp_conf_tag_name = tag_data["templateConfigurator"]
    tagged_obj_list = tag_utils.get_tagged_objects_from_string_names([temp_conf_tag_name])
    if len(tagged_obj_list) == 0:
        plLogger.LogError("No objects tagged with " +
                          str(temp_conf_tag_name) +
                          " tag name.")
        return False

    create_temp_cfg_cmd = tagged_obj_list[0]
    create_temp_cfg_cmd.Set("StmTemplateMix", StmTemplateMix)
    create_temp_cfg_cmd.Set("InputJson", json.dumps(row))
    create_temp_cfg_cmd.Set("AutoExpandTemplate", False)

    return True
def run(TargetObjectList, TargetObjectTagList, SrcObjectList,
        SrcObjectTagList, RouteCount):
    plLogger = PLLogger.GetLogger("methodology")
    this_cmd = get_this_cmd()

    # Process targets for RouterConfigs
    routers_dict = get_routers(TargetObjectList, TargetObjectTagList)
    # If we get empty dictionary here, error has already been created
    if not routers_dict:
        return False

    # Process sources for RouteConfig XMLs
    rmix_list = []
    if SrcObjectList:
        rmix_list = CCommandEx.ProcessInputHandleVec('StmTemplateMix',
                                                     SrcObjectList)
    if SrcObjectTagList:
        rmix_list = rmix_list + \
            tag_utils.get_tagged_objects_from_string_names(SrcObjectTagList)
    rmix_list = dm_utils.remove_dup_scriptable(rmix_list)
    if len(rmix_list) == 0:
        err = "Neither SrcObjectList nor SrcObjectTagList " \
            "specified a valid SrcObjectList object."
        plLogger.LogError(err)
        this_cmd.Set('Status', err)
        return False

    # Each RouteConfig we find in the StmTemplateMix'es StmTemplateConfigs is
    # a source point to use in a ExpandTemplateCommand (with targets being
    # routers from the StmProtocolMix)
    for rmix in rmix_list:
        r_mi = rmix.Get('MixInfo')
        if r_mi == '':
            err = "MixInfo is empty for {}".format(rmix.Get('Name'))
            plLogger.LogError(err)
            this_cmd.Set('Status', err)
            return False
        err_str = json_utils.validate_json(r_mi,
                                           this_cmd.Get('MixInfoJsonSchema'))
        if err_str != '':
            err = "MixInfo in the StmTemplateMix does not conform to the " \
                  "schema " + this_cmd.Get('MixInfoJsonSchema')
            plLogger.LogError(err)
            this_cmd.Set('Status', err)
            return False
        err_str, mix_data = json_utils.load_json(r_mi)
        if err_str != "":
            plLogger.LogError(err_str)
            this_cmd.Set("Status", err_str)
            return False
        mix_comp_list = mix_data.get('components')
        if mix_comp_list is None:
            err = "Invalid JSON in MixInfo: MixInfo does not " \
                  "contain components."
            plLogger.LogError(err)
            this_cmd.Set('Status', err)
            return False
        tmpl_cfg_list = rmix.GetObjects('StmTemplateConfig')
        for tmpl_cfg, mix_comp in zip(tmpl_cfg_list, mix_comp_list):
            # At this point, we have the template and associated component
            # entry, and we need to determine what kind of template file is
            # being loaded

            # First check the expand
            exp_list = mix_comp.get('postExpandModify', [])
            wiz_list = []
            for exp in exp_list:
                wiz = exp.get('bllWizardExpand')
                if wiz is None:
                    continue
                wiz_list.append(wiz)
            if wiz_list:
                if not process_wizard_args(tmpl_cfg, wiz_list):
                    # Error message handled in called functions
                    return False
            else:
                if not process_route_args(tmpl_cfg, routers_dict):
                    # Error message handled in called function
                    return False

    # After the routes are created, call allocate for each argument
    with AutoCommand(PKG_RTG + '.AllocateRouteMixCountCommand') as cmd:
        cmd.SetCollection('RouteMixList', [r.GetObjectHandle() for r in rmix_list])
        cmd.Set('RouteCount', RouteCount)
        cmd.Execute()
        pf_state = cmd.Get('PassFailState')
        status = cmd.Get('Status')
    if pf_state == 'FAILED':
        err = '{}.AllocateRouteMixCountCommand failed: {}' \
            .format(PKG_RTG, status)
        plLogger.LogError(err)
        this_cmd.Set('Status', err)
        return False
    return True
def run(StmTemplateMix, InputJson, AutoExpandTemplate,
        CopiesPerParent, SrcTagList, TargetTagList):
    plLogger = PLLogger.GetLogger('methodology')
    plLogger.LogDebug("CreateTemplateConfigCommand.run")

    hnd_reg = CHandleRegistry.Instance()
    ctor = CScriptableCreator()
    this_cmd = get_this_cmd()
    project = CStcSystem.Instance().GetObject("Project")

    if InputJson == "":
        err_str = "InputJson is an empty string."
        plLogger.LogError(err_str)
        this_cmd.Set("Status", err_str)
        return False

    # Validate the InputJson against the schema
    res = json_utils.validate_json(InputJson,
                                   this_cmd.Get("InputJsonSchema"))
    if res != "":
        err_str = "InputJson is invalid or does not conform to the " + \
            "schema: " + res
        plLogger.LogError(err_str)
        this_cmd.Set("Status", err_str)
        return False

    if StmTemplateMix != "" and StmTemplateMix != 0:
        mix = hnd_reg.Find(StmTemplateMix)
        if mix is None:
            err_str = "StmTemplateMix with given handle: " + \
                str(StmTemplateMix) + " is invalid."
            plLogger.LogError(err_str)
            this_cmd.Set("Status", err_str)
            return False
        elif not mix.IsTypeOf("StmTemplateMix"):
            err_str = "Object with given handle: " + \
                str(StmTemplateMix) + " is a " + \
                mix.GetType() + ".  If StmTemplateMix is " + \
                "specified, object must be an StmTemplateMix."
            plLogger.LogError(err_str)
            this_cmd.Set("Status", err_str)
            return False
        parent = mix
    else:
        parent = project

    template = ctor.Create("StmTemplateConfig", parent)
    this_cmd.Set("StmTemplateConfig", template.GetObjectHandle())

    # Breakdown the json
    err_str, conf_data = json_utils.load_json(InputJson)
    if err_str != "":
        plLogger.LogError(err_str)
        this_cmd.Set("Status", err_str)
        return False

    plLogger.LogDebug("conf_data: " + str(conf_data))

    # Do the load first
    if "baseTemplateFile" not in conf_data.keys():
        plLogger.LogError("InputJson is missing a baseTemplateFile.")
        return False
    xml_file = conf_data["baseTemplateFile"]

    xml_val = xml_utils.load_xml_from_file(xml_file)
    if xml_val is None:
        err_str = "Was unable to load template XML from " + xml_file
        plLogger.LogError(err_str)
        this_cmd.Set("Status", err_str)
        return False

    # Update the prefixes
    tag_prefix = ""
    if ("tagPrefix" in conf_data.keys() and conf_data["tagPrefix"] != ""):
        tag_prefix = conf_data["tagPrefix"]
    plLogger.LogDebug("using tag_prefix: " + tag_prefix)

    xml_val = xml_utils.add_prefix_to_tags(tag_prefix, xml_val)
    template.Set("TemplateXml", xml_val)
    plLogger.LogDebug("conf_data: " + str(conf_data))

    # Iterate over the objects in the array and apply the appropriate
    # template modification.  Order is determined by the list order.
    for mod_data in conf_data.get("modifyList", []):
        plLogger.LogDebug("mod_data: " + str(mod_data))
        plLogger.LogDebug("mod_data.keys(): " + str(mod_data.keys()))
        err_str = ""
        res = True

        # Merge stuff in mergeList
        for merge_data in mod_data.get("mergeList", []):
            res = run_merge(template, tag_prefix, merge_data)
            err_str = "Failed to merge XML into the StmTemplateConfig " + \
                "given JSON specified as: " + str(merge_data)

        # Process objects in the addObjectList
        for obj_data in mod_data.get("addObjectList", []):
            res = run_objectlist(template, tag_prefix, obj_data)
            err_str = "Failed to add object into the StmTemplateConfig " + \
                "given JSON specified as: " + str(obj_data)

        # Modify the stuff in the PropertyValueList
        for prop_set in mod_data.get('propertyValueList', []):
            res = run_modify(template, tag_prefix, prop_set)
            err_str = "Failed to modify properties in the " + \
                "StmTemplateConfig given JSON specified as: " + \
                str(prop_set)

        # Modify the stuff in the StmPropertyModifierList
        for prop_set in mod_data.get("stmPropertyModifierList", []):
            res = run_config_prop_modifier(template, tag_prefix, prop_set)
            err_str = "Failed to add or configure " + \
                "StmPropertyModifier objects in the StmTemplateConfig " + \
                "given JSON specified as: " + str(prop_set)

        # Modify PDUs
        for pdu_mod in mod_data.get("pduModifierList", []):
            res = run_config_pdu(template, tag_prefix, pdu_mod)
            err_str = "Failed to modify PDU data in a streamblock's " + \
                "FrameConfig in the StmTemplateConfig given JSON " + \
                "specified as: " + str(pdu_mod)

        # Modify the stuff in the RelationList
        for rel_mod in mod_data.get("relationList", []):
            res = run_config_relation(template, tag_prefix, rel_mod)
            err_str = "Failed to add or remove a relation in the " + \
                "StmTemplateConfig given JSON specified as " + str(rel_mod)

        if not res:
            plLogger.LogError(err_str)
            this_cmd.Set("Status", err_str)
            return False

    # Handle Expand if necessary
    if AutoExpandTemplate:
        res = run_expand(template, TargetTagList,
                         SrcTagList, CopiesPerParent)
        if not res:
            err_str = "Failed to expand the StmTemplateConfig."
            plLogger.LogError(err_str)
            this_cmd.Set("Status", err_str)
            return False

    this_cmd.Set("Status", "")
    return True
Beispiel #18
0
def test_validate_json(stc):
    plLogger = PLLogger.GetLogger("test_validate_json")
    plLogger.LogInfo("start")

    # Build a simple schema
    s_dict = {}
    s_dict["type"] = "object"
    s_dict["required"] = ["devTag", "weight", "modList"]
    s_dict["properties"] = {}
    s_dict["properties"]["devTag"] = {"type": "string"}
    s_dict["properties"]["age"] = {"type": "number"}
    s_dict["properties"]["weight"] = {"type": "number"}

    mod_dict = {}
    mod_dict["type"] = "array"
    mod_dict["items"] = {}
    mod_dict["items"]["type"] = "object"
    mod_dict["items"]["properties"] = {}
    mod_dict["items"]["properties"]["key"] = {"type": "string"}
    mod_dict["items"]["properties"]["name"] = {"type": "string"}
    mod_dict["items"]["required"] = ["key"]

    s_dict["properties"]["mod_list"] = mod_dict

    # Build sample JSON
    j_dict = {}
    j_dict["devTag"] = "devTag Name"
    j_dict["weight"] = 100.0
    j_dict["modList"] = []
    j_dict["modList"].append({"name": "me", "key": "me.123"})
    j_dict["modList"].append({"name": "you", "key": "you.123"})

    schema_str = json.dumps(s_dict)
    json_str = json.dumps(j_dict)
    plLogger.LogInfo("schema_str: " + schema_str)
    plLogger.LogInfo("json_str: " + json_str)

    # Positive Test (no schema)
    res = json_utils.validate_json(json_str)
    assert res == ""

    # Positive Test (with schema)
    res = json_utils.validate_json(json_str, schema_str)
    assert res == ""

    # Negative Test (invalid JSON)
    res = json_utils.validate_json("invalid_json_str")
    assert res.find("is not valid JSON.") != -1

    # Negative Test (valid JSON, invalid schema)
    res = json_utils.validate_json(json_str, "invalid_schema")
    assert res.find("Schema string: ") != -1

    # Negative Test (invalid JSON, valid schema)
    res = json_utils.validate_json("invalid_json", schema_str)
    assert res.find("is not valid JSON.") != -1

    # Build a different schema
    s_dict2 = {}
    s_dict2["type"] = "object"
    s_dict2["required"] = ["devTag", "weight", "yada"]
    s_dict2["properties"] = {}
    s_dict2["properties"]["devTag"] = {"type": "string"}
    s_dict2["properties"]["age"] = {"type": "number"}
    s_dict2["properties"]["weight"] = {"type": "number"}
    s_dict2["properties"]["yada"] = {"type": "number"}
    schema_str2 = json.dumps(s_dict2)

    # Validate schema
    res = json_utils.validate_json(schema_str2)
    assert res == ""

    # Negative Test (valid JSON, wrong schema)
    res = json_utils.validate_json(json_str, schema_str2)
    assert res.find("JSON object does not conform to given schema") != -1