Exemple #1
0
def generate_snapshots_from_mastersnapshot_file(mastersnapshot_file):
    """
    Each snapshot file from the filesystem is loaded as a json datastructue
     and generate all the nodes in this json datastructure.
    """
    mastersnapshot_file_name = '%s.json' % mastersnapshot_file if mastersnapshot_file and not \
        mastersnapshot_file.endswith('.json') else mastersnapshot_file
    mastersnapshot_json_data = json_from_file(mastersnapshot_file_name)
    if not mastersnapshot_json_data:
        logger.error("masterSnapshot file %s looks to be empty, next!...",
                     mastersnapshot_file)
        return {}, {}

    if "connector" in mastersnapshot_json_data and "remoteFile" in mastersnapshot_json_data and mastersnapshot_json_data[
            "connector"] and mastersnapshot_json_data["remoteFile"]:
        _, pull_response = pull_json_data(mastersnapshot_json_data)
        if not pull_response:
            return {}, {}
    logger.debug(json.dumps(mastersnapshot_json_data, indent=2))
    parts = mastersnapshot_file_name.rsplit('.', 1)
    snapshot_file_name = '%s_gen.%s' % (parts[0], parts[1])
    snapshot_json_data = json_from_file(snapshot_file_name)
    if not snapshot_json_data:
        snapshot_json_data = {}
    snapshot_data = generate_mastersnapshots_from_json(
        mastersnapshot_json_data, snapshot_json_data)
    # save_json_to_file(mastersnapshot_json_data, mastersnapshot_file)
    if exists_file(snapshot_file_name):
        remove_file(snapshot_file_name)

    save_json_to_file(snapshot_json_data, snapshot_file_name)
    return snapshot_data, mastersnapshot_json_data
Exemple #2
0
 def generate_template_json(self):
     """
     generate the template json from template and parameter files
     """
     stars = '*' * 25
     template_json = json_from_file(self.get_template())
     self.replace_spacial_characters(template_json)
     gen_template_json = None
     if template_json:
         gen_template_json = copy.deepcopy(template_json)
         if 'parameters' not in template_json:
             template_json['parameters'] = {}
         self.gparams = template_json['parameters']
         for param in self.get_parameter():
             param_json = json_from_file(param)
             if 'parameters' in  param_json and param_json['parameters']:
                 for key, value in param_json['parameters'].items():
                     # if key in template_json['parameters']:
                     if "value" in value:
                         if key not in template_json['parameters']:
                             template_json['parameters'][key] = {'value': value['value']}
                         else:
                             template_json['parameters'][key]['value'] = value['value']
                     else:
                         logger.error("From parameter %s was not replaced.", key)
             gen_template_json['parameters'] = self.gparams
         # print('%s Updated Parameters %s' % (stars, stars))
         # print(json.dumps(template_json['parameters'], indent=2))
         if 'variables' in template_json:
             self.gvariables = template_json['variables']
             new_resource = self.process_resource(template_json['variables'])
             # print('%s Original Variables %s' % (stars, stars))
             # print(json.dumps(template_json['variables'], indent=2))
             # print('%s Updated Variables %s' % (stars, stars))
             # print(json.dumps(new_resource, indent=2))
             # Second pass, becoz some variables have been defined in terms of parameters, functions like concat, substr
             self.gvariables = self.process_resource(new_resource)
             gen_template_json['variables'] = self.gvariables
         if 'resources' in template_json:
             new_resources = []
             for resource in template_json['resources']:
                 is_copy, copy_resources = self.handle_copy(resource)
                 if is_copy:
                     for resourc in copy_resources:
                         new_resource = self.process_resource(resourc)
                         new_resources.append(new_resource)
                 else:
                     new_resource = self.process_resource(resource)
                     new_resources.append(new_resource)
                 # print('%s Original Resource %s' % (stars, stars))
                 # print(json.dumps(resource, indent=2))
                 # print('%s Updated Resource %s' % (stars, stars))
                 # print(json.dumps(new_resource, indent=2))
             gen_template_json['resources'] = new_resources
     return gen_template_json
def container_snapshots_filesystem(container):
    """
    Get snapshot and mastersnapshot list used in all test/mastertest files of a container from the filesystem.
    This gets list of all the snapshots/mastersnapshots used in the container.
    The list will be used to not populate the snapshots/mastersnapshots multiple times, if the same
    snapshots/mastersnapshots are used in different test/mastertest files of a container.
    The configuration of the default path is configured in config.ini.
    """
    snapshots = []
    logger.info("Starting to get list of snapshots")
    reporting_path = config_value('REPORTING', 'reportOutputFolder')
    json_dir = '%s/%s/%s' % (framework_dir(), reporting_path, container)
    logger.info(json_dir)
    singletest = get_from_currentdata(SINGLETEST)
    test_files = get_json_files(json_dir, JSONTEST)
    logger.info('\n'.join(test_files))
    for test_file in test_files:
        test_json_data = json_from_file(test_file)
        if test_json_data:
            snapshot = test_json_data['snapshot'] if 'snapshot' in test_json_data else ''
            if snapshot:
                file_name = snapshot if snapshot.endswith('.json') else '%s.json' % snapshot
                if singletest:
                    testsets = get_field_value_with_default(test_json_data, 'testSet', [])
                    for testset in testsets:
                        for testcase in testset['cases']:
                            if ('testId' in testcase and testcase['testId'] == singletest) or \
                                    ('masterTestId' in testcase and testcase['masterTestId'] == singletest):
                                if file_name not in snapshots:
                                    snapshots.append(file_name)
                else:
                    snapshots.append(file_name)

    test_files = get_json_files(json_dir, MASTERTEST)
    logger.info('\n'.join(test_files))
    for test_file in test_files:
        test_json_data = json_from_file(test_file)
        if test_json_data:
            snapshot = test_json_data['masterSnapshot'] if 'masterSnapshot' in test_json_data else ''
            if snapshot:
                file_name = snapshot if snapshot.endswith('.json') else '%s.json' % snapshot
                parts = file_name.split('.')
                file_name = '%s_gen.%s' % (parts[0], parts[-1])
                if singletest:
                    testsets = get_field_value_with_default(test_json_data, 'testSet', [])
                    for testset in testsets:
                        for testcase in testset['cases']:
                            if ('testId' in testcase and testcase['testId'] == singletest) or \
                                    ('masterTestId' in testcase and testcase['masterTestId'] == singletest):
                                if file_name not in snapshots:
                                    snapshots.append(file_name)
                else:
                    snapshots.append(file_name)
    return list(set(snapshots))
Exemple #4
0
    def generate_template_json(self):
        """
        generate the template json from template and parameter files
        """
        gen_template_json = None
        template_json = None
        if self.get_template().endswith(".yaml") and exists_file(
                self.get_template()):
            template_json = self.yaml_to_json(self.get_template())
        elif self.get_template().endswith(".json"):
            template_json = json_from_file(self.get_template(),
                                           object_pairs_hook=None)
            # template_json = self.json_from_file(self.get_template())

        logger.info(self.get_template())
        if not template_json:
            logger.error("Invalid path! No file found at : %s",
                         self.get_template())
            return gen_template_json

        if "AWSTemplateFormatVersion" not in template_json:
            logger.error(
                "Invalid file content : file does not contains 'AWSTemplateFormatVersion' field."
            )
            return gen_template_json

        if template_json:
            gen_template_json = copy.deepcopy(template_json)
            if 'Parameters' in template_json:
                self.gparams = template_json['Parameters']
                if self.parameter_file:
                    parameters = json_from_file(self.parameter_file,
                                                object_pairs_hook=None)
                    # parameters = self.json_from_file(self.parameter_file)
                    if parameters:
                        for param in parameters:
                            if "ParameterKey" in param and "ParameterValue" in param:
                                self.gparams[param["ParameterKey"]] = {
                                    "Default": param["ParameterValue"]
                                }
                        logger.info(self.gparams)
            if 'Mappings' in template_json:
                self.mappings = template_json['Mappings']
            if 'Resources' in template_json:
                new_resources = []
                for key, resource in template_json['Resources'].items():
                    new_resource = self.process_resource(resource)
                    new_resources.append(new_resource)
                    gen_template_json['Resources'] = new_resources
        return gen_template_json
Exemple #5
0
    def get_snaphotid_doc_old(self, sid, container):
        doc = None
        json_dir = get_test_json_dir()
        if exists_dir(json_dir):
            fname = '%s/%s/snapshots/%s' % (json_dir, container, sid)
            if exists_file(fname):
                json_data = json_from_file(fname)
                if json_data and 'json' in json_data:
                    doc = json_data['json']
                    snapshot = {
                        'id': json_data['snapshotId'],
                        'structure': json_data['structure'],
                        'reference': json_data['reference'],
                        'source': json_data['source'],
                        'collection': json_data['collection'],
                        'type': json_data.get("node", {}).get('type'),
                        'region' : json_data.get('region', "")
                    }
                    if 'paths' in json_data:
                        snapshot['paths'] = json_data['paths']
                    else:
                        snapshot['path'] = json_data['path']

                    self.snapshots.append(snapshot)
        return doc
Exemple #6
0
 def get_paramter_json_list(self):
     """
     process parameter files and returns parameters json list
     """
     parameter_json_list = []
     for parameter in self.parameter_file:
         if len(parameter.split(".")) > 0 and parameter.split(".")[-1] in [
                 "tf", "tfvars"
         ]:
             json_data = hcl_to_json(parameter)
             if json_data:
                 parameter_json_list.append(
                     {parameter.split(".")[-1]: json_data})
         elif len(parameter.split(".")) > 1 and [
                 ele for ele in [".tfvars.json", ".tf.json"]
                 if (ele in parameter)
         ]:
             json_data = json_from_file(parameter, escape_chars=['$'])
             if json_data:
                 splited_list = parameter.split(".")
                 parameter_json_list.append({
                     '.'.join(splited_list[len(splited_list) - 2:]):
                     json_data
                 })
     return parameter_json_list
def get_call_kwargs(node):
    """Get argument names and their values in kwargs"""
    kwargs = {"params": {}}
    logger.info("Get node's kwargs")
    params_source = config_value('GOOGLE', 'params')
    paramsversions = None
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = params_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Google Params versions: %s', len(docs))
        if docs and len(docs):
            paramsversions = docs[0]['json']
    else:
        paramsversions_file = '%s/%s' % (framework_dir(), params_source)
        logger.info(paramsversions_file)
        if exists_file(paramsversions_file):
            paramsversions = json_from_file(paramsversions_file)

    path = node['path']
    if paramsversions and "queryprameters" in paramsversions:
        if node['type'] in paramsversions["queryprameters"]:
            for param, parameter_type in paramsversions["queryprameters"][
                    node['type']].items():
                add_argument_parameter(path, kwargs, param, parameter_type)

    return kwargs
Exemple #8
0
def mastersnapshots_used_in_mastertests_filesystem(container):
    """
    Get mastersnapshot list used in all mastertest files of a container from the filesystem.
    This gets list of all the mastersnapshots used in the container.
    The list will be used to make sure the snapshots are not generated multiple times, if the same
    mastersnapshots are used in different mastertest files of a container.
    The configuration of the default path is configured in config.ini.
    """
    snapshots = []
    # logger.info("Starting to get list of mastersnapshots used in test files.")
    reporting_path = config_value('REPORTING', 'reportOutputFolder')
    json_dir = '%s/%s/%s' % (framework_dir(), reporting_path, container)
    # logger.info(json_dir)
    # Only get list of mastertest files.
    test_files = get_json_files(json_dir, MASTERTEST)
    # logger.info('\n'.join(test_files))
    for test_file in test_files:
        logger.info('\tMASTERTEST:%s', test_file)
        test_json_data = json_from_file(test_file)
        if test_json_data:
            snapshot = test_json_data[
                'masterSnapshot'] if 'masterSnapshot' in test_json_data else ''
            if snapshot:
                file_name = snapshot if snapshot.endswith(
                    '.json') else '%s.json' % snapshot
                snapshots.append(file_name)
    return list(
        set(snapshots))  # set so that unique list of files are returned.
def get_currentdata():
    """Get the current run data, if present else empty json object"""
    runctx = framework_currentdata()
    curr_data = json_from_file(runctx)
    if not curr_data:
        curr_data = {}
    return curr_data
Exemple #10
0
def get_azure_data(snapshot_source):
    sub_data = {}
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Snapshot Documents: %s', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        azure_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('Azure source: %s', azure_source)
        if exists_file(azure_source):
            sub_data = json_from_file(azure_source)
    return sub_data
def get_version_for_type(node):
    """Url version of the resource."""
    version = None
    apiversions = None
    logger.info("Get type's version")
    api_source = config_value('AZURE', 'api')
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = api_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
        logger.info('Number of Azure API versions: %s', len(docs))
        if docs and len(docs):
            apiversions = docs[0]['json']
    else:
        apiversions_file = '%s/%s' % (framework_dir(), api_source)
        logger.info(apiversions_file)
        if exists_file(apiversions_file):
            apiversions = json_from_file(apiversions_file)
    if apiversions:
        if node and 'type' in node and node['type'] in apiversions:
            version = apiversions[node['type']]['version']
    return version
Exemple #12
0
def get_google_data(snapshot_source):
    """
    The Google source object to be fetched from database or the filesystem
    The initial configuration for database is 'validator' and collection
    is 'structures', whereas for the filesystem the path to fetch the
    'structures' is  $SOLUTIONDIR/realm/<structure>.json
    """
    sub_data = {}
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
        logger.info('Number of Google structure Documents: %d', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        google_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('Google source: %s', google_source)
        if exists_file(google_source):
            sub_data = json_from_file(google_source)

    if not sub_data:
        logger.error("Google connector file %s does not exist, or it does not contains the valid JSON.", snapshot_source)
    return sub_data
Exemple #13
0
    def get_connector_data(self):
        """ get connector data from snapshot """
        connector_data = {}
        if self.snapshots:
            isdb_fetch = get_dbtests()
            if isdb_fetch:
                connectors = get_documents(
                    "structures",
                    query={
                        "name" : self.snapshots[0].get("source"),
                        "type" : "structure",
                        "container": self.container
                    },
                    dbname=self.dbname,
                    limit=1
                )
                connector_data = connectors[0].get("json", {}) if connectors else {}
            else:
                json_test_dir = get_test_json_dir()
                snapshot_source = self.snapshots[0].get("source")
                file_name = '%s.json' % snapshot_source if snapshot_source and not \
                    snapshot_source.endswith('.json') else snapshot_source
                connector_path = '%s/../%s' % (json_test_dir, file_name)
                if exists_file(connector_path):
                    connector_data = json_from_file(connector_path)

        return connector_data
Exemple #14
0
def get_node_version(node, snapshot):
    """Url version of the resource."""
    version = None
    apiversions = None
    logger.info("Get type's version")
    api_source = config_value('AZURE', 'api')
    if snapshot.isDb:
        parts = api_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        docs = get_documents(snapshot.collection(STRUCTURE),
                             dbname=snapshot.dbname,
                             sort=snapshot.sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Azure API versions: %s', len(docs))
        if docs and len(docs):
            apiversions = docs[0]['json']
    else:
        apiversions_file = '%s/%s' % (framework_dir(), api_source)
        logger.info(apiversions_file)
        if exists_file(apiversions_file):
            apiversions = json_from_file(apiversions_file)
    if apiversions:
        if node and 'type' in node and node['type'] in apiversions:
            version = apiversions[node['type']]['version']
    return version
Exemple #15
0
 def process_files(self, test_files, doctype, replace=False):
     """ Process Test or masterTest json files."""
     snapshots = []
     for test_file in test_files:
         test_json_data = json_from_file(test_file)
         if test_json_data:
             snapshot = test_json_data[
                 doctype] if doctype in test_json_data else ''
             if snapshot:
                 file_name = snapshot if snapshot.endswith(
                     '.json') else '%s.json' % snapshot
                 if replace:
                     file_name = file_name.replace('.json', '_gen.json')
                 if self.singleTest:
                     testsets = get_field_value_with_default(
                         test_json_data, 'testSet', [])
                     for testset in testsets:
                         for testcase in testset['cases']:
                             if ('testId' in testcase and testcase['testId'] == self.singleTest) or \
                                     ('masterTestId' in testcase and testcase['masterTestId'] == self.singleTest):
                                 if file_name not in snapshots:
                                     snapshots.append(file_name)
                 else:
                     snapshots.append(file_name)
     return snapshots
Exemple #16
0
 def is_template_file(self, filename):
     if filename and filename.endswith('_gen.json'):
         return False
     json_data = json_from_file(filename)
     if json_data and '$schema' in json_data and json_data['$schema']:
         match =  re.match(r'.*deploymentTemplate.json#$', json_data['$schema'], re.I)
         return True if match else False
     return None
Exemple #17
0
 def is_template_file(self, file_path):
     """
     check for valid template file for parse arm template
     """
     if len(file_path.split(".")) > 0 and file_path.split(
             ".")[-1] == "json":
         json_data = json_from_file(file_path)
         return True if (json_data) else False
     return False
def test_json_from_file(create_temp_dir, create_temp_json, create_temp_text):
    newpath = create_temp_dir()
    fname = create_temp_text(newpath)
    fullpath = '%s/%s' % (newpath, fname)
    file_exists = os.path.exists(fullpath)
    assert True == file_exists
    json_data = json_from_file(fullpath)
    assert json_data is None
    fname = create_temp_json(newpath)
    fullpath = '%s/%s' % (newpath, fname)
    file_exists = os.path.exists(fullpath)
    assert True == file_exists
    json_data = json_from_file(fullpath)
    assert json_data is not None
    assert isinstance(json_data, collections.OrderedDict)
    json_data = json_from_file(None)
    assert json_data is None
    json_data = json_from_file('/tmp/xyza.json')
    assert json_data is None
 def process_rego_test_case(self):
     inputjson = {}
     result = False
     opa_exe = opa_binary()
     if not opa_exe:
         # print('*' * 50)
         return result
     rule_expr = get_field_value(self.testcase, 'eval')
     if not rule_expr:
         rule_expr = 'data.rule.rulepass'
     if len(self.testcase['snapshotId']) == 1:
         sid = self.testcase['snapshotId'][0]
         inputjson = self.get_snaphotid_doc(sid)
     else:
         ms_id = dict(
             zip(self.testcase['snapshotId'],
                 self.testcase['masterSnapshotId']))
         for sid in self.testcase['snapshotId']:
             inputjson.update({ms_id[sid]: self.get_snaphotid_doc(sid)})
     if inputjson:
         save_json_to_file(inputjson, '/tmp/input.json')
         rego_rule = self.rule
         rego_match = re.match(r'^file\((.*)\)$', rego_rule, re.I)
         if rego_match:
             # rego_file = get_rego_rule_filename(rego_match.groups()[0], self.container)
             rego_file = self.rego_rule_filename(rego_match.groups()[0],
                                                 self.container)
             if rego_file:
                 pass
             else:
                 rego_file = None
             # rego_file1 = self.rego_rule_filename(rego_match.groups()[0], "google_crawler_container")
             # rego_file1 = self.rego_rule_filename('google_crawler.rego', "google_crawler_container")
             # print(rego_file1)
         else:
             rego_txt = [
                 "package rule", "default rulepass = false",
                 "rulepass = true{",
                 "   %s" % rego_rule, "}", ""
             ]
             rego_file = '/tmp/input.rego'
             open(rego_file, 'w').write('\n'.join(rego_txt))
         if rego_file:
             os.system(
                 '%s eval -i /tmp/input.json -d %s "%s" > /tmp/a.json' %
                 (opa_exe, rego_file, rule_expr))
             resultval = json_from_file('/tmp/a.json')
             if resultval:
                 resultbool = resultval['result'][0]['expressions'][0][
                     'value']  # [get_field_value(resultval, 'result[0].expressions[0].value')
                 if resultbool:
                     result = parsebool(resultbool)
         else:
             result = False
     return result
Exemple #20
0
 def is_template_file(self, file_path):
     """
     check for valid template file for parse terraform template
     """
     if len(file_path.split(".")) > 0 and file_path.split(".")[-1] == "tf":
         json_data = hcl_to_json(file_path)
         return True if (json_data and ("resource" in json_data or "module" in json_data)) else False
     elif len(file_path.split(".")) > 0 and file_path.split(".")[-1] == "json":
         json_data = json_from_file(file_path, escape_chars=['$'])
         return True if (json_data and ("resource" in json_data or "module" in json_data)) else False
     return False
Exemple #21
0
 def is_parameter_file(self, file_path):
     """
     check for valid variable file for parse terraform template
     """
     if len(file_path.split(".")) > 0 and file_path.split(".")[-1] in ["tf", "tfvars"]:
         json_data = hcl_to_json(file_path)
         return True if (json_data and not "resource" in json_data) else False
     elif len(file_path.split(".")) > 1 and [ele for ele in [".tfvars.json", ".tf.json"] if(ele in file_path)]:
         json_data = json_from_file(file_path, escape_chars=['$'])
         return True if (json_data and not "resource" in json_data) else False
     return False
Exemple #22
0
 def is_parameter_file(self, file_path):
     """
     check for valid parameter file for parse cloudformation template
     """
     if len(file_path.split(".")) > 0 and file_path.split(
             ".")[-1] == "json":
         json_data = json_from_file(file_path)
         if json_data and '$schema' in json_data and json_data['$schema']:
             match = re.match(r'.*deploymentParameters.json#',
                              json_data['$schema'], re.I)
             return True if match else False
     return False
Exemple #23
0
def convert_to_json(file_path, node_type):
    json_data = {}
    if node_type == 'json':
        json_data = json_from_file(file_path, escape_chars=['$'])
    elif node_type == 'terraform':
        with open(file_path, 'r') as fp:
            json_data = hcl.load(fp)
    elif node_type == 'yaml' or node_type == 'yml':
        json_data = yaml_from_file(file_path)
    else:
        logger.error("Snapshot error type:%s and file: %s", node_type, file_path)
    return json_data
Exemple #24
0
 def is_template_file(self, file_path):
     """
     check for valid template file for parse arm template
     """
     if len(file_path.split(".")) > 0 and file_path.split(
             ".")[-1] == "json":
         json_data = json_from_file(file_path)
         if json_data and '$schema' in json_data and json_data['$schema']:
             match = re.match(r'.*deploymentTemplate.json#$',
                              json_data['$schema'], re.I)
             return True if match else False
     return False
Exemple #25
0
 def get_template(self):
     """
     return the template json data
     """
     json_data = None
     if len(self.template_file.split(".")) > 0 and self.template_file.split(
             ".")[-1] == "tf":
         json_data = hcl_to_json(self.template_file)
     elif len(self.template_file.split(
             ".")) > 1 and ".tf.json" in self.template_file:
         json_data = json_from_file(self.template_file, escape_chars=['$'])
     return json_data
Exemple #26
0
    def get_snaphotid_doc(self, sid):
        doc = None
        isdb_fetch = get_dbtests()
        if isdb_fetch:
            dbname = self.dbname
            coll = self.collection_data[sid] if sid in self.collection_data else COLLECTION
            docs = get_documents(coll, {'snapshotId': sid}, dbname,
                                 sort=[('timestamp', pymongo.DESCENDING)], limit=1)
            logger.debug('Number of Snapshot Documents: %s', len(docs))
            if docs and len(docs):
                doc = docs[0]['json']
                snapshot = {
                    'id': docs[0]['snapshotId'],
                    'structure': docs[0]['structure'],
                    'reference': docs[0]['reference'],
                    'source': docs[0]['source'],
                    'collection': docs[0]['collection'],
                    'type': docs[0].get("node", {}).get('type'),
                    'region' : docs[0].get('region', "")
                }
                if 'paths' in docs[0]:
                    snapshot['paths'] = docs[0]['paths']
                else:
                    snapshot['path'] = docs[0]['path']
                self.snapshots.append(snapshot)
        else:
            json_dir = '%s%s' % (get_test_json_dir(), self.container)
            if exists_dir(json_dir):
                fname = '%s/snapshots/%s' % (json_dir, sid)
                if exists_file(fname):
                    json_data = json_from_file(fname)
                    if json_data and 'json' in json_data:
                        doc = json_data['json']
                        snapshot_val = {
                            'id': json_data['snapshotId'],
                            'structure': json_data['structure'],
                            'reference': json_data['reference'],
                            'source': json_data['source'],
                            'collection': json_data['collection'],
                            'type': json_data.get("node", {}).get('type'),
                            'region' : json_data.get('region', "")
                        }
                        if 'paths' in json_data:
                            snapshot_val['paths'] = json_data['paths']
                        else:
                            snapshot_val['path'] = json_data['path']

                        singletest = get_from_currentdata(SINGLETEST)
                        if singletest:
                            snapshot_val['json'] = doc
                        self.snapshots.append(snapshot_val)
        return doc
Exemple #27
0
 def get_structure_data(self, snapshot_object):
     """ Get the structure from the filesystem."""
     structure_data = {}
     json_test_dir = get_test_json_dir()
     snapshot_source = get_field_value(snapshot_object, "source")
     file_name = '%s.json' % snapshot_source if snapshot_source and not \
         snapshot_source.endswith('.json') else snapshot_source
     custom_source = '%s/../%s' % (json_test_dir, file_name)
     logger.info('%s structure file is %s', Snapshot.LOGPREFIX,
                 custom_source)
     if exists_file(custom_source):
         structure_data = json_from_file(custom_source)
     return structure_data
def get_custom_data(snapshot_source):
    """
    Get source JSON data
    """
    sub_data = {}
    json_test_dir = get_test_json_dir()
    file_name = '%s.json' % snapshot_source if snapshot_source and not \
        snapshot_source.endswith('.json') else snapshot_source
    custom_source = '%s/../%s' % (json_test_dir, file_name)
    logger.info('Custom source: %s', custom_source)
    if exists_file(custom_source):
        sub_data = json_from_file(custom_source)
    return sub_data
Exemple #29
0
def main(arg_vals=None):
    """Main driver utility for running validator tests."""
    logger.info("Comand: '%s %s'",
                sys.executable.rsplit('/', 1)[-1], ' '.join(sys.argv))
    cmd_parser = argparse.ArgumentParser("Comparator functional tests.")
    cmd_parser.add_argument('container',
                            action='store',
                            help='Container tests directory.')
    cmd_parser.add_argument('testfile',
                            action='store',
                            help='test file in the container')

    args = cmd_parser.parse_args(arg_vals)
    # Delete the rundata at the end of the script.
    atexit.register(delete_currentdata)
    logger.info(args)
    init_currentdata()
    init_db()
    snapshot_dir, snapshot_files = get_container_snapshot_json_files(
        args.container)
    if not snapshot_files:
        logger.info("No Snapshot files in %s, exiting!...", snapshot_dir)
        return False
    logger.info('Snapshot files: %s', snapshot_files)
    dbname = config_value(DATABASE, DBNAME)
    snapshot_ids = []
    for fl in snapshot_files:
        snapshot_ids = populate_snapshots_from_file(fl)
    logger.debug(snapshot_ids)
    for sid, coll in snapshot_ids.items():
        docs = get_documents(coll, {'snapshotId': sid},
                             dbname,
                             sort=[('timestamp', pymongo.DESCENDING)],
                             limit=1)
        logger.debug('Number of Snapshot Documents: %s', len(docs))
        if docs and len(docs):
            doc = docs[0]['json']
            logger.info('#' * 80)
            logger.info(json.dumps(doc, indent=2))
    test6 = '%s/%s' % (get_container_dir(args.container), args.testfile)
    test_json = json_from_file(test6)
    if not test_json:
        return
    logger.debug(test_json)
    otherdata = {'dbname': dbname, 'snapshots': snapshot_ids}
    # for testcase in test_json['testSet'][0]['cases']:
    for testset in test_json['testSet']:
        for testcase in testset['cases']:
            rulestr = get_field_value(testcase, 'rule')
            if rulestr:
                main_comparator(rulestr, otherdata)
Exemple #30
0
 def is_parameter_file(self, file_path):
     """
     check for valid parameter file for parse cloudformation template
     """
     if len(file_path.split(".")) > 0 and file_path.split(".")[-1] in [
             "json", "yaml"
     ]:
         json_data = json_from_file(file_path)
         if json_data and isinstance(json_data, list):
             parameter = json_data[0]
             if isinstance(
                     parameter, dict
             ) and "ParameterKey" in parameter and "ParameterValue" in parameter:
                 return True
     return False