def populate_container_snapshots_database(container):
    """
    Get the snapshot files from the container with storage system as database.
    The table or collection and database is configured in the config.ini, for the default
    location configuration is "validator" database with "snapshots" as its collections.
    """
    snapshots_status = {}
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, _id=True)
    if docs and len(docs):
        logger.info('Number of Snapshot Documents: %s', len(docs))
        snapshots = container_snapshots_database(container)
        populated = []
        for doc in docs:
            if doc['json']:
                snapshot = doc['name']
                if snapshot in snapshots and snapshot not in populated:
                    # Take the snapshot and populate whether it was successful or not.
                    # Then pass it back to the validation tests, so that tests for those
                    # snapshots that have been susccessfully fetched shall be executed.
                    snapshot_file_data = populate_snapshots_from_json(doc['json'], container)
                    update_one_document(doc, collection, dbname)
                    populated.append(snapshot)
                    snapshots_status[snapshot] = snapshot_file_data
    return snapshots_status
def get_version_for_type(node):
    """Url version of the resource."""
    version = None
    apiversions = None
    logger.info("Get type's version")
    api_source = config_value('AZURE', 'api')
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = api_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
        logger.info('Number of Azure API versions: %s', len(docs))
        if docs and len(docs):
            apiversions = docs[0]['json']
    else:
        apiversions_file = '%s/%s' % (framework_dir(), api_source)
        logger.info(apiversions_file)
        if exists_file(apiversions_file):
            apiversions = json_from_file(apiversions_file)
    if apiversions:
        if node and 'type' in node and node['type'] in apiversions:
            version = apiversions[node['type']]['version']
    return version
def get_call_kwargs(node):
    """Get argument names and their values in kwargs"""
    kwargs = {"params": {}}
    logger.info("Get node's kwargs")
    params_source = config_value('GOOGLE', 'params')
    paramsversions = None
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = params_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Google Params versions: %s', len(docs))
        if docs and len(docs):
            paramsversions = docs[0]['json']
    else:
        paramsversions_file = '%s/%s' % (framework_dir(), params_source)
        logger.info(paramsversions_file)
        if exists_file(paramsversions_file):
            paramsversions = json_from_file(paramsversions_file)

    path = node['path']
    if paramsversions and "queryprameters" in paramsversions:
        if node['type'] in paramsversions["queryprameters"]:
            for param, parameter_type in paramsversions["queryprameters"][
                    node['type']].items():
                add_argument_parameter(path, kwargs, param, parameter_type)

    return kwargs
Exemple #4
0
def add_new_container(container_name, dbname):
    container_struture_list = get_documents('structures',
                                            {'type': 'container'}, dbname)
    container_struture = container_struture_list[0]
    container_json = container_struture['json']
    container_list = container_json['containers']

    filtered_list = list(
        filter(lambda i: i['name'] == container_name, container_list))
    if filtered_list:
        return

    if container_list:
        container = dict(container_list[-1])
        containerId = container['containerId'] + 1
    else:
        containerId = 1

    new_container = {
        'containerId': containerId,
        'status': 'active',
        'name': container_name,
        'masterSnapshots': [],
        'Snapshots': [],
        'masterTests': [],
        'Tests': [],
        'others': []
    }

    container_list.append(new_container)
    container_json['containers'] = container_list
    container_struture['json'] = container_json
    update_one_document(container_struture, container_struture['collection'],
                        dbname)
def get_custom_data(snapshot_source):
    sub_data = {}
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Custom Documents: %d', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        custom_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('Custom source: %s', custom_source)
        if exists_file(custom_source):
            sub_data = json_from_file(custom_source)
    return sub_data
def get_google_data(snapshot_source):
    """
    The Google source object to be fetched from database or the filesystem
    The initial configuration for database is 'validator' and collection
    is 'structures', whereas for the filesystem the path to fetch the
    'structures' is  $SOLUTIONDIR/realm/<structure>.json
    """
    sub_data = {}
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Google structure Documents: %d', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        google_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('Google source: %s', google_source)
        if exists_file(google_source):
            sub_data = json_from_file(google_source)
    return sub_data
Exemple #7
0
    def get_connector_data(self):
        """ get connector data from snapshot """
        connector_data = {}
        if self.snapshots:
            isdb_fetch = get_dbtests()
            if isdb_fetch:
                connectors = get_documents(
                    "structures",
                    query={
                        "name" : self.snapshots[0].get("source"),
                        "type" : "structure",
                        "container": self.container
                    },
                    dbname=self.dbname,
                    limit=1
                )
                connector_data = connectors[0].get("json", {}) if connectors else {}
            else:
                json_test_dir = get_test_json_dir()
                snapshot_source = self.snapshots[0].get("source")
                file_name = '%s.json' % snapshot_source if snapshot_source and not \
                    snapshot_source.endswith('.json') else snapshot_source
                connector_path = '%s/../%s' % (json_test_dir, file_name)
                if exists_file(connector_path):
                    connector_data = json_from_file(connector_path)

        return connector_data
Exemple #8
0
def mastersnapshots_used_in_mastertests_database(container):
    """
    Get mastersnapshot list used in mastertest files of a container from the database.
    The mastersnapshots list are read from database. The default configuration of database and
    snapshot collections is configured in config.ini file.
    """
    snapshots = []
    logger.info("Starting to get list of mastersnapshots from database")
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[MASTERTEST])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    logger.info('Number of mastertest Documents: %s', len(docs))
    if docs and len(docs):
        for doc in docs:
            if doc['json']:
                snapshot = doc['json'][
                    'masterSnapshot'] if 'masterSnapshot' in doc['json'] else ''
                if snapshot:
                    if snapshot.endswith('.json'):
                        parts = snapshot.split('.')
                        snapshots.append(parts[0])
                    else:
                        snapshots.append(snapshot)
    return list(set(snapshots))
Exemple #9
0
def get_node_version(node, snapshot):
    """Url version of the resource."""
    version = None
    apiversions = None
    logger.info("Get type's version")
    api_source = config_value('AZURE', 'api')
    if snapshot.isDb:
        parts = api_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        docs = get_documents(snapshot.collection(STRUCTURE),
                             dbname=snapshot.dbname,
                             sort=snapshot.sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Azure API versions: %s', len(docs))
        if docs and len(docs):
            apiversions = docs[0]['json']
    else:
        apiversions_file = '%s/%s' % (framework_dir(), api_source)
        logger.info(apiversions_file)
        if exists_file(apiversions_file):
            apiversions = json_from_file(apiversions_file)
    if apiversions:
        if node and 'type' in node and node['type'] in apiversions:
            version = apiversions[node['type']]['version']
    return version
Exemple #10
0
def test_mongoconnection(monkeypatch):
    monkeypatch.setattr('processor.database.database.config_value', mock_config_value)
    monkeypatch.setattr('processor.database.database.get_dburl', mock_get_dburl)
    monkeypatch.setattr('processor.database.database.MongoClient', MyMongoClient)
    from processor.database.database import mongoconnection, mongodb, init_db,\
        get_collection, collection_names, insert_one_document, insert_documents,\
        check_document, get_documents, count_documents, index_information, distinct_documents
    # assert MONGO is None
    mongo = mongoconnection()
    assert mongo is not None
    dbname = 'abcd'
    testdb = mongodb(dbname)
    assert testdb is not None
    testdb = mongodb()
    assert testdb is not None
    val = testdb['abcd']
    assert val is not None
    init_db()
    coll = get_collection(dbname, 'a1')
    assert coll is not None
    colls = collection_names(dbname)
    assert colls is not None
    val = insert_one_document({'a':'b'}, 'a1', dbname)
    assert val is not None
    val = distinct_documents('a1', 'a', dbname)
    assert val is not None
    with warnings.catch_warnings():
        warnings.filterwarnings("ignore", category=DeprecationWarning)
        val = insert_one_document({'a': 'b'}, 'a1', dbname, False)
        assert val is not None
    val = '123456789012345678901234' # 24 character string
    doc = check_document('a1', val, dbname)
    assert doc is not None
    vals = insert_documents([{'a': 'b'}, {'c': 'd'}], 'a1', dbname)
    assert len(vals) == 2
    vals = get_documents('a1',dbname=dbname, sort=None)
    assert vals is not None
    vals = get_documents('a1', dbname=dbname, sort='abcd')
    assert vals is not None
    count = count_documents('a1', dbname=dbname)
    assert type(count) is int
    assert count > 0
    info = index_information('a1', dbname)
    assert info is not None
    mongo.drop_database(dbname)
Exemple #11
0
    def get_snaphotid_doc(self, sid):
        doc = None
        isdb_fetch = get_dbtests()
        if isdb_fetch:
            dbname = self.dbname
            coll = self.collection_data[sid] if sid in self.collection_data else COLLECTION
            docs = get_documents(coll, {'snapshotId': sid}, dbname,
                                 sort=[('timestamp', pymongo.DESCENDING)], limit=1)
            logger.debug('Number of Snapshot Documents: %s', len(docs))
            if docs and len(docs):
                doc = docs[0]['json']
                snapshot = {
                    'id': docs[0]['snapshotId'],
                    'structure': docs[0]['structure'],
                    'reference': docs[0]['reference'],
                    'source': docs[0]['source'],
                    'collection': docs[0]['collection'],
                    'type': docs[0].get("node", {}).get('type'),
                    'region' : docs[0].get('region', "")
                }
                if 'paths' in docs[0]:
                    snapshot['paths'] = docs[0]['paths']
                else:
                    snapshot['path'] = docs[0]['path']
                self.snapshots.append(snapshot)
        else:
            json_dir = '%s%s' % (get_test_json_dir(), self.container)
            if exists_dir(json_dir):
                fname = '%s/snapshots/%s' % (json_dir, sid)
                if exists_file(fname):
                    json_data = json_from_file(fname)
                    if json_data and 'json' in json_data:
                        doc = json_data['json']
                        snapshot_val = {
                            'id': json_data['snapshotId'],
                            'structure': json_data['structure'],
                            'reference': json_data['reference'],
                            'source': json_data['source'],
                            'collection': json_data['collection'],
                            'type': json_data.get("node", {}).get('type'),
                            'region' : json_data.get('region', "")
                        }
                        if 'paths' in json_data:
                            snapshot_val['paths'] = json_data['paths']
                        else:
                            snapshot_val['path'] = json_data['path']

                        singletest = get_from_currentdata(SINGLETEST)
                        if singletest:
                            snapshot_val['json'] = doc
                        self.snapshots.append(snapshot_val)
        return doc
Exemple #12
0
def main(arg_vals=None):
    """Main driver utility for running validator tests."""
    logger.info("Comand: '%s %s'",
                sys.executable.rsplit('/', 1)[-1], ' '.join(sys.argv))
    cmd_parser = argparse.ArgumentParser("Comparator functional tests.")
    cmd_parser.add_argument('container',
                            action='store',
                            help='Container tests directory.')
    cmd_parser.add_argument('testfile',
                            action='store',
                            help='test file in the container')

    args = cmd_parser.parse_args(arg_vals)
    # Delete the rundata at the end of the script.
    atexit.register(delete_currentdata)
    logger.info(args)
    init_currentdata()
    init_db()
    snapshot_dir, snapshot_files = get_container_snapshot_json_files(
        args.container)
    if not snapshot_files:
        logger.info("No Snapshot files in %s, exiting!...", snapshot_dir)
        return False
    logger.info('Snapshot files: %s', snapshot_files)
    dbname = config_value(DATABASE, DBNAME)
    snapshot_ids = []
    for fl in snapshot_files:
        snapshot_ids = populate_snapshots_from_file(fl)
    logger.debug(snapshot_ids)
    for sid, coll in snapshot_ids.items():
        docs = get_documents(coll, {'snapshotId': sid},
                             dbname,
                             sort=[('timestamp', pymongo.DESCENDING)],
                             limit=1)
        logger.debug('Number of Snapshot Documents: %s', len(docs))
        if docs and len(docs):
            doc = docs[0]['json']
            logger.info('#' * 80)
            logger.info(json.dumps(doc, indent=2))
    test6 = '%s/%s' % (get_container_dir(args.container), args.testfile)
    test_json = json_from_file(test6)
    if not test_json:
        return
    logger.debug(test_json)
    otherdata = {'dbname': dbname, 'snapshots': snapshot_ids}
    # for testcase in test_json['testSet'][0]['cases']:
    for testset in test_json['testSet']:
        for testcase in testset['cases']:
            rulestr = get_field_value(testcase, 'rule')
            if rulestr:
                main_comparator(rulestr, otherdata)
Exemple #13
0
    def get_snapshots(self):
        """Populate the used snapshots in test and mastertest for this container."""
        snapshots_status = {}
        docs = get_documents(self.collection(SNAPSHOT),
                             dbname=self.dbname,
                             sort=self.sort,
                             query=self.qry,
                             _id=True)
        if docs and len(docs):
            logger.info('%s fetched %s number of documents: %s',
                        Snapshot.LOGPREFIX, SNAPSHOT, len(docs))
            used_snapshots = self.get_used_snapshots_in_tests()
            if not used_snapshots:
                raise SnapshotsException(
                    "No snapshots for this container: %s, add and run again!..."
                    % self.container)
            populated = []
            for doc in docs:
                if doc['json']:
                    snapshot = doc['name']
                    try:
                        pull_response, git_connector_json = self.check_and_fetch_remote_snapshots(
                            doc['json'])
                        if git_connector_json and not pull_response:
                            logger.info('%s Fetching remote snapshots failed.',
                                        Snapshot.LOGPREFIX)
                            break

                        if snapshot in used_snapshots and snapshot not in populated:
                            # Take the snapshot and populate whether it was successful or not.
                            # Then pass it back to the validation tests, so that tests for those
                            # snapshots that have been susccessfully fetched shall be executed.
                            snapshot_file_data = self.populate_snapshots(
                                doc['json'])

                            if not git_connector_json:
                                update_one_document(doc,
                                                    self.collection(SNAPSHOT),
                                                    self.dbname)

                            populated.append(snapshot)
                            snapshots_status[snapshot] = snapshot_file_data
                    except Exception as e:
                        dump_output_results([], self.container, "-", snapshot,
                                            False)
                        raise e
        if not snapshots_status:
            raise SnapshotsException(
                "No snapshots for this container: %s, add and run again!..." %
                self.container)
        return snapshots_status
Exemple #14
0
 def get_snaphotid_doc(self, sid):
     doc = None
     isdb_fetch = get_dbtests()
     if isdb_fetch:
         dbname = self.kwargs['dbname']
         coll = self.kwargs['snapshots'][sid] if sid in self.kwargs[
             'snapshots'] else COLLECTION
         docs = get_documents(coll, {'snapshotId': sid},
                              dbname,
                              sort=[('timestamp', pymongo.DESCENDING)],
                              limit=1)
         logger.debug('Number of Snapshot Documents: %s', len(docs))
         if docs and len(docs):
             doc = docs[0]['json']
             self.snapshots.append({
                 'id': docs[0]['snapshotId'],
                 'path': docs[0]['path'],
                 'structure': docs[0]['structure'],
                 'reference': docs[0]['reference'],
                 'source': docs[0]['source']
             })
     else:
         json_dir = '%s%s' % (get_test_json_dir(), self.kwargs['container'])
         if exists_dir(json_dir):
             fname = '%s/snapshots/%s' % (json_dir, sid)
             if exists_file(fname):
                 json_data = json_from_file(fname)
                 if json_data and 'json' in json_data:
                     doc = json_data['json']
                     # self.snapshots.append({
                     #     'id': json_data['snapshotId'],
                     #     'path': json_data['path'],
                     #     'structure': json_data['structure'],
                     #     'reference': json_data['reference'],
                     #     'source': json_data['source']
                     # })
                     snapshot_val = {
                         'id': json_data['snapshotId'],
                         'path': json_data['path'],
                         'structure': json_data['structure'],
                         'reference': json_data['reference'],
                         'source': json_data['source']
                     }
                     singletest = get_from_currentdata(SINGLETEST)
                     if singletest:
                         snapshot_val['json'] = doc
                     self.snapshots.append(snapshot_val)
     return doc
Exemple #15
0
 def get_used_snapshots_in_tests(self):
     """ Get the snapshots used in test and mastertest of the container."""
     snapshots = []
     logger.info("%s Fetching documents for %s", Snapshot.LOGPREFIX,
                 self.container)
     for collection, snapshotType, suffix in ((TEST, SNAPSHOT, ''),
                                              (MASTERTEST, MASTERSNAPSHOT,
                                               '_gen')):
         docs = get_documents(self.collection(collection),
                              dbname=self.dbname,
                              sort=self.sort,
                              query=self.qry)
         logger.info('%s fetched %s number of documents: %s',
                     Snapshot.LOGPREFIX, collection, len(docs))
         snapshots.extend(self.process_docs(docs, snapshotType, suffix))
     return list(set(snapshots))
Exemple #16
0
 def get_structure_data(self, snapshot_object):
     """ Return the structure from the database"""
     structure_data = {}
     snapshot_source = get_field_value(snapshot_object, "source")
     snapshot_source = snapshot_source.replace(
         '.json', '') if snapshot_source else ''
     qry = {'name': snapshot_source}
     structure_docs = get_documents(self.collection(STRUCTURE),
                                    dbname=self.dbname,
                                    sort=self.sort,
                                    query=qry,
                                    limit=1)
     logger.info('%s fetched %s number of documents: %s',
                 Snapshot.LOGPREFIX, STRUCTURE, len(structure_docs))
     if structure_docs and len(structure_docs):
         structure_data = structure_docs[0]['json']
     return structure_data
Exemple #17
0
def _get_snapshot_type_map(container):
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
    qry = {'container': container}
    docs = get_documents(collection, dbname=dbname, query=qry)
    mappings = {}
    if docs and len(docs):
        for doc in docs:
            given_data = doc['json']
            if given_data:
                snapshots = given_data.get("snapshots", [])
                for snapshot in snapshots:
                    given_type = snapshot.get("type","")
                    if given_type == "aws":
                        nodes = snapshot.get("nodes",[])
                        for node in nodes:
                            mappings[node['snapshotId']] = node['type']
    return mappings
Exemple #18
0
def get_snapshot_id_to_collection_dict(snapshot_file,
                                       container,
                                       dbname,
                                       filesystem=True):
    snapshot_data = {}
    snapshot_json_data = {}
    if filesystem:
        file_name = '%s.json' % snapshot_file if snapshot_file and not \
            snapshot_file.endswith('.json') else snapshot_file
        snapshot_file = '%s/%s/%s' % (get_test_json_dir(), container,
                                      file_name)
        snapshot_json_data = json_from_file(snapshot_file)
    else:
        parts = snapshot_file.split('.')
        collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
        qry = {'container': container, 'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Snapshot Documents: %s', len(docs))
        if docs and len(docs):
            snapshot_json_data = docs[0]['json']
    snapshots = get_field_value(snapshot_json_data, 'snapshots')
    if not snapshots:
        logger.info("Snapshot does not contain snapshots...")
        return snapshot_data
    for snapshot in snapshots:
        nodes = get_field_value(snapshot, 'nodes')
        if not nodes:
            logger.info("No nodes in snapshot, continuing to next!...")
            continue
        for node in nodes:
            sid = get_field_value(node, 'snapshotId')
            coll = node['collection'] if 'collection' in node else COLLECTION
            collection = coll.replace('.', '').lower()
            snapshot_data[sid] = collection
            if get_dbtests():
                create_indexes(collection, dbname,
                               [('timestamp', pymongo.TEXT)])
    return snapshot_data
Exemple #19
0
    def rego_rule_filename(self, rego_file, container):
        rego_file_name = None
        if 'dirpath' in self.testcase and self.testcase['dirpath']:
            rego_file_name = '%s/%s' % (self.testcase['dirpath'], rego_file)
            if exists_file(rego_file_name):
                pass
            else:
                rego_file_name = None
            return  rego_file_name
        isdb_fetch = get_dbtests()
        #It give same value for DB and SNAPSHOT, So for SNAPSHOT, we'll check it in 
        #db first and if file isn't there, then we are fetching it from file path '''
        
        if isdb_fetch:
            dbname = self.dbname
            coll = 'structures'
            docs = get_documents(coll, { 'type': 'others', "container" : container}, dbname,
                                 sort=[('timestamp', pymongo.DESCENDING)], limit=1)
            # print('Number of other Documents: %s' % len(docs))
            logger.debug('Number of other Documents: %s', len(docs))
            if docs and len(docs):
                doc = docs[0]['json']
                if doc and 'file' in doc and isinstance(doc['file'], list):
                    for file_doc in doc['file']:
                        name = get_field_value(file_doc, 'name')
                        # print(name, rego_file)
                        if name == rego_file:
                            content = get_field_value(file_doc, 'container_file')
                            if content:
                                rego_file_name = '/tmp/%s' % rego_file
                                open(rego_file_name, 'w', encoding="utf-8").write(content)
                                return rego_file_name
                # print(doc)

        json_dir = get_test_json_dir()
        if exists_dir(json_dir):
            rego_file_name = '%s/%s/%s' % (json_dir, container, rego_file)
            if exists_file(rego_file_name):
                pass
            else:
                rego_file_name = None
        return rego_file_name
def get_service_name(node_type):
    """
    Get service name for init compute function
    """
    service = None
    params_source = config_value('GOOGLE', 'params')
    paramsversions = None
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = params_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Google Params versions: %s', len(docs))
        if docs and len(docs):
            paramsversions = docs[0]['json']
    else:
        paramsversions_file = '%s/%s' % (framework_dir(), params_source)
        logger.info(paramsversions_file)
        if exists_file(paramsversions_file):
            paramsversions = json_from_file(paramsversions_file)

    check_node_type = node_type
    node_type_list = node_type.split(".")
    if len(node_type_list) > 1:
        del node_type_list[-1]
        check_node_type = ".".join(node_type_list)

    if paramsversions and "serviceName" in paramsversions:
        for service_name, resource_list in paramsversions['serviceName'].items(
        ):
            if check_node_type in resource_list:
                service = service_name

    return service
Exemple #21
0
def get_kubernetes_structure_data(snapshot_source):
    """
    get_kubernetes_structure_data going to get structure data from connector 
    file which specified in snapshot as source field.
    Return structure data as json dictionary
    """
    kubernetes_structure_data = {}
    if json_source():
        qry = {'name': snapshot_source}
        dbname = config_value('MONGODB', 'dbname')
        sort = [sort_field('timestamp', False)]
        collection =config_value(DATABASE, collectiontypes[STRUCTURE])
        structure_docs = get_documents(collection=collection , dbname=dbname, sort= sort, query=qry, limit=1)
        logger.info('%s fetched %s number of documents: %s', Snapshot.LOGPREFIX, STRUCTURE, len(structure_docs))
        if structure_docs and len(structure_docs):
            kubernetes_structure_data = structure_docs[0]['json']
    else:
        kubernetes_structure_path = get_kubernetes_structure_path(snapshot_source)
        kubernetes_structure_data = json_from_file(kubernetes_structure_path)

    return kubernetes_structure_data
Exemple #22
0
def get_api_versions():
    """ get api versions dict """
    global apiversions
    if not apiversions:
        api_source = config_value('AZURE', 'api')
        if json_source():
            dbname = config_value(DATABASE, DBNAME)
            collection = config_value(DATABASE, collectiontypes[STRUCTURE])
            parts = api_source.rsplit('/')
            name = parts[-1].split('.')
            qry = {'name': name[0]}
            sort = [sort_field('timestamp', False)]
            docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
            logger.info('Number of Azure API versions: %s', len(docs))
            if docs and len(docs):
                apiversions = docs[0]['json']
        else:
            apiversions_file = '%s/%s' % (framework_dir(), api_source)
            # logger.info(apiversions_file)
            if exists_file(apiversions_file):
                apiversions = json_from_file(apiversions_file)
    return apiversions
def get_call_kwargs_for_crawler(node, project_id):
    """Get argument names and their values in kwargs for Crawler"""
    kwargs = {}
    logger.info("Get node's kwargs")
    params_source = config_value('GOOGLE', 'params')
    paramsversions = None
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = params_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Google Params versions: %s', len(docs))
        if docs and len(docs):
            paramsversions = docs[0]['json']
    else:
        paramsversions_file = '%s/%s' % (framework_dir(), params_source)
        logger.info(paramsversions_file)
        if exists_file(paramsversions_file):
            paramsversions = json_from_file(paramsversions_file)
    if paramsversions:
        if node and 'type' in node and "crawler_queryprameters" in paramsversions:
            for prameter in paramsversions["crawler_queryprameters"]:
                if node['type'] in prameter['services']:
                    for param in prameter['params']:
                        if param == "project":
                            kwargs['project'] = project_id
                        elif param == "projectId":
                            kwargs['projectId'] = project_id
                        elif param == "zone":
                            kwargs['zone'] = "-"

    return kwargs
Exemple #24
0
def get_google_parameters():
    """
    Return the google parameter object read from database or the filesystem
    """
    global google_parameters
    if not google_parameters:
        params_source = config_value('GOOGLE', 'params')
        if json_source():
            dbname = config_value(DATABASE, DBNAME)
            collection = config_value(DATABASE, collectiontypes[STRUCTURE])
            parts = params_source.rsplit('/')
            name = parts[-1].split('.')
            qry = {'name': name[0]}
            sort = [sort_field('timestamp', False)]
            docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
            logger.info('Number of Google Params versions: %s', len(docs))
            if docs and len(docs):
                google_parameters = docs[0]['json']
        else:
            params_file = '%s/%s' % (framework_dir(), params_source)
            logger.info(params_file)
            if exists_file(params_file):
                google_parameters = json_from_file(params_file)
    return google_parameters
 def validate(self):
     result_val = {"result": "failed"}
     if self.format == TESTCASEV1:
         if self.snapshot_id:
             docs = get_documents(self.collection,
                                  dbname=self.dbname,
                                  sort=[('timestamp', pymongo.DESCENDING)],
                                  query={'snapshotId': self.snapshot_id},
                                  limit=1)
             logger.info('Number of Snapshot Documents: %s', len(docs))
             if docs and len(docs):
                 self.data = docs[0]['json']
                 if self.op in OPERATORS and OPERATORS[self.op]:
                     result = OPERATORS[self.op](self.data, self.loperand,
                                                 self.roperand, self.is_not,
                                                 self.extras)
                     result_val["result"] = "passed" if result else "failed"
                     result_val["snapshots"] = [{
                         'id':
                         docs[0]['snapshotId'],
                         'path':
                         docs[0]['path'],
                         'structure':
                         docs[0]['structure'],
                         'reference':
                         docs[0]['reference'],
                         'source':
                         docs[0]['source']
                     }]
             else:
                 result_val.update({
                     "result":
                     "skipped",
                     "reason":
                     "Missing documents for the snapshot"
                 })
         else:
             result_val.update({
                 "result": "skipped",
                 "reason": "Missing snapshotId for testcase"
             })
     elif self.format == TESTCASEV2:
         if self.type == 'rego':
             result = self.process_rego_test_case()
             result_val["result"] = "passed" if result else "failed"
             result_val['snapshots'] = self.snapshots
         else:
             logger.info('#' * 75)
             logger.info('Actual Rule: %s', self.rule)
             input_stream = InputStream(self.rule)
             lexer = comparatorLexer(input_stream)
             stream = CommonTokenStream(lexer)
             parser = comparatorParser(stream)
             tree = parser.expression()
             children = []
             for child in tree.getChildren():
                 children.append((child.getText()))
             logger.info('*' * 50)
             logger.debug("All the parsed tokens: %s", children)
             otherdata = {
                 'dbname': self.dbname,
                 'snapshots': self.collection_data,
                 'container': self.container
             }
             r_i = RuleInterpreter(children, **otherdata)
             result = r_i.compare()
             result_val["result"] = "passed" if result else "failed"
             result_val['snapshots'] = r_i.get_snapshots()
     else:
         result_val.update({
             "result": "skipped",
             "reason": "Unsupported testcase format"
         })
     return result_val
Exemple #26
0
def run_container_validation_tests_database(container, snapshot_status=None):
    """ Get the test files from the database"""
    dirpath = None
    dbname = config_value(DATABASE, DBNAME)
    test_files_found = True
    mastertest_files_found = True
    # For test files
    collection = config_value(DATABASE, collectiontypes[TEST])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    finalresult = True
    if docs and len(docs):
        logger.info('Number of test Documents: %s', len(docs))
        for doc in docs:
            if doc['json']:
                try:
                    snapshot = doc['json']['snapshot'] if 'snapshot' in doc['json'] else ''
                    if "connector" in doc['json'] and "remoteFile" in doc['json'] and doc['json']["connector"] and doc['json']["remoteFile"]:
                        dirpath, pull_response = pull_json_data(doc['json'])
                        if not pull_response:
                            return {}
                    resultset = run_json_validation_tests(doc['json'], container, False, dirpath=dirpath)
                    if resultset:
                        test_file = doc['name'] if 'name' in doc else ''
                        dump_output_results(resultset, container, test_file, snapshot, False)
                        for result in resultset:
                            if 'result' in result:
                                if not re.match(r'passed', result['result'], re.I):
                                    finalresult = False
                                    break
                except Exception as e:
                    dump_output_results([], container, "-", snapshot, False)
                    raise e
    else:
        logger.info('No test Documents found!')
        test_files_found = False
        finalresult = False
    # For mastertest files
    collection = config_value(DATABASE, collectiontypes[MASTERTEST])
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    # snapshots_details_map = _get_snapshot_type_map(container)
    if docs and len(docs):
        logger.info('Number of mastertest Documents: %s', len(docs))
        for doc in docs:
            test_json_data = doc['json']
            if test_json_data:
                snapshot = doc['json']['snapshot'] if 'snapshot' in doc['json'] else ''
                test_file = doc['name'] if 'name' in doc else '-'
                try:
                    if "connector" in test_json_data and "remoteFile" in test_json_data and test_json_data["connector"] and test_json_data["remoteFile"]:
                        dirpath, pull_response = pull_json_data(test_json_data)
                        if not pull_response:
                            return {}
                    snapshot_key = '%s_gen' % test_json_data['masterSnapshot']
                    mastersnapshots = defaultdict(list)
                    snapshot_data = snapshot_status[snapshot_key] if snapshot_key in snapshot_status else {}
                    for snapshot_id, mastersnapshot_id in snapshot_data.items():
                        if isinstance(mastersnapshot_id, list):
                            for msnp_id in mastersnapshot_id:
                                mastersnapshots[msnp_id].append(snapshot_id)    
                        else:
                            mastersnapshots[mastersnapshot_id].append(snapshot_id)
                    test_json_data['snapshot'] = snapshot_key
                    testsets = get_field_value_with_default(test_json_data, 'testSet', [])
                    for testset in testsets:
                        testcases = get_field_value_with_default(testset, 'cases', [])
                        testset['cases'] = _get_new_testcases(testcases, mastersnapshots)
                    # print(json.dumps(test_json_data, indent=2))
                    resultset = run_json_validation_tests(test_json_data, container, False, snapshot_status, dirpath=dirpath)
                    if resultset:
                        dump_output_results(resultset, container, test_file, snapshot, False)
                        for result in resultset:
                            if 'result' in result:
                                if not re.match(r'passed', result['result'], re.I):
                                    finalresult = False
                                    break
                except Exception as e:
                    dump_output_results([], container, test_file, snapshot, False)
                    raise e
    else:
        logger.info('No mastertest Documents found!')
        mastertest_files_found = False
        finalresult = False
    if not test_files_found and not mastertest_files_found:
        raise Exception("No complaince tests for this container: %s, add and run!", container)
    return finalresult
Exemple #27
0
def populate_json_files(args):
    dbname = config_value(DATABASE, DBNAME)
    containerId = None
    if args.container:
        container_struture_list = get_documents('structures',
                                                {'type': 'container'}, dbname)
        if not container_struture_list:
            # create container_json
            create_container_json_to_db(dbname)
            container_struture_list = get_documents('structures',
                                                    {'type': 'container'},
                                                    dbname)
        container_json = container_struture_list[0]['json']
        container_list = container_json['containers']

        filtered_list = list(
            filter(lambda i: i['name'] == args.container, container_list))
        if not filtered_list:
            # add new container if container not exist
            add_new_container(args.container, dbname)
            container_struture_list = get_documents('structures',
                                                    {'type': 'container'},
                                                    dbname)
            container_json = container_struture_list[0]['json']
            container_list = container_json['containers']
            filtered_list = list(
                filter(lambda i: i['name'] == args.container, container_list))
        containerId = filtered_list[0]['containerId']
    # return containerId

    # if args.dir:
    #     logger.info("Checking this directory: %s for json files", args.dir)
    #     json_dir = args.dir
    #     if exists_dir(args.dir):
    #         for filename in glob.glob('%s/*.json' % json_dir.replace('//', '/')):
    #             json_data = json_from_file(filename)
    #             if json_data and 'fileType' in json_data:
    #                 filetype = json_data['fileType']
    #             else:
    #                 filetype = 'structure'
    #             logger.info('Storing file:%s from directory: %s', json_dir, filename)
    #             db_record = json_record(args.container, filetype, filename, json_data)
    #             if validate_json_data(db_record['json'], db_record['type']):
    #                 insert_one_document(db_record, db_record['collection'], dbname, False)
    #                 logger.debug('DB Record: %s', json.dumps(db_record, indent=2))
    #             else:
    #                 logger.info('Invalid json for type:%s', db_record['type'])
    #             logger.info('*' * 80)
    if args.file:
        logger.info("Populating %s json file.", args.file)
        json_file = args.file
        if exists_file(json_file):
            if json_file.endswith('.json'):
                json_data = json_from_file(json_file)
                if json_data and 'fileType' in json_data:
                    filetype = json_data['fileType']
                # elif args.type:
                #     filetype = args.type
                else:
                    filetype = 'structure'
                logger.info('Storing file:%s', json_file)
                db_record = json_record(args.container, filetype, json_file,
                                        json_data)
                if validate_json_data(db_record['json'], db_record['type']):
                    docId = insert_one_document(db_record,
                                                db_record['collection'],
                                                dbname, False)
                    data = {
                        'object_id': ObjectId(docId),
                        'name': db_record['name']
                    }
                    if filetype == 'masterSnapshot':
                        save_container_object(args.container,
                                              'masterSnapshots', data, dbname)
                    elif filetype == 'mastertest':
                        save_container_object(args.container, 'masterTests',
                                              data, dbname)
                    elif filetype == 'snapshot':
                        save_container_object(args.container, 'Snapshots',
                                              data, dbname)
                    elif filetype == 'test':
                        save_container_object(args.container, 'Tests', data,
                                              dbname)
                    logger.debug('DB Record: %s',
                                 json.dumps(db_record, indent=2))
                else:
                    logger.info('Invalid json for type:%s', db_record['type'])
                logger.info('*' * 80)
            elif json_file.endswith('.rego'):
                with open(json_file) as f:
                    file_content = f.read()
                    content_type = 'application/octet-stream'
                    save_container_to_db(args.container, containerId,
                                         json_file, content_type, file_content,
                                         dbname)
Exemple #28
0
def save_container_object(container_name, object_type, data, dbname):
    """
    container_name : Name of container in which new object will store
    object_type: Type of the object must be in masterSnapshots, Snapshots, masterTests or Tests
    data: container
        1. object_id: table Id of object
        2. name: Display name of that object
    """

    container_struture_list = get_documents('structures',
                                            {'type': 'container'}, dbname)
    if not container_struture_list:
        # create container_json
        create_container_json_to_db(dbname)
        container_struture_list = get_documents('structures',
                                                {'type': 'container'}, dbname)
    container_json = container_struture_list[0]['json']
    container_list = container_json['containers']

    filtered_list = list(
        filter(lambda i: i['name'] == container_name, container_list))
    if not filtered_list:
        # add new container if container not exist
        add_new_container(container_name, dbname)
        container_struture_list = get_documents('structures',
                                                {'type': 'container'}, dbname)
        container_json = container_struture_list[0]['json']
        container_list = container_json['containers']
        filtered_list = list(
            filter(lambda i: i['name'] == container_name, container_list))

    container = filtered_list[0]

    if object_type == 'others':
        exist = False
        for obj in container[object_type]:
            if obj['name'] == data['name']:
                exist = True

        if not exist:
            container[object_type].append({
                'id': data['object_id'],
                'name': data['name']
            })
            container_struture_list[0]['json'] = container_json
            update_one_document(container_struture_list[0],
                                container_struture_list[0]['collection'],
                                dbname)
    else:
        exist = False
        for obj in container[object_type]:
            if obj['id'] == data['object_id']:
                exist = True

        if not exist:
            container[object_type].append({
                'id': data['object_id'],
                'name': data['name']
            })
            container_struture_list[0]['json'] = container_json
            update_one_document(container_struture_list[0],
                                container_struture_list[0]['collection'],
                                dbname)
Exemple #29
0
def run_container_validation_tests_database(container, snapshot_status=None):
    """ Get the test files from the database"""
    dbname = config_value(DATABASE, DBNAME)
    # For test files
    collection = config_value(DATABASE, collectiontypes[TEST])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    finalresult = True
    if docs and len(docs):
        logger.info('Number of test Documents: %s', len(docs))
        for doc in docs:
            if doc['json']:
                resultset = run_json_validation_tests(doc['json'], container,
                                                      False)
                if resultset:
                    snapshot = doc['json']['snapshot'] if 'snapshot' in doc[
                        'json'] else ''
                    test_file = doc['name'] if 'name' in doc else ''
                    dump_output_results(resultset, container, test_file,
                                        snapshot, False)
                    for result in resultset:
                        if 'result' in result:
                            if not re.match(r'passed', result['result'], re.I):
                                finalresult = False
                                break
    else:
        logger.info('No test Documents found!')
        finalresult = False
    # For mastertest files
    collection = config_value(DATABASE, collectiontypes[MASTERTEST])
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    # snapshots_details_map = _get_snapshot_type_map(container)
    if docs and len(docs):
        logger.info('Number of mastertest Documents: %s', len(docs))
        for doc in docs:
            test_json_data = doc['json']
            if test_json_data:
                snapshot_key = '%s_gen' % test_json_data['masterSnapshot']
                mastersnapshots = defaultdict(list)
                snapshot_data = snapshot_status[
                    snapshot_key] if snapshot_key in snapshot_status else {}
                for snapshot_id, mastersnapshot_id in snapshot_data.items():
                    if isinstance(mastersnapshot_id, list):
                        for msnp_id in mastersnapshot_id:
                            mastersnapshots[msnp_id].append(snapshot_id)
                    else:
                        mastersnapshots[mastersnapshot_id].append(snapshot_id)
                test_json_data['snapshot'] = snapshot_key
                testsets = get_field_value_with_default(
                    test_json_data, 'testSet', [])
                for testset in testsets:
                    testcases = get_field_value_with_default(
                        testset, 'cases', [])
                    testset['cases'] = _get_new_testcases(
                        testcases, mastersnapshots)
                # print(json.dumps(test_json_data, indent=2))
                resultset = run_json_validation_tests(test_json_data,
                                                      container, False,
                                                      snapshot_status)
                if resultset:
                    snapshot = doc['json']['snapshot'] if 'snapshot' in doc[
                        'json'] else ''
                    test_file = doc['name'] if 'name' in doc else ''
                    dump_output_results(resultset, container, test_file,
                                        snapshot, False)
                    for result in resultset:
                        if 'result' in result:
                            if not re.match(r'passed', result['result'], re.I):
                                finalresult = False
                                break
    else:
        logger.info('No mastertest Documents found!')
        finalresult = False
    return finalresult
Exemple #30
0
def generate_container_mastersnapshots_database(container):
    """
    Get the mastersnapshot files from the container with storage system as database.
    The table or collection and database is configured in the config.ini, for the default
    location configuration is "validator" database with "mastersnapshots" as its collections.
    """
    snapshots_status = {}
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[MASTERSNAPSHOT])
    snp_collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    try:
        if docs and len(docs):
            logger.info('Number of mastersnapshot Documents: %s', len(docs))
            snapshots = mastersnapshots_used_in_mastertests_database(container)
            populated = []
            for doc in docs:
                if doc['json']:
                    snapshot = doc['name']
                    if "connector" in doc['json'] and "remoteFile" in doc[
                            'json'] and doc['json']["connector"] and doc[
                                'json']["remoteFile"]:
                        _, pull_response = pull_json_data(doc['json'])
                        if not pull_response:
                            logger.info(
                                "Failed to populate master snapshot json from the git repository"
                            )
                            break

                    if snapshot in snapshots:
                        if snapshot not in populated:
                            snp_collection = config_value(
                                DATABASE, collectiontypes[SNAPSHOT])
                            snp_name = '%s_gen' % snapshot
                            snp_qry = {
                                'container': container,
                                'name': snp_name
                            }
                            snp_sort = [sort_field('timestamp', False)]
                            snp_docs = get_documents(snp_collection,
                                                     dbname=dbname,
                                                     sort=snp_sort,
                                                     query=snp_qry,
                                                     _id=True)
                            snp_json_data = {}
                            if snp_docs and len(snp_docs):
                                logger.info('Number of snapshot Documents: %s',
                                            len(snp_docs))
                                snp_json_data = snp_docs[0]
                            # Take the mastersnapshot and populate the mastersnapshot
                            snapshot_file_data = generate_mastersnapshots_from_json(
                                doc['json'], snp_json_data)
                            # Insert or update the new generated snapshot document with name='*_gen' and same container name.
                            generate_snapshot(doc['json'], snapshot_file_data)
                            if snp_json_data:
                                set_snapshot_activate_and_validate_data(
                                    doc['json'], snp_json_data['json'])
                                snp_json_data['json'] = doc['json']
                                snp_json_data["timestamp"] = int(time.time() *
                                                                 1000)
                                update_one_document(
                                    snp_json_data, snp_json_data['collection'],
                                    dbname)
                            else:
                                db_record = {
                                    "timestamp":
                                    int(time.time() * 1000),
                                    "container":
                                    container,
                                    "checksum":
                                    hashlib.md5(
                                        "{}".encode('utf-8')).hexdigest(),
                                    "type":
                                    "snapshot",
                                    "name":
                                    snp_name,
                                    "collection":
                                    "snapshots",
                                    "json":
                                    doc['json']
                                }
                                insert_one_document(db_record,
                                                    db_record['collection'],
                                                    dbname, False)
                            populated.append(snapshot)
                            snapshots_status[snapshot] = snapshot_file_data
                    else:
                        logger.error("No master testcase found for %s " %
                                     snapshot)
    except Exception as e:
        generate_crawler_run_output(container)
        raise e
    generate_crawler_run_output(container)
    return snapshots_status