Пример #1
0
def get_google_data(snapshot_source):
    """
    The Google source object to be fetched from database or the filesystem
    The initial configuration for database is 'validator' and collection
    is 'structures', whereas for the filesystem the path to fetch the
    'structures' is  $SOLUTIONDIR/realm/<structure>.json
    """
    sub_data = {}
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
        logger.info('Number of Google structure Documents: %d', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        google_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('Google source: %s', google_source)
        if exists_file(google_source):
            sub_data = json_from_file(google_source)

    if not sub_data:
        logger.error("Google connector file %s does not exist, or it does not contains the valid JSON.", snapshot_source)
    return sub_data
Пример #2
0
    def get_snaphotid_doc_old(self, sid, container):
        doc = None
        json_dir = get_test_json_dir()
        if exists_dir(json_dir):
            fname = '%s/%s/snapshots/%s' % (json_dir, container, sid)
            if exists_file(fname):
                json_data = json_from_file(fname)
                if json_data and 'json' in json_data:
                    doc = json_data['json']
                    snapshot = {
                        'id': json_data['snapshotId'],
                        'structure': json_data['structure'],
                        'reference': json_data['reference'],
                        'source': json_data['source'],
                        'collection': json_data['collection'],
                        'type': json_data.get("node", {}).get('type'),
                        'region' : json_data.get('region', "")
                    }
                    if 'paths' in json_data:
                        snapshot['paths'] = json_data['paths']
                    else:
                        snapshot['path'] = json_data['path']

                    self.snapshots.append(snapshot)
        return doc
Пример #3
0
def get_azure_data(snapshot_source):
    sub_data = {}
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Snapshot Documents: %s', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        azure_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('Azure source: %s', azure_source)
        if exists_file(azure_source):
            sub_data = json_from_file(azure_source)
    return sub_data
Пример #4
0
    def get_connector_data(self):
        """ get connector data from snapshot """
        connector_data = {}
        if self.snapshots:
            isdb_fetch = get_dbtests()
            if isdb_fetch:
                connectors = get_documents(
                    "structures",
                    query={
                        "name" : self.snapshots[0].get("source"),
                        "type" : "structure",
                        "container": self.container
                    },
                    dbname=self.dbname,
                    limit=1
                )
                connector_data = connectors[0].get("json", {}) if connectors else {}
            else:
                json_test_dir = get_test_json_dir()
                snapshot_source = self.snapshots[0].get("source")
                file_name = '%s.json' % snapshot_source if snapshot_source and not \
                    snapshot_source.endswith('.json') else snapshot_source
                connector_path = '%s/../%s' % (json_test_dir, file_name)
                if exists_file(connector_path):
                    connector_data = json_from_file(connector_path)

        return connector_data
Пример #5
0
def make_snapshots_dir(container):
    snapshot_dir = None
    json_dir = '%s%s' % (get_test_json_dir(), container)
    if exists_dir(json_dir):
        snapshot_dir = '%s/snapshots' % json_dir
        mkdir_path(snapshot_dir)
    return snapshot_dir
Пример #6
0
def test_get_test_json_dir():
    test_dir = os.getenv('TESTDIR', '/realm/validation/')
    val_dir = '%s/%s' % (TESTSDIR, test_dir)
    os.chdir(val_dir)
    tests_curdir = os.getcwd()
    os.chdir(get_test_json_dir())
    prod_curdir = os.getcwd()
    assert tests_curdir == prod_curdir
Пример #7
0
def get_container_dir(container, tabs=1):
    """Translate container name to container directory"""
    json_test_dir = get_test_json_dir()
    logger.info('%s LOCATION: %s', '\t' * tabs, json_test_dir)
    container_dir = '%s/%s' % (json_test_dir, container)
    container_dir = container_dir.replace('//', '/')
    logger.info('%s COLLECTION: %s', '\t' * tabs, container_dir)
    return container_dir
def get_rego_rule_filename(rego_file, container):
    rego_file_name = None
    json_dir = get_test_json_dir()
    if exists_dir(json_dir):
        rego_file_name = '%s/%s/%s' % (json_dir, container, rego_file)
        if exists_file(rego_file_name):
            pass
        else:
            rego_file_name = None
    return rego_file_name
Пример #9
0
    def get_snaphotid_doc(self, sid):
        doc = None
        isdb_fetch = get_dbtests()
        if isdb_fetch:
            dbname = self.dbname
            coll = self.collection_data[sid] if sid in self.collection_data else COLLECTION
            docs = get_documents(coll, {'snapshotId': sid}, dbname,
                                 sort=[('timestamp', pymongo.DESCENDING)], limit=1)
            logger.debug('Number of Snapshot Documents: %s', len(docs))
            if docs and len(docs):
                doc = docs[0]['json']
                snapshot = {
                    'id': docs[0]['snapshotId'],
                    'structure': docs[0]['structure'],
                    'reference': docs[0]['reference'],
                    'source': docs[0]['source'],
                    'collection': docs[0]['collection'],
                    'type': docs[0].get("node", {}).get('type'),
                    'region' : docs[0].get('region', "")
                }
                if 'paths' in docs[0]:
                    snapshot['paths'] = docs[0]['paths']
                else:
                    snapshot['path'] = docs[0]['path']
                self.snapshots.append(snapshot)
        else:
            json_dir = '%s%s' % (get_test_json_dir(), self.container)
            if exists_dir(json_dir):
                fname = '%s/snapshots/%s' % (json_dir, sid)
                if exists_file(fname):
                    json_data = json_from_file(fname)
                    if json_data and 'json' in json_data:
                        doc = json_data['json']
                        snapshot_val = {
                            'id': json_data['snapshotId'],
                            'structure': json_data['structure'],
                            'reference': json_data['reference'],
                            'source': json_data['source'],
                            'collection': json_data['collection'],
                            'type': json_data.get("node", {}).get('type'),
                            'region' : json_data.get('region', "")
                        }
                        if 'paths' in json_data:
                            snapshot_val['paths'] = json_data['paths']
                        else:
                            snapshot_val['path'] = json_data['path']

                        singletest = get_from_currentdata(SINGLETEST)
                        if singletest:
                            snapshot_val['json'] = doc
                        self.snapshots.append(snapshot_val)
        return doc
Пример #10
0
 def get_structure_data(self, snapshot_object):
     """ Get the structure from the filesystem."""
     structure_data = {}
     json_test_dir = get_test_json_dir()
     snapshot_source = get_field_value(snapshot_object, "source")
     file_name = '%s.json' % snapshot_source if snapshot_source and not \
         snapshot_source.endswith('.json') else snapshot_source
     custom_source = '%s/../%s' % (json_test_dir, file_name)
     logger.info('%s structure file is %s', Snapshot.LOGPREFIX,
                 custom_source)
     if exists_file(custom_source):
         structure_data = json_from_file(custom_source)
     return structure_data
def get_custom_data(snapshot_source):
    """
    Get source JSON data
    """
    sub_data = {}
    json_test_dir = get_test_json_dir()
    file_name = '%s.json' % snapshot_source if snapshot_source and not \
        snapshot_source.endswith('.json') else snapshot_source
    custom_source = '%s/../%s' % (json_test_dir, file_name)
    logger.info('Custom source: %s', custom_source)
    if exists_file(custom_source):
        sub_data = json_from_file(custom_source)
    return sub_data
Пример #12
0
 def get_snaphotid_doc(self, sid):
     doc = None
     isdb_fetch = get_dbtests()
     if isdb_fetch:
         dbname = self.kwargs['dbname']
         coll = self.kwargs['snapshots'][sid] if sid in self.kwargs[
             'snapshots'] else COLLECTION
         docs = get_documents(coll, {'snapshotId': sid},
                              dbname,
                              sort=[('timestamp', pymongo.DESCENDING)],
                              limit=1)
         logger.debug('Number of Snapshot Documents: %s', len(docs))
         if docs and len(docs):
             doc = docs[0]['json']
             self.snapshots.append({
                 'id': docs[0]['snapshotId'],
                 'path': docs[0]['path'],
                 'structure': docs[0]['structure'],
                 'reference': docs[0]['reference'],
                 'source': docs[0]['source']
             })
     else:
         json_dir = '%s%s' % (get_test_json_dir(), self.kwargs['container'])
         if exists_dir(json_dir):
             fname = '%s/snapshots/%s' % (json_dir, sid)
             if exists_file(fname):
                 json_data = json_from_file(fname)
                 if json_data and 'json' in json_data:
                     doc = json_data['json']
                     # self.snapshots.append({
                     #     'id': json_data['snapshotId'],
                     #     'path': json_data['path'],
                     #     'structure': json_data['structure'],
                     #     'reference': json_data['reference'],
                     #     'source': json_data['source']
                     # })
                     snapshot_val = {
                         'id': json_data['snapshotId'],
                         'path': json_data['path'],
                         'structure': json_data['structure'],
                         'reference': json_data['reference'],
                         'source': json_data['source']
                     }
                     singletest = get_from_currentdata(SINGLETEST)
                     if singletest:
                         snapshot_val['json'] = doc
                     self.snapshots.append(snapshot_val)
     return doc
 def get_snaphotid_doc_old(self, sid, container):
     doc = None
     json_dir = get_test_json_dir()
     if exists_dir(json_dir):
         fname = '%s/%s/snapshots/%s' % (json_dir, container, sid)
         if exists_file(fname):
             json_data = json_from_file(fname)
             if json_data and 'json' in json_data:
                 doc = json_data['json']
                 self.snapshots.append({
                     'id': json_data['snapshotId'],
                     'path': json_data['path'],
                     'structure': json_data['structure'],
                     'reference': json_data['reference'],
                     'source': json_data['source']
                 })
     return doc
Пример #14
0
def get_snapshot_id_to_collection_dict(snapshot_file,
                                       container,
                                       dbname,
                                       filesystem=True):
    snapshot_data = {}
    snapshot_json_data = {}
    if filesystem:
        file_name = '%s.json' % snapshot_file if snapshot_file and not \
            snapshot_file.endswith('.json') else snapshot_file
        snapshot_file = '%s/%s/%s' % (get_test_json_dir(), container,
                                      file_name)
        snapshot_json_data = json_from_file(snapshot_file)
    else:
        parts = snapshot_file.split('.')
        collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
        qry = {'container': container, 'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Snapshot Documents: %s', len(docs))
        if docs and len(docs):
            snapshot_json_data = docs[0]['json']
    snapshots = get_field_value(snapshot_json_data, 'snapshots')
    if not snapshots:
        logger.info("Snapshot does not contain snapshots...")
        return snapshot_data
    for snapshot in snapshots:
        nodes = get_field_value(snapshot, 'nodes')
        if not nodes:
            logger.info("No nodes in snapshot, continuing to next!...")
            continue
        for node in nodes:
            sid = get_field_value(node, 'snapshotId')
            coll = node['collection'] if 'collection' in node else COLLECTION
            collection = coll.replace('.', '').lower()
            snapshot_data[sid] = collection
            if get_dbtests():
                create_indexes(collection, dbname,
                               [('timestamp', pymongo.TEXT)])
    return snapshot_data
Пример #15
0
    def rego_rule_filename(self, rego_file, container):
        rego_file_name = None
        if 'dirpath' in self.testcase and self.testcase['dirpath']:
            rego_file_name = '%s/%s' % (self.testcase['dirpath'], rego_file)
            if exists_file(rego_file_name):
                pass
            else:
                rego_file_name = None
            return  rego_file_name
        isdb_fetch = get_dbtests()
        #It give same value for DB and SNAPSHOT, So for SNAPSHOT, we'll check it in 
        #db first and if file isn't there, then we are fetching it from file path '''
        
        if isdb_fetch:
            dbname = self.dbname
            coll = 'structures'
            docs = get_documents(coll, { 'type': 'others', "container" : container}, dbname,
                                 sort=[('timestamp', pymongo.DESCENDING)], limit=1)
            # print('Number of other Documents: %s' % len(docs))
            logger.debug('Number of other Documents: %s', len(docs))
            if docs and len(docs):
                doc = docs[0]['json']
                if doc and 'file' in doc and isinstance(doc['file'], list):
                    for file_doc in doc['file']:
                        name = get_field_value(file_doc, 'name')
                        # print(name, rego_file)
                        if name == rego_file:
                            content = get_field_value(file_doc, 'container_file')
                            if content:
                                rego_file_name = '/tmp/%s' % rego_file
                                open(rego_file_name, 'w', encoding="utf-8").write(content)
                                return rego_file_name
                # print(doc)

        json_dir = get_test_json_dir()
        if exists_dir(json_dir):
            rego_file_name = '%s/%s/%s' % (json_dir, container, rego_file)
            if exists_file(rego_file_name):
                pass
            else:
                rego_file_name = None
        return rego_file_name
Пример #16
0
def get_custom_data(snapshot_source, tabs=2):
    sub_data = {}
    if json_source():
        container = get_from_currentdata('container')
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0], 'container' : container }
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
        logger.info('Number of Custom Documents: %d', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        custom_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('\t\tCUSTOM CONNECTOR: %s ', custom_source)
        # logger.info('Custom source: %s', custom_source)
        if exists_file(custom_source):
            sub_data = json_from_file(custom_source)
    return sub_data