def populate_container_snapshots_database(container):
    """
    Get the snapshot files from the container with storage system as database.
    The table or collection and database is configured in the config.ini, for the default
    location configuration is "validator" database with "snapshots" as its collections.
    """
    snapshots_status = {}
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, _id=True)
    if docs and len(docs):
        logger.info('Number of Snapshot Documents: %s', len(docs))
        snapshots = container_snapshots_database(container)
        populated = []
        for doc in docs:
            if doc['json']:
                snapshot = doc['name']
                if snapshot in snapshots and snapshot not in populated:
                    # Take the snapshot and populate whether it was successful or not.
                    # Then pass it back to the validation tests, so that tests for those
                    # snapshots that have been susccessfully fetched shall be executed.
                    snapshot_file_data = populate_snapshots_from_json(doc['json'], container)
                    update_one_document(doc, collection, dbname)
                    populated.append(snapshot)
                    snapshots_status[snapshot] = snapshot_file_data
    return snapshots_status
Esempio n. 2
0
def add_new_container(container_name, dbname):
    container_struture_list = get_documents('structures',
                                            {'type': 'container'}, dbname)
    container_struture = container_struture_list[0]
    container_json = container_struture['json']
    container_list = container_json['containers']

    filtered_list = list(
        filter(lambda i: i['name'] == container_name, container_list))
    if filtered_list:
        return

    if container_list:
        container = dict(container_list[-1])
        containerId = container['containerId'] + 1
    else:
        containerId = 1

    new_container = {
        'containerId': containerId,
        'status': 'active',
        'name': container_name,
        'masterSnapshots': [],
        'Snapshots': [],
        'masterTests': [],
        'Tests': [],
        'others': []
    }

    container_list.append(new_container)
    container_json['containers'] = container_list
    container_struture['json'] = container_json
    update_one_document(container_struture, container_struture['collection'],
                        dbname)
Esempio n. 3
0
    def get_snapshots(self):
        """Populate the used snapshots in test and mastertest for this container."""
        snapshots_status = {}
        docs = get_documents(self.collection(SNAPSHOT),
                             dbname=self.dbname,
                             sort=self.sort,
                             query=self.qry,
                             _id=True)
        if docs and len(docs):
            logger.info('%s fetched %s number of documents: %s',
                        Snapshot.LOGPREFIX, SNAPSHOT, len(docs))
            used_snapshots = self.get_used_snapshots_in_tests()
            if not used_snapshots:
                raise SnapshotsException(
                    "No snapshots for this container: %s, add and run again!..."
                    % self.container)
            populated = []
            for doc in docs:
                if doc['json']:
                    snapshot = doc['name']
                    try:
                        pull_response, git_connector_json = self.check_and_fetch_remote_snapshots(
                            doc['json'])
                        if git_connector_json and not pull_response:
                            logger.info('%s Fetching remote snapshots failed.',
                                        Snapshot.LOGPREFIX)
                            break

                        if snapshot in used_snapshots and snapshot not in populated:
                            # Take the snapshot and populate whether it was successful or not.
                            # Then pass it back to the validation tests, so that tests for those
                            # snapshots that have been susccessfully fetched shall be executed.
                            snapshot_file_data = self.populate_snapshots(
                                doc['json'])

                            if not git_connector_json:
                                update_one_document(doc,
                                                    self.collection(SNAPSHOT),
                                                    self.dbname)

                            populated.append(snapshot)
                            snapshots_status[snapshot] = snapshot_file_data
                    except Exception as e:
                        dump_output_results([], self.container, "-", snapshot,
                                            False)
                        raise e
        if not snapshots_status:
            raise SnapshotsException(
                "No snapshots for this container: %s, add and run again!..." %
                self.container)
        return snapshots_status
Esempio n. 4
0
def generate_container_mastersnapshots_database(container):
    """
    Get the mastersnapshot files from the container with storage system as database.
    The table or collection and database is configured in the config.ini, for the default
    location configuration is "validator" database with "mastersnapshots" as its collections.
    """
    snapshots_status = {}
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[MASTERSNAPSHOT])
    snp_collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    try:
        if docs and len(docs):
            logger.info('Number of mastersnapshot Documents: %s', len(docs))
            snapshots = mastersnapshots_used_in_mastertests_database(container)
            populated = []
            for doc in docs:
                if doc['json']:
                    snapshot = doc['name']
                    if "connector" in doc['json'] and "remoteFile" in doc[
                            'json'] and doc['json']["connector"] and doc[
                                'json']["remoteFile"]:
                        _, pull_response = pull_json_data(doc['json'])
                        if not pull_response:
                            logger.info(
                                "Failed to populate master snapshot json from the git repository"
                            )
                            break

                    if snapshot in snapshots:
                        if snapshot not in populated:
                            snp_collection = config_value(
                                DATABASE, collectiontypes[SNAPSHOT])
                            snp_name = '%s_gen' % snapshot
                            snp_qry = {
                                'container': container,
                                'name': snp_name
                            }
                            snp_sort = [sort_field('timestamp', False)]
                            snp_docs = get_documents(snp_collection,
                                                     dbname=dbname,
                                                     sort=snp_sort,
                                                     query=snp_qry,
                                                     _id=True)
                            snp_json_data = {}
                            if snp_docs and len(snp_docs):
                                logger.info('Number of snapshot Documents: %s',
                                            len(snp_docs))
                                snp_json_data = snp_docs[0]
                            # Take the mastersnapshot and populate the mastersnapshot
                            snapshot_file_data = generate_mastersnapshots_from_json(
                                doc['json'], snp_json_data)
                            # Insert or update the new generated snapshot document with name='*_gen' and same container name.
                            generate_snapshot(doc['json'], snapshot_file_data)
                            if snp_json_data:
                                set_snapshot_activate_and_validate_data(
                                    doc['json'], snp_json_data['json'])
                                snp_json_data['json'] = doc['json']
                                snp_json_data["timestamp"] = int(time.time() *
                                                                 1000)
                                update_one_document(
                                    snp_json_data, snp_json_data['collection'],
                                    dbname)
                            else:
                                db_record = {
                                    "timestamp":
                                    int(time.time() * 1000),
                                    "container":
                                    container,
                                    "checksum":
                                    hashlib.md5(
                                        "{}".encode('utf-8')).hexdigest(),
                                    "type":
                                    "snapshot",
                                    "name":
                                    snp_name,
                                    "collection":
                                    "snapshots",
                                    "json":
                                    doc['json']
                                }
                                insert_one_document(db_record,
                                                    db_record['collection'],
                                                    dbname, False)
                            populated.append(snapshot)
                            snapshots_status[snapshot] = snapshot_file_data
                    else:
                        logger.error("No master testcase found for %s " %
                                     snapshot)
    except Exception as e:
        generate_crawler_run_output(container)
        raise e
    generate_crawler_run_output(container)
    return snapshots_status
Esempio n. 5
0
def save_container_object(container_name, object_type, data, dbname):
    """
    container_name : Name of container in which new object will store
    object_type: Type of the object must be in masterSnapshots, Snapshots, masterTests or Tests
    data: container
        1. object_id: table Id of object
        2. name: Display name of that object
    """

    container_struture_list = get_documents('structures',
                                            {'type': 'container'}, dbname)
    if not container_struture_list:
        # create container_json
        create_container_json_to_db(dbname)
        container_struture_list = get_documents('structures',
                                                {'type': 'container'}, dbname)
    container_json = container_struture_list[0]['json']
    container_list = container_json['containers']

    filtered_list = list(
        filter(lambda i: i['name'] == container_name, container_list))
    if not filtered_list:
        # add new container if container not exist
        add_new_container(container_name, dbname)
        container_struture_list = get_documents('structures',
                                                {'type': 'container'}, dbname)
        container_json = container_struture_list[0]['json']
        container_list = container_json['containers']
        filtered_list = list(
            filter(lambda i: i['name'] == container_name, container_list))

    container = filtered_list[0]

    if object_type == 'others':
        exist = False
        for obj in container[object_type]:
            if obj['name'] == data['name']:
                exist = True

        if not exist:
            container[object_type].append({
                'id': data['object_id'],
                'name': data['name']
            })
            container_struture_list[0]['json'] = container_json
            update_one_document(container_struture_list[0],
                                container_struture_list[0]['collection'],
                                dbname)
    else:
        exist = False
        for obj in container[object_type]:
            if obj['id'] == data['object_id']:
                exist = True

        if not exist:
            container[object_type].append({
                'id': data['object_id'],
                'name': data['name']
            })
            container_struture_list[0]['json'] = container_json
            update_one_document(container_struture_list[0],
                                container_struture_list[0]['collection'],
                                dbname)
Esempio n. 6
0
def save_container_to_db(container_name, container_id, file_name, content_type,
                         file_content, dbname):
    file_content_list = []
    structure_model_obj = get_documents('structures', {
        'json.containerId': container_id,
        'type': 'others'
    },
                                        dbname,
                                        _id=True)

    file_obj = {
        'name': file_name,
        'container_file': file_content,
        'content_type': content_type
    }

    if structure_model_obj:
        exist = False
        for file_data in structure_model_obj[0]['json']['file']:
            for key, value in file_data.items():
                if value == file_name:
                    exist = True

        if exist:
            for file_data in structure_model_obj[0]['json']['file']:
                for key, value in file_data.items():
                    if value == file_name:
                        file_data['container_file'] = file_content
        else:
            structure_model_obj[0]['json']['file'].append(file_obj)
        update_one_document(structure_model_obj[0], 'structures', dbname)
        # print(structure_model_obj)
        data = {'object_id': structure_model_obj[0]['_id'], 'name': file_name}
    else:
        file_obj = {
            'name': file_name,
            'container_file': file_content,
            'content_type': content_type
        }
        file_content_list.append(file_obj)

        container_json = {
            'name': container_name,
            'containerId': container_id,
            'file': file_content_list
        }
        structure_model_obj = {
            'checksum': '',
            'collection': 'structures',
            'container': container_name,
            'name': 'file_upload',
            'timestamp': int(datetime.datetime.now().timestamp() * 1000),
            'type': 'others',
            'json': container_json
        }
        docId = insert_one_document(structure_model_obj,
                                    structure_model_obj['collection'], dbname,
                                    False)
        # print(docId)
        data = {'object_id': ObjectId(docId), 'name': file_name}

    save_container_object(container_name, 'others', data, dbname)