Exemple #1
0
def get_orig(userId, bucket, archiveid):
    global archive_initialized, data_volume, use_db

    if not archive_initialized:
        raise Exception("archive not initialized")

    ret = {}

    if use_db:
        try:
            with db.session_scope() as dbsession:
                result = db_archivedocument.get(userId,
                                                bucket,
                                                archiveid,
                                                session=dbsession)
            if result and 'jsondata' in result:
                ret = json.loads(result['jsondata'])
                del result
            else:
                raise Exception("no archive record JSON data found in DB")
        except Exception as err:
            logger.debug("cannot get data: exception - " + str(err))
            raise err
    else:
        try:
            with open(os.path.join(data_volume, bucket, archiveid + ".json"),
                      'r') as FH:
                ret = json.loads(FH.read())
        except Exception as err:
            logger.debug("cannot get data: exception - " + str(err))
            raise err

    return (ret)
Exemple #2
0
def get(userId, bucket, archiveid):
    global archive_initialized, data_volume, use_db

    if not archive_initialized:
        raise Exception("archive not initialized")

    ret = {}

    try:
        with db.session_scope() as dbsession:
            result = db_archivedocument.get(userId,
                                            bucket,
                                            archiveid,
                                            session=dbsession)
        if result:
            if use_db:
                if 'jsondata' in result:
                    ret = json.loads(result['jsondata'])
                    del result
                else:
                    raise Exception("no archive record JSON data found in DB")
            else:
                ret = read_archive_file(userId, bucket, archiveid)

    except Exception as err:
        logger.debug("cannot get data: exception - " + str(err))
        raise err

    return (ret)
Exemple #3
0
def db_upgrade_003_004():
    engine = anchore_engine.db.entities.common.get_engine()

    from anchore_engine.db import db_catalog_image, db_archivedocument, session_scope
    import anchore_engine.common

    newcolumns = [
        Column('arch', String, primary_key=False),
        Column('distro', String, primary_key=False),
        Column('distro_version', String, primary_key=False),
        Column('dockerfile_mode', String, primary_key=False),
        Column('image_size', BigInteger, primary_key=False),
        Column('layer_count', Integer, primary_key=False)
    ]
    for column in newcolumns:
        try:
            table_name = 'catalog_image'
            cn = column.compile(dialect=engine.dialect)
            ct = column.type.compile(engine.dialect)
            engine.execute('ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s' % (table_name, cn, ct))
        except Exception as e:
            log.err('failed to perform DB upgrade on catalog_image adding column - exception: {}'.format(str(e)))
            raise Exception('failed to perform DB upgrade on catalog_image adding column - exception: {}'.format(str(e)))

    with session_scope() as dbsession:
        image_records = db_catalog_image.get_all(session=dbsession)

    for image_record in image_records:
        userId = image_record['userId']
        imageDigest = image_record['imageDigest']

        log.err("upgrade: processing image " + str(imageDigest) + " : " + str(userId))
        try:

            # get the image analysis data from archive
            image_data = None
            with session_scope() as dbsession:
                result = db_archivedocument.get(userId, 'analysis_data', imageDigest, session=dbsession)
            if result and 'jsondata' in result:
                image_data = json.loads(result['jsondata'])['document']
                
            if image_data:
                # update the record and store
                anchore_engine.common.helpers.update_image_record_with_analysis_data(image_record, image_data)
                with session_scope() as dbsession:
                    db_catalog_image.update_record(image_record, session=dbsession)
            else:
                raise Exception("upgrade: no analysis data found in archive for image: " + str(imageDigest))
        except Exception as err:
            log.err("upgrade: failed to populate new columns with existing data for image (" + str(imageDigest) + "), record may be incomplete: " + str(err))

    return True
    def get_by_uri(self, uri: str) -> bytes:
        userId, bucket, key = self._parse_uri(uri)

        try:
            with db.session_scope() as dbsession:
                result = db_archivedocument.get(userId, bucket, key, session=dbsession)
            if result:
                return utils.ensure_bytes(self._decode(result))
            else:
                raise ObjectKeyNotFoundError(userId, bucket, key, caused_by=None)
        except Exception as err:
            logger.debug("cannot get data: exception - " + str(err))
            raise err
Exemple #5
0
def initialize():
    global archive_initialized, data_volume, use_db, archive_driver

    localconfig = anchore_engine.configuration.localconfig.get_config()
    myconfig = localconfig['services']['catalog']

    try:
        data_volume = None
        if 'archive_data_dir' in myconfig:
            data_volume = myconfig['archive_data_dir']

        archive_driver = 'db'
        if 'archive_driver' in myconfig:
            archive_driver = myconfig['archive_driver']

        if 'use_db' in myconfig and myconfig['use_db']:
            archive_driver = 'db'

        # driver specific initializations here
        if archive_driver == 'db':
            use_db = True
        else:
            use_db = False
            initialize_archive_file(myconfig)

    except Exception as err:
        raise err

    logger.debug("archive initialization config: " +
                 str([archive_driver, use_db, data_volume]))

    # this section is for conversion on initialization between db driver and other driver
    with db.session_scope() as dbsession:
        logger.debug("running archive driver converter")

        if use_db:
            # need to check if any archive records do not have the document field populated, and if so try to import from localfs
            dbfilter = {'jsondata': '{}'}
            archive_matches = db_archivedocument.list_all(session=dbsession,
                                                          **dbfilter)
            for archive_match in archive_matches:
                userId = archive_match['userId']
                bucket = archive_match['bucket']
                archiveid = archive_match['archiveId']
                try:
                    fs_data = read_archive_file(userId,
                                                bucket,
                                                archiveid,
                                                driver_override='localfs')
                except Exception as err:
                    logger.debug("no data: " + str(err))
                    fs_data = None

                if fs_data:
                    logger.debug("document data - converting driver->DB: " +
                                 str([userId, bucket, archiveid]))
                    with db.session_scope() as subdbsession:
                        db_archivedocument.add(
                            userId,
                            bucket,
                            archiveid,
                            archiveid + ".json",
                            {'jsondata': json.dumps(fs_data)},
                            session=subdbsession)
                    delete_archive_file(userId,
                                        bucket,
                                        archiveid,
                                        driver_override='localfs')

        else:
            # need to check if any archive records DO have the document field populated, and if so try to export to localfs
            archive_matches = db_archivedocument.list_all_notempty(
                session=dbsession)
            for archive_match in archive_matches:
                userId = archive_match['userId']
                bucket = archive_match['bucket']
                archiveid = archive_match['archiveId']
                archive_record = db_archivedocument.get(userId,
                                                        bucket,
                                                        archiveid,
                                                        session=dbsession)
                db_data = json.loads(archive_record['jsondata'])

                logger.debug("document data - converting DB->driver: " +
                             str([userId, bucket, archiveid]))
                dataref = write_archive_file(userId,
                                             bucket,
                                             archiveid,
                                             db_data,
                                             driver_override='localfs')
                with db.session_scope() as subdbsession:
                    db_archivedocument.add(userId,
                                           bucket,
                                           archiveid,
                                           archiveid + ".json",
                                           {'jsondata': "{}"},
                                           session=subdbsession)

        if False:
            for archive_record in db_archivedocument.get_all_iter(
                    session=dbsession):
                userId = archive_record['userId']
                bucket = archive_record['bucket']
                archiveid = archive_record['archiveId']
                dataref = archive_record['documentName']
                if archive_record['jsondata']:
                    if archive_record['jsondata'] == "{}":
                        db_data = None
                    else:
                        try:
                            db_data = json.loads(archive_record['jsondata'])
                        except:
                            logger.warn(
                                "could no load jsondata for archive record: " +
                                str([userId, bucket, archiveid]))
                            db_data = None
                else:
                    db_data = None

                if use_db and not db_data:
                    try:
                        fs_data = read_archive_file(userId,
                                                    bucket,
                                                    archiveid,
                                                    driver_override='localfs')
                    except Exception as err:
                        logger.debug("no data: " + str(err))
                        fs_data = None
                    if fs_data:
                        logger.debug(
                            "document data not in DB but is on FS - converting: "
                            + str([userId, bucket, archiveid]))
                        with db.session_scope() as subdbsession:
                            db_archivedocument.add(
                                userId,
                                bucket,
                                archiveid,
                                archiveid + ".json",
                                {'jsondata': json.dumps(fs_data)},
                                session=subdbsession)
                        delete_archive_file(userId,
                                            bucket,
                                            archiveid,
                                            driver_override='localfs')

                elif not use_db and db_data:
                    logger.debug(
                        "document data not on FS but is in DB - converting: " +
                        str([userId, bucket, archiveid]))
                    dataref = write_archive_file(userId,
                                                 bucket,
                                                 archiveid,
                                                 db_data,
                                                 driver_override='localfs')
                    with db.session_scope() as subdbsession:
                        db_archivedocument.add(userId,
                                               bucket,
                                               archiveid,
                                               archiveid + ".json",
                                               {'jsondata': "{}"},
                                               session=subdbsession)

        logger.debug("archive driver converter complete")
    archive_initialized = True
    return (True)