示例#1
0
def put_orig(userId, bucket, archiveid, data):
    global archive_initialized, data_volume, use_db

    if not archive_initialized:
        raise Exception("archive not initialized")

    if use_db:
        try:
            with db.session_scope() as dbsession:
                blarg = {'jsondata': json.dumps(data)}
                db_archivedocument.add(userId,
                                       bucket,
                                       archiveid,
                                       archiveid + ".json",
                                       blarg,
                                       session=dbsession)
        except Exception as err:
            logger.debug("cannot put data: exception - " + str(err))
            raise err
    else:
        try:
            if not os.path.exists(os.path.join(data_volume, bucket)):
                os.makedirs(os.path.join(data_volume, bucket))

            with open(os.path.join(data_volume, bucket, archiveid + ".json"),
                      'w') as OFH:
                OFH.write(json.dumps(data))

        except Exception as err:
            logger.debug("cannot put data: exception - " + str(err))
            raise err

    return (True)
示例#2
0
def put(userId, bucket, archiveid, data):
    global archive_initialized, data_volume, use_db

    if not archive_initialized:
        raise Exception("archive not initialized")

    try:
        with db.session_scope() as dbsession:
            if use_db:
                dbdata = {'jsondata': json.dumps(data)}
            else:
                dbdata = {'jsondata': '{}', 'last_updated': int(time.time())}
                dataref = write_archive_file(userId, bucket, archiveid, data)

            db_archivedocument.add(userId,
                                   bucket,
                                   archiveid,
                                   archiveid + ".json",
                                   dbdata,
                                   session=dbsession)
    except Exception as err:
        logger.debug("cannot put data: exception - " + str(err))
        raise err

    return (True)
示例#3
0
    def put(self, userId: str, bucket: str, key: str, data: bytes) -> str:
        if not self.initialized:
            raise Exception("archive not initialized")

        try:
            with db.session_scope() as dbsession:
                str_data, is_b64 = self._encode(data)
                dbdata = {'jsondata': str_data, 'b64_encoded': is_b64}

                db_archivedocument.add(userId, bucket, key, key + ".json", inobj=dbdata, session=dbsession)
                return self.uri_for(userId, bucket, key)
        except Exception as err:
            logger.debug("cannot put data: exception - " + str(err))
            raise err
示例#4
0
    def put(self, userId, bucket, key, data):
        if not self.initialized:
            raise Exception("archive not initialized")

        try:
            with db.session_scope() as dbsession:
                dbdata = {'jsondata': str(data, 'utf-8')}
                db_archivedocument.add(userId,
                                       bucket,
                                       key,
                                       key + ".json",
                                       inobj=dbdata,
                                       session=dbsession)
                return self.uri_for(userId, bucket, key)
        except Exception as err:
            logger.debug("cannot put data: exception - " + str(err))
            raise err
示例#5
0
def initialize():
    global archive_initialized, data_volume, use_db, archive_driver

    localconfig = anchore_engine.configuration.localconfig.get_config()
    myconfig = localconfig['services']['catalog']

    try:
        data_volume = None
        if 'archive_data_dir' in myconfig:
            data_volume = myconfig['archive_data_dir']

        archive_driver = 'db'
        if 'archive_driver' in myconfig:
            archive_driver = myconfig['archive_driver']

        if 'use_db' in myconfig and myconfig['use_db']:
            archive_driver = 'db'

        # driver specific initializations here
        if archive_driver == 'db':
            use_db = True
        else:
            use_db = False
            initialize_archive_file(myconfig)

    except Exception as err:
        raise err

    logger.debug("archive initialization config: " +
                 str([archive_driver, use_db, data_volume]))

    # this section is for conversion on initialization between db driver and other driver
    with db.session_scope() as dbsession:
        logger.debug("running archive driver converter")

        if use_db:
            # need to check if any archive records do not have the document field populated, and if so try to import from localfs
            dbfilter = {'jsondata': '{}'}
            archive_matches = db_archivedocument.list_all(session=dbsession,
                                                          **dbfilter)
            for archive_match in archive_matches:
                userId = archive_match['userId']
                bucket = archive_match['bucket']
                archiveid = archive_match['archiveId']
                try:
                    fs_data = read_archive_file(userId,
                                                bucket,
                                                archiveid,
                                                driver_override='localfs')
                except Exception as err:
                    logger.debug("no data: " + str(err))
                    fs_data = None

                if fs_data:
                    logger.debug("document data - converting driver->DB: " +
                                 str([userId, bucket, archiveid]))
                    with db.session_scope() as subdbsession:
                        db_archivedocument.add(
                            userId,
                            bucket,
                            archiveid,
                            archiveid + ".json",
                            {'jsondata': json.dumps(fs_data)},
                            session=subdbsession)
                    delete_archive_file(userId,
                                        bucket,
                                        archiveid,
                                        driver_override='localfs')

        else:
            # need to check if any archive records DO have the document field populated, and if so try to export to localfs
            archive_matches = db_archivedocument.list_all_notempty(
                session=dbsession)
            for archive_match in archive_matches:
                userId = archive_match['userId']
                bucket = archive_match['bucket']
                archiveid = archive_match['archiveId']
                archive_record = db_archivedocument.get(userId,
                                                        bucket,
                                                        archiveid,
                                                        session=dbsession)
                db_data = json.loads(archive_record['jsondata'])

                logger.debug("document data - converting DB->driver: " +
                             str([userId, bucket, archiveid]))
                dataref = write_archive_file(userId,
                                             bucket,
                                             archiveid,
                                             db_data,
                                             driver_override='localfs')
                with db.session_scope() as subdbsession:
                    db_archivedocument.add(userId,
                                           bucket,
                                           archiveid,
                                           archiveid + ".json",
                                           {'jsondata': "{}"},
                                           session=subdbsession)

        if False:
            for archive_record in db_archivedocument.get_all_iter(
                    session=dbsession):
                userId = archive_record['userId']
                bucket = archive_record['bucket']
                archiveid = archive_record['archiveId']
                dataref = archive_record['documentName']
                if archive_record['jsondata']:
                    if archive_record['jsondata'] == "{}":
                        db_data = None
                    else:
                        try:
                            db_data = json.loads(archive_record['jsondata'])
                        except:
                            logger.warn(
                                "could no load jsondata for archive record: " +
                                str([userId, bucket, archiveid]))
                            db_data = None
                else:
                    db_data = None

                if use_db and not db_data:
                    try:
                        fs_data = read_archive_file(userId,
                                                    bucket,
                                                    archiveid,
                                                    driver_override='localfs')
                    except Exception as err:
                        logger.debug("no data: " + str(err))
                        fs_data = None
                    if fs_data:
                        logger.debug(
                            "document data not in DB but is on FS - converting: "
                            + str([userId, bucket, archiveid]))
                        with db.session_scope() as subdbsession:
                            db_archivedocument.add(
                                userId,
                                bucket,
                                archiveid,
                                archiveid + ".json",
                                {'jsondata': json.dumps(fs_data)},
                                session=subdbsession)
                        delete_archive_file(userId,
                                            bucket,
                                            archiveid,
                                            driver_override='localfs')

                elif not use_db and db_data:
                    logger.debug(
                        "document data not on FS but is in DB - converting: " +
                        str([userId, bucket, archiveid]))
                    dataref = write_archive_file(userId,
                                                 bucket,
                                                 archiveid,
                                                 db_data,
                                                 driver_override='localfs')
                    with db.session_scope() as subdbsession:
                        db_archivedocument.add(userId,
                                               bucket,
                                               archiveid,
                                               archiveid + ".json",
                                               {'jsondata': "{}"},
                                               session=subdbsession)

        logger.debug("archive driver converter complete")
    archive_initialized = True
    return (True)