Esempio n. 1
0
def run_test():
    """
    Common test path for all configs to test against
    :return:
    """
    mgr = get_manager()
    logger.info('Basic string operations using get/put/delete')
    resp = mgr.put(userId=test_user_id, bucket=test_bucket_id, archiveid='document_1', data=document_1)
    logger.info('Document 1 PUT: {}'.format(resp))

    resp = mgr.get(userId=test_user_id,    bucket=test_bucket_id, archiveid='document_1')
    assert document_1 == resp

    assert mgr.exists(test_user_id, test_bucket_id, 'document_1')
    assert not mgr.exists(test_user_id, test_bucket_id, 'document_10')

    logger.info('Document operations')
    resp = mgr.put_document(userId=test_user_id, bucket=test_bucket_id, archiveId='document_json', data=document_json)
    logger.info('Document JSON PUT Doc: {}'.format(resp))

    resp = mgr.get_document(userId=test_user_id, bucket=test_bucket_id, archiveId='document_json')
    logger.info('Document JSON GET Dock: {}'.format(resp))
    assert document_json == resp

    logger.info('Document operations')
    resp = mgr.put_document(userId=test_user_id, bucket=test_bucket_id, archiveId='document_json', data=document_1.decode('utf-8'))
    logger.info('Document string PUT Doc: {}'.format(resp))

    resp = mgr.get_document(userId=test_user_id, bucket=test_bucket_id, archiveId='document_json')
    logger.info('Document string GET Dock: {}'.format(resp))
    assert document_1.decode('utf-8') == resp
Esempio n. 2
0
    def _execute(self):
        # if image record already exists, exit.
        with session_scope() as session:
            if db_catalog_image.get(self.image_digest, self.account, session):
                logger.info('Image archive restore found existing image records already. Aborting restore.')
                raise ImageConflict('Conflict: Image already exists in system. No restore possible')

        dest_obj_mgr = object_store.get_manager()

        # Load the archive manifest
        m = self.fileobj.read()

        if m:
            tf = tempfile.NamedTemporaryFile(prefix='analysis_archive_{}'.format(self.image_digest), dir=localconfig.get_config()['tmp_dir'], delete=False)
            try:
                tf.write(ensure_bytes(m))
                tf.close()

                # Load the archive from the temp file
                with ImageArchive.for_reading(tf.name) as img_archive:

                    logger.debug('Using manifest: {}'.format(img_archive.manifest))

                    self.restore_artifacts(img_archive, dest_obj_mgr)
                    self.restore_records(img_archive.manifest)
                    self._reload_policy_engine(img_archive.manifest)
            finally:
                os.remove(tf.name)

        else:
            raise Exception('No archive manifest found in archive record. Cannot restore')        
Esempio n. 3
0
def flush_data():
    logger.info('Flushing data')
    mgr = object_store.get_manager()
    for i in range(0, 100):
        archiveId = 'doc-{}'.format(i)
        logger.info('Deleting document: {}'.format(archiveId))
        mgr.delete_document(userId='test1',
                            bucket='testing',
                            archiveid=archiveId)
Esempio n. 4
0
def list_evals_impl(dbsession, userId, policyId=None, imageDigest=None, tag=None, evalId=None, newest_only=False, interactive=False):
    logger.debug("looking up eval record: " + userId)

    object_store_mgr = object_store.get_manager()

    # set up the filter based on input
    dbfilter = {}
    latest_eval_record = latest_eval_result = None

    if policyId is not None:
        dbfilter['policyId'] = policyId

    if imageDigest is not None:
        dbfilter['imageDigest'] = imageDigest

    if tag is not None:
        dbfilter['tag'] = tag

    if evalId is not None:
        dbfilter['evalId'] = evalId

    # perform an interactive eval to get/install the latest
    try:
        logger.debug("performing eval refresh: " + str(dbfilter))
        imageDigest = dbfilter['imageDigest']
        if 'tag' in dbfilter:
            evaltag = dbfilter['tag']
        else:
            evaltag = None

        if 'policyId' in dbfilter:
            policyId = dbfilter['policyId']
        else:
            policyId = None

        latest_eval_record, latest_eval_result = catalog_impl.perform_policy_evaluation(userId, imageDigest, dbsession, evaltag=evaltag, policyId=policyId, interactive=interactive, newest_only=newest_only)
    except Exception as err:
        logger.error("interactive eval failed - exception: {}".format(err))

    records = []
    if interactive or newest_only:
        try:
            latest_eval_record['result'] = latest_eval_result
            records = [latest_eval_record]
        except:
            raise Exception("interactive or newest_only eval requested, but unable to perform eval at this time")
    else:
        records = db_policyeval.tsget_byfilter(userId, session=dbsession, **dbfilter)
        for record in records:
            try:
                result = object_store_mgr.get_document(userId, 'policy_evaluations', record['evalId'])
                record['result'] = result
            except:
                record['result'] = {}

    return records
Esempio n. 5
0
def list_policies(active=None):
    """
    GET /policies?active=true|false
    :return:
    """

    # set up the filter based on input
    try:

        object_storage_mgr = object_store.get_manager()
        request_inputs = anchore_engine.apis.do_request_prep(connexion.request,
                                                             default_params={})
        user_id = request_inputs["userId"]

        with db.session_scope() as dbsession:
            if active is not None:
                records = db_policybundle.get_byfilter(user_id,
                                                       session=dbsession,
                                                       active=active)
            else:
                records = db_policybundle.get_byfilter(user_id,
                                                       session=dbsession)

        if records:
            for record in records:
                record["policybundle"] = {}
                try:
                    policybundle = object_storage_mgr.get_document(
                        user_id, "policy_bundles", record["policyId"])
                    if policybundle:
                        record["policybundle"] = policybundle

                        record["policybundlemeta"] = {}
                        meta = object_storage_mgr.get_document_meta(
                            user_id, "policy_bundles", record["policyId"])
                        if meta:
                            record["policybundlemeta"] = meta

                except Exception as err:
                    logger.warn(
                        "failed to fetch policy bundle from archive - exception: "
                        + str(err))
                    raise anchore_engine.common.helpers.make_anchore_exception(
                        err,
                        input_message=
                        "failed to fetch policy bundle from archive",
                        input_httpcode=500,
                    )
        else:
            records = []

        return records, 200
    except Exception as err:
        logger.exception("Uncaught exception")
        return str(err), 500
Esempio n. 6
0
def add_data():
    logger.info('Adding data')
    mgr = object_store.get_manager()
    for i in range(0, 100):
        archiveId = 'doc-{}'.format(i)
        logger.info('Adding document: {}'.format(archiveId))
        mgr.put_document(userId='test1',
                         bucket='testing',
                         archiveId=archiveId,
                         data='TESTINGBUCKETDATASMALL'.join(
                             [str(x) for x in range(100)]))
Esempio n. 7
0
def get_policy(policyId):
    """
    GET /policies/{policyId}

    :param policyId:
    :return:
    """
    try:
        object_storage_mgr = object_store.get_manager()

        request_inputs = anchore_engine.apis.do_request_prep(connexion.request,
                                                             default_params={})
        user_id = request_inputs["userId"]

        with db.session_scope() as dbsession:
            record = db_policybundle.get(user_id,
                                         policyId=policyId,
                                         session=dbsession)

        if record:
            record["policybundle"] = {}
            try:
                policybundle = object_storage_mgr.get_document(
                    user_id, "policy_bundles", record["policyId"])
                if policybundle:
                    record["policybundle"] = policybundle

                    record["policybundlemeta"] = {}
                    meta = object_storage_mgr.get_document_meta(
                        user_id, "policy_bundles", record["policyId"])
                    if meta:
                        record["policybundlemeta"] = meta

            except Exception as err:
                logger.warn(
                    "failed to fetch policy bundle from archive - exception: "
                    + str(err))
                raise anchore_engine.common.helpers.make_anchore_exception(
                    err,
                    input_message="failed to fetch policy bundle from archive",
                    input_httpcode=500,
                )
            return record, 200
        else:
            return (
                anchore_engine.common.helpers.make_response_error(
                    "Policy bundle {} not found in DB".format(policyId),
                    in_httpcode=404),
                404,
            )
    except Exception as err:
        logger.exception("Uncaught exception")
        return str(err), 500
Esempio n. 8
0
def run_test():
    """
    Common test path for all configs to test against
    :return:
    """
    mgr = get_manager()
    logger.info("Basic string operations using get/put/delete")
    resp = mgr.put(
        userId=test_user_id,
        bucket=test_bucket_id,
        archiveid="document_1",
        data=document_1,
    )
    logger.info("Document 1 PUT: {}".format(resp))

    resp = mgr.get(userId=test_user_id,
                   bucket=test_bucket_id,
                   archiveid="document_1")
    assert document_1 == resp

    assert mgr.exists(test_user_id, test_bucket_id, "document_1")
    assert not mgr.exists(test_user_id, test_bucket_id, "document_10")

    logger.info("Document operations")
    resp = mgr.put_document(
        userId=test_user_id,
        bucket=test_bucket_id,
        archiveId="document_json",
        data=document_json,
    )
    logger.info("Document JSON PUT Doc: {}".format(resp))

    resp = mgr.get_document(userId=test_user_id,
                            bucket=test_bucket_id,
                            archiveId="document_json")
    logger.info("Document JSON GET Dock: {}".format(resp))
    assert document_json == resp

    logger.info("Document operations")
    resp = mgr.put_document(
        userId=test_user_id,
        bucket=test_bucket_id,
        archiveId="document_json",
        data=document_1.decode("utf-8"),
    )
    logger.info("Document string PUT Doc: {}".format(resp))

    resp = mgr.get_document(userId=test_user_id,
                            bucket=test_bucket_id,
                            archiveId="document_json")
    logger.info("Document string GET Dock: {}".format(resp))
    assert document_1.decode("utf-8") == resp
def save_policy(user_id, policyId, active, policy_bundle, dbsession):
    """
    Do the save, outside the context of an api call itself.


    :param user_id: str - requesting usera
    :param policyId: str - the id for policy
    :param active: boolean - is active or not
    :param policy_bundle: dict - bundle content
    :return:
    """

    object_store_mgr = object_store.get_manager()
    try:
        if object_store_mgr.put_document(user_id, 'policy_bundles', policyId,
                                         policy_bundle):
            rc = db_policybundle.update(policyId,
                                        user_id,
                                        active,
                                        policy_bundle,
                                        session=dbsession)
        else:
            rc = False
    except Exception as err:
        raise anchore_engine.common.helpers.make_anchore_exception(
            err,
            input_message="cannot add policy, failed to update archive/DB",
            input_httpcode=500)
    if not rc:
        raise Exception("DB update failed")
    else:
        if active:
            try:
                rc = db_policybundle.set_active_policy(policyId,
                                                       user_id,
                                                       session=dbsession)
            except Exception as err:
                raise Exception(
                    "could not set policy as active - exception: " + str(err))

        record = db_policybundle.get(user_id, policyId, session=dbsession)
        record['policybundle'] = policy_bundle

        return record
Esempio n. 10
0
    def _execute(self):
        # if image record already exists, exit.

        with session_scope() as session:
            if db_catalog_image.get(self.image_digest, self.account, session):
                logger.info('Image archive restore found existing image records already. Aborting restore.')
                raise Exception('Conflict: Image already exists in system. No restore possible')

            #rec = db_archived_images.get(session, self.account, self.image_digest)
            #if not rec:
            #    raise MetadataNotFound('/'.join([str(self.account), str(self.image_digest)]))

            #self.archive_record = rec.to_dict()
            #self.archive_detail_records = [x.to_dict() for x in rec.tags()]

        #src_archive_mgr = archive.get_manager()
        dest_obj_mgr = object_store.get_manager()

        # Load the archive manifest
        #m = src_archive_mgr.get(self.account, self.archive_record['manifest_bucket'], self.archive_record['manifest_key'])
        m = self.fileobj.read()

        if m:
            tf = tempfile.NamedTemporaryFile(prefix='analysis_archive_{}'.format(self.image_digest), dir=localconfig.get_config()['tmp_dir'], delete=False)
            try:
                tf.write(ensure_bytes(m))
                tf.close()

                # Load the archive from the temp file
                with ImageArchive.for_reading(tf.name) as img_archive:

                    logger.debug('Using manifest: {}'.format(img_archive.manifest))

                    self.restore_artifacts(img_archive, dest_obj_mgr)
                    self.restore_records(img_archive.manifest)
                    self._reload_policy_engine(img_archive.manifest)
            finally:
                os.remove(tf.name)

        else:
            raise Exception('No archive manifest found in archive record. Cannot restore')        
Esempio n. 11
0
def save_policy(user_id, policyId, active, policy_bundle, dbsession):
    """
    Do the save, outside the context of an api call itself.


    :param user_id: str - requesting usera
    :param policyId: str - the id for policy
    :param active: boolean - is active or not
    :param policy_bundle: dict - bundle content
    :return:
    """

    try:
        active_record = db_policybundle.get_active_policy(user_id,
                                                          session=dbsession)
        last_active_policyId = active_record.get('policyId', None)
    except:
        last_active_policyId = None

    object_store_mgr = object_store.get_manager()

    # if update is to currently active bundle, check if the bundle content is to be changed
    if last_active_policyId == policyId:
        try:
            last_policy_bundle_content = object_store_mgr.get_document(
                user_id, 'policy_bundles', policyId)
            if last_policy_bundle_content:
                last_policy_bundle_digest = hashlib.sha256(
                    anchore_engine.utils.ensure_bytes(
                        json.dumps(last_policy_bundle_content,
                                   sort_keys=True))).hexdigest()
                new_policy_bundle_digest = hashlib.sha256(
                    anchore_engine.utils.ensure_bytes(
                        json.dumps(policy_bundle,
                                   sort_keys=True))).hexdigest()
                if last_policy_bundle_digest != new_policy_bundle_digest:
                    event = anchore_engine.subsys.events.ActivePolicyBundleContentChange(
                        user_id=user_id,
                        data={
                            'policy_id':
                            policyId,
                            'last_policy_bundle_digest':
                            last_policy_bundle_digest,
                            'current_policy_bundle_digest':
                            new_policy_bundle_digest
                        })
                    try:
                        anchore_engine.services.catalog.catalog_impl._add_event(
                            event, dbsession)
                    except:
                        logger.warn(
                            'Ignoring error creating active policy content change event'
                        )
        except Exception as err:
            logger.warn(
                "Could not evaluate bundle content different on save for active bundle - exception: {}"
                .format(err))

    try:
        if object_store_mgr.put_document(user_id, 'policy_bundles', policyId,
                                         policy_bundle):
            rc = db_policybundle.update(policyId,
                                        user_id,
                                        active,
                                        policy_bundle,
                                        session=dbsession)
        else:
            rc = False
    except Exception as err:
        raise anchore_engine.common.helpers.make_anchore_exception(
            err,
            input_message="cannot add policy, failed to update archive/DB",
            input_httpcode=500)
    if not rc:
        raise Exception("DB update failed")
    else:
        if active:
            try:
                rc = db_policybundle.set_active_policy(policyId,
                                                       user_id,
                                                       session=dbsession)
                if rc:
                    if policyId != last_active_policyId:
                        # a new policy is now active
                        event = anchore_engine.subsys.events.ActivePolicyBundleIdChange(
                            user_id=user_id,
                            data={
                                'last_policy_bundle_id': last_active_policyId,
                                'current_policy_bundle_id': policyId
                            })
                        try:
                            anchore_engine.services.catalog.catalog_impl._add_event(
                                event, dbsession)
                        except:
                            logger.warn(
                                'Ignoring error creating active policy id change event'
                            )

            except Exception as err:
                raise Exception(
                    "could not set policy as active - exception: " + str(err))

        record = db_policybundle.get(user_id, policyId, session=dbsession)
        record['policybundle'] = policy_bundle

        return record
Esempio n. 12
0
def check(configfile, analysis_archive):
    """
    Test the configuration in the expected anchore-engine config location or override that and use the configuration file provided as an option.

    To test, the system will read and write a very small data document to the driver and then delete it on completion.

    :param configfile:
    :return:
    """

    logger.info('Using config file {}'.format(configfile))
    sys_config = load_config(configfile=configfile)

    if sys_config:
        service_config = sys_config['services']['catalog']
    else:
        service_config = None

    if not service_config:
        logger.info(
            'No configuration file or content available. Cannot test archive driver configuration'
        )
        utils.doexit(2)

    if analysis_archive:
        try:
            object_store.initialize(service_config,
                                    manager_id=ANALYSIS_ARCHIVE_MANAGER_ID,
                                    config_keys=[ANALYSIS_ARCHIVE_MANAGER_ID])
        except:
            logger.error(
                'No "analysis_archive" configuration section found in the configuration. To check a config that uses the default backend for analysis archive data, use the regular object storage check'
            )
            utils.doexit(2)

        mgr = object_store.get_manager(ANALYSIS_ARCHIVE_MANAGER_ID)
    else:
        object_store.initialize(service_config,
                                manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID,
                                config_keys=[
                                    DEFAULT_OBJECT_STORE_MANAGER_ID,
                                    ALT_OBJECT_STORE_CONFIG_KEY
                                ])
        mgr = object_store.get_manager()

    test_user_id = 'test'
    test_bucket = 'anchorecliconfigtest'
    test_archive_id = 'cliconfigtest'
    test_data = 'clitesting at {}'.format(
        datetime.datetime.utcnow().isoformat())

    logger.info(
        'Checking existence of test document with user_id = {}, bucket = {} and archive_id = {}'
        .format(test_user_id, test_bucket, test_archive_id))
    if mgr.exists(test_user_id, test_bucket, test_archive_id):
        test_archive_id = 'cliconfigtest2'
        if mgr.exists(test_user_id, test_bucket, test_archive_id):
            logger.error(
                'Found existing records for archive doc to test, aborting test to avoid overwritting any existing data'
            )
            utils.doexit(1)

    logger.info(
        'Creating test document with user_id = {}, bucket = {} and archive_id = {}'
        .format(test_user_id, test_bucket, test_archive_id))
    result = mgr.put(test_user_id,
                     test_bucket,
                     test_archive_id,
                     data=test_data)
    if not result:
        logger.warn(
            'Got empty response form archive PUT operation: {}'.format(result))

    logger.info('Checking document fetch')
    loaded = str(mgr.get(test_user_id, test_bucket, test_archive_id), 'utf-8')
    if not loaded:
        logger.error(
            'Failed retrieving the written document. Got: {}'.format(loaded))
        utils.doexit(5)

    if str(loaded) != test_data:
        logger.error(
            'Failed retrieving the written document. Got something other than expected. Expected: "{}" Got: "{}"'
            .format(test_data, loaded))
        utils.doexit(5)

    logger.info('Removing test object')
    mgr.delete(test_user_id, test_bucket, test_archive_id)

    if mgr.exists(test_user_id, test_bucket, test_archive_id):
        logger.error('Found archive object after it should have been removed')
        utils.doexit(5)

    logger.info('Archive config check completed successfully')
Esempio n. 13
0
 def __init__(self, object_manager_id):
     self.obj_manager = object_store.get_manager(object_manager_id)
     logger.debug(
         "Archive manager initialized using object storage driver: {}".
         format(self.obj_manager.primary_client.__config_name__))
Esempio n. 14
0
def import_image(
    dbsession,
    account: str,
    operation_id: str,
    import_manifest: ImportManifest,
    force: bool = False,
    annotations: dict = None,
) -> dict:
    """
    Process the image import finalization, creating the new 'image' record and setting the proper state for queueing

    :param dbsession:
    :param account:
    :param operation_id:
    :param import_manifest:
    :param force:
    :param annotations:
    :return:
    """

    logger.debug(
        "Processing import image request with source operation_id = %s, annotations = %s",
        operation_id,
        annotations,
    )

    # Add annotation indicating this is an import
    annotations = add_import_annotations(import_manifest, annotations)

    # Import analysis for a new digest, or re-load analysis for existing image
    logger.debug("Loading image info using import operation id %s",
                 operation_id)
    image_references = []
    for t in import_manifest.tags:
        r = DockerImageReference.from_string(t)
        r.digest = import_manifest.digest

        if import_manifest.local_image_id:
            r.image_id = import_manifest.local_image_id
        else:
            r.image_id = import_manifest.digest

        image_references.append(r)

    if not (image_references and image_references[0].has_digest()):
        raise ValueError("Must have image digest in image reference")

    # Check for dockerfile updates to an existing image
    found_img = db_catalog_image.get(imageDigest=import_manifest.digest,
                                     userId=account,
                                     session=dbsession)

    # Removed this to align processing with how analysis works: the status is updated *after* the add call
    # if the record already had an older status it will get reset
    if (found_img and found_img["analysis_status"]
            not in taskstate.fault_state("analyze") and not force):
        # Load the existing manifest since we aren't going to use the import manifest for analysis
        obj_mgr = get_manager()
        manifest = obj_mgr.get_document(account, "manifest_data",
                                        found_img["imageDigest"])
        parent_manifest = obj_mgr.get_document(account, "parent_manifest_data",
                                               found_img["imageDigest"])

        # Don't allow a dockerfile update via import path
        dockerfile_content = None
        dockerfile_mode = None

        # Finalize the import, go straight to complete
        finalize_import_operation(
            dbsession,
            account,
            operation_id,
            import_manifest,
            final_state=ImportState.complete,
        )

        # raise BadRequest(
        #     "Cannot reload image that already exists unless using force=True for re-analysis",
        #     detail={"digest": import_manifest.digest},
        # )
    else:
        # Finalize the import
        internal_import_manifest = finalize_import_operation(
            dbsession, account, operation_id, import_manifest)

        # Get the dockerfile content if available
        if import_manifest.contents.dockerfile:
            rec = [
                ref for ref in internal_import_manifest.contents
                if ref.content_type == ImportTypes.dockerfile.value
            ][0]
            obj_mgr = get_manager()
            dockerfile_content = obj_mgr.get_document(
                userId=account,
                bucket=rec.bucket,
                archiveId=rec.key,
            )
            dockerfile_mode = "Actual"
        else:
            dockerfile_content = ""
            dockerfile_mode = "Guessed"

        # Set the manifest to the import manifest. This is swapped out for the real manifest during the import operation on
        # the analyzer
        manifest = internal_import_manifest.to_json()

        parent_manifest = ""

    # Update the db for the image record
    image_records = add_or_update_image(
        dbsession,
        account,
        image_references[0].image_id,
        tags=[x.tag_pullstring() for x in image_references],
        digests=[x.digest_pullstring() for x in image_references],
        parentdigest=import_manifest.parent_digest
        if import_manifest.parent_digest else import_manifest.digest,
        dockerfile=dockerfile_content,
        dockerfile_mode=dockerfile_mode,
        manifest=
        manifest,  # Fo now use the import manifest as the image manifest. This will get set to the actual manifest on the analyzer
        parent_manifest=parent_manifest,
        annotations=annotations,
    )
    if image_records:
        image_record = image_records[0]
    else:
        raise Exception("No record updated/inserted")

    return image_record
Esempio n. 15
0
def check(configfile, analysis_archive):
    """
    Test the configuration in the expected anchore-engine config location or override that and use the configuration file provided as an option.

    To test, the system will read and write a very small data document to the driver and then delete it on completion.
    """

    db_conf = db_context()
    db_preflight(db_conf["params"], db_conf["retries"])

    logger.info("Using config file {}".format(configfile))
    sys_config = load_config(configfile=configfile)

    if sys_config:
        service_config = sys_config["services"]["catalog"]
    else:
        service_config = None

    if not service_config:
        logger.info(
            "No configuration file or content available. Cannot test archive driver configuration"
        )
        fail_exit()

    if analysis_archive:
        try:
            object_store.initialize(
                service_config,
                manager_id=ANALYSIS_ARCHIVE_MANAGER_ID,
                config_keys=[ANALYSIS_ARCHIVE_MANAGER_ID],
            )
        except:
            logger.error(
                'No "analysis_archive" configuration section found in the configuration. To check a config that uses the default backend for analysis archive data, use the regular object storage check'
            )
            fail_exit()

        mgr = object_store.get_manager(ANALYSIS_ARCHIVE_MANAGER_ID)
    else:
        object_store.initialize(
            service_config,
            manager_id=DEFAULT_OBJECT_STORE_MANAGER_ID,
            config_keys=[
                DEFAULT_OBJECT_STORE_MANAGER_ID, ALT_OBJECT_STORE_CONFIG_KEY
            ],
        )
        mgr = object_store.get_manager()

    test_user_id = "test"
    test_bucket = "anchorecliconfigtest"
    test_archive_id = "cliconfigtest"
    test_data = "clitesting at {}".format(
        datetime.datetime.utcnow().isoformat())

    logger.info(
        "Checking existence of test document with user_id = {}, bucket = {} and archive_id = {}"
        .format(test_user_id, test_bucket, test_archive_id))
    if mgr.exists(test_user_id, test_bucket, test_archive_id):
        test_archive_id = "cliconfigtest2"
        if mgr.exists(test_user_id, test_bucket, test_archive_id):
            logger.error(
                "Found existing records for archive doc to test, aborting test to avoid overwritting any existing data"
            )
            doexit(1)

    logger.info(
        "Creating test document with user_id = {}, bucket = {} and archive_id = {}"
        .format(test_user_id, test_bucket, test_archive_id))
    result = mgr.put(test_user_id,
                     test_bucket,
                     test_archive_id,
                     data=test_data)
    if not result:
        logger.warn(
            "Got empty response form archive PUT operation: {}".format(result))

    logger.info("Checking document fetch")
    loaded = str(mgr.get(test_user_id, test_bucket, test_archive_id), "utf-8")
    if not loaded:
        logger.error(
            "Failed retrieving the written document. Got: {}".format(loaded))
        doexit(ExitCode.obj_store_failed)

    if str(loaded) != test_data:
        logger.error(
            'Failed retrieving the written document. Got something other than expected. Expected: "{}" Got: "{}"'
            .format(test_data, loaded))
        doexit(ExitCode.obj_store_failed)

    logger.info("Removing test object")
    mgr.delete(test_user_id, test_bucket, test_archive_id)

    if mgr.exists(test_user_id, test_bucket, test_archive_id):
        logger.error("Found archive object after it should have been removed")
        doexit(ExitCode.obj_store_failed)

    logger.info("Archive config check completed successfully")
Esempio n. 16
0
    def _execute(self):
        """
        Do the archiving of data
        :return:
        """

        src_obj_mgr = object_store.get_manager()
        dest_archive_mgr = archive.get_manager()
        data_written = False

        with session_scope() as session:
            record = db_archived_images.get(session, self.account, self.image_digest)

            if not record:
                raise Exception('No analysis archive record found to track state')

            try:
                with tempfile.TemporaryDirectory(dir=localconfig.get_config().get('tmp_dir')) as tempdir:
                    with ImageArchive.for_writing(os.path.join(tempdir, 'analysis_archive.tar.gz')) as img_archive:
                        img_archive.account = self.account
                        img_archive.image_digest = self.image_digest

                        if self._catalog_record.get('image_detail'):
                            image_id = self._catalog_record.get('image_detail')[0]['imageId']
                        else:
                            image_id = None

                        img_archive.manifest.metadata = {
                            'versions': localconfig.get_versions(),
                            'image_id': image_id,
                            'image_record': json.dumps(self._catalog_record, sort_keys=True)
                        }

                        self.archive_required(src_obj_mgr, self.required_artifacts, img_archive)

                        try:
                            vuln_artifacts = self.archive_vuln_history(img_archive)
                        except:
                            logger.exception('Error saving vuln history')
                            raise

                        try:
                            eval_artifacts = self.archive_policy_evaluations(src_obj_mgr, img_archive, session)
                        except:
                            logger.exception('Error saving policy evals')
                            raise

                        self.manifest = img_archive.manifest

                    # Closed tarball, now write it.

                    archive_bucket = self.__archive_bucket__
                    archive_key = '{}.tar.gz'.format(self.image_digest)
                    record.manifest_bucket = archive_bucket
                    record.manifest_key = archive_key

                    # Write the archive out to object store
                    with open(img_archive.backing_file_path, 'r+b') as tb:
                        tarball_data = tb.read()
                        size = len(tarball_data)

                    if not dest_archive_mgr.put(self.account, bucket=archive_bucket, archiveId=archive_key, data=tarball_data):
                        raise Exception("Could not write archive manifest")

                    data_written = True
                    record.archive_size_bytes = size
                    record.status = 'archived'

                    add_event(ImageArchived(self.account, self.image_digest, self.id))
                    return record.status, 'Completed successfully'
            except Exception as ex:
                record.status = 'error'

                if data_written:
                    logger.info('Cleaning up after failed analysis archive task for {}/{}'.format(self.account,
                                                                                                  self.image_digest))
                    try:
                        resp = dest_archive_mgr.delete(self.account, record.manifest_bucket, record.manifest_key)
                    except Exception as ex:
                        logger.warn('Could not delete the analysis archive tarball in storage. May have leaked. Err: {}'.format(ex))

                session.delete(record)
                add_event(ImageArchivingFailed(self.account, self.image_digest, self.id, err=str(ex)))
                return 'error', str(ex)