Пример #1
0
    def _execute(self):
        """
        Run the deletion of the archived data. This is irreversable.

        The final record delete (the ArchivedImage records) are deleted later with gc passes to leave them in the 'deleted' state
        for a while so users can see the transition.

        :return: (status, msg) tuple
        """
        with session_scope() as session:
            rec = db_archived_images.get(session, self.account, self.image_digest)
            if not rec:
                raise MetadataNotFound('/'.join([self.account, self.image_digest]))

            self.archive_record = rec.to_dict()
            self.archive_detail_records = [x.to_dict() for x in rec.tags()]

        dest_archive_mgr = archive.get_manager()

        try:
            logger.debug('Deleting archive object: {}/{}'.format(self.archive_record['manifest_bucket'], self.archive_record['manifest_key']))
            dest_archive_mgr.delete(self.account, self.archive_record['manifest_bucket'], self.archive_record['manifest_key'])
            logger.debug('Image analysis archive deletion complete')
        except:
            logger.exception('Failure deleting archive content')
            raise

        with session_scope() as session:
            logger.debug('Deleting archive records for {}/{}'.format(self.account, self.image_digest))
            db_archived_images.delete(session, self.account, [self.image_digest])

        return 'deleted', 'Archive deleted successfully'
Пример #2
0
    def run(self, merge=False):
        """

        :param merge:
        :return: (str, str) tuple, with status as first element and detail msg as second
        """
        logger.debug("Starting archiving process for image {}".format(self.image_digest))

        self.started = datetime.datetime.utcnow()

        try:
            with session_scope() as session:
                found = db_archived_images.get(session, self.account, self.image_digest)
                if found and not merge:
                    # Short-circuit, since already exists
                    return found.status, 'Existing record found, archiving aborted'

                catalog_img_dict = db_catalog_image.get(self.image_digest, self.account, session)

                if not catalog_img_dict:
                    raise Exception('Could not locate an image with digest {} in account {}'.format(self.image_digest, self.account))
                else:
                    self._catalog_record = catalog_img_dict

                if catalog_img_dict.get('image_status') != 'active' or catalog_img_dict.get('analysis_status') != 'analyzed':
                    raise Exception('Invalid image record state. Image must have "analysis_status"="analyzed" and "image_status"="active". Found {} and {}'.format(catalog_img_dict.get('analysis_status'), catalog_img_dict.get('image_status')))

                # Add the new record
                img = ArchivedImage.from_catalog_image(catalog_img_dict, cascade=True)
                if merge and found:
                    img = session.merge(img)
                else:
                    img = session.add(img)

        except Exception as ex:
            add_event(ImageArchivingFailed(self.account, self.image_digest, self.id, err=str(ex)))
            return 'error', str(ex)

        try:
            return self._execute()
        except Exception as ex:
            logger.exception('Error executing image archive task')
            return 'error', str(ex)
        finally:
            self.stopped = datetime.datetime.utcnow()
Пример #3
0
def get_archived_analysis(imageDigest):
    """
    GET /archives/images/{digest}

    :param imageDigest:
    :return:
    """

    try:
        with db.session_scope() as session:
            return_object = db_archived_images.get(session, ApiRequestContextProxy.namespace(), imageDigest)

            if not return_object:
                return make_response_error('Not found in archive', in_httpcode=404), 404

            return archived_img_to_msg(return_object), 200
    except Exception as err:
        logger.exception('Error listing archived images')
        return make_response_error(str(err), in_httpcode=500), 500
Пример #4
0
    def _execute(self):
        # if image record already exists, exit.

        with session_scope() as session:
            if db_catalog_image.get(self.image_digest, self.account, session):
                logger.info('Image archive restore found existing image records already. Aborting restore.')
                raise ImageConflict('Conflict: Image already exists in system. No restore possible')

            rec = db_archived_images.get(session, self.account, self.image_digest)
            if not rec:
                raise MetadataNotFound('/'.join([str(self.account), str(self.image_digest)]))

            self.archive_record = rec.to_dict()
            self.archive_detail_records = [x.to_dict() for x in rec.tags()]

        src_archive_mgr = archive.get_manager()
        dest_obj_mgr = object_store.get_manager()

        # Load the archive manifest
        m = src_archive_mgr.get(self.account, self.archive_record['manifest_bucket'], self.archive_record['manifest_key'])

        if m:
            tf = tempfile.NamedTemporaryFile(prefix='analysis_archive_{}'.format(self.image_digest), dir=localconfig.get_config()['tmp_dir'], delete=False)
            try:
                tf.write(ensure_bytes(m))
                tf.close()

                # Load the archive from the temp file
                with ImageArchive.for_reading(tf.name) as img_archive:

                    logger.debug('Using manifest: {}'.format(img_archive.manifest))

                    self.restore_artifacts(img_archive, dest_obj_mgr)
                    self.restore_records(img_archive.manifest)
                    self._reload_policy_engine(img_archive.manifest)
            finally:
                os.remove(tf.name)

        else:
            raise Exception('No archive manifest found in archive record. Cannot restore')
Пример #5
0
    def _execute(self):
        """
        Do the archiving of data
        :return:
        """

        src_obj_mgr = object_store.get_manager()
        dest_archive_mgr = archive.get_manager()
        data_written = False

        with session_scope() as session:
            record = db_archived_images.get(session, self.account, self.image_digest)

            if not record:
                raise Exception('No analysis archive record found to track state')

            try:
                with tempfile.TemporaryDirectory(dir=localconfig.get_config().get('tmp_dir')) as tempdir:
                    with ImageArchive.for_writing(os.path.join(tempdir, 'analysis_archive.tar.gz')) as img_archive:
                        img_archive.account = self.account
                        img_archive.image_digest = self.image_digest

                        if self._catalog_record.get('image_detail'):
                            image_id = self._catalog_record.get('image_detail')[0]['imageId']
                        else:
                            image_id = None

                        img_archive.manifest.metadata = {
                            'versions': localconfig.get_versions(),
                            'image_id': image_id,
                            'image_record': json.dumps(self._catalog_record, sort_keys=True)
                        }

                        self.archive_required(src_obj_mgr, self.required_artifacts, img_archive)

                        try:
                            vuln_artifacts = self.archive_vuln_history(img_archive)
                        except:
                            logger.exception('Error saving vuln history')
                            raise

                        try:
                            eval_artifacts = self.archive_policy_evaluations(src_obj_mgr, img_archive, session)
                        except:
                            logger.exception('Error saving policy evals')
                            raise

                        self.manifest = img_archive.manifest

                    # Closed tarball, now write it.

                    archive_bucket = self.__archive_bucket__
                    archive_key = '{}.tar.gz'.format(self.image_digest)
                    record.manifest_bucket = archive_bucket
                    record.manifest_key = archive_key

                    # Write the archive out to object store
                    with open(img_archive.backing_file_path, 'r+b') as tb:
                        tarball_data = tb.read()
                        size = len(tarball_data)

                    if not dest_archive_mgr.put(self.account, bucket=archive_bucket, archiveId=archive_key, data=tarball_data):
                        raise Exception("Could not write archive manifest")

                    data_written = True
                    record.archive_size_bytes = size
                    record.status = 'archived'

                    add_event(ImageArchived(self.account, self.image_digest, self.id))
                    return record.status, 'Completed successfully'
            except Exception as ex:
                record.status = 'error'

                if data_written:
                    logger.info('Cleaning up after failed analysis archive task for {}/{}'.format(self.account,
                                                                                                  self.image_digest))
                    try:
                        resp = dest_archive_mgr.delete(self.account, record.manifest_bucket, record.manifest_key)
                    except Exception as ex:
                        logger.warn('Could not delete the analysis archive tarball in storage. May have leaked. Err: {}'.format(ex))

                session.delete(record)
                add_event(ImageArchivingFailed(self.account, self.image_digest, self.id, err=str(ex)))
                return 'error', str(ex)