def create_archive(bucket, archiveid, bodycontent): httpcode = 500 try: accountName = ApiRequestContextProxy.namespace() archive_sys = archive.get_manager() try: jsonbytes = anchore_utils.ensure_bytes(json.dumps(bodycontent)) my_svc = ApiRequestContextProxy.get_service() if my_svc is not None: resource_url = my_svc.service_record[ 'base_url'] + "/" + my_svc.service_record[ 'version'] + "/archive/" + bucket + "/" + archiveid else: resource_url = "N/A" rc = archive_sys.put(accountName, bucket, archiveid, jsonbytes) return_object = resource_url httpcode = 200 except Exception as err: httpcode = 500 raise err except Exception as err: return_object = anchore_engine.common.helpers.make_response_error( err, in_httpcode=httpcode) return (return_object, httpcode)
def _execute(self): """ Run the deletion of the archived data. This is irreversable. The final record delete (the ArchivedImage records) are deleted later with gc passes to leave them in the 'deleted' state for a while so users can see the transition. :return: (status, msg) tuple """ with session_scope() as session: rec = db_archived_images.get(session, self.account, self.image_digest) if not rec: raise MetadataNotFound('/'.join([self.account, self.image_digest])) self.archive_record = rec.to_dict() self.archive_detail_records = [x.to_dict() for x in rec.tags()] dest_archive_mgr = archive.get_manager() try: logger.debug('Deleting archive object: {}/{}'.format(self.archive_record['manifest_bucket'], self.archive_record['manifest_key'])) dest_archive_mgr.delete(self.account, self.archive_record['manifest_bucket'], self.archive_record['manifest_key']) logger.debug('Image analysis archive deletion complete') except: logger.exception('Failure deleting archive content') raise with session_scope() as session: logger.debug('Deleting archive records for {}/{}'.format(self.account, self.image_digest)) db_archived_images.delete(session, self.account, [self.image_digest]) return 'deleted', 'Archive deleted successfully'
def delete_archive(bucket, archiveid): httpcode = 500 try: archive_sys = archive.get_manager() account_name = ApiRequestContextProxy.namespace() with db.session_scope() as session: rc = archive_sys.delete(account_name, bucket, archiveid) httpcode = 200 return_object = None except Exception as err: return_object = anchore_engine.common.helpers.make_response_error(err, in_httpcode=httpcode) return return_object, httpcode
def get_archive(bucket, archiveid): httpcode = 500 try: archive_sys = archive.get_manager() accountName = ApiRequestContextProxy.namespace() try: return_object = json.loads(anchore_utils.ensure_str(archive_sys.get(accountName, bucket, archiveid))) httpcode = 200 except Exception as err: httpcode = 404 raise err except Exception as err: return_object = anchore_engine.common.helpers.make_response_error(err, in_httpcode=httpcode) return return_object, httpcode
def _execute(self): # if image record already exists, exit. with session_scope() as session: if db_catalog_image.get(self.image_digest, self.account, session): logger.info('Image archive restore found existing image records already. Aborting restore.') raise ImageConflict('Conflict: Image already exists in system. No restore possible') rec = db_archived_images.get(session, self.account, self.image_digest) if not rec: raise MetadataNotFound('/'.join([str(self.account), str(self.image_digest)])) self.archive_record = rec.to_dict() self.archive_detail_records = [x.to_dict() for x in rec.tags()] src_archive_mgr = archive.get_manager() dest_obj_mgr = object_store.get_manager() # Load the archive manifest m = src_archive_mgr.get(self.account, self.archive_record['manifest_bucket'], self.archive_record['manifest_key']) if m: tf = tempfile.NamedTemporaryFile(prefix='analysis_archive_{}'.format(self.image_digest), dir=localconfig.get_config()['tmp_dir'], delete=False) try: tf.write(ensure_bytes(m)) tf.close() # Load the archive from the temp file with ImageArchive.for_reading(tf.name) as img_archive: logger.debug('Using manifest: {}'.format(img_archive.manifest)) self.restore_artifacts(img_archive, dest_obj_mgr) self.restore_records(img_archive.manifest) self._reload_policy_engine(img_archive.manifest) finally: os.remove(tf.name) else: raise Exception('No archive manifest found in archive record. Cannot restore')
def _execute(self): """ Do the archiving of data :return: """ src_obj_mgr = object_store.get_manager() dest_archive_mgr = archive.get_manager() data_written = False with session_scope() as session: record = db_archived_images.get(session, self.account, self.image_digest) if not record: raise Exception('No analysis archive record found to track state') try: with tempfile.TemporaryDirectory(dir=localconfig.get_config().get('tmp_dir')) as tempdir: with ImageArchive.for_writing(os.path.join(tempdir, 'analysis_archive.tar.gz')) as img_archive: img_archive.account = self.account img_archive.image_digest = self.image_digest if self._catalog_record.get('image_detail'): image_id = self._catalog_record.get('image_detail')[0]['imageId'] else: image_id = None img_archive.manifest.metadata = { 'versions': localconfig.get_versions(), 'image_id': image_id, 'image_record': json.dumps(self._catalog_record, sort_keys=True) } self.archive_required(src_obj_mgr, self.required_artifacts, img_archive) try: vuln_artifacts = self.archive_vuln_history(img_archive) except: logger.exception('Error saving vuln history') raise try: eval_artifacts = self.archive_policy_evaluations(src_obj_mgr, img_archive, session) except: logger.exception('Error saving policy evals') raise self.manifest = img_archive.manifest # Closed tarball, now write it. archive_bucket = self.__archive_bucket__ archive_key = '{}.tar.gz'.format(self.image_digest) record.manifest_bucket = archive_bucket record.manifest_key = archive_key # Write the archive out to object store with open(img_archive.backing_file_path, 'r+b') as tb: tarball_data = tb.read() size = len(tarball_data) if not dest_archive_mgr.put(self.account, bucket=archive_bucket, archiveId=archive_key, data=tarball_data): raise Exception("Could not write archive manifest") data_written = True record.archive_size_bytes = size record.status = 'archived' add_event(ImageArchived(self.account, self.image_digest, self.id)) return record.status, 'Completed successfully' except Exception as ex: record.status = 'error' if data_written: logger.info('Cleaning up after failed analysis archive task for {}/{}'.format(self.account, self.image_digest)) try: resp = dest_archive_mgr.delete(self.account, record.manifest_bucket, record.manifest_key) except Exception as ex: logger.warn('Could not delete the analysis archive tarball in storage. May have leaked. Err: {}'.format(ex)) session.delete(record) add_event(ImageArchivingFailed(self.account, self.image_digest, self.id, err=str(ex))) return 'error', str(ex)