def get_archive(): if not hasattr(settings, '_aleph_archive'): archive = storagelayer.init(settings.ARCHIVE_TYPE, path=settings.ARCHIVE_PATH, aws_key_id=settings.ARCHIVE_AWS_KEY_ID, # noqa aws_secret=settings.ARCHIVE_AWS_SECRET, # noqa aws_region=settings.ARCHIVE_AWS_REGION, # noqa bucket=settings.ARCHIVE_BUCKET) # noqa settings._aleph_archive = archive return settings._aleph_archive
def get_archive(): app = current_app._get_current_object() if not hasattr(app, '_aleph_archive'): archive = storagelayer.init(app.config.get('ARCHIVE_TYPE'), path=app.config.get('ARCHIVE_PATH'), aws_key_id=app.config.get('ARCHIVE_AWS_KEY_ID'), # noqa aws_secret=app.config.get('ARCHIVE_AWS_SECRET'), # noqa aws_region=app.config.get('ARCHIVE_AWS_REGION'), # noqa bucket=app.config.get('ARCHIVE_BUCKET')) # noqa app._aleph_archive = archive return app._aleph_archive
import storagelayer from sqlalchemy.pool import NullPool from werkzeug.local import LocalProxy from memorious import settings log = logging.getLogger(__name__) redis_pool = redis.ConnectionPool(host=settings.REDIS_HOST, port=settings.REDIS_PORT, decode_responses=True) # File storage layer for blobs on local file system or S3 storage = storagelayer.init(settings.ARCHIVE_TYPE, path=settings.ARCHIVE_PATH, aws_key_id=settings.ARCHIVE_AWS_KEY_ID, aws_secret=settings.ARCHIVE_AWS_SECRET, aws_region=settings.ARCHIVE_AWS_REGION, bucket=settings.ARCHIVE_BUCKET) def load_manager(): if not hasattr(settings, '_manager'): from memorious.logic.manager import CrawlerManager settings._manager = CrawlerManager() if settings.CONFIG_PATH: settings._manager.load_path(settings.CONFIG_PATH) return settings._manager def load_datastore(): if not hasattr(settings, '_datastore'):
def setUp(self): self.mock = mock_s3() self.mock.start() self.archive = storagelayer.init('s3', bucket='foo') self.file = os.path.abspath(__file__)
def setUp(self): self.path = os.path.join(tempfile.gettempdir(), 'storagelayer_test') self.archive = storagelayer.init('file', path=self.path) self.file = os.path.abspath(__file__)