def setUp(self): # Force tests to use fake configuration service_settings.REDIS_URL = None service_settings.ARCHIVE_TYPE = 'file' service_settings.ARCHIVE_PATH = mkdtemp() balkhash_settings.BACKEND = 'LEVELDB' balkhash_settings.LEVELDB_PATH = mkdtemp() conn = get_fakeredis() self.queue = ServiceQueue(conn, ServiceQueue.OP_INGEST, 'test') self.manager = Manager(self.queue, {}) self.manager.entities = [] self.manager.emit_entity = types.MethodType(emit_entity, self.manager) self.manager.queue_entity = types.MethodType(queue_entity, self.manager) # noqa self.archive = init_archive() self.manager._archive = self.archive
def setUp(self): # Force tests to use fake configuration ingestors_settings.TESTING = True service_settings.REDIS_URL = None service_settings.ARCHIVE_TYPE = 'file' service_settings.ARCHIVE_PATH = mkdtemp() ftmstore_settings.DATABASE_URI = 'sqlite://' conn = get_fakeredis() job = Job.create(conn, 'test') stage = Stage(job, OP_INGEST) dataset = get_dataset(job.dataset.name, OP_INGEST) self.manager = Manager(dataset, stage, {}) self.manager.entities = [] self.manager.emit_entity = types.MethodType(emit_entity, self.manager) self.manager.queue_entity = types.MethodType(queue_entity, self.manager) # noqa self.archive = init_archive() self.manager._archive = self.archive
def setUp(self): self.mock = mock_s3() self.mock.start() self.archive = init_archive('s3', bucket='foo') self.file = ensure_path(__file__)
def get_archive(): if not hasattr(settings, '_archive'): settings._archive = init_archive() return settings._archive
def archive(self): if not hasattr(settings, "_archive"): settings._archive = init_archive() return settings._archive
settings._datastore = dataset.connect(settings.DATASTORE_URI, engine_kwargs=engine_kwargs) # Use bigint to store integers by default settings._datastore.types.integer = settings._datastore.types.bigint return settings._datastore def is_sync_mode(): if settings.TESTING or settings.DEBUG: return True return sls.REDIS_URL is None def connect_redis(): if settings.TESTING: return get_fakeredis() return get_redis() manager = LocalProxy(load_manager) datastore = LocalProxy(load_datastore) conn = LocalProxy(connect_redis) # File storage layer for blobs on local file system or S3 storage = init_archive() def init_memorious(): for func in get_extensions('memorious.plugins'): func()
def setUp(self): self.mock = mock_s3() self.mock.start() self.archive = init_archive("s3", bucket="foo", publication_bucket="foo") self.file = ensure_path(__file__)
def __init__(self, archive=None): self.archive = archive or init_archive()
def get_archive(): if not hasattr(settings, '_aleph_archive'): settings._aleph_archive = init_archive(archive_type=settings.ARCHIVE_TYPE, # noqa bucket=settings.ARCHIVE_BUCKET, path=settings.ARCHIVE_PATH) return settings._aleph_archive
def get_archive(): if not hasattr(settings, '_aleph_archive'): settings._aleph_archive = init_archive(archive_type=settings.ARCHIVE_TYPE, # noqa bucket=settings.ARCHIVE_BUCKET, path=settings.ARCHIVE_PATH) return settings._aleph_archive
def setUp(self): tempdir = ensure_path(tempfile.gettempdir()) self.path = tempdir.joinpath('sltest') self.archive = init_archive('file', path=self.path) self.file = ensure_path(__file__)