def setUp(self): db1 = TemporaryFilesystemBlobDB() assert get_blob_db() is db1, (get_blob_db(), db1) data = b'binary data not valid utf-8 \xe4\x94' self.not_founds = set() self.blob_metas = [] for type_code in [CODES.form_xml, CODES.multimedia, CODES.data_export]: meta = db1.put(BytesIO(data), meta=new_meta(type_code=type_code)) lost = new_meta(type_code=type_code, content_length=42) self.blob_metas.append(meta) self.blob_metas.append(lost) lost.save() self.not_founds.add(( lost.id, lost.domain, lost.type_code, lost.parent_id, lost.key, )) self.test_size = len(self.blob_metas) db2 = TemporaryFilesystemBlobDB() self.db = TemporaryMigratingBlobDB(db2, db1) assert get_blob_db() is self.db, (get_blob_db(), self.db) discard_migration_state(self.slug)
def setUpClass(cls): with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS cls.s3db = TemporaryS3BlobDB(config) cls.fsdb = TemporaryFilesystemBlobDB() cls.db = mod.MigratingBlobDB(cls.s3db, cls.fsdb)
def setup(self): if self.should_skip_test_setup(): return from corehq.blobs.tests.util import TemporaryFilesystemBlobDB self.blob_db = TemporaryFilesystemBlobDB() # get/verify list of apps with databases to be deleted on teardown databases = getattr(settings, "COUCHDB_DATABASES", []) if isinstance(databases, (list, tuple)): # Convert old style to new style databases = {app_name: uri for app_name, uri in databases} self.apps = [self.verify_test_db(*item) for item in databases.items()] if self.skip_setup_for_reuse_db: from django.db import connections old_names = [] for connection in connections.all(): db = connection.settings_dict assert db["NAME"].startswith(TEST_DATABASE_PREFIX), db["NAME"] try: connection.ensure_connection() except OperationalError: break # cannot connect; resume normal setup old_names.append((connection, db["NAME"], True)) else: self.old_names = old_names, [] return # skip remaining setup sys.__stdout__.write("\n") # newline for creating database message if "REUSE_DB" in os.environ: sys.__stdout__.write("REUSE_DB={REUSE_DB!r} ".format(**os.environ)) super(HqdbContext, self).setup()
def setup(self): if self.should_skip_test_setup(): return from corehq.blobs.tests.util import TemporaryFilesystemBlobDB self.blob_db = TemporaryFilesystemBlobDB() self.old_names = self._get_databases() if self.skip_setup_for_reuse_db and self._databases_ok(): if self.reuse_db == "migrate": call_command('migrate_multi', interactive=False) if self.reuse_db == "flush": flush_databases() return # skip remaining setup if self.reuse_db == "reset": self.reset_databases() print("", file=sys.__stdout__) # newline for creating database message if self.reuse_db: print("REUSE_DB={} ".format(self.reuse_db), file=sys.__stdout__, end="") if self.skip_setup_for_reuse_db: # pass this on to the Django runner to avoid creating databases # that already exist self.runner.keepdb = True super(HqdbContext, self).setup()
def setUp(self): # psutil is in dev-requirements only. Don't bother trying to # import for the module if the test is skipped. from psutil import virtual_memory self.memory = virtual_memory().total self.db = TemporaryFilesystemBlobDB() assert get_blob_db() is self.db, (get_blob_db(), self.db) self.blob_metas = []
def import_and_verify(self, filename): from ..management.commands.run_blob_import import Command as ImportCommand expected_metas = { m for m in self.blob_metas if m.key not in self.not_found and m.domain == self.domain_name } self.assertTrue(any(m.is_compressed for m in expected_metas)) with TemporaryFilesystemBlobDB() as dest_db: assert get_blob_db() is dest_db, (get_blob_db(), dest_db) ImportCommand.handle(None, filename) for meta in expected_metas: with dest_db.get(meta=meta) as fh: self.assertEqual(fh.read(), self.data, meta.type_code)
def setUp(self): with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS fsdb = TemporaryFilesystemBlobDB() assert get_blob_db() is fsdb, (get_blob_db(), fsdb) self.migrate_docs = docs = [] for i in range(self.test_size): doc = SavedBasicExport(configuration=_mk_config("config-%s" % i)) doc.save() doc.set_payload(("content %s" % i).encode('utf-8')) docs.append(doc) s3db = TemporaryS3BlobDB(config) self.db = TemporaryMigratingBlobDB(s3db, fsdb) assert get_blob_db() is self.db, (get_blob_db(), self.db) BaseMigrationTest.discard_migration_state(self.slug)
def setUpClass(cls): super().setUpClass() cls.db = TemporaryFilesystemBlobDB() assert get_blob_db() is cls.db, (get_blob_db(), cls.db) data = b'binary data not valid utf-8 \xe4\x94' cls.blob_metas = [] cls.not_found = set() cls.domain_name = str(uuid.uuid4) for type_code in [CODES.form_xml, CODES.multimedia, CODES.data_export]: for domain in (cls.domain_name, str(uuid.uuid4())): meta = cls.db.put(BytesIO(data), meta=new_meta(domain=domain, type_code=type_code)) lost = new_meta(domain=domain, type_code=type_code, content_length=42) cls.blob_metas.append(meta) cls.blob_metas.append(lost) lost.save() cls.not_found.add(lost.key)
def setup(self): if self.should_skip_test_setup(): return from corehq.blobs.tests.util import TemporaryFilesystemBlobDB self.blob_db = TemporaryFilesystemBlobDB() if self.skip_setup_for_reuse_db and self._databases_ok(): if self.reuse_db == "migrate": call_command('migrate_multi', interactive=False) if self.reuse_db == "flush": flush_databases() return # skip remaining setup if self.reuse_db == "reset": self.delete_couch_databases() print("", file=sys.__stdout__) # newline for creating database message if self.reuse_db: print("REUSE_DB={} ".format(self.reuse_db), file=sys.__stdout__, end="") super(HqdbContext, self).setup()
def setup(self): if self.should_skip_test_setup(): return from corehq.blobs.tests.util import TemporaryFilesystemBlobDB self.blob_db = TemporaryFilesystemBlobDB() # get/verify list of apps with databases to be deleted on teardown databases = getattr(settings, "COUCHDB_DATABASES", []) if isinstance(databases, (list, tuple)): # Convert old style to new style databases = {app_name: uri for app_name, uri in databases} self.apps = [self.verify_test_db(*item) for item in databases.items()] if self.skip_setup_for_reuse_db and self._databases_ok(): if self.run_migrations_for_reuse_db: call_command('migrate_multi', interactive=False) return # skip remaining setup sys.__stdout__.write("\n") # newline for creating database message if "REUSE_DB" in os.environ: sys.__stdout__.write("REUSE_DB={REUSE_DB!r} ".format(**os.environ)) super(HqdbContext, self).setup()
def setUpClass(cls): super(TestBlobDownload, cls).setUpClass() cls.db = TemporaryFilesystemBlobDB()
def setUpClass(cls): super().setUpClass() cls.db = TemporaryFilesystemBlobDB()
def setUpClass(cls): super(TestMigratingBlobDB, cls).setUpClass() assert isinstance(cls.db, TemporaryS3BlobDB), cls.db cls.s3db = cls.db cls.fsdb = TemporaryFilesystemBlobDB() cls.db = TemporaryMigratingBlobDB(cls.s3db, cls.fsdb)
def setUp(self): lost_db = TemporaryFilesystemBlobDB() # must be created before other dbs db1 = TemporaryFilesystemBlobDB() assert get_blob_db() is db1, (get_blob_db(), db1) missing = "found.not" name = "blob.bin" data = b'binary data not valid utf-8 \xe4\x94' self.not_founds = set() self.couch_docs = [] with lost_db: for doc_type, model_class in self.couch_doc_types.items(): item = model_class() item.doc_type = doc_type item.save() item.put_attachment(data, name) with install_blob_db(lost_db): item.put_attachment(data, missing) self.not_founds.add(( doc_type, item._id, item.external_blobs[missing].id, item._blobdb_bucket(), )) item.save() self.couch_docs.append(item) def create_obj(rex): ident = random_url_id(8) args = {rex.blob_helper.id_attr: ident} fields = {getattr(f, "attname", "") for f in rex.model_class._meta.get_fields()} if "content_length" in fields: args["content_length"] = len(data) elif "length" in fields: args["length"] = len(data) item = rex.model_class(**args) save_attr = rex.model_class.__name__ + "_save" if hasattr(self, save_attr): getattr(self, save_attr)(item, rex) else: item.save() return item, ident self.sql_docs = [] for rex in (x() for x in self.sql_reindex_accessors): item, ident = create_obj(rex) helper = rex.blob_helper({"_obj_not_json": item}) db1.put(StringIO(data), ident, helper._blobdb_bucket()) self.sql_docs.append(item) lost, lost_blob_id = create_obj(rex) self.sql_docs.append(lost) self.not_founds.add(( rex.model_class.__name__, lost.id, lost_blob_id, rex.blob_helper({"_obj_not_json": lost})._blobdb_bucket(), )) self.test_size = len(self.couch_docs) + len(self.sql_docs) db2 = TemporaryFilesystemBlobDB() self.db = TemporaryMigratingBlobDB(db2, db1) assert get_blob_db() is self.db, (get_blob_db(), self.db) BaseMigrationTest.discard_migration_state(self.slug)
def setUpClass(cls): super(BlobExpireTest, cls).setUpClass() cls.db = TemporaryFilesystemBlobDB()
def setUpClass(cls): # intentional call to super super setUpClass super(TestBlobMixinWithMigratingDbBeforeCopyToNew, cls).setUpClass() cls.db = PutInOldCopyToNewBlobDB(cls.db, TemporaryFilesystemBlobDB())
def setUpClass(cls): super(TestBlobMixinWithMigratingDbBeforeCopyToNew, cls).setUpClass() cls.db = PutInOldBlobDB(cls.db, TemporaryFilesystemBlobDB())
def setUpClass(cls): super(BaseTestCase, cls).setUpClass() cls.db = TemporaryFilesystemBlobDB()
def setUp(self): self.db = TemporaryFilesystemBlobDB() assert get_blob_db() is self.db, (get_blob_db(), self.db) self.blob_metas = []
def setUp(self): super(DeleteAttachmentsFSDBTests, self).setUp() self.db = TemporaryFilesystemBlobDB()
def setUpClass(cls): super(TestMetaDB, cls).setUpClass() cls.db = TemporaryFilesystemBlobDB()
def setUpClass(cls): super(TestAtomicBlobs, cls).setUpClass() cls.db = TemporaryFilesystemBlobDB()
def setUp(self): super(GDPRScrubUserFromFormsCouchTests, self).setUp() self.db = TemporaryFilesystemBlobDB()
def setUpClass(cls): cls.db = TemporaryFilesystemBlobDB()