def django_connection_engine(): if get_default_db_string().startswith("sqlite"): return create_engine( get_default_db_string(), poolclass=SharingPool, convert_unicode=True ) return create_engine( get_default_db_string(), poolclass=NullPool, convert_unicode=True )
def handle(self, *args, **options): no_export_schema = options["version"] == CURRENT_SCHEMA_VERSION engine = create_engine(get_default_db_string(), convert_unicode=True) metadata = MetaData() app_config = apps.get_app_config("content") # Exclude channelmetadatacache in case we are reflecting an older version of Kolibri table_names = [ model._meta.db_table for name, model in app_config.models.items() if name != "channelmetadatacache" ] metadata.reflect(bind=engine, only=table_names) Base = automap_base(metadata=metadata) # TODO map relationship backreferences using the django names Base.prepare() session = sessionmaker(bind=engine, autoflush=False)() # Always update the current schema with open(SCHEMA_PATH_TEMPLATE.format(name=CURRENT_SCHEMA_VERSION), "wb") as f: pickle.dump(metadata, f, protocol=2) # Only do this if we are generating a new export schema version if not no_export_schema: with open(SCHEMA_PATH_TEMPLATE.format(name=options["version"]), "wb") as f: pickle.dump(metadata, f, protocol=2) # Load fixture data into the test database with Django call_command("loaddata", "content_import_test.json", interactive=False) data = {} for table_name, record in Base.classes.items(): data[table_name] = [ get_dict(r) for r in session.query(record).all() ] data_path = DATA_PATH_TEMPLATE.format(name=options["version"]) # Handle Python 2 unicode issue by opening the file in binary mode # with no encoding as the data has already been encoded if sys.version[0] == "2": with io.open(data_path, mode="wb") as f: json.dump(data, f) else: with io.open(data_path, mode="w", encoding="utf-8") as f: json.dump(data, f)
def handle(self, *args, **options): version = options["version"] if not version: version = str(int(CONTENT_SCHEMA_VERSION) + 1) no_export_schema = version == CURRENT_SCHEMA_VERSION app_name = KolibriContentConfig.label if not no_export_schema: settings.DATABASES["default"] = { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } # Force a reload of the default connection after changing settings. del connections["default"] settings.INSTALLED_APPS = ("kolibri.core.content.contentschema", ) apps.app_configs = OrderedDict() apps.apps_ready = apps.models_ready = apps.loading = apps.ready = False apps.all_models = defaultdict(OrderedDict) apps.clear_cache() apps.populate(settings.INSTALLED_APPS) call_command("makemigrations", app_name, interactive=False) call_command("migrate", app_name) engine = create_engine(get_default_db_string(), poolclass=SharingPool, convert_unicode=True) metadata = MetaData() app_config = apps.get_app_config(app_name) # Exclude channelmetadatacache in case we are reflecting an older version of Kolibri table_names = [ model._meta.db_table for name, model in app_config.models.items() if name != "channelmetadatacache" ] metadata.reflect(bind=engine, only=table_names) Base = prepare_base(metadata, name=version) # TODO map relationship backreferences using the django names session = sessionmaker(bind=engine, autoflush=False)() metadata.bind = engine generator = CodeGenerator(metadata, False, False, True, True, False, nocomments=False) with io.open( SQLALCHEMY_CLASSES_PATH_TEMPLATE.format( name=coerce_version_name_to_valid_module_path(version)), "w", ) as f: generator.render(f) # Only do this if we are generating a new export schema version if not no_export_schema: # Load fixture data into the test database with Django call_command("loaddata", "content_import_test.json", interactive=False) data = {} for table_name, record in Base.classes.items(): data[table_name] = [ get_dict(r) for r in session.query(record).all() ] data_path = DATA_PATH_TEMPLATE.format(name=version) # Handle Python 2 unicode issue by opening the file in binary mode # with no encoding as the data has already been encoded if sys.version[0] == "2": with io.open(data_path, mode="wb") as f: json.dump(data, f) else: with io.open(data_path, mode="w", encoding="utf-8") as f: json.dump(data, f) shutil.rmtree( os.path.join(os.path.dirname(__file__), "../../contentschema/migrations")) os.system("kolibri manage generate_schema " + CURRENT_SCHEMA_VERSION)
def get_engine(self, connection_string): if connection_string == get_default_db_string(): return django_connection_engine() return self.content_engine