def main(): with app.app_context(): if daconfig.get('use alembic', True): packagedir = pkg_resources.resource_filename( pkg_resources.Requirement.parse('docassemble.webapp'), 'docassemble/webapp') if not os.path.isdir(packagedir): sys.exit("path for running alembic could not be found") from alembic.config import Config from alembic import command alembic_cfg = Config(os.path.join(packagedir, 'alembic.ini')) alembic_cfg.set_main_option("sqlalchemy.url", alchemy_connection_string()) alembic_cfg.set_main_option("script_location", os.path.join(packagedir, 'alembic')) if not db.engine.has_table(dbtableprefix + 'alembic_version'): command.stamp(alembic_cfg, "head") if db.engine.has_table(dbtableprefix + 'user'): command.upgrade(alembic_cfg, "head") #db.drop_all() try: sys.stderr.write("Trying to create tables\n") db.create_all() except: sys.stderr.write( "Error trying to create tables; trying a second time.\n") try: db.create_all() except: sys.stderr.write( "Error trying to create tables; trying a third time.\n") db.create_all() populate_tables() db.engine.dispose()
def main(): with app.app_context(): if daconfig.get('use alembic', True): packagedir = pkg_resources.resource_filename(pkg_resources.Requirement.parse('docassemble.webapp'), 'docassemble/webapp') if not os.path.isdir(packagedir): sys.exit("path for running alembic could not be found") from alembic.config import Config from alembic import command alembic_cfg = Config(os.path.join(packagedir, 'alembic.ini')) alembic_cfg.set_main_option("sqlalchemy.url", alchemy_connection_string()) alembic_cfg.set_main_option("script_location", os.path.join(packagedir, 'alembic')) if not db.engine.has_table(dbtableprefix + 'alembic_version'): command.stamp(alembic_cfg, "head") if db.engine.has_table(dbtableprefix + 'user'): command.upgrade(alembic_cfg, "head") #db.drop_all() db.create_all() populate_tables() db.engine.dispose()
def main(): from docassemble.webapp.database import dbprefix if dbprefix.startswith('postgresql') and not daconfig.get( 'force text to varchar upgrade', False): do_varchar_upgrade = False else: do_varchar_upgrade = True with app.app_context(): if daconfig.get('use alembic', True): if do_varchar_upgrade: changed = False if db.engine.has_table(dbtableprefix + 'userdict'): db.session.query(UserDict).filter( db.func.length(UserDict.filename) > 255).delete( synchronize_session=False) changed = True if db.engine.has_table(dbtableprefix + 'userdictkeys'): db.session.query(UserDictKeys).filter( db.func.length(UserDictKeys.filename) > 255).delete( synchronize_session=False) changed = True if db.engine.has_table(dbtableprefix + 'chatlog'): db.session.query(ChatLog).filter( db.func.length(ChatLog.filename) > 255).delete( synchronize_session=False) changed = True if db.engine.has_table(dbtableprefix + 'uploads'): db.session.query(Uploads).filter( db.func.length(Uploads.filename) > 255).delete( synchronize_session=False) db.session.query(Uploads).filter( db.func.length(Uploads.yamlfile) > 255).delete( synchronize_session=False) changed = True if db.engine.has_table(dbtableprefix + 'objectstorage'): db.session.query(ObjectStorage).filter( db.func.length(ObjectStorage.key) > 1024).delete( synchronize_session=False) changed = True if db.engine.has_table(dbtableprefix + 'speaklist'): db.session.query(SpeakList).filter( db.func.length(SpeakList.filename) > 255).delete( synchronize_session=False) changed = True if db.engine.has_table(dbtableprefix + 'shortener'): db.session.query(Shortener).filter( db.func.length(Shortener.filename) > 255).delete( synchronize_session=False) db.session.query(Shortener).filter( db.func.length(Shortener.key) > 255).delete( synchronize_session=False) changed = True if db.engine.has_table(dbtableprefix + 'machinelearning'): db.session.query(MachineLearning).filter( db.func.length(MachineLearning.key) > 1024).delete( synchronize_session=False) db.session.query(MachineLearning).filter( db.func.length(MachineLearning.group_id) > 1024 ).delete(synchronize_session=False) changed = True if db.engine.has_table(dbtableprefix + 'globalobjectstorage'): db.session.query(GlobalObjectStorage).filter( db.func.length(GlobalObjectStorage.key) > 1024).delete( synchronize_session=False) changed = True if changed: db.session.commit() packagedir = pkg_resources.resource_filename( pkg_resources.Requirement.parse('docassemble.webapp'), 'docassemble/webapp') if not os.path.isdir(packagedir): sys.exit("path for running alembic could not be found") from alembic.config import Config from alembic import command alembic_cfg = Config(os.path.join(packagedir, 'alembic.ini')) alembic_cfg.set_main_option("sqlalchemy.url", alchemy_connection_string()) alembic_cfg.set_main_option("script_location", os.path.join(packagedir, 'alembic')) if not db.engine.has_table(dbtableprefix + 'alembic_version'): start_time = time.time() sys.stderr.write("Creating alembic stamp\n") command.stamp(alembic_cfg, "head") sys.stderr.write("Done creating alembic stamp after " + str(time.time() - start_time) + " seconds\n") if db.engine.has_table(dbtableprefix + 'user'): start_time = time.time() sys.stderr.write("Creating alembic stamp\n") sys.stderr.write("Running alembic upgrade\n") command.upgrade(alembic_cfg, "head") sys.stderr.write("Done running alembic upgrade after " + str(time.time() - start_time) + " seconds\n") #db.drop_all() start_time = time.time() try: sys.stderr.write("Trying to create tables\n") db.create_all() except: sys.stderr.write( "Error trying to create tables; trying a second time.\n") try: db.create_all() except: sys.stderr.write( "Error trying to create tables; trying a third time.\n") db.create_all() sys.stderr.write("Finished creating tables after " + str(time.time() - start_time) + " seconds.\n") populate_tables() db.engine.dispose()
def main(): logmessage("create_tables.main: starting") start_time = time.time() if dbprefix.startswith('postgresql') and not daconfig.get( 'force text to varchar upgrade', False): do_varchar_upgrade = False else: do_varchar_upgrade = True with app.app_context(): logmessage("create_tables.main: inside app context after " + str(time.time() - start_time) + " seconds.") if daconfig.get('use alembic', True): logmessage("create_tables.main: running alembic after " + str(time.time() - start_time) + " seconds.") insp = inspect(db.engine) if do_varchar_upgrade: changed = False if insp.has_table(dbtableprefix + 'userdict'): db.session.execute( delete(UserDict).where( db.func.length(UserDict.filename) > 255). execution_options(synchronize_session=False)) changed = True if insp.has_table(dbtableprefix + 'userdictkeys'): db.session.execute( delete(UserDictKeys).where( db.func.length(UserDictKeys.filename) > 255). execution_options(synchronize_session=False)) changed = True if insp.has_table(dbtableprefix + 'chatlog'): db.session.execute( delete(ChatLog).where( db.func.length(ChatLog.filename) > 255). execution_options(synchronize_session=False)) changed = True if insp.has_table(dbtableprefix + 'uploads'): db.session.execute( delete(Uploads).where( db.func.length(Uploads.filename) > 255). execution_options(synchronize_session=False)) db.session.execute( delete(Uploads).where( db.func.length(Uploads.yamlfile) > 255). execution_options(synchronize_session=False)) changed = True if insp.has_table(dbtableprefix + 'objectstorage'): db.session.execute( delete(ObjectStorage).where( db.func.length(ObjectStorage.key) > 1024). execution_options(synchronize_session=False)) changed = True if insp.has_table(dbtableprefix + 'speaklist'): db.session.execute( delete(SpeakList).where( db.func.length(SpeakList.filename) > 255). execution_options(synchronize_session=False)) changed = True if insp.has_table(dbtableprefix + 'shortener'): db.session.execute( delete(Shortener).where( db.func.length(Shortener.filename) > 255). execution_options(synchronize_session=False)) db.session.execute( delete(Shortener).where( db.func.length(Shortener.key) > 255). execution_options(synchronize_session=False)) changed = True if insp.has_table(dbtableprefix + 'machinelearning'): db.session.execute( delete(MachineLearning).where( db.func.length(MachineLearning.key) > 1024). execution_options(synchronize_session=False)) db.session.execute( delete(MachineLearning).where( db.func.length(MachineLearning.group_id) > 1024). execution_options(synchronize_session=False)) changed = True if insp.has_table(dbtableprefix + 'globalobjectstorage'): db.session.execute( delete(GlobalObjectStorage).where( db.func.length(GlobalObjectStorage.key) > 1024). execution_options(synchronize_session=False)) changed = True if changed: db.session.commit() packagedir = pkg_resources.resource_filename( pkg_resources.Requirement.parse('docassemble.webapp'), 'docassemble/webapp') if not os.path.isdir(packagedir): sys.exit("path for running alembic could not be found") alembic_cfg = Config(os.path.join(packagedir, 'alembic.ini')) alembic_cfg.set_main_option("sqlalchemy.url", alchemy_connection_string()) alembic_cfg.set_main_option("script_location", os.path.join(packagedir, 'alembic')) if not insp.has_table(dbtableprefix + 'alembic_version'): logmessage("create_tables.main: creating alembic stamp") command.stamp(alembic_cfg, "head") logmessage( "create_tables.main: done creating alembic stamp after " + str(time.time() - start_time) + " seconds") if insp.has_table(dbtableprefix + 'user'): logmessage("create_tables.main: creating alembic stamp") logmessage("create_tables.main: running alembic upgrade") command.upgrade(alembic_cfg, "head") logmessage( "create_tables.main: done running alembic upgrade after " + str(time.time() - start_time) + " seconds") #db.drop_all() try: logmessage("create_tables.main: trying to create tables") db.create_all() except: logmessage( "create_tables.main: error trying to create tables; trying a second time." ) try: db.create_all() except: logmessage( "create_tables.main: error trying to create tables; trying a third time." ) db.create_all() logmessage("create_tables.main: finished creating tables after " + str(time.time() - start_time) + " seconds.") if dbprefix.startswith('postgresql'): try: test_for_errors(start_time=start_time) except: logmessage( "create_tables.main: unable to test for errors after " + str(time.time() - start_time) + " seconds.") logmessage("create_tables.main: populating tables after " + str(time.time() - start_time) + " seconds.") populate_tables(start_time=start_time) logmessage("create_tables.main: disposing engine after " + str(time.time() - start_time) + " seconds.") db.engine.dispose() logmessage("create_tables.main: finishing after " + str(time.time() - start_time) + " seconds.")
from __future__ import with_statement from alembic import context from sqlalchemy import engine_from_config, pool from logging.config import fileConfig from docassemble.webapp.database import alchemy_connection_string, connect_args # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config config.set_main_option("sqlalchemy.url", alchemy_connection_string()) # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = None # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL
from __future__ import with_statement from alembic import context from sqlalchemy import engine_from_config, pool from logging.config import fileConfig from docassemble.webapp.database import alchemy_connection_string # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config config.set_main_option("sqlalchemy.url", alchemy_connection_string()) # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = None # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL