def clean_db(ctx: click.Context): """Removes Postgres database.""" db_uri = make_url(ctx.obj['db_uri']) db_uri_str = db_uri.__to_string__() if database_exists(db_uri): logging.info(f'Cleaning "{db_uri_str}" database.') drop_database(db_uri)
def create_ctfd(ctf_name="CTFd", name="admin", email="*****@*****.**", password="******", setup=True): app = create_app('CTFd.config.TestingConfig') url = make_url(app.config['SQLALCHEMY_DATABASE_URI']) if url.drivername == 'postgres': url.drivername = 'postgresql' if database_exists(url): drop_database(url) create_database(url) with app.app_context(): app.db.create_all() if setup: with app.app_context(): with app.test_client() as client: data = {} r = client.get('/setup') # Populate session with nonce with client.session_transaction() as sess: data = { "ctf_name": ctf_name, "name": name, "email": email, "password": password, "nonce": sess.get('nonce') } client.post('/setup', data=data) return app
def dropDatabase(dbName): """Drop specified database.""" config = CONFIG_DB connectString = "postgresql://{}:{}@{}:{}/{}".format(config["username"], config["password"], config["host"], config["port"], dbName) if sqlalchemy_utils.database_exists(connectString): sqlalchemy_utils.drop_database(connectString)
def init_data(): from imports import ( Widget,Article,Page, User,Setting,Type, Template,Tag,Role, Category,Block,Profile, ContactMessage) """Fish data for project""" if prompt_bool('Do you want to kill your db?'): if squ.database_exists(db.engine.url): squ.drop_database(db.engine.url) try: db.drop_all() except: pass try: squ.create_database(db.engine.url) db.create_all() except: pass user = User.query.filter(User.email=='*****@*****.**').first() if user is None: user = User(username='******', email='*****@*****.**', password='******') user.save()
def gen_engine(): _engine = create_db_test() yield _engine drop_database(url_db)
def stop_fixture(self): """Clean up the config fixture and storage artifacts.""" if hasattr(self, 'metricd_thread'): self.metricd_thread.stop() self.metricd_thread.join() if hasattr(self, 'fixtures'): for f in reversed(self.fixtures): f.cleanUp() if hasattr(self, 'index'): self.index.disconnect() # Swallow noise from missing tables when dropping # database. with warnings.catch_warnings(): warnings.filterwarnings('ignore', module='sqlalchemy.engine.default') sqlalchemy_utils.drop_database(self.conf.indexer.url) if self.tmp_dir: shutil.rmtree(self.tmp_dir) if hasattr(self, 'coord'): self.coord.stop() self.conf.reset() if not os.getenv("GNOCCHI_TEST_DEBUG"): self.output.cleanUp()
def setup(): print(app.config['SQLALCHEMY_DATABASE_URI']) engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI']) if database_exists(engine.url): drop_database(engine.url) create_database(engine.url) engine.execute('create extension if not exists fuzzystrmatch')
def drop_db(app): from project.core.db import db from sqlalchemy_utils import database_exists, drop_database if database_exists(db.engine.url): print '====> Drop database' drop_database(db.engine.url) else: print '====> database not exist'
def createdb(): print "Connecting to %s" % settings.SQLALCHEMY_DATABASE_URI engine = create_engine(settings.SQLALCHEMY_DATABASE_URI) if settings.DROP_DB_ON_RESTART and database_exists(engine.url): print "Dropping old database... (because DROP_DB_ON_RESTART=True)" drop_database(engine.url) if not database_exists(engine.url): print "Creating databases..." create_database(engine.url)
def book_uri(request): name = request.param if name and database_exists(name): drop_database(name) yield name if name and database_exists(name): drop_database(name)
def app(): """ Create test database and application """ app = create_app(testing=True) sqlalchemy_url = app.config['SQLALCHEMY_DATABASE_URI'] if database_exists(sqlalchemy_url): drop_database(sqlalchemy_url) create_database(sqlalchemy_url) yield app
def create_sweography_db(): engine = create_engine(SQLALCHEMY_DATABASE_URI) if database_exists(engine.url): drop_database(engine.url) create_database(engine.url) ##creates all tables in database Base.metadata.create_all(engine)
def setUp(self): self.app = self.create_app() self.db = DB(engine,session,meta) import sqlalchemy_utils as squ if squ.database_exists(self.db.engine.url): squ.drop_database(self.db.engine.url) squ.create_database(self.db.engine.url) meta.bind = self.db.engine meta.create_all()
def drop(): """ Drop the database if it exists :return: """ app.logger.debug('Dropping the database!') if database_exists(db.engine.url): drop_database(db.engine.url) app.logger.error('Database does not exists!')
def new_book_USD(request): name = request.param if name and database_exists(name): drop_database(name) with create_book(uri_conn=name, currency="USD", keep_foreign_keys=False) as b: yield b if name and database_exists(name): drop_database(name)
def setUp(self): self.app = self.create_app() self.db = BaseModel import sqlalchemy_utils as squ if squ.database_exists(self.db.engine.url): squ.drop_database(self.db.engine.url) squ.create_database(self.db.engine.url) #import imports #for module in dir(imports): # globals()[module] = getattr(imports,module) meta.bind = self.db.engine meta.create_all()
def drop_db(): ''' Drops database according to scrapper/config/server.py. ''' from scrapper.config.default import SQLALCHEMY_DATABASE_URI as db_url if database_exists(db_url): drop_database(db_url) print("Done") else: print("Database doesn't exist") exit(1)
def stop_fixture(self): """Clean up the config fixture and storage artifacts.""" self.conf.reset() if not self.conf.indexer.url.startswith("null://"): # Swallow noise from missing tables when dropping # database. with warnings.catch_warnings(): warnings.filterwarnings('ignore', module='sqlalchemy.engine.default') sqlalchemy_utils.drop_database(self.conf.indexer.url) if self.tmp_dir: shutil.rmtree(self.tmp_dir)
def db_connection(db_connection_string): """ Create one test database for all database tests. """ engine = create_engine(db_connection_string) if not database_exists(engine.url): create_database(engine.url) connection = engine.connect() yield connection connection.close() engine.dispose() drop_database(engine.url)
def book_db_config(request): from piecash.core.session import build_uri sql_backend, db_config = request.param name = build_uri(**db_config) if sql_backend != "sqlite_in_mem" and database_exists(name): drop_database(name) yield db_config if sql_backend != "sqlite_in_mem" and database_exists(name): drop_database(name)
def setUp(self): self.app = self.create_app() self.db = self.app.extensions['sqlalchemy'].db from imports import ( Page,User,Setting) import sqlalchemy_utils as squ if squ.database_exists(self.db.engine.url): squ.drop_database(self.db.engine.url) squ.create_database(self.db.engine.url) #import imports #for module in dir(imports): # globals()[module] = getattr(imports,module) meta.bind = self.db.engine meta.create_all()
def app(request): test_app = ApplicationFactory.create_application('testing') test_app.app_context().push() if database_exists(db.engine.url): drop_database(db.engine.url) create_database(db.engine.url) db.create_all() def teardown(): db.session.expunge_all() db.session.remove() drop_database(db.engine.url) db.engine.dispose() request.addfinalizer(teardown) return test_app
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('categories') op.drop_table('types') op.drop_table('blocks') op.drop_table('settings') op.drop_table('users') op.drop_table('roles') op.drop_table('tags') op.drop_table('pages_blocks') op.drop_table('widgets') op.drop_table('templates_blocks') op.drop_table('templates') op.drop_table('articles') op.drop_table('pages') from local_settings import LocalConfig url = LocalConfig.SQLALCHEMY_DATABASE_URI sau.drop_database(url)
def create_db(username,dbname,dbpassword): ''' Returns a tuple (<bool>,database_engine_handle), such that the user can check to see if the database was created sucessfully, and if so, then access th sql_alchemy engine via the database_engine_handle ''' # Here, we're using postgres, but sqlalchemy can connect to other things too. engine = create_engine('postgres://%s:%s@localhost/%s'%(username,dbpassword,dbname)) print "Connecting to",engine.url if not database_exists(engine.url): create_database(engine.url) else: drop_database(engine.url) create_database(engine.url) database_exists_check = database_exists(engine.url) print "Database created successfully?:",database_exists_check return (database_exists_check,engine)
def clear(**kwargs): """Clear the specified names from the specified databases. This can be highly destructive as it destroys tables and when all names are removed from a database, the database itself. """ database = kwargs.pop('database', False) expression = lambda target, table: table.drop(target) test = lambda x, tab: not database_exists(x.url) or not tab.exists(x) # TODO: Iterate through all engines in name set. if database and database_exists(engine['default'].url): drop_database(engine['default'].url) clear_cache() op(expression, reversed(metadata.sorted_tables), test=test, primary='clear', secondary='drop', **kwargs)
def book_basic(request): name = request.param if name and database_exists(name): drop_database(name) # create new book with create_book(uri_conn=name, currency="EUR", keep_foreign_keys=False) as b: # create some accounts curr = b.currencies[0] cdty = Commodity(namespace=u"échange", mnemonic=u"ïoà", fullname=u"Example of unicode déta") a = Account(name="asset", type="ASSET", commodity=curr, parent=b.root_account) Account(name="broker", type="STOCK", commodity=cdty, parent=a) Account(name="exp", type="EXPENSE", commodity=curr, parent=b.root_account) Account(name="inc", type="INCOME", commodity=curr, parent=b.root_account) yield b if name and database_exists(name): drop_database(name)
def init_data(): from auth.models import User if prompt_bool('Do you want to kill your db?'): if squ.database_exists(app.config['SQLALCHEMY_DATABASE_URI']): squ.drop_database(app.config['SQLALCHEMY_DATABASE_URI']) try: drop_db() except: pass try: squ.create_database(app.config['SQLALCHEMY_DATABASE_URI']) create_db() except: pass seed_db() user = User().query.filter(User.email=='*****@*****.**').first() if user is None: user = User(username='******', email='*****@*****.**', password='******') user.save()
def init_data(): """Fish data for project""" if prompt_bool('Do you want to kill your db?'): if squ.database_exists(db.engine.url): squ.drop_database(db.engine.url) try: db.metadata.drop_all() except: pass try: squ.create_database(db.engine.url) db.metadata.create_all() except: pass user = User.query.filter(User.email=='*****@*****.**').first() if user is None: user = User(username='******', email='*****@*****.**', password='******') user.save()
def tearDownClass(cls): drop_database(cls.dao_url) files = [os.path.join(root, file) for root, _, files in os.walk(cls.path) for file in files] for file in files: if file not in cls.test_files: os.remove(file) drs = [os.path.join(root, dr) for root, drs, _ in os.walk(cls.path) for dr in drs] # `os.rmdir` removes only empty directories, # so we need start deleting from inner subdirectories # which have longer abspath drs.sort(key=lambda dr: len(dr), reverse=True) for dr in drs: if dr not in cls.test_drs: os.rmdir(dr)
def create_db(request): print("\n===> Create and synchro database") # drop database if exist if database_exists(engine.url): drop_database(engine.url) # create a new database for testing purposes create_database(engine.url) # setting the new database Base.metadata.create_all(engine) def drop_db(): print("\n===> Drop database") # dropdb at the end of the session drop_database(engine.url) request.addfinalizer(drop_db)
def stop_fixture(self): """Clean up the config fixture and storage artifacts.""" if hasattr(self, 'metricd_thread'): self.metricd_thread.stop() self.metricd_thread.join() if hasattr(self, 'index'): self.index.disconnect() if not self.conf.indexer.url.startswith("null://"): # Swallow noise from missing tables when dropping # database. with warnings.catch_warnings(): warnings.filterwarnings('ignore', module='sqlalchemy.engine.default') sqlalchemy_utils.drop_database(self.conf.indexer.url) if self.tmp_dir: shutil.rmtree(self.tmp_dir) self.conf.reset()
def main(): if database_exists(engine.url): ans = input("[+] Database found, do you want to delete it? (y/n) ") if ans.lower() == "y": ans = input("[!] Are you sure? (y/n) ") if ans.lower() == "y": print ("[+] Deleting the database...") drop_database(engine.url) os.system("rm -rf output") print ("[+] Creating the databse...") create_database(engine.url) Base.metadata.create_all(engine) os.system("mkdir -p output/bin") os.system("mkdir -p output/models") else: ans = input("[+] Database not found, do you want to create it? (y/n) ") if ans.lower() == "y": print ("[+] Creating the database...") create_database(engine.url) Base.metadata.create_all(engine)
def __enter__(self): if self.database_uri is not None and self.database_uri.startswith( "mongodb://"): with pymongo.MongoClient( self.database_uri, serverSelectionTimeoutMS=5000) as mongo_client: mongo_client.drop_database(mongo_client.get_database()) if self.database_uri is not None and self.database_uri.startswith( "postgresql://"): if sqlalchemy_utils.database_exists(self.database_uri): sqlalchemy_utils.drop_database(self.database_uri) sqlalchemy_utils.create_database(self.database_uri) if self.database_uri is not None: with self.database_administration_factory( ) as database_administration: database_administration.initialize(simulate=False) return self
def init_db(monte_carlo_file, force=False): engine = db_connect() db_url = engine.url if force: if database_exists(db_url): logging.info("Dropping existing database in 5 seconds...") time.sleep(5) drop_database(db_url) logging.info("Dropped.") if not database_exists(db_url): logging.info("Creating database...") create_database(db_url) create_postgis_extension(engine) run_migrations() import_monte_carlo(monte_carlo_file) else: logging.info( "Database already exists, skipping database initialization...")
def setup_database(): # Create database specific for tests if not database_exists(DATABASE_URI_BASE): create_database(DATABASE_URI_BASE) app = create_app(testing=True, database_uri=DATABASE_URI_BASE) with app.test_request_context(): with app.test_client(): flask_migrate.upgrade() fill_basic_db(db) db.engine.dispose() if not database_exists(DATABASE_URI): create_database(DATABASE_URI) try: yield None finally: # Drop database to save container space if not os.environ.get("SAVE_DB", False): drop_database(DATABASE_URI) drop_database(DATABASE_URI_BASE)
def drop_db(db_name=None): """ Drops the default database. Arguments: db_name - name of the new database Returns: success - success flag error - error message """ # if db_name is not specified, use the default value if db_name is None: del_db_sql_con_string = SQL_CONNECTION_STRING_DB else: del_db_sql_con_string = "%s/%s" % (SQL_CONNECTION_STRING, db_name) drop_database(del_db_sql_con_string) return True, None
def before_scenario(context, scenario): # create new database if database_exists(context.test_db): drop_database(context.test_db) create_database(context.test_db) context.engine = create_engine(context.test_db, echo='ECHO_SQL' in os.environ) with context.engine.begin() as conn: conn.execute("CREATE EXTENSION postgis") conn.execute("CREATE EXTENSION hstore") try: os.remove(context.nodestore_file) except: pass # possibly not there context.tables = {} context.tagsets = {}
def stop_fixture(self): """Clean up the config fixture and storage artifacts.""" if hasattr(self, 'metricd_thread'): self.metricd_thread.stop() self.metricd_thread.join() if hasattr(self, 'fixtures'): for f in reversed(self.fixtures): f.cleanUp() if hasattr(self, 'index'): self.index.disconnect() # Swallow noise from missing tables when dropping # database. with warnings.catch_warnings(): warnings.filterwarnings('ignore', module='sqlalchemy.engine.default') sqlalchemy_utils.drop_database(self.conf.indexer.url) if self.conf.storage.driver == 'ceph': with open(os.devnull, 'w') as f: ceph_rmpool_command = "ceph -c %s osd pool delete %s %s \ --yes-i-really-really-mean-it" % (os.getenv("CEPH_CONF"), self.ceph_pool_name, self.ceph_pool_name) subprocess.call(ceph_rmpool_command, shell=True, stdout=f, stderr=subprocess.STDOUT) if self.tmp_dir: shutil.rmtree(self.tmp_dir) if hasattr(self, 'coord'): self.coord.stop() self.conf.reset() if not os.getenv("GNOCCHI_TEST_DEBUG"): self.output.cleanUp()
def drop_db(conn_string, db_name): """ Function to drop db *WHat it doesnt do: drop individual table/column/values -conn_string: the string that holds the connection to postgres -dbname: name of the database return: True, None if the db is dropped. """ # Connection string db_conn_string = "{}/{}".format(conn_string, db_name) if database_exists(db_conn_string): # Connect to the db _, _, engine = connect_db(conn_string, db_name) # Disconnects all users from the db we want to drop try: connection = engine.connect() connection.connection.set_isolation_level( ISOLATION_LEVEL_AUTOCOMMIT) version = connection.dialect.server_version_info pid_column = "pid" if (version >= (9, 2)) else "procpid" text = """ SELECT pg_terminate_backend(pg_stat_activity.%(pid_column)s) FROM pg_stat_activity WHERE pg_stat_activity.datname = '%(database)s' AND %(pid_column)s <> pg_backend_pid(); """ % { "pid_column": pid_column, "database": db_name, } connection.execute(text) # Drops db drop_database(db_conn_string) except: return False, "cannot drop db %s" % db_name return True, None
def init_data(): """Fish data for project""" if prompt_bool('Do you want to kill your db?'): if squ.database_exists(db.engine.url): squ.drop_database(db.engine.url) try: db.metadata.drop_all() except: pass try: squ.create_database(db.engine.url) db.metadata.create_all() except: pass user = User.query.filter(User.email == '*****@*****.**').first() if user is None: user = User(username='******', email='*****@*****.**', password='******') user.save()
def initialize_database(): from sqlalchemy import create_engine from sqlalchemy_utils import (database_exists, create_database, drop_database) engine = create_engine(f'postgresql://{PG_USER}:{PG_PASS}@{PG_URI}/feeds') if not database_exists(engine.url): create_database(engine.url) elif FORCE_INIT_NEW: drop_database(engine.url) create_database(engine.url) else: return print(f'Database created: {database_exists(engine.url)}') # Drop existing tables for table_name in ['feeds', 'items', 'tags', 'alembic_version']: sql = f'DROP TABLE IF EXISTS {table_name} CASCADE;' result = engine.execute(sql) print(f'Drop table {table_name}: result={result}')
def _setup_base_app(db_uri: str = DSN_LIST[0]): if database_exists(db_uri): drop_database(db_uri) create_database(db_uri) app = FastAPI() engine = create_engine(db_uri) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) Base = declarative_base() def session(): session = SessionLocal() try: yield session session.commit() finally: session.close() return app, engine, Base, session
def tearDownClass(cls): drop_database(cls.dao_url) files = [ os.path.join(root, file) for root, _, files in os.walk(cls.path) for file in files ] for file in files: if file not in cls.test_files: os.remove(file) drs = [ os.path.join(root, dr) for root, drs, _ in os.walk(cls.path) for dr in drs ] # `os.rmdir` removes only empty directories, # so we need start deleting from inner subdirectories # which have longer abspath drs.sort(key=lambda dr: len(dr), reverse=True) for dr in drs: if dr not in cls.test_drs: os.rmdir(dr)
def session_local(): """Override the default database with our testing database, and make sure to run migrations""" settings = api.environment.ApplicationSettings() test_engine = create_engine( (f"postgresql+psycopg2://{settings.postgres_user}:{settings.postgres_password}" f"@{settings.postgres_host}:{settings.postgres_port}/test"), echo=False, ) # Drop database and recreate to ensure tests are always run against a clean slate if database_exists(test_engine.url): drop_database(test_engine.url) create_database(test_engine.url) TestSessionLocal = sessionmaker(bind=test_engine) # Install necessary pgcrypto extension (for database-level default UUIDs) test_engine.execute("create extension pgcrypto") # Create all tables db.AlchemyBase.metadata.create_all(bind=test_engine) try: yield TestSessionLocal finally: drop_database(test_engine.url)
def delete_database() -> None: """Procedura sterge baza de date in care sunt retinute datele din meciurile importate din Hattrick. Algoritm: ---------- Daca exista fisierul ce contine baza de date in folerul 'db', atunci il sterge si afiseaza un mesaj de confirmare. Daca nu, atunci nu face nimic si afiseaza un mesaj de eroare. Parametri: ---------- Niciunul Intoarce: ---------- Nimic.""" if os.path.exists(global_library.database_file_path): drop_database(global_library.database_file_uri) dw.show_info_window_in_thread(title='Succes!', message='Baza de date a fost stearsa.') else: dw.show_error_window_in_thread(title='Esec!', message='Baza de date nu exista')
def setUpModule(): engine = create_engine(TEST_DB_PATH) if database_exists(engine.url): drop_database(engine.url) create_database(engine.url) conn = psycopg2.connect(dbname=DB_NAME, user=USER, password=DB_PASSWORD, host=DB_HOST) cursor = conn.cursor() with open( os.path.join(os.path.dirname(__file__), "../task_wg_forge_backend/wg_forge_init.sql")) as f: initial_sql = f.read() cursor.execute(initial_sql) cursor.close() conn.commit() conn.close()
def init_data(): from auth.models import User if prompt_bool('Do you want to kill your db?'): if squ.database_exists(app.config['SQLALCHEMY_DATABASE_URI']): squ.drop_database(app.config['SQLALCHEMY_DATABASE_URI']) try: drop_db() except: pass try: squ.create_database(app.config['SQLALCHEMY_DATABASE_URI']) create_db() except: pass seed_db() user = User().query.filter(User.email == '*****@*****.**').first() if user is None: user = User(username='******', email='*****@*****.**', password='******') user.save()
def test_postgres_already_exist(self): db_name = "postgres_already_exist" app.config[POSTGRESQL_USERNAME] = POSTGRESQL app.config[POSTGRESQL_PASSWORD] = POSTGRESQL app.config[POSTGRESQL_HOST] = "localhost" error_message = f"The database {db_name} already exists" try: self.importer._create_database(db_name, POSTGRESQLFLAVOR) with self.assertRaisesRegex(DatabaseAlreadyExistException, error_message): self.importer._create_database(db_name, POSTGRESQLFLAVOR) finally: url = ( "postgresql://" + app.config[POSTGRESQL_USERNAME] + ":" + app.config[POSTGRESQL_PASSWORD] + "@localhost/" + db_name ) if sqlalchemy_utils.database_exists(url): sqlalchemy_utils.drop_database(url)
def __init__(self, request): self.request = request self.settings = request[0] self.del_db = request[1] self.kwargs = request[2] os.environ["CRAX_SETTINGS"] = self.settings test_mode = os.environ["CRAX_TEST_MODE"] if self.del_db is True: if test_mode != "sqlite": e = get_db_engine() engine = create_engine(e) if not database_exists(engine.url): create_database(engine.url) else: drop_database(engine.url) create_database(engine.url) else: if os.path.isfile("test_crax.sqlite"): os.remove("test_crax.sqlite") _file = open("test_crax.sqlite", "w") _file.close()
def __create_test_database(self): """ テストDB作成""" # テストDBが削除されずに残ってしまっている場合は削除 if database_exists(get_env().test_database_url): drop_database(get_env().test_database_url) # テストDB作成 _con = \ psycopg2.connect('host=db user=postgres password=postgres') _con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) _cursor = _con.cursor() _cursor.execute('CREATE DATABASE test_db_fastapi_sample') # テストDBにExtension追加 _test_db_con = psycopg2.connect( 'host=db dbname=test_db_fastapi_sample user=postgres password=postgres' ) _test_db_con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) # テストDBにテーブル追加 Base.metadata.create_all(bind=test_db_connection)
def main(): if database_exists(engine.url): ans = input("[+] DaaS database found, delete? (y/n) ") if ans.lower() == "y": ans = input("[!] Are you sure? (y/n) ") if ans.lower() == "y": print("[+] Deleting database...") drop_database(engine.url) print("[+] Creating new database...") create_database(engine.url) Base.metadata.create_all(engine) add_character_classes() else: ans = input("[+] DaaS database not found, create it? (y/n) ") if ans.lower() == "y": print("[+] Creating new database...") create_database(engine.url) Base.metadata.create_all(engine) print("[+] Filling database") add_character_classes()
def __get_config(self, config_name: str, resume, **kwargs): config_file = '_configs/' + config_name + '.json' with open(config_file) as f: config = commentjson.load(f) if 'name' in config['study'] and config['study']['name']: study_name = config['study']['name'].replace(' ', '_') else: study_name = config_name db_string = config['study']['output_db_location'] + '/' + study_name engine = create_engine(db_string) if resume: if 'db_string' in kwargs: db_string = kwargs['db_string'] # look for existing db in db. if one exists, return it if database_exists(db_string): if engine.dialect.has_table(engine, 'configs'): db = dataset.connect(db_string) configs_table = db['configs'] configs = configs_table.find_one(id=0)['data'] configs['study']['resume'] = resume return configs config['study']['name'] = study_name if 'output_database' not in config['study'] or not config['study']['output_database']: config['study']['output_database'] = db_string if database_exists(db_string): drop_database(db_string) if not database_exists(db_string): db_utils.create_db(db_string) self.__create_configs_table(db_string) db = dataset.connect(db_string) configs_table = db['configs'] configs_table.insert({'id': 0, 'data': config}) config['study']['resume'] = resume return config
def main(): password = open("/home/kyle/.config/mysql_kyle_passwd", 'r').read().splitlines()[0] systems = pd.read_csv( os.path.join("home", "kyle", "Documents", "research", "personal", "bikeshare-scrape", "data", "gbfs_systems.csv")) # Create a database for each bikeshare provider within MySQL for i in systems['System ID']: engine = create_engine('mysql+pymysql://kyle:' + password + '@localhost/' + i) if database_exists(engine.url): drop_database(engine.url) create_database(engine.url) if not database_exists(engine.url): create_database(engine.url) # Create a data frame of urls for every bikeshare provider url_df = pd.DataFrame() for i in range(1, len(systems)): # First go to the system's gbfs site gbfs = requests.get(systems['Auto-Discovery URL'][i]).json() gbfs_urls = pd.io.json.json_normalize(gbfs['data']['en']['feeds']) gbfs_urls['System ID'] = systems['System ID'][i] url_df = url_df.append(gbfs_urls) # Remove all the rows with 'name' == 'gbfs' to prevent infinite recursion url_df = url_df[url_df['name'] != 'gbfs'] url_df = url_df[url_df['System ID'] != 'curtin_university'] url_df.to_csv(os.path.join('..', 'data', 'url_list.csv')) # Create tables for i in range(1, len(url_df)): # Make connection engine = create_engine('mysql+pymysql://kyle:' + password + '@localhost/' + url_df['System ID'].tolist()[i]) if not engine.dialect.has_table(engine, url_df['name'].tolist()[i]): create_table(url_df['name'].tolist()[i], password, url_df['System ID'].tolist()[i])
def mysql_database(tmpdir_factory, mysql_instance, mysql_credentials, faker): temp_image_dir = tmpdir_factory.mktemp("images") db = Database( "mysql+mysqldb://{user}:{password}@{host}:{port}/{database}".format( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, )) with Helpers.session_scope(db) as session: for _ in range(faker.pyint(min_value=12, max_value=24)): article = ArticleFactory() article.authors.append(AuthorFactory()) article.tags.append(TagFactory()) article.misc.append(MiscFactory()) for _ in range(faker.pyint(min_value=1, max_value=4)): article.images.append( ImageFactory(path=join( str(temp_image_dir), faker.year(), faker.month(), faker.day_of_month(), faker.file_name(extension="jpg"), ))) session.add(article) for _ in range(faker.pyint(min_value=12, max_value=24)): session.add(CrazyNameFactory()) try: session.commit() except IntegrityError: session.rollback() yield db if database_exists(db.engine.url): drop_database(db.engine.url)
def db(create, drop, drop_tables, create_tables, recreate_tables): """ Create/Drop database or database tables """ if not any([create, drop, drop_tables, create_tables, recreate_tables]): ctx = click.get_current_context() click.echo(ctx.get_help()) else: db_url = flask_app.config['SQLALCHEMY_DATABASE_URI'] db_name = db_url.database if drop: click.echo(f'Dropping {db_name} database') sqlalchemy_utils.drop_database(db_url) if create: click.echo(f'Creating {db_name} database') sqlalchemy_utils.create_database(db_url, encoding='utf8') if drop_tables or recreate_tables: click.echo('Drop DB tables') drop_schemas() if create or create_tables or recreate_tables: click.echo('Creating DB tables') flask_app.db.create_all()
class DropDB(): url = app.config['SQLALCHEMY_DATABASE_URI'] basedir = os.path.abspath(os.path.dirname(__file__)) folder = os.path.join(basedir, 'migrations') if not database_exists(url): print("Database does not exist!") else: drop_database(url) if os.path.isdir(folder): shutil.rmtree(folder) print('Migrations has been removed!') print('Database has been removed!')
def create_test_database(): try: if database_exists(engine.url): drop_database(engine.url) create_database( engine.url, template="template_postgis" ) # Create the test database. p = os.path.join(os.getcwd(), "alembic.ini") m = os.path.join(os.getcwd(), "alembic") alembic_config = Config(p) # Run the migrations. alembic_config.set_main_option("script_location", m) alembic_config.attributes["configure_logger"] = False with engine.begin() as session: alembic_config.attributes["connection"] = session command.upgrade(alembic_config, "head") session = next(app.dependency_overrides[get_db]()) init_db(session) yield # Run the tests. finally: drop_database(engine.url) # Drop the test database.
def test_apply_schema_dict_postgresql_foreign_key(self): temp_name = ''.join(random.choices(string.ascii_lowercase, k=6)) url = "postgresql://localhost/" + temp_name dbu = DatabaseUtil() db = dbu.create_db(url) test_schema = deepcopy(TEST_SCHEMA) test_schema["table1"]["columns"]["column1"]["foreign_key"] = { "reference_table": "table2", "reference_column": "id" } dbu.apply_schema_dict(db, schema_dict=test_schema) table1 = db.get_table("table1") table1_column1 = table1.table.columns['column1'] fk = table1_column1.foreign_keys self.assertEqual(len(fk), 1) drop_database(url)
def recreate(args: List[str] = None): # nocover if conf.ENV not in ["dev", "development"]: logger.error( f"""Cant recreate database when not in development mode. Set ENV=development as an environment variable to enable this feature.""" # noqa ) sys.exit(0) else: from sqlalchemy_utils import create_database, database_exists, drop_database url = conf.ALEMBIC_CONFIG.url short_url = str(url).split("@")[-1] if database_exists(url): logger.warning(f"Dropping existing database: {short_url}") drop_database(url) logger.warning(f"Recreating database at: {short_url}") create_database(url) else: logger.warning(f"Creating new database at: {short_url}") create_database(url) upgrade() logger.warning("Database recreation complete")
def _setup_base_app(): if database_exists(SQLALCHEMY_DATABASE_URL): drop_database(SQLALCHEMY_DATABASE_URL) create_database(SQLALCHEMY_DATABASE_URL) app = FastAPI() engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) Base = declarative_base() def session(): session = SessionLocal() try: yield session session.commit() finally: session.close() return app, engine, Base, session