def restore_airspace(date_time): """ Restore the airspace db. """ _handle_copy_down(LOCAL_AIRSPACE_FILE_NAME, date_time) psql('-h', ctx.geo_db_hostname, '-U', 'postgres', '-f', LOCAL_AIRSPACE_FILE_NAME, ctx.CONTEXT[ctx.DB_NAME])
def restore_db(): try: # Define block_blob_service block_blob_service = BlockBlobService( account_name=os.environ['ACCOUNT_NAME'], account_key=os.environ['ACCOUNT_KEY']) container_name = os.environ['CONTAINER_NAME'] # Use Argparse to grab file name to pull from azure parser = argparse.ArgumentParser() parser.add_argument("file_name", help="File you want to restore from Azure") args = parser.parse_args() # Set local path for file donwload local_path = os.getcwd() + '/' # Grab file from Azure block_blob_service.get_blob_to_path(container_name, args.file_name, local_path + args.file_name) with gzip.open(args.file_name, 'rb') as f: psql('-h', os.environ['POSTGRESQL_HOST'], '-U', os.environ['DB_USER'], _in=f) except Exception as e: print(e)
def restore_reference(date_time): """ Restore the reference db. """ _handle_copy_down(LOCAL_REF_FILE_NAME, date_time) psql('-h', ctx.ref_db_hostname, '-U', 'postgres', '-f', LOCAL_REF_FILE_NAME, ctx.CONTEXT[ctx.DB_NAME])
def load_sql_file(filename): f = join(TEST_FOLDER, 'data', filename) with open(f, 'r') as handle: sh.psql('postgres', '--no-psqlrc', host='localhost', port='5432', username='******', _in=handle)
def migrate_table(self, db_settings, old_db, old_table, verbose): with ShVerbose(verbose=verbose): sh.psql( sh.pg_dump(old_db, h=db_settings['HOST'], p=db_settings['PORT'], U=db_settings['USER'], t=old_table, _piped=True), db_settings['NAME'], h=db_settings['HOST'], p=db_settings['PORT'], U=db_settings['USER'])
def run(self): table = "lyon.raw_station" dirname = os.path.abspath(os.path.dirname(self.input()['zip'].path)) shpfile = os.path.join(dirname, self.typename + '.shp') shp2args = iodb.shp2pgsql_args(self.projection, shpfile, table) psqlargs = iodb.psql_args() with self.output().open('w') as fobj: sh.psql(sh.shp2pgsql(shp2args), psqlargs) fobj.write("shp2pgsql {} at {}\n".format(shpfile, datetime.now())) fobj.write("Create lyon.raw_station\n")
def create_user(name, pw): sql = "SELECT 1 FROM pg_roles WHERE rolname='%s'" % name if '1' in psql('-Upostgres', '-tAc', sql): click.echo("User already exists: " + name) return False click.echo("Creating user") psql('-Upostgres', "-c CREATE USER {} WITH PASSWORD '{}';".format(name, pw)) return True
def load_sql_file(filename): """ Load SQL file into PostgreSQL """ path = join(TEST_FOLDER, 'data', filename) with open(path, 'r') as handle: sh.psql('postgres', '--no-psqlrc', host='localhost', port='5432', username='******', _in=handle)
def run(self): table = self.city + '.' + self.table dirname = os.path.abspath(os.path.dirname(self.input()['zip'].path)) shpfile = os.path.join(dirname, self.typename + '.shp') shp2args = shp2pgsql_args(self.projection, shpfile, table) psqlargs = psql_args() with self.output().open('w') as fobj: sh.psql(sh.shp2pgsql(shp2args), psqlargs) fobj.write("shp2pgsql {} at {}\n".format(shpfile, dt.now())) fobj.write("Create {schema}.{table}\n" .format(schema=self.city, table=self.table))
def destroy(): from sh import psql db_check = str(db.engine) if prompt_bool("Are you sure you want to do drop %s" % db_check): if (db_check == 'Engine(postgres://postgres:***@localhost/ooiuidev)'): psql('-c', 'drop database ooiuidev') psql('-c', 'drop database ooiuitest') app.logger.info( 'ooiuidev and ooiuitest databases have been dropped.') else: print 'Must be working on LOCAL_DEVELOPMENT to destroy db'
def run(self): table = self.city + '.' + self.table dirname = os.path.abspath(os.path.dirname(self.input()['zip'].path)) shpfile = os.path.join(dirname, self.typename + '.shp') shp2args = shp2pgsql_args(self.projection, shpfile, table) psqlargs = psql_args() with self.output().open('w') as fobj: sh.psql(sh.shp2pgsql(shp2args), psqlargs) fobj.write("shp2pgsql {} at {}\n".format(shpfile, dt.now())) fobj.write("Create {schema}.{table}\n".format(schema=self.city, table=self.table))
def run(self): table = self.schema + '.' + self.table dirname = os.path.abspath(os.path.dirname(self.input()['zip'].path)) shpfile = os.path.join(dirname, self.fname + '.shp') shp2args = shp2pgsql_args(self.projection, shpfile, table) psqlargs = psql_args() # check if the schema exist. raise if this is not the case with self.output().open('w') as fobj: sh.psql(sh.shp2pgsql(shp2args), psqlargs) fobj.write("shp2pgsql {} at {}\n".format(shpfile, dt.now())) fobj.write("Create {schema}.{table}\n".format(schema=self.schema, table=self.table))
def migrate_table(self, db_settings, old_db, old_table, verbose): with ShVerbose(verbose=verbose): sh.psql(sh.pg_dump(old_db, h=db_settings['HOST'], p=db_settings['PORT'], U=db_settings['USER'], t=old_table, _piped=True), db_settings['NAME'], h=db_settings['HOST'], p=db_settings['PORT'], U=db_settings['USER'])
def destroy(): from sh import psql db_check = str(db.engine) if prompt_bool( "Are you sure you want to do drop %s" % db_check ): if (db_check == 'Engine(postgres://postgres:***@localhost/ooiuidev)'): psql('-c', 'drop database ooiuidev') psql('-c', 'drop database ooiuitest') app.logger.info('ooiuidev and ooiuitest databases have been dropped.') else: print 'Must be working on LOCAL_DEVELOPMENT to destroy db'
def run_psql_cmd(postgresql_uri, command): error_buffer = io.StringIO() sh.psql( '-qX', d=postgresql_uri, c=command, _err=error_buffer, _env={'PGOPTIONS': '--client-min-messages=warning'}, # prevents NOTICE: ) errors = error_buffer.getvalue() if errors: logger.warning("errors while executing %s: %s", command, errors) sys.exit(2)
def load_sql_file(filename): """ Load SQL file into PostgreSQL """ path = join(TEST_FOLDER, "data", filename) with open(path, "r") as handle: sh.psql( "postgres", "--no-psqlrc", host="localhost", port="5432", username="******", _in=handle, )
def __enter__(self): printc("CREATE DB %s" % self.dbname, color="blue") sh.createdb(self.dbname, _out="log/create-tmpdb.txt") sh.psql(self.dbname, "-f", "backend/apps/serverboards/priv/repo/initial.sql", _out="log/create-tmpdb.txt") databaseurl = ("postgresql://*****:*****@localhost/%s" % self.dbname) with envset(MIX_ENV="test", SERVERBOARDS_DATABASE_URL=databaseurl), \ chdir("backend"): sh.mix("run", "apps/serverboards/priv/repo/test_seeds.exs", _out="../log/create-tmpdb.txt")
def run_sql_test(filename): f = join(TEST_FOLDER, 'test', filename) with open(f, 'r') as handle: with io.StringIO() as buf: sh.psql('postgres', '--no-psqlrc', '--tuples-only', host='localhost', port='5432', username='******', quiet=True, _in=handle, _out=buf) return buf.getvalue().strip()
def main(): schemas = os.getenv('DUMP_SCHEMAS') for schema in schemas.split(" "): psql(pg_dump( '-h', prod_pg_opts['host'], '-U', prod_pg_opts['username'], '-d', prod_pg_opts['database'], '--schema', schema, '-O', '-v', '--clean', _piped=True), '-h', qa_pg_opts['host'], '-U', qa_pg_opts['username'], '-d', qa_pg_opts['database'] )
def resetdb(): """Create the tables.""" import annotator.models # noqa click.echo('Resetting database...') query = ''' SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '{}' '''.format(db.engine.url.database) psql('--command', query) dropdb('--if-exists', db.engine.url.database) createdb(db.engine.url.database) _createtables()
def restore_database(): """Restores the database via pg_restore.""" from sh import psql, createdb from dispatch.config import DATABASE_HOSTNAME, DATABASE_PORT, DATABASE_CREDENTIALS username, password = str(DATABASE_CREDENTIALS).split(":") print( createdb( "-h", DATABASE_HOSTNAME, "-p", DATABASE_PORT, "-U", username, "dispatch", _env={"PGPASSWORD": password}, ) ) print( psql( "-h", DATABASE_HOSTNAME, "-p", DATABASE_PORT, "-U", username, "-f", "dispatch-backup.dump", _env={"PGPASSWORD": password}, ) )
def create_db(name, owner): if name in psql('-Upostgres', '-lqtA'): click.echo("Database already exists: " + name) return False click.echo("Creating database") pgcreatedb('-Upostgres', '-O' + owner, name, encoding='utf-8') return True
def run_script(filepath, postgresql_uri): logger.info("running script %s", filepath) error_buffer = io.StringIO() sh.psql( '-qX', d=postgresql_uri, f=filepath, _err=error_buffer, _env={'PGOPTIONS': '--client-min-messages=warning'}, # prevents NOTICE: ) errors = error_buffer.getvalue() if errors: logger.warning("errors while executing %s: %s", filepath, errors) sys.exit(2)
def flushall(psql): # Flush PostgreSQL and OpenLDAP from any data. psql('-tc', "DROP DATABASE IF EXISTS app0;") psql('-tc', "DROP DATABASE IF EXISTS app1;") psql('-tc', "DELETE FROM pg_catalog.pg_auth_members;") psql( '-tc', "DELETE FROM pg_catalog.pg_authid " "WHERE rolname != 'postgres' AND rolname NOT LIKE 'pg_%';", )
def run_sql_test(filename): """ Run SQL test file """ path = join(TEST_FOLDER, "test", filename) with open(path, "r") as handle: with io.StringIO() as out, io.StringIO() as err: sh.psql( "postgres", "--no-psqlrc", "--tuples-only", host="localhost", port="5432", username="******", quiet=True, _in=handle, _out=out, _err=err, ) return out.getvalue().strip(), err.getvalue().strip()
def create_user(database_url, token, logfile="log/createuser.txt"): sqls = [ """ INSERT INTO auth_user (email, name, is_active, inserted_at, updated_at) VALUES ('{username}', 'Test User', true, NOW(), NOW()); """, """ SELECT * FROM auth_user; """, """ INSERT INTO auth_user_password (user_id, password, inserted_at, updated_at) VALUES ((SELECT id FROM auth_user WHERE email='{username}'), '$bcrypt$$2b$12$mFXChDI63yh1WPR./gJjk.vq7U3Q/r1xjtgmLJhDhPoaZd650pAny', NOW(), NOW()); SELECT * FROM auth_user_password """, """ INSERT INTO auth_user_token (user_id, token, perms, time_limit, inserted_at, updated_at) VALUES ((SELECT id FROM auth_user WHERE email='{username}'), '{token}', NULL, NOW() + '30 minutes'::interval, NOW(), NOW()); """, """ SELECT * FROM auth_user_token; """, """ INSERT INTO auth_user_group (user_id, group_id) VALUES ((SELECT id FROM auth_user WHERE email='{username}'), (SELECT id FROM auth_group WHERE name='admin')), ((SELECT id FROM auth_user WHERE email='{username}'), (SELECT id FROM auth_group WHERE name='user')); SELECT * FROM auth_user_group; """ ] for sql in sqls: sql = sql.format(username="******", token=token) # printc(sql, color="blue") sh.psql(database_url, _in=sql, _out=logfile, _err=logfile)
def restore_database(dump_file): """Restores the database via pg_restore.""" import sh from sh import psql, createdb from dispatch.config import ( DATABASE_HOSTNAME, DATABASE_NAME, DATABASE_PORT, DATABASE_CREDENTIALS, ) username, password = str(DATABASE_CREDENTIALS).split(":") username = "******" password = "******" try: print( createdb( "-h", DATABASE_HOSTNAME, "-p", DATABASE_PORT, "-U", username, DATABASE_NAME, _env={"PGPASSWORD": password}, ) ) except sh.ErrorReturnCode_1: print("Database already exists.") print( psql( "-h", DATABASE_HOSTNAME, "-p", DATABASE_PORT, "-U", username, "-d", DATABASE_NAME, "-f", dump_file, _env={"PGPASSWORD": password}, ) ) click.secho("Success.", fg="green")
def init_schema(org_code, db_session): dump_file = "/Users/qiyangduan/git/kandbox/dispatch/duan/org_template.sql" # import sh from sh import psql from dispatch.config import ( DATABASE_HOSTNAME, DATABASE_NAME, DATABASE_PORT, # DATABASE_CREDENTIALS, ) # db_session.execute( "CREATE USER org_template WITH ENCRYPTED PASSWORD 'org_template'") db_session.execute( "CREATE SCHEMA org_template AUTHORIZATION org_template ") db_session.commit() log.debug("USER org_template created ...") # DATABASE_CREDENTIALS= # username, password = str(DATABASE_CREDENTIALS).split(":") username = "******" password = None # This should fail. raise Exception("Not implemented!") print( psql( "-h", DATABASE_HOSTNAME, "-p", DATABASE_PORT, "-U", username, "-d", DATABASE_NAME, "-f", dump_file, _env={"PGPASSWORD": password}, )) db_session.execute(f"ALTER USER org_template rename to {org_code} ") db_session.execute(f"ALTER USER {org_code} PASSWORD '{org_code}' ") db_session.execute(f"ALTER SCHEMA org_template rename to {org_code} ") db_session.commit() log.debug(f"USER org_template is renamed to {org_code}...")
def migrate(): """Run all of the unapplied migrations""" next_migration_num = get_next_to_run() migration_folder = get_migration_folder() local_migrations = get_local_migrations(migration_folder) def should_run(path): return get_migration_num(path) >= next_migration_num migrations_to_run = sorted(filter(should_run, local_migrations), key=get_migration_num) if not len(migrations_to_run): print(u"No new migrations to run in {}".format(migration_folder)) dbname = get_dbname() username = get_username() print(u"Running migrations against db: {}".format(dbname)) for name in migrations_to_run: print(u"Running migration:", name) filename = os.path.join(migration_folder, name) print(psql( '--no-psqlrc', # Don't read the local psqlrc '--quiet', # Hide some uncessary output '--echo-all', # Print all commands to stdout as they are read '--single-transaction', # Wrap the script in BEGIN/COMMIT '--set', 'ON_ERROR_STOP=1', # Stop on errors '--pset', 'pager=off', # Don't try and page output '--dbname', dbname, '--file', filename, '-U', username, _env={'PGOPTIONS': '--client-min-messages=warning'}, )) # Save off that we've run this migration session = get_session() m = MigrationHistory( migration_id=get_migration_num(name), name=name, ) session.add(m) session.commit() session.close()
def initdb(): environ['PGUSER'] = "******" click.echo("Creating user") psql("-c CREATE USER " + DB_USER + " WITH PASSWORD '" + DB_PASS + "';") click.echo("Creating databases") createdb("-Ogis", DB_NAME, encoding='utf-8') createdb("-Ogis", "maps", encoding='utf-8') click.echo("Creating extensions") psql("-d" + DB_NAME, "-c CREATE EXTENSION postgis; CREATE EXTENSION hstore;") psql("-dmaps", "-c CREATE EXTENSION postgis")
def destroy(): from sh import psql db_check = str(db.engine) if prompt_bool( "Are you sure you want to do drop %s" % db_check ): try: psql('-c', 'drop database ooiuiprod', '-U', 'postgres') except: print 'prod db not found' pass try: psql('-c', 'drop database ooiuidev', '-U', 'postgres') except: print 'dev db not found' pass try: psql('-c', 'drop database ooiuitest', '-U', 'postgres') except: print 'test db not found' pass app.logger.info('Databases have been dropped.')
def initdb(name, owner): create_db(name, owner) psql('-Upostgres', "-d" + name, "-c CREATE EXTENSION IF NOT EXISTS postgis")
def __exit__(self, *args): # print("Wipe out db %s %s"%(self.dbname, \ # DESTROY_DB_USERS.format(DBNAME=self.dbname))) sh.psql("template1", _in=DESTROY_DB_USERS.format(DBNAME=self.dbname))
def deploy(password, bulkload): from flask.ext.migrate import upgrade from ooiservices.app.models import User, UserScope, UserScopeLink, Array, Organization from ooiservices.app.models import PlatformDeployment, InstrumentDeployment, Stream, StreamParameterLink from sh import psql #Create the local database app.logger.info('Creating DEV and TEST Databases') psql('-c', 'create database ooiuidev;', '-U', 'postgres') psql('ooiuidev', '-c', 'create schema ooiui') psql('ooiuidev', '-c', 'create extension postgis') #Create the local test database psql('-c', 'create database ooiuitest;', '-U', 'postgres') psql('ooiuitest', '-c', 'create schema ooiui') psql('ooiuitest', '-c', 'create extension postgis') from sqlalchemy.orm.mapper import configure_mappers configure_mappers() db.create_all() if bulkload: with open('db/ooiui_schema_data.sql') as f: psql('ooiuidev', _in=f) app.logger.info('Bulk test data loaded.') # migrate database to latest revision #upgrade() if not os.getenv('TRAVIS'): Organization.insert_org() UserScope.insert_scopes() app.logger.info('Insert default user, name: admin') User.insert_user(password=password) admin = User.query.first() admin.scopes.append(UserScope.query.filter_by(scope_name='user_admin').first()) admin.scopes.append(UserScope.query.filter_by(scope_name='redmine').first()) db.session.add(admin) db.session.commit() if bulkload: with open('db/ooiui_schema_data_notifications.sql') as f: psql('ooiuidev', _in=f) app.logger.info('Bulk test data loaded for notifications.')
def deploy(password, bulkload): from flask.ext.migrate import upgrade from ooiservices.app.models import User, UserScope, UserScopeLink, Array from ooiservices.app.models import PlatformDeployment, InstrumentDeployment, Stream, StreamParameterLink from sh import psql #Create the local database app.logger.info('Creating DEV and TEST Databases') psql('-c', 'create database ooiuidev;', '-U', 'postgres') psql('ooiuidev', '-c', 'create schema ooiui') psql('ooiuidev', '-c', 'create extension postgis') #Create the local test database psql('-c', 'create database ooiuitest;', '-U', 'postgres') psql('ooiuitest', '-c', 'create schema ooiui') psql('ooiuitest', '-c', 'create extension postgis') db.create_all() if bulkload: with open('db/ooiui_schema_data.sql') as f: psql('ooiuidev', _in=f) app.logger.info('Bulk test data loaded.') # migrate database to latest revision #upgrade() UserScope.insert_scopes() app.logger.info('Insert default user, name: admin') User.insert_user(password=password) admin = User.query.first() admin.scopes.append(UserScope.query.filter_by(scope_name='user_admin').first()) db.session.add(admin) db.session.commit()
def copy_schema(log, sql_path, from_config, from_schema, to_config, to_schema, structure_only=False): log.info( f'{from_config["name"]}.{from_schema} -> {to_config["name"]}.{to_schema}' ) log.debug("Dump") os.environ["PGPASSWORD"] = from_config["password"] if structure_only: schema_dump = ( sql_path / "schemas" / f'{from_config["name"]}_{from_schema}_structure_only.sql') print( sh.pg_dump( "-U", from_config["user"], "-n", from_schema, "-h", from_config["host"], "-p", from_config["port"], "-s", from_config["name"], _err_to_out=True, _out=str(schema_dump), ), end="", ) else: schema_dump = sql_path / "schemas" / f'{from_config["name"]}_{from_schema}.sql' print( sh.pg_dump( "-U", from_config["user"], "-n", from_schema, "-h", from_config["host"], "-p", from_config["port"], from_config["name"], _err_to_out=True, _out=str(schema_dump), ), end="", ) log.debug("Modify") with fileinput.FileInput(str(schema_dump), inplace=True) as file: for line in file: print(line.replace(from_schema, to_schema), end="") with fileinput.FileInput(schema_dump, inplace=True) as file: for line in file: print( line.replace( "CREATE SCHEMA " + to_schema, f"DROP SCHEMA IF EXISTS {to_schema} CASCADE; CREATE SCHEMA {to_schema}; " f'set search_path to {to_schema},public; CREATE EXTENSION IF NOT EXISTS "uuid-ossp"', ), end="", ) # change role name with fileinput.FileInput(str(schema_dump), inplace=True) as file: for line in file: print( line.replace( f'TO {from_config["user"]}', f'TO {to_config["user"]}', ), end="", ) with fileinput.FileInput(str(schema_dump), inplace=True) as file: for line in file: print( line.replace( f'Owner: {from_config["user"]}', f'Owner: {to_config["user"]}', ), end="", ) log.debug(f"Saved to file: {schema_dump}") log.debug("Import") os.environ["PGPASSWORD"] = to_config["password"] schemafile = open(schema_dump, "r") print( sh.psql( "-U", to_config["user"], "-h", to_config["host"], "-p", to_config["port"], "--quiet", "-o", "/dev/null", to_config["name"], _err_to_out=True, _in=schemafile, ), end="", )
def deploy(password, production, psqluser): from flask.ext.migrate import upgrade from ooiservices.app.models import User, UserScope, UserScopeLink, Array, Organization from ooiservices.app.models import PlatformDeployment, InstrumentDeployment, Stream, StreamParameterLink from sh import psql if production: app.logger.info('Creating PRODUCTION Database') try: psql('-c', 'CREATE ROLE postgres LOGIN SUPERUSER') except: pass psql('-c', 'create database ooiuiprod;', '-U', psqluser) psql('ooiuiprod', '-c', 'create schema ooiui', '-U', psqluser) psql('ooiuiprod', '-c', 'create extension postgis', '-U', psqluser) else: try: psql('-c', 'CREATE ROLE postgres LOGIN SUPERUSER') except: pass #Create the local database app.logger.info('Creating DEV and TEST Databases') psql('-c', 'create database ooiuidev;', '-U', psqluser) psql('ooiuidev', '-c', 'create schema ooiui', '-U', psqluser) psql('ooiuidev', '-c', 'create extension postgis', '-U', psqluser) #Create the local test database psql('-c', 'create database ooiuitest;', '-U', psqluser) psql('ooiuitest', '-c', 'create schema ooiui', '-U', psqluser) psql('ooiuitest', '-c', 'create extension postgis', '-U', psqluser) from sqlalchemy.orm.mapper import configure_mappers configure_mappers() db.create_all() if production: app.logger.info('Populating Production Database . . .') with open('db/ooiui_schema_data.sql') as f: psql('-U', psqluser, 'ooiuiprod', _in=f) with open('db/ooiui_params_streams_data.sql') as h: psql('-U', psqluser, 'ooiuiprod', _in=h) # with open('db/ooiui_vocab.sql') as i: # psql('-U', psqluser, 'ooiuiprod', _in=i) app.logger.info('Production Database loaded.') else: app.logger.info('Populating Dev Database . . .') with open('db/ooiui_schema_data.sql') as f: psql('-U', psqluser, 'ooiuidev', _in=f) with open('db/ooiui_params_streams_data.sql') as h: psql('-U', psqluser, 'ooiuidev', _in=h) # with open('db/ooiui_vocab.sql') as i: # psql('-U', psqluser, 'ooiuidev', _in=i) app.logger.info('Dev Database loaded.') # migrate database to latest revision #upgrade() if not os.getenv('TRAVIS'): UserScope.insert_scopes() app.logger.info('Insert default user, name: admin') User.insert_user(password=password) admin = User.query.first() admin.scopes.append(UserScope.query.filter_by(scope_name='user_admin').first()) admin.scopes.append(UserScope.query.filter_by(scope_name='sys_admin').first()) admin.scopes.append(UserScope.query.filter_by(scope_name='data_manager').first()) admin.scopes.append(UserScope.query.filter_by(scope_name='redmine').first()) db.session.add(admin) db.session.commit()