def pending(write_to_file=False): with temporary_db() as CURRENT_DB_URL, temporary_db() as TARGET_DB_URL: load_current_production_state(CURRENT_DB_URL) load_current_staging_state(TARGET_DB_URL) with S(CURRENT_DB_URL) as s_current, S(TARGET_DB_URL) as s_target: m = Migration(s_current, s_target) m.set_safety(False) m.add_all_changes() print('-- Pending (prod -> staging):\n\n{}'.format(m.sql)) if write_to_file: with io.open('DB/migration/pending/pending.sql', 'w') as w: w.write(m.sql) with temporary_db() as CURRENT_DB_URL, temporary_db() as TARGET_DB_URL: load_current_staging_state(CURRENT_DB_URL) load_from_app_model(TARGET_DB_URL) with S(CURRENT_DB_URL) as s_current, S(TARGET_DB_URL) as s_target: m = Migration(s_current, s_target) m.set_safety(False) m.add_all_changes() print('-- Pending (staging -> models):\n\n{}'.format(m.sql)) if write_to_file: with io.open('DB/migration/pending/pending.sql', 'w') as w: w.write(m.sql)
def databases_are_equal(dburl_a, dburl_b): with S(dburl_a) as s0, S(dburl_b) as s1: m = Migration(s0, s1) m.set_safety(False) m.add_all_changes() if m.statements: print('DIFFERENCES FOUND:') print(m.sql) return not m.statements
def staging_errors(): with temporary_db() as CURRENT_DB_URL, temporary_db() as TARGET_DB_URL: load_current_staging_state(CURRENT_DB_URL) load_current_production_state(TARGET_DB_URL) with S(CURRENT_DB_URL) as s_current, S(TARGET_DB_URL) as s_target: m = Migration(s_current, s_target) m.set_safety(False) m.add_all_changes() print('Differences:\n{}'.format(m.sql))
def check_migration_result(): with temporary_db() as CURRENT_DB_URL, temporary_db() as TARGET_DB_URL: load_post_migration_state(CURRENT_DB_URL) load_from_app_model(TARGET_DB_URL) with S(CURRENT_DB_URL) as s_current, S(TARGET_DB_URL) as s_target: m = Migration(s_current, s_target) m.set_safety(False) m.add_all_changes() print('Differences:\n{}'.format(m.sql))
def sync(database): if os.getenv('HOSTINGENV') == 'DEV': db_url_format = 'postgresql://*****:*****@postgres:5432/%s' else: db_url_format = 'postgresql://*****:*****@/%s?host=/var/run/postgresql' temp_db_url = db_url_format % f'{database}temp' db_url = db_url_format % database with temp_db(temp_db_url) as s_target_temp: create_database(db_url) with S(db_url) as s_current, S(s_target_temp) as s_target: run_all(f'{database}/migrations', s_current) run_all(f'{database}/schemas', s_target) run_all(f'{database}/tables', s_target) m = Migration(s_current, s_target) m.set_safety(False) m.add_all_changes() if m.statements: print('THE FOLLOWING CHANGES ARE PENDING:', end='\n\n') print(m.sql) print() if (len(sys.argv) > 1 and sys.argv[1] == 'noconfirm') or input('Apply these changes? (y/n) ') == 'y': print('Applying...') m.apply() else: print('Not applying.') else: print('Already synced.')
def sync(DB_URL: str = "postgresql://orflaedi:@localhost/orflaedi"): from sqlbag import S, temporary_database as temporary_db with temporary_db() as TEMP_DB_URL: os.environ["DATABASE_URL"] = TEMP_DB_URL from orflaedi.database import engine, Base from orflaedi.models import Model, Retailer, VehicleClassEnum, TagEnum Base.metadata.create_all(engine) with S(DB_URL) as s_current, S(TEMP_DB_URL) as s_target: m = Migration(s_current, s_target) m.set_safety(False) m.add_all_changes() if m.statements: print("THE FOLLOWING CHANGES ARE PENDING:", end="\n\n") print(m.sql) print() if input("Apply these changes?") == "yes": print("Applying...") m.apply() else: print("Not applying.") else: print("Already synced.")
def sync(args: List[str]) -> None: """ Compare live database to application schema & apply changes to database. Uses running database specified for application via `DB_[USER|PASS|HOST|NAME]` environment variables & compares to application schema defined at `./src/models/**/*.sql`. """ # define if prompts are needed or not no_prompt = False if 'noprompt' in args: no_prompt = True # create temp database for app schema with _temp_db(host=DB_HOST, user=DB_USER, password=DB_PASS) as temp_db_url: print(f'db url: {DB_URL}') print(f'temp url: {temp_db_url}') # create sessions for current db state & target schema with S(DB_URL) as from_schema_session, \ S(temp_db_url) as target_schema_session: # load target schema to temp db _load_from_app(target_schema_session) # diff target db & current db migration = Migration(from_schema_session, target_schema_session) migration.set_safety(False) migration.add_all_changes() # handle changes if migration.statements: print('\nTHE FOLLOWING CHANGES ARE PENDING:', end='\n\n') print(migration.sql) if no_prompt: print('Applying...') migration.apply() print('Changes applied.') else: if _prompt('Apply these changes?'): print('Applying...') migration.apply() print('Changes applied.') else: print('Not applying.') else: print('Already synced.')
def dump(): from sqlbag import S, temporary_database as temporary_db, load_sql_from_file from migra import Migration DB_URL_FUTURE = 'postgresql:///cuedev' with temporary_db() as DB_URL_CURRENT: with S(DB_URL_FUTURE) as s_current, S(DB_URL_CURRENT) as s_target: load_sql_from_file(s_target, './host.sql') m = Migration(s_target, s_current) m.set_safety(False) m.add_all_changes() if m.statements: print('THE FOLLOWING CHANGES ARE NOT IMMORTALISED:', end='\n\n') print(m.sql) print() os.system( 'pg_dump -s --no-comments --no-owner --no-acl --no-tablespaces postgresql:///cuedev | sed -e "/^--/d" > host.sql' ) if input('Dump Dev to host.sql?') == 'yes': print('Dumping...') else: print('Not applying.') else: print('Already synced.')
def db_differences(db_url_a, db_url_b, **kwargs): with S(db_url_a, poolclass=NullPool) as a, S(db_url_b, poolclass=NullPool) as b: m = Migration(a, b, **kwargs) m.set_safety(False) m.add_all_changes() return m.sql
def sync_db_structure_to_target_db(db_url, target_db_url, confirm=True, create_extensions_only=False, **kwargs): log.info(f"syncing: {db_url} to {target_db_url}") with S(db_url, poolclass=NullPool) as s_current, S(target_db_url, poolclass=NullPool) as s_target: m = Migration(s_current, s_target, **kwargs) m.set_safety(False) if create_extensions_only: log.info("Syncing extension creation only...") m.add_extension_changes(creates=True, drops=False) else: m.add_all_changes() if m.statements: if confirm: # pragma: no cover print("THE FOLLOWING CHANGES ARE PENDING:", end="\n\n") print(m.sql) print() if not confirm or prompt("Apply these changes?"): log.info("Applying...") m.apply() log.info("Applied.") else: if confirm: # pragma: no cover print("Not applying.") else: if confirm: # pragma: no cover print("Already synced.") current_schema_hash = schema_hash(db_url) if confirm: # pragma: no cover print(f"Schema hash: {current_schema_hash}")
def _get_schema_diff( from_db_url: str, target_db_url: str) -> Generator[Tuple[str, Migration], Any, Any]: """Get schema diff between two databases using djrobstep/migra.""" with S(from_db_url) as from_schema_session, \ S(target_db_url) as target_schema_session: migration = Migration(from_schema_session, target_schema_session) migration.set_safety(False) migration.add_all_changes() yield migration.sql, migration
def sync(): DB_URL = get_current_app_db_url() with temporary_db() as TEMP_DB_URL: load_from_app_model(TEMP_DB_URL) with S(DB_URL) as s_current, S(TEMP_DB_URL) as s_target: m = Migration(s_current, s_target) m.set_safety(False) m.add_all_changes() if m.statements: print('THE FOLLOWING CHANGES ARE PENDING:', end='\n\n') print(m.sql) print('Applying...') m.apply() else: print('Already synced.')
def init(db: str = None, schema: str = "schema.sql", overwrite: bool = False): """ Create an initial schema SQL file, optionally from an existing database. :param db: An optional database to create the schema from. :param schema: An optional file to write schema to. Default: schema.sql :param overwrite: Overwrite existing file. """ if os.path.exists(schema) and not overwrite: print(cf.bold_red("Error:"), f'File "{schema}" already exists.', file=sys.stderr) print("Run again with", cf.bold("--overwrite"), "to replace.") sys.exit(os.EX_OSFILE) if not db: with open(schema, "w") as f: f.write("") print(cf.bold("All done! ✨")) print(f'Created blank file "{schema}"') sys.exit() base_uri = copy_url(db) target_exists = database_exists(base_uri, test_can_select=True) if not target_exists: print( cf.bold_red("Error:"), f'Database "{base_uri.database}" does not exist.', ) sys.exit(os.EX_NOHOST) sql = "" patch = create_admin_patch(base_uri) patch.start() with temporary_database(base_uri) as sTemp, S(db) as sFrom: # Compare m = Migration(sTemp, sFrom) m.add_all_changes() m.set_safety(False) # Get SQL sql = m.sql with open(schema, "wb") as f: f.write(pg_format(sql.encode(), unquote=False)) print(cf.bold("All done! ✨")) print(f'Created file "{schema}" with schema from "{base_uri.database}"') sys.exit()
def sync(database): DB_URL = f'postgresql://*****:*****@postgres:5432/{database}' with temp_db(f'postgresql://*****:*****@postgres:5432/{database}temp') as TEMP_DB_URL: create_database(TEMP_DB_URL) create_database(DB_URL) with S(DB_URL) as s_current, S(TEMP_DB_URL) as s_target: run_all(f'{database}/schemas', s_target) run_all(f'{database}/tables', s_target) m = Migration(s_current, s_target) m.set_safety(False) m.add_all_changes() if m.statements: print('THE FOLLOWING CHANGES ARE PENDING:', end='\n\n') print(m.sql) print() if (len(sys.argv) > 1 and sys.argv[1] == 'noconfirm') or input('Apply these changes? (y/n) ') == 'y': print('Applying...') m.apply() else: print('Not applying.') else: print('Already synced.')
def test_migration(schema_database, migrations_database, run_migrations): # apply the migrations on the migrations_database database run_migrations(migrations_database) # use migra to verify that the databases "schema_database" and "migrations_database" # have nos differences with contextlib.ExitStack() as stack: # we use a NullPool to avoid issues when dropping the databases because # of opened database sessions schema_db_session = stack.enter_context( S(f"postgresql:///{schema_database}", poolclass=NullPool)) migrations_db_session = stack.enter_context( S(f"postgresql:///{migrations_database}", poolclass=NullPool)) m = Migration(schema_db_session, migrations_db_session) m.set_safety(False) m.add_all_changes() print(m.sql) assert not m.statements
def pending(_: List[str]) -> None: """ Compare a production schema to application schema & save difference. Uses production schema stored at `./migrations/production.dump.sql` & application schema defined at `./src/models/**/*.sql`, then saves difference at `./migrations/pending.sql`. """ # create temporary databases for prod & target schemas with _temp_db(host=DB_HOST, user=DB_USER, password=DB_PASS) as prod_schema_db_url, _temp_db( host=DB_HOST, user=DB_USER, password=DB_PASS) as target_db_url: print(f'prod temp url: {prod_schema_db_url}') print(f'target temp url: {target_db_url}') # create sessions for both databases with S(prod_schema_db_url) as from_schema_session, \ S(target_db_url) as target_schema_session: # load both schemas into their databases _load_pre_migration(prod_schema_db_url) _load_from_app(target_schema_session) # get a diff migration = Migration(from_schema_session, target_schema_session) migration.set_safety(False) migration.add_all_changes() if migration.statements: print('\nTHE FOLLOWING CHANGES ARE PENDING:', end='\n\n') print(migration.sql) else: print('No changes needed, setting pending.sql to empty.') # write pending changes to file with io.open('migrations/pending.sql', 'w') as file: file.write(migration.sql) print('Changes written to ./migrations/pending.sql.')
def do_fixture_test( fixture_name, schema=None, create_extensions_only=False, with_privileges=False ): flags = ["--unsafe"] if schema: flags += ["--schema", schema] if create_extensions_only: flags += ["--create-extensions-only"] if with_privileges: flags += ["--with-privileges"] fixture_path = "tests/FIXTURES/{}/".format(fixture_name) EXPECTED = io.open(fixture_path + "expected.sql").read().strip() with temporary_database(host="localhost") as d0, temporary_database( host="localhost" ) as d1: with S(d0) as s0: create_role(s0, schemainspect_test_role) with S(d0) as s0, S(d1) as s1: load_sql_from_file(s0, fixture_path + "a.sql") load_sql_from_file(s1, fixture_path + "b.sql") args = parse_args([d0, d1]) assert not args.unsafe assert args.schema is None out, err = outs() assert run(args, out=out, err=err) == 3 assert out.getvalue() == "" DESTRUCTIVE = "-- ERROR: destructive statements generated. Use the --unsafe flag to suppress this error.\n" assert err.getvalue() == DESTRUCTIVE args = parse_args(flags + [d0, d1]) assert args.unsafe assert args.schema == schema out, err = outs() assert run(args, out=out, err=err) == 2 assert err.getvalue() == "" assert out.getvalue().strip() == EXPECTED ADDITIONS = io.open(fixture_path + "additions.sql").read().strip() EXPECTED2 = io.open(fixture_path + "expected2.sql").read().strip() with S(d0) as s0, S(d1) as s1: m = Migration(s0, s1, schema=schema) m.inspect_from() m.inspect_target() with raises(AttributeError): m.changes.nonexist m.set_safety(False) if ADDITIONS: m.add_sql(ADDITIONS) m.apply() if create_extensions_only: m.add_extension_changes(drops=False) else: m.add_all_changes(privileges=with_privileges) expected = EXPECTED2 if ADDITIONS else EXPECTED assert m.sql.strip() == expected # sql generated OK m.apply() # check for changes again and make sure none are pending if create_extensions_only: m.add_extension_changes(drops=False) assert ( m.changes.i_from.extensions.items() >= m.changes.i_target.extensions.items() ) else: m.add_all_changes(privileges=with_privileges) assert m.changes.i_from == m.changes.i_target assert not m.statements # no further statements to apply assert m.sql == "" out, err = outs() assert run(args, out=out, err=err) == 0 # test alternative parameters with S(d0) as s0, S(d1) as s1: m = Migration(get_inspector(s0), get_inspector(s1)) # test empty m = Migration(None, None) m.add_all_changes(privileges=with_privileges) with raises(AttributeError): m.s_from with raises(AttributeError): m.s_target args = parse_args(flags + ["EMPTY", "EMPTY"]) out, err = outs() assert run(args, out=out, err=err) == 0
def do_fixture_test(fixture_name, schema=None, create_extensions_only=False, with_privileges=False): flags = ["--unsafe"] if schema: flags += ["--schema", schema] if create_extensions_only: flags += ["--create-extensions-only"] if with_privileges: flags += ["--with-privileges"] fixture_path = "tests/FIXTURES/{}/".format(fixture_name) EXPECTED = io.open(fixture_path + "expected.sql").read().strip() with temporary_database(host="localhost") as d0, temporary_database( host="localhost") as d1: with S(d0) as s0: create_role(s0, schemainspect_test_role) with S(d0) as s0, S(d1) as s1: load_sql_from_file(s0, fixture_path + "a.sql") load_sql_from_file(s1, fixture_path + "b.sql") args = parse_args([d0, d1]) assert not args.unsafe assert args.schema is None out, err = outs() assert run(args, out=out, err=err) == 3 assert out.getvalue() == "" DESTRUCTIVE = "-- ERROR: destructive statements generated. Use the --unsafe flag to suppress this error.\n" assert err.getvalue() == DESTRUCTIVE args = parse_args(flags + [d0, d1]) assert args.unsafe assert args.schema == schema out, err = outs() assert run(args, out=out, err=err) == 2 assert err.getvalue() == "" assert out.getvalue().strip() == EXPECTED ADDITIONS = io.open(fixture_path + "additions.sql").read().strip() EXPECTED2 = io.open(fixture_path + "expected2.sql").read().strip() with S(d0) as s0, S(d1) as s1: m = Migration(s0, s1, schema=schema) m.inspect_from() m.inspect_target() with raises(AttributeError): m.changes.nonexist m.set_safety(False) if ADDITIONS: m.add_sql(ADDITIONS) m.apply() if create_extensions_only: m.add_extension_changes(drops=False) else: m.add_all_changes(privileges=with_privileges) expected = EXPECTED2 if ADDITIONS else EXPECTED assert m.sql.strip() == expected # sql generated OK m.apply() # check for changes again and make sure none are pending if create_extensions_only: m.add_extension_changes(drops=False) assert (m.changes.i_from.extensions.items() >= m.changes.i_target.extensions.items()) else: m.add_all_changes(privileges=with_privileges) assert m.changes.i_from == m.changes.i_target assert not m.statements # no further statements to apply assert m.sql == "" out, err = outs() assert run(args, out=out, err=err) == 0 # test alternative parameters with S(d0) as s0, S(d1) as s1: m = Migration(get_inspector(s0), get_inspector(s1)) # test empty m = Migration(None, None) m.add_all_changes(privileges=with_privileges) with raises(AttributeError): m.s_from with raises(AttributeError): m.s_target args = parse_args(flags + ["EMPTY", "EMPTY"]) out, err = outs() assert run(args, out=out, err=err) == 0
def test_all(): with temporary_database() as d0, temporary_database() as d1: with S(d0) as s0, S(d1) as s1: load_sql_from_file(s0, 'tests/FIXTURES/a.sql') load_sql_from_file(s1, 'tests/FIXTURES/b.sql') args = parse_args([d0, d1]) assert not args.unsafe out, err = outs() assert run(args, out=out, err=err) == 3 assert out.getvalue() == '' assert err.getvalue() == '-- ERROR: destructive statements generated. Use the --unsafe flag to suppress this error.\n' args = parse_args(['--unsafe', d0, d1]) assert args.unsafe out, err = outs() assert run(args, out=out, err=err) == 2 assert err.getvalue() == '' assert out.getvalue().strip() == EXPECTED with S(d0) as s0, S(d1) as s1: m = Migration(s0, s1) with raises(AttributeError): m.changes.nonexist m.set_safety(False) m.add_sql('alter table products rename column oldcolumn to newcolumn;') m.apply() m.add_all_changes() assert m.sql.strip() == EXPECTED2 # sql generated OK m.apply() # check for changes again and make sure none are pending m.add_all_changes() assert m.changes.i_from == m.changes.i_target assert not m.statements # no further statements to apply out, err = outs() assert run(args, out=out, err=err) == 0
def do_fixture_test(fixture_name): fixture_path = 'tests/FIXTURES/{}/'.format(fixture_name) EXPECTED = io.open(fixture_path + 'expected.sql').read().strip() with temporary_database() as d0, temporary_database() as d1: with S(d0) as s0, S(d1) as s1: load_sql_from_file(s0, fixture_path + 'a.sql') load_sql_from_file(s1, fixture_path + 'b.sql') args = parse_args([d0, d1]) assert not args.unsafe out, err = outs() assert run(args, out=out, err=err) == 3 assert out.getvalue() == '' assert err.getvalue() == '-- ERROR: destructive statements generated. Use the --unsafe flag to suppress this error.\n' args = parse_args(['--unsafe', d0, d1]) assert args.unsafe out, err = outs() assert run(args, out=out, err=err) == 2 assert err.getvalue() == '' assert out.getvalue().strip() == EXPECTED ADDITIONS = io.open(fixture_path + 'additions.sql').read().strip() EXPECTED2 = io.open(fixture_path + 'expected2.sql').read().strip() if ADDITIONS: with S(d0) as s0, S(d1) as s1: m = Migration(s0, s1) m.inspect_from() m.inspect_target() with raises(AttributeError): m.changes.nonexist m.set_safety(False) m.add_sql(ADDITIONS) m.apply() m.add_all_changes() assert m.sql.strip() == EXPECTED2 # sql generated OK m.apply() # check for changes again and make sure none are pending m.add_all_changes() assert m.changes.i_from == m.changes.i_target assert not m.statements # no further statements to apply assert m.sql == '' out, err = outs() assert run(args, out=out, err=err) == 0 # test alternative parameters with S(d0) as s0, S(d1) as s1: m = Migration( get_inspector(s0), get_inspector(s1) ) # test empty m = Migration(None, None) m.add_all_changes() with raises(AttributeError): m.s_from with raises(AttributeError): m.s_target args = parse_args(['--unsafe', 'EMPTY', 'EMPTY']) out, err = outs() assert run(args, out=out, err=err) == 0
def creation_statements(db_url, **kwargs): with S(db_url, poolclass=NullPool) as s: m = Migration(None, s, **kwargs) m.set_safety(False) m.add_all_changes() return m.sql
def main(dbname, verbose, reverse): """ Compare the structure of the database against what's created from the models during `indico db prepare`. By default the current database is assumed to be named `indico`, but it can be overridden. The database uses for comparison will be named `indico_dbdiff` and may not exist. It will be created and dropped automatically. Since your user may or may not be a database superuser, the database is created from a template database named `indico_template`. You can create it using the following SQL commands: \b createdb indico_template psql indico_template -c 'CREATE EXTENSION unaccent;' psql indico_template -c 'CREATE EXTENSION pg_trgm;' Since this script uses the command-line PostgreSQL tools any other configuration should be done using the various environment variables like PGHOST, PGPORT and PGUSER) and your `.pgpass` file. """ temp_dbname = 'indico_dbdiff' base_conn = None target_conn = None # create database and dump current/new structures _checked_call(verbose, ['createdb', '-T', 'indico_template', temp_dbname]) try: env_override = { 'INDICO_CONF_OVERRIDE': repr({'SQLALCHEMY_DATABASE_URI': _build_conn_string(temp_dbname)}) } _checked_call(verbose, ['indico', 'db', 'prepare'], env=env_override) # create SQLAlchemy engines/connections for base and target db base_eng = create_engine(_build_conn_string(temp_dbname)) target_eng = create_engine(_build_conn_string(dbname)) base_conn = base_eng.connect() target_conn = target_eng.connect() version = base_conn.execute( "SELECT current_setting('server_version_num')::int").scalar() if version < 100000: click.echo(click.style( '!! This utility requires at least Postgres 10', fg='red', bold=True), err=True) sys.exit(1) if verbose: click.echo(click.style('** Calculating differences', fg='cyan'), err=True) # use migra to figure out the SQL diff m = Migration(base_conn, target_conn) if reverse else Migration( target_conn, base_conn) m.set_safety(False) m.add_all_changes() diff = m.sql finally: # clean up connections and engines, so that no open connection remains # (otherwise the DROPDB operation won't work) if base_conn: base_conn.close() base_eng.dispose() if target_conn: target_conn.close() target_eng.dispose() _checked_call(verbose, ['dropdb', temp_dbname]) if not diff: click.echo(click.style('No changes found :)', fg='green', bold=True), err=True) return elif should_strip_ansi(sys.stdout): click.echo(diff) else: pretty_diff = _checked_call(verbose, [ 'pygmentize', '-l', 'sql', '-f', 'terminal256', '-O', 'style=native,bg=dark' ], return_output=True, input=diff) click.echo(pretty_diff)
def diff( sql_statements: str, db: str, unsafe: bool = False, apply: bool = False, chatty: bool = False, ): base_uri = copy_url(db) patch = create_admin_patch(base_uri) patch.start() roles, statements = extract_roles(sql_statements) with temporary_database(base_uri) as sTemp: roles_m = Migration(sTemp, NullInspector()) from_roles = roles_m.changes.i_from.roles # Exclude all unspecified roles for k in set(from_roles.keys()) - set(roles.keys()): del from_roles[k] # Compare roles roles_m.add(statements_for_changes(from_roles, roles)) if roles_m.statements: roles_m.set_safety(True) roles_sql = roles_m.sql if chatty: print(pg_format(roles_sql.encode(), unquote=False).decode()) print(cf.bold("Applying roles...")) roles_m.apply() sTemp.commit() if chatty: print(cf.bold("Done.")) target_exists = database_exists(base_uri, test_can_select=True) with S(db) if target_exists else temporary_database(base_uri) as sFrom: # Run schema in temporary database try: raw_execute(sTemp, statements) except Exception as e: raise SQLSyntaxError(e, statements) # Compare m = Migration(sFrom, sTemp) m.set_safety(not unsafe) m.add_all_changes(privileges=True) if not m.statements: if chatty: print(cf.bold("All done! ✨")) print(f'Database "{base_uri.database}" is up to date.') return None, False sql = "" # Get SQL try: sql = m.sql except UnsafeMigrationException: m.set_safety(False) sql = m.sql formatted = pg_format(sql.encode(), unquote=False).decode() return formatted, True formatted = pg_format(sql.encode(), unquote=False).decode() if chatty: print(formatted) if apply: if chatty: print(cf.bold("Applying...")) m.apply() if chatty: print(cf.bold("All done! ✨")) print(f'Database "{base_uri.database}" has been updated.') return formatted, False