def main(): parser = create_parser(description=__doc__, use_instance=False) parser.add_argument('-f', dest='force', default=False, action='store_true', help="force deletion without asking for confirmation") args = parser.parse_args() if not args.force: input = raw_input('Delete all data? No backup will be done! ' 'If so type "yes": ') if input != 'yes': print 'Answer not "yes", but: "%s"\nAborting.' % input exit(1) config = config_from_args(args) engine = get_engine(config, echo=True) conn = engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def main(): parser = create_parser(description=__doc__, use_instance=False) parser.add_argument('-f', dest='force', default=False, action='store_true', help="force deletion without asking for confirmation") args = parser.parse_args() if not args.force: input = raw_input('Delete all data? No backup will be done! ' 'If so type "yes": ') if input != 'yes': print 'Answer not "yes", but: "%s"\nAborting.' % input exit(1) config = config_from_args(args) engine = get_engine(config, echo=True) conn = engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( ForeignKeyConstraint((), (), name=fk['name']) ) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def main(): parser = create_parser(description=__doc__, use_instance=False) parser.add_argument('--dump', default=None, required=True, help="Path to the SQL dump file.") args = parser.parse_args() # check and cleanup dump file dump_path = os.path.join(os.getcwd(), args.dump) if not os.path.exists(dump_path): parser.error('SQL dump file "%s" does not exist.' % args.dump) # get an engine to get the driver type and connection details. engine = get_engine(config_from_args(args)) drivername = engine.url.drivername error = False if drivername == 'postgresql': # use the psql command line script for imports. # pg_dump by default emits COPY ... FROM STDIN statements # which the psycopg2 driver can't handle. # pg_dump can emit inserts (--inserts), but that's # dead slow to import. vars = engine.url.__dict__.copy() vars['dump_path'] = dump_path command = ('psql -U {username} -h {host} -p {port} -' 'd {database} -f {dump_path}').format(**vars) print 'Executing command: %s' % command if engine.url.password is not None: print 'Prefixing it with PGPASSWORD="******"' command = 'PGPASSWORD="******" ' % (engine.url.password, command) error = subprocess.call(command, shell=True) else: print( 'Action for driver "%s" is not defined.\n' "Note: sqlite3 has a non-standard executescript() method.") exit(1) if error: print 'Process exited with Error: %s' % error exit(error)
def main(): parser = create_parser(description=__doc__, use_instance=False) parser.add_argument('--dump', default=None, required=True, help="Path to the SQL dump file.") args = parser.parse_args() # check and cleanup dump file dump_path = os.path.join(os.getcwd(), args.dump) if not os.path.exists(dump_path): parser.error('SQL dump file "%s" does not exist.' % args.dump) # get an engine to get the driver type and connection details. engine = get_engine(config_from_args(args)) drivername = engine.url.drivername error = False if drivername == 'postgresql': # use the psql command line script for imports. # pg_dump by default emits COPY ... FROM STDIN statements # which the psycopg2 driver can't handle. # pg_dump can emit inserts (--inserts), but that's # dead slow to import. vars = engine.url.__dict__.copy() vars['dump_path'] = dump_path command = ('psql -U {username} -h {host} -p {port} -' 'd {database} -f {dump_path}').format(**vars) print 'Executing command: %s' % command if engine.url.password is not None: print 'Prefixing it with PGPASSWORD="******"' command = 'PGPASSWORD="******" ' % (engine.url.password, command) error = subprocess.call(command, shell=True) else: print ('Action for driver "%s" is not defined.\n' "Note: sqlite3 has a non-standard executescript() method.") exit(1) if error: print 'Process exited with Error: %s' % error exit(error)