async def get_output_schema( schema_name: str = "public", include: t.Optional[t.List[str]] = None, exclude: t.Optional[t.List[str]] = None, engine: t.Optional[Engine] = None, ) -> OutputSchema: """ :param schema_name: Name of the schema. :param include: Optional list of table names. Only creates the specified tables. :param exclude: Optional list of table names. excludes the specified tables. :param engine: The ``Engine`` instance to use for making database queries. If not specified, then ``engine_finder`` is used to get the engine from ``piccolo_conf.py``. :returns: OutputSchema """ if engine is None: engine = engine_finder() if exclude is None: exclude = [] if engine is None: raise ValueError( "Unable to find the engine - make sure piccolo_conf is on the " "path.") if not isinstance(engine, PostgresEngine): raise ValueError( "This feature is currently only supported in Postgres.") class Schema(Table, db=engine): """ Just used for making raw queries on the db. """ pass if not include: include = await get_tablenames(Schema, schema_name=schema_name) table_coroutines = (create_table_class_from_db(table_class=Schema, tablename=tablename, schema_name=schema_name) for tablename in include if tablename not in exclude) output_schemas = await asyncio.gather(*table_coroutines) # Merge all the output schemas to a single OutputSchema object output_schema: OutputSchema = sum(output_schemas) # type: ignore # Sort the tables based on their ForeignKeys. output_schema.tables = sort_table_classes( sorted(output_schema.tables, key=lambda x: x._meta.tablename)) output_schema.imports = sorted(list(set(output_schema.imports))) return output_schema
def run(): """ Launch the SQL shell for the configured engine. For Postgres this will be psql, and for SQLite it will be sqlite3. """ engine: t.Optional[Engine] = engine_finder() if engine is None: raise ValueError( "Unable to find the engine - make sure piccolo_conf is on the " "path." ) # Heavily inspired by Django's dbshell command if isinstance(engine, PostgresEngine): engine: PostgresEngine = engine args = ["psql"] host = engine.config.get("host") port = engine.config.get("port") user = engine.config.get("user") password = engine.config.get("password") database = engine.config.get("database") if user: args += ["-U", user] if host: args += ["-h", host] if port: args += ["-p", str(port)] args += [database] sigint_handler = signal.getsignal(signal.SIGINT) subprocess_env = os.environ.copy() if password: subprocess_env["PGPASSWORD"] = str(password) try: # Allow SIGINT to pass to psql to abort queries. signal.signal(signal.SIGINT, signal.SIG_IGN) print("Enter \\q to exit") subprocess.run(args, check=True, env=subprocess_env) finally: # Restore the original SIGINT handler. signal.signal(signal.SIGINT, sigint_handler) elif isinstance(engine, SQLiteEngine): engine: SQLiteEngine = engine print("Enter .quit to exit") subprocess.run( ["sqlite3", engine.connection_kwargs.get("database")], check=True )
import asyncio from piccolo.engine.finder import engine_finder ENGINE = engine_finder() async def drop_tables(): for table in [ "ticket", "concert", "venue", "band", "manager", "poster", "migration", "musician", "my_table", "recording_studio", "shirt", "mega_table", "small_table", ]: await ENGINE._run_in_new_connection(f"DROP TABLE IF EXISTS {table}") def pytest_sessionstart(session): """ Make sure all the tables have been dropped. https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_configure