def does_db_exist(): try: with get_new_connection() as connection: connection.execute("SELECT ast::Name;") except Exception: return False else: return True
def run_benchmarks(query, repeat=5): results = defaultdict(list) with get_new_connection() as connection: for query in queries: for _ in range(repeat): start = time.perf_counter() run_query_on_connection(connection, query) results[query].append(time.perf_counter() - start) return { key: round(statistics.fmean(val), PRECISION) for key, val in results.items() }
def run_benchmarks(iterations, results_file=None): if results_file: file = open(results_file, "w") else: file = nullcontext(sys.stdout) with get_new_connection() as connection: benchmarks = { test_case.name: test_case.run_benchmarks(connection, times=iterations) for test_case in collect_tests(log_skip=False) } with file as stream: json.dump(benchmarks, stream, indent=4)
def drop_and_load_db(schema, reboot_server=True): if reboot_server: drop_all_connection(config.database.cluster) logger.info("Successfully rebooted...") with get_new_connection(database="edgedb") as connection: with suppress(InvalidReferenceError): connection.execute(f"DROP DATABASE {config.database.database}") logger.info("Creating the database %s...", config.database.database) connection.execute(f"CREATE DATABASE {config.database.database}") logger.info("Database created...") with get_new_connection() as connection: with open(schema) as stream: content = stream.read() logger.info("Executing schema on %s...", connection.dbname) connection.execute(content) logger.info("Starting migration...") connection.execute("POPULATE MIGRATION") logger.info("Committing the schema...") connection.execute("COMMIT MIGRATION") logger.info("Successfully resetted!")
def run_tests(allow_fail): fail = False with get_new_connection() as connection: for test_case in collect_tests(): try: test_case.execute(connection) except ExpectationFailed: logger.info("%r failed", test_case.name) except Exception: logger.exception("%r terminated", test_case.name) else: logger.info("%r succeed", test_case.name) continue if test_case.name not in allow_fail: fail = True return fail
def from_db(cls): cache = cls() with get_new_connection() as connection: cache.sync(connection) return cache
def run_raw_edgeql(query, times): with get_new_connection() as connection: return timeit.timeit( "connection.query(query)", number=times, globals=locals() )
def run_query(reiz_ql, limit=DEFAULT_LIMIT): with get_new_connection() as connection: return run_query_on_connection(connection, reiz_ql, limit=limit)