def compile_ir_to_sql( ir_expr: irast.Base, *, output_format: Optional[OutputFormat]=None, ignore_shapes: bool=False, explicit_top_cast: Optional[irast.TypeRef]=None, use_named_params: bool=False, expected_cardinality_one: bool=False, pretty: bool=True) -> Tuple[str, Dict[str, int]]: qtree = compile_ir_to_sql_tree( ir_expr, output_format=output_format, ignore_shapes=ignore_shapes, explicit_top_cast=explicit_top_cast, use_named_params=use_named_params, expected_cardinality_one=expected_cardinality_one) if debug.flags.edgeql_compile: # pragma: no cover debug.header('SQL Tree') debug.dump(qtree) assert isinstance(qtree, pgast.Query), "expected instance of ast.Query" argmap = qtree.argnames # Generate query text codegen = _run_codegen(qtree, pretty=pretty) sql_text = ''.join(codegen.result) if debug.flags.edgeql_compile: # pragma: no cover debug.header('SQL') debug.dump_code(sql_text, lexer='sql') return sql_text, argmap
def _compile_and_apply_ddl_command(self, ctx: CompileContext, cmd): current_tx = ctx.state.current_tx() schema = current_tx.get_schema() if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(cmd) # Do a dry-run on test_schema to canonicalize # the schema delta-commands. test_schema = schema context = self._new_delta_context(ctx) cmd.apply(test_schema, context=context) cmd.canonical = True # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = self._process_delta(ctx, cmd, schema) if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)): block = pg_dbops.SQLBlock() else: block = pg_dbops.PLTopBlock() plan.generate(block) sql = block.to_string().encode('utf-8') current_tx.update_schema(schema) if debug.flags.delta_execute: debug.header('Delta Script') debug.dump_code(sql, lexer='sql') return dbstate.DDLQuery(sql=(sql, ))
async def _execute_block(conn, block: dbops.PLBlock) -> None: if not block.is_transactional(): stmts = block.get_statements() else: stmts = [block.to_string()] if debug.flags.bootstrap: debug.header('Bootstrap') debug.dump_code(';\n'.join(stmts), lexer='sql') for stmt in stmts: await _execute(conn, stmt)
async def _execute_ddl(conn, sql_text): try: if debug.flags.bootstrap: debug.header('Delta Script') debug.dump_code(sql_text, lexer='sql') await conn.execute(sql_text) except Exception as e: position = getattr(e, 'position', None) internal_position = getattr(e, 'internal_position', None) context = getattr(e, 'context', '') if context: pl_func_line = re.search( r'^PL/pgSQL function inline_code_block line (\d+).*', context, re.M) if pl_func_line: pl_func_line = int(pl_func_line.group(1)) else: pl_func_line = None point = None if position is not None: position = int(position) point = parser_context.SourcePoint( None, None, position) text = e.query if text is None: # Parse errors text = sql_text elif internal_position is not None: internal_position = int(internal_position) point = parser_context.SourcePoint( None, None, internal_position) text = e.internal_query elif pl_func_line: point = parser_context.SourcePoint( pl_func_line, None, None ) text = sql_text if point is not None: context = parser_context.ParserContext( 'query', text, start=point, end=point) exceptions.replace_context(e, context) raise
async def _configure(schema, conn, cluster, *, insecure=False, testmode=False): scripts = [] if not testmode: memory_kb = psutil.virtual_memory().total // 1024 settings = { 'shared_buffers': f'"{int(memory_kb * 0.2)}kB"', 'effective_cache_size': f'"{int(memory_kb * 0.5)}kB"', 'query_work_mem': f'"{6 * (2 ** 10)}kB"', } for setting, value in settings.items(): scripts.append(f''' CONFIGURE SYSTEM SET {setting} := {value}; ''') else: settings = {} if insecure: scripts.append(''' CONFIGURE SYSTEM INSERT Auth { priority := 0, method := (INSERT Trust), }; ''') config_spec = config.get_settings() for script in scripts: _, sql = compiler.compile_bootstrap_script( schema, schema, script, single_statement=True) if debug.flags.bootstrap: debug.header('Bootstrap') debug.dump_code(sql, lexer='sql') config_op_data = await conn.fetchval(sql) if config_op_data is not None and isinstance(config_op_data, str): config_op = config.Operation.from_json(config_op_data) settings = config_op.apply(config_spec, immutables.Map()) config_json = config.to_json(config_spec, settings) block = dbops.PLTopBlock() dbops.UpdateMetadata( dbops.Database(name=edbdef.EDGEDB_TEMPLATE_DB), {'sysconfig': json.loads(config_json)}, ).generate(block) await _execute_block(conn, block)
def compile_ir_to_sql( ir_expr: irast.Base, *, output_format: typing.Optional[OutputFormat] = None, ignore_shapes: bool = False, explicit_top_cast: typing.Optional[irast.TypeRef] = None, timer=None, use_named_params: bool = False, expected_cardinality_one: bool = False, pretty: bool = True) -> typing.Tuple[str, typing.Dict[str, int]]: if timer is None: qtree = compile_ir_to_sql_tree( ir_expr, output_format=output_format, ignore_shapes=ignore_shapes, explicit_top_cast=explicit_top_cast, use_named_params=use_named_params, expected_cardinality_one=expected_cardinality_one) else: with timer.timeit('compile_ir_to_sql'): qtree = compile_ir_to_sql_tree( ir_expr, output_format=output_format, ignore_shapes=ignore_shapes, explicit_top_cast=explicit_top_cast, use_named_params=use_named_params, expected_cardinality_one=expected_cardinality_one) if debug.flags.edgeql_compile: # pragma: no cover debug.header('SQL Tree') debug.dump(qtree) argmap = qtree.argnames # Generate query text if timer is None: codegen = _run_codegen(qtree, pretty=pretty) else: with timer.timeit('compile_ir_to_sql'): codegen = _run_codegen(qtree, pretty=pretty) sql_text = ''.join(codegen.result) if debug.flags.edgeql_compile: # pragma: no cover debug.header('SQL') debug.dump_code(sql_text, lexer='sql') return sql_text, argmap
async def _configure( schema: s_schema.Schema, compiler: edbcompiler.Compiler, conn: asyncpg_con.Connection, cluster: pgcluster.BaseCluster, *, insecure: bool = False, ) -> None: config_spec = config.get_settings() scripts = [] settings: Mapping[str, config.SettingValue] = {} if insecure: scripts.append(''' CONFIGURE SYSTEM INSERT Auth { priority := 0, method := (INSERT Trust), }; ''') for script in scripts: _, sql = compile_bootstrap_script( compiler, schema, script, single_statement=True, ) if debug.flags.bootstrap: debug.header('Bootstrap') debug.dump_code(sql, lexer='sql') config_op_data = await conn.fetchval(sql) if config_op_data is not None and isinstance(config_op_data, str): config_op = config.Operation.from_json(config_op_data) settings = config_op.apply(config_spec, immutables.Map()) config_json = config.to_json(config_spec, settings, include_source=False) block = dbops.PLTopBlock() dbops.UpdateMetadata( dbops.Database(name=edbdef.EDGEDB_TEMPLATE_DB), { 'sysconfig': json.loads(config_json) }, ).generate(block) await _execute_block(conn, block)
async def _configure(schema, conn, cluster, *, insecure=False, testmode=False): scripts = [] if not testmode: memory_kb = psutil.virtual_memory().total // 1024 settings = { 'shared_buffers': f'"{int(memory_kb * 0.2)}kB"', 'effective_cache_size': f'"{int(memory_kb * 0.5)}kB"', 'query_work_mem': f'"{6 * (2 ** 10)}kB"', } for setting, value in settings.items(): scripts.append(f''' CONFIGURE SYSTEM SET {setting} := {value}; ''') else: settings = {} if insecure: scripts.append(''' CONFIGURE SYSTEM INSERT Auth { priority := 0, method := (INSERT Trust), }; ''') config_spec = config.get_settings() for script in scripts: _, sql = compiler.compile_bootstrap_script(schema, schema, script, single_statement=True) if debug.flags.bootstrap: debug.header('Bootstrap') debug.dump_code(sql, lexer='sql') config_op_data = await conn.fetchval(sql) if config_op_data is not None and isinstance(config_op_data, str): config_op = config.Operation.from_json(config_op_data) settings = config_op.apply(config_spec, immutables.Map()) data_dir = cluster.get_data_dir() overrides_fn = os.path.join(data_dir, 'config_sys.json') with open(overrides_fn, 'wt') as f: f.write(config.to_json(config_spec, settings))
def compile_ir_to_sql( ir_expr: irast.Base, *, output_format: Optional[OutputFormat] = None, ignore_shapes: bool = False, explicit_top_cast: Optional[irast.TypeRef] = None, singleton_mode: bool = False, use_named_params: bool = False, expected_cardinality_one: bool = False, pretty: bool = True, backend_runtime_params: Optional[pgparams.BackendRuntimeParams] = None, ) -> Tuple[str, Dict[str, pgast.Param]]: qtree = compile_ir_to_sql_tree( ir_expr, output_format=output_format, ignore_shapes=ignore_shapes, explicit_top_cast=explicit_top_cast, singleton_mode=singleton_mode, use_named_params=use_named_params, expected_cardinality_one=expected_cardinality_one, backend_runtime_params=backend_runtime_params, ) if ( # pragma: no cover debug.flags.edgeql_compile or debug.flags.edgeql_compile_sql_ast): debug.header('SQL Tree') debug.dump(qtree) if isinstance(qtree, pgast.Query) and qtree.argnames: argmap = qtree.argnames else: argmap = {} # Generate query text sql_text = run_codegen(qtree, pretty=pretty) if ( # pragma: no cover debug.flags.edgeql_compile or debug.flags.edgeql_compile_sql_text): debug.header('SQL') debug.dump_code(sql_text, lexer='sql') if ( # pragma: no cover debug.flags.edgeql_compile_sql_reordered_text): debug.header('Reordered SQL') debug_sql_text = run_codegen(qtree, pretty=True, reordered=True) debug.dump_code(debug_sql_text, lexer='sql') return sql_text, argmap
async def _execute_block(conn, block: dbops.PLBlock) -> None: sql_text = block.to_string() if debug.flags.bootstrap: debug.header('Bootstrap') debug.dump_code(sql_text, lexer='sql') await _execute(conn, sql_text)
def compile_ast_to_ir( tree: qlast.Base, schema: s_schema.Schema, *, options: Optional[CompilerOptions] = None, ) -> irast.Command: """Compile given EdgeQL AST into EdgeDB IR. This is the normal compiler entry point. It assumes that *tree* represents a complete statement. Args: tree: EdgeQL AST. schema: Schema instance. Must contain definitions for objects referenced by the AST *tree*. options: An optional :class:`edgeql.compiler.options.CompilerOptions` instance specifying compilation options. allow_writing_protected_ptrs: If ``True``, allows protected object properties or links to be overwritten in `INSERT` shapes. Returns: An instance of :class:`ir.ast.Command`. Most frequently, this would be an instance of :class:`ir.ast.Statement`. """ if options is None: options = CompilerOptions() if debug.flags.edgeql_compile or debug.flags.edgeql_compile_edgeql_text: debug.header('EdgeQL Text') debug.dump_code(qlcodegen.generate_source(tree, pretty=True)) if debug.flags.edgeql_compile or debug.flags.edgeql_compile_edgeql_ast: debug.header('Compiler Options') debug.dump(options.__dict__) debug.header('EdgeQL AST') debug.dump(tree, schema=schema) ctx = stmtctx_mod.init_context(schema=schema, options=options) ir_set = dispatch_mod.compile(tree, ctx=ctx) ir_expr = stmtctx_mod.fini_expression(ir_set, ctx=ctx) if ctx.env.query_parameters: first_argname = next(iter(ctx.env.query_parameters)) if first_argname.isdecimal(): args_decnames = {int(arg) for arg in ctx.env.query_parameters} args_tpl = set(range(len(ctx.env.query_parameters))) if args_decnames != args_tpl: missing_args = args_tpl - args_decnames missing_args_repr = ', '.join(f'${a}' for a in missing_args) raise errors.QueryError( f'missing {missing_args_repr} positional argument' f'{"s" if len(missing_args) > 1 else ""}') if debug.flags.edgeql_compile or debug.flags.edgeql_compile_scope: debug.header('Scope Tree') print(ctx.path_scope.pdebugformat()) # Also build and dump a mapping from scope ids to # paths that appear directly at them. scopes: Dict[int, Set[irast.PathId]] = { k: set() for k in sorted(node.unique_id for node in ctx.path_scope.descendants if node.unique_id) } for ir_set in ctx.env.set_types: if ir_set.path_scope_id and ir_set.path_scope_id in scopes: scopes[ir_set.path_scope_id].add(ir_set.path_id) debug.dump(scopes) if debug.flags.edgeql_compile or debug.flags.edgeql_compile_ir: debug.header('EdgeDB IR') debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None)) return ir_expr
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None migration_schema = None migration_target = None migration_script = [] for stmt in statements: if isinstance(stmt, qlast.StartMigration): # START MIGRATION if target_schema is None: target_schema = _load_std_schema() migration_target = s_ddl.apply_sdl( stmt.target, base_schema=target_schema, current_schema=current_schema, testmode=True, ) migration_schema = current_schema ddl_plan = None elif isinstance(stmt, qlast.PopulateMigration): # POPULATE MIGRATION if migration_target is None: raise errors.QueryError( 'unexpected POPULATE MIGRATION:' ' not currently in a migration block', context=stmt.context, ) migration_diff = s_ddl.delta_schemas( migration_schema, migration_target, ) if debug.flags.delta_plan: debug.header('Populate Migration Diff') debug.dump(migration_diff, schema=schema) new_ddl = s_ddl.ddlast_from_delta( migration_schema, migration_target, migration_diff, ) migration_script.extend(new_ddl) if debug.flags.delta_plan: debug.header('Populate Migration DDL AST') text = [] for cmd in new_ddl: debug.dump(cmd) text.append(edgeql.generate_source(cmd, pretty=True)) debug.header('Populate Migration DDL Text') debug.dump_code(';\n'.join(text) + ';') elif isinstance(stmt, qlast.CommitMigration): if migration_target is None: raise errors.QueryError( 'unexpected COMMIT MIGRATION:' ' not currently in a migration block', context=stmt.context, ) last_migration = current_schema.get_last_migration() if last_migration: last_migration_ref = s_utils.name_to_ast_ref( last_migration.get_name(current_schema), ) else: last_migration_ref = None create_migration = qlast.CreateMigration( body=qlast.MigrationBody(commands=tuple(migration_script)), parent=last_migration_ref, ) ddl_plan = s_ddl.delta_from_ddl( create_migration, schema=migration_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) migration_schema = None migration_target = None migration_script = [] elif isinstance(stmt, qlast.DDL): if migration_target is not None: migration_script.append(stmt) ddl_plan = None else: ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') if ddl_plan is not None: context = sd.CommandContext() context.testmode = True current_schema = ddl_plan.apply(current_schema, context) return current_schema