def compile_ir_to_sql( ir_expr: irast.Base, *, output_format: Optional[OutputFormat]=None, ignore_shapes: bool=False, explicit_top_cast: Optional[irast.TypeRef]=None, use_named_params: bool=False, expected_cardinality_one: bool=False, pretty: bool=True) -> Tuple[str, Dict[str, int]]: qtree = compile_ir_to_sql_tree( ir_expr, output_format=output_format, ignore_shapes=ignore_shapes, explicit_top_cast=explicit_top_cast, use_named_params=use_named_params, expected_cardinality_one=expected_cardinality_one) if debug.flags.edgeql_compile: # pragma: no cover debug.header('SQL Tree') debug.dump(qtree) assert isinstance(qtree, pgast.Query), "expected instance of ast.Query" argmap = qtree.argnames # Generate query text codegen = _run_codegen(qtree, pretty=pretty) sql_text = ''.join(codegen.result) if debug.flags.edgeql_compile: # pragma: no cover debug.header('SQL') debug.dump_code(sql_text, lexer='sql') return sql_text, argmap
def _compile_and_apply_ddl_command(self, ctx: CompileContext, cmd): current_tx = ctx.state.current_tx() schema = current_tx.get_schema() if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(cmd) # Do a dry-run on test_schema to canonicalize # the schema delta-commands. test_schema = schema context = self._new_delta_context(ctx) cmd.apply(test_schema, context=context) cmd.canonical = True # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = self._process_delta(ctx, cmd, schema) if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)): block = pg_dbops.SQLBlock() else: block = pg_dbops.PLTopBlock() plan.generate(block) sql = block.to_string().encode('utf-8') current_tx.update_schema(schema) if debug.flags.delta_execute: debug.header('Delta Script') debug.dump_code(sql, lexer='sql') return dbstate.DDLQuery(sql=(sql, ))
def _process_delta(ctx, delta, schema): """Adapt and process the delta command.""" if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(delta, schema=schema) context = sd.CommandContext() context.stdmode = True if not delta.canonical: # Canonicalize sd.apply(delta, schema=schema) delta = delta_cmds.CommandMeta.adapt(delta) context = sd.CommandContext( stdmode=True, backend_runtime_params=ctx.cluster.get_runtime_params(), ) schema = sd.apply(delta, schema=schema, context=context) if debug.flags.delta_pgsql_plan: debug.header('PgSQL Delta Plan') debug.dump(delta, schema=schema) return schema, delta
async def _make_stdlib(testmode: bool): schema = s_schema.Schema() current_block = None std_texts = [] for modname in s_schema.STD_LIB + ('stdgraphql', ): std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text('_testmode')) ddl_text = '\n'.join(std_texts) for ddl_cmd in edgeql.parse_block(ddl_text): delta_command = s_ddl.delta_from_ddl(ddl_cmd, schema=schema, modaliases={None: 'std'}, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Do a dry-run on test_schema to canonicalize # the schema delta-commands. test_schema = schema context = sd.CommandContext() context.stdmode = True delta_command.apply(test_schema, context=context) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)): if (current_block is not None and not isinstance(current_block, dbops.SQLBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.SQLBlock() else: if (current_block is not None and not isinstance(current_block, dbops.PLTopBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.PLTopBlock() plan.generate(current_block) sql_text = current_block.to_string() return schema, sql_text
async def _amend_stdlib( ddl_text: str, stdlib: StdlibBits, ) -> Tuple[StdlibBits, str]: schema = stdlib.stdschema reflschema = stdlib.reflschema topblock = dbops.PLTopBlock() plans = [] context = sd.CommandContext() context.stdmode = True for ddl_cmd in edgeql.parse_block(ddl_text): assert isinstance(ddl_cmd, qlast.DDLCommand) delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) reflschema = delta_command.apply(reflschema, context) plan.generate(topblock) plans.append(plan) compiler = edbcompiler.new_compiler( std_schema=schema, reflection_schema=reflschema, schema_class_layout=stdlib.classlayout, ) compilerctx = edbcompiler.new_compiler_context( schema, bootstrap_mode=True, ) compilerctx = edbcompiler.new_compiler_context(schema) for plan in plans: compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=plan, block=topblock, ) sqltext = topblock.to_string() return stdlib._replace(stdschema=schema, reflschema=reflschema), sqltext
def run(db: DB, s: str, print_asts: bool, output_mode: str) -> None: q = parse(s) if print_asts: debug.dump(q) res = go(q, db) if output_mode == 'pprint': pprint.pprint(res) elif output_mode == 'json': print(EdbJSONEncoder().encode(res)) else: debug.dump(res)
def compile_ir_to_sql( ir_expr: irast.Base, *, output_format: typing.Optional[OutputFormat] = None, ignore_shapes: bool = False, explicit_top_cast: typing.Optional[irast.TypeRef] = None, timer=None, use_named_params: bool = False, expected_cardinality_one: bool = False, pretty: bool = True) -> typing.Tuple[str, typing.Dict[str, int]]: if timer is None: qtree = compile_ir_to_sql_tree( ir_expr, output_format=output_format, ignore_shapes=ignore_shapes, explicit_top_cast=explicit_top_cast, use_named_params=use_named_params, expected_cardinality_one=expected_cardinality_one) else: with timer.timeit('compile_ir_to_sql'): qtree = compile_ir_to_sql_tree( ir_expr, output_format=output_format, ignore_shapes=ignore_shapes, explicit_top_cast=explicit_top_cast, use_named_params=use_named_params, expected_cardinality_one=expected_cardinality_one) if debug.flags.edgeql_compile: # pragma: no cover debug.header('SQL Tree') debug.dump(qtree) argmap = qtree.argnames # Generate query text if timer is None: codegen = _run_codegen(qtree, pretty=pretty) else: with timer.timeit('compile_ir_to_sql'): codegen = _run_codegen(qtree, pretty=pretty) sql_text = ''.join(codegen.result) if debug.flags.edgeql_compile: # pragma: no cover debug.header('SQL') debug.dump_code(sql_text, lexer='sql') return sql_text, argmap
def compile_ir_to_sql( ir_expr: irast.Base, *, output_format: Optional[OutputFormat] = None, ignore_shapes: bool = False, explicit_top_cast: Optional[irast.TypeRef] = None, singleton_mode: bool = False, use_named_params: bool = False, expected_cardinality_one: bool = False, pretty: bool = True, backend_runtime_params: Optional[pgparams.BackendRuntimeParams] = None, ) -> Tuple[str, Dict[str, pgast.Param]]: qtree = compile_ir_to_sql_tree( ir_expr, output_format=output_format, ignore_shapes=ignore_shapes, explicit_top_cast=explicit_top_cast, singleton_mode=singleton_mode, use_named_params=use_named_params, expected_cardinality_one=expected_cardinality_one, backend_runtime_params=backend_runtime_params, ) if ( # pragma: no cover debug.flags.edgeql_compile or debug.flags.edgeql_compile_sql_ast): debug.header('SQL Tree') debug.dump(qtree) if isinstance(qtree, pgast.Query) and qtree.argnames: argmap = qtree.argnames else: argmap = {} # Generate query text sql_text = run_codegen(qtree, pretty=pretty) if ( # pragma: no cover debug.flags.edgeql_compile or debug.flags.edgeql_compile_sql_text): debug.header('SQL') debug.dump_code(sql_text, lexer='sql') if ( # pragma: no cover debug.flags.edgeql_compile_sql_reordered_text): debug.header('Reordered SQL') debug_sql_text = run_codegen(qtree, pretty=True, reordered=True) debug.dump_code(debug_sql_text, lexer='sql') return sql_text, argmap
def _process_delta(self, ctx: CompileContext, delta, schema): """Adapt and process the delta command.""" if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(delta, schema=schema) delta = pg_delta.CommandMeta.adapt(delta) context = self._new_delta_context(ctx) schema, _ = delta.apply(schema, context) if debug.flags.delta_pgsql_plan: debug.header('PgSQL Delta Plan') debug.dump(delta, schema=schema) return schema, delta
def _process_delta(delta, schema): """Adapt and process the delta command.""" if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(delta, schema=schema) delta = delta_cmds.CommandMeta.adapt(delta) context = sd.CommandContext() context.stdmode = True schema, _ = delta.apply(schema, context) if debug.flags.delta_pgsql_plan: debug.header('PgSQL Delta Plan') debug.dump(delta, schema=schema) return schema, delta
def compile_ast_to_ir(tree, schema, *, parent_object_type=None, anchors=None, path_prefix_anchor=None, singletons=None, func_params=None, security_context=None, derived_target_module=None, result_view_name=None, modaliases=None, implicit_id_in_shapes=False, implicit_tid_in_shapes=False, schema_view_mode=False, disable_constant_folding=False, json_parameters=False, session_mode=False, allow_abstract_operators=False, allow_generic_type_output=False): """Compile given EdgeQL AST into EdgeDB IR.""" if debug.flags.edgeql_compile: debug.header('EdgeQL AST') debug.dump(tree, schema=schema) ctx = stmtctx.init_context( schema=schema, anchors=anchors, singletons=singletons, modaliases=modaliases, security_context=security_context, func_params=func_params, derived_target_module=derived_target_module, result_view_name=result_view_name, implicit_id_in_shapes=implicit_id_in_shapes, implicit_tid_in_shapes=implicit_tid_in_shapes, schema_view_mode=schema_view_mode, disable_constant_folding=disable_constant_folding, json_parameters=json_parameters, session_mode=session_mode, allow_abstract_operators=allow_abstract_operators, allow_generic_type_output=allow_generic_type_output, parent_object_type=parent_object_type) if path_prefix_anchor is not None: path_prefix = anchors[path_prefix_anchor] ctx.partial_path_prefix = setgen.class_set(path_prefix, ctx=ctx) ctx.partial_path_prefix.anchor = path_prefix_anchor ctx.partial_path_prefix.show_as_anchor = path_prefix_anchor ir_set = dispatch.compile(tree, ctx=ctx) ir_expr = stmtctx.fini_expression(ir_set, ctx=ctx) if ctx.env.query_parameters: first_argname = next(iter(ctx.env.query_parameters)) if first_argname.isdecimal(): args_decnames = {int(arg) for arg in ctx.env.query_parameters} args_tpl = set(range(len(ctx.env.query_parameters))) if args_decnames != args_tpl: missing_args = args_tpl - args_decnames missing_args_repr = ', '.join(f'${a}' for a in missing_args) raise errors.QueryError( f'missing {missing_args_repr} positional argument' f'{"s" if len(missing_args) > 1 else ""}') if debug.flags.edgeql_compile: debug.header('Scope Tree') if ctx.path_scope is not None: print(ctx.path_scope.pdebugformat()) else: print('N/A') debug.header('EdgeDB IR') debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None)) return ir_expr
def compile_ast_to_ir( tree: qlast.Base, schema: s_schema.Schema, *, options: Optional[CompilerOptions] = None, ) -> irast.Command: """Compile given EdgeQL AST into EdgeDB IR. This is the normal compiler entry point. It assumes that *tree* represents a complete statement. Args: tree: EdgeQL AST. schema: Schema instance. Must contain definitions for objects referenced by the AST *tree*. options: An optional :class:`edgeql.compiler.options.CompilerOptions` instance specifying compilation options. allow_writing_protected_ptrs: If ``True``, allows protected object properties or links to be overwritten in `INSERT` shapes. Returns: An instance of :class:`ir.ast.Command`. Most frequently, this would be an instance of :class:`ir.ast.Statement`. """ if options is None: options = CompilerOptions() if debug.flags.edgeql_compile or debug.flags.edgeql_compile_edgeql_text: debug.header('EdgeQL Text') debug.dump_code(qlcodegen.generate_source(tree, pretty=True)) if debug.flags.edgeql_compile or debug.flags.edgeql_compile_edgeql_ast: debug.header('Compiler Options') debug.dump(options.__dict__) debug.header('EdgeQL AST') debug.dump(tree, schema=schema) ctx = stmtctx_mod.init_context(schema=schema, options=options) ir_set = dispatch_mod.compile(tree, ctx=ctx) ir_expr = stmtctx_mod.fini_expression(ir_set, ctx=ctx) if ctx.env.query_parameters: first_argname = next(iter(ctx.env.query_parameters)) if first_argname.isdecimal(): args_decnames = {int(arg) for arg in ctx.env.query_parameters} args_tpl = set(range(len(ctx.env.query_parameters))) if args_decnames != args_tpl: missing_args = args_tpl - args_decnames missing_args_repr = ', '.join(f'${a}' for a in missing_args) raise errors.QueryError( f'missing {missing_args_repr} positional argument' f'{"s" if len(missing_args) > 1 else ""}') if debug.flags.edgeql_compile or debug.flags.edgeql_compile_scope: debug.header('Scope Tree') print(ctx.path_scope.pdebugformat()) # Also build and dump a mapping from scope ids to # paths that appear directly at them. scopes: Dict[int, Set[irast.PathId]] = { k: set() for k in sorted(node.unique_id for node in ctx.path_scope.descendants if node.unique_id) } for ir_set in ctx.env.set_types: if ir_set.path_scope_id and ir_set.path_scope_id in scopes: scopes[ir_set.path_scope_id].add(ir_set.path_id) debug.dump(scopes) if debug.flags.edgeql_compile or debug.flags.edgeql_compile_ir: debug.header('EdgeDB IR') debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None)) return ir_expr
async def _make_stdlib( testmode: bool) -> Tuple[s_schema.Schema, str, Set[uuid.UUID]]: schema = s_schema.Schema() schema, _ = s_mod.Module.create_in_schema(schema, name='__derived__') schema = s_pseudo.populate_types(schema) current_block = None std_texts = [] for modname in s_schema.STD_LIB + ('stdgraphql', ): std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text('_testmode')) ddl_text = '\n'.join(std_texts) new_types: Set[uuid.UUID] = set() for ddl_cmd in edgeql.parse_block(ddl_text): delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)): if (current_block is not None and not isinstance(current_block, dbops.SQLBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.SQLBlock() else: new_types.update(plan.new_types) if (current_block is not None and not isinstance(current_block, dbops.PLTopBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.PLTopBlock() plan.generate(current_block) assert current_block is not None sql_text = current_block.to_string() mods = { mod.get_name(schema) for mod in schema.get_modules() if mod.get_builtin(schema) } if mods != s_schema.STD_MODULES: raise errors.SchemaError( f'modules {s_schema.STD_MODULES - mods} are not marked as builtin') return schema, sql_text, new_types
async def _make_stdlib(testmode: bool, global_ids) -> StdlibBits: schema = s_schema.Schema() schema, _ = s_mod.Module.create_in_schema(schema, name='__derived__') current_block = dbops.PLTopBlock() std_texts = [] for modname in s_schema.STD_LIB + ('stdgraphql', ): std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text('_testmode')) ddl_text = '\n'.join(std_texts) types: Set[uuid.UUID] = set() std_plans: List[sd.Command] = [] for ddl_cmd in edgeql.parse_block(ddl_text): delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) std_plans.append(delta_command) types.update(plan.new_types) plan.generate(current_block) stdglobals = '\n'.join([ f'''CREATE SUPERUSER ROLE {edbdef.EDGEDB_SUPERUSER} {{ SET id := <uuid>'{global_ids[edbdef.EDGEDB_SUPERUSER]}' }};''', f'''CREATE DATABASE {edbdef.EDGEDB_TEMPLATE_DB} {{ SET id := <uuid>'{global_ids[edbdef.EDGEDB_TEMPLATE_DB]}' }};''', f'CREATE DATABASE {edbdef.EDGEDB_SUPERUSER_DB};', ]) context = sd.CommandContext(stdmode=True) for ddl_cmd in edgeql.parse_block(stdglobals): delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) schema = delta_command.apply(schema, context) refldelta, classlayout, introparts = s_refl.generate_structure(schema) reflschema, reflplan = _process_delta(refldelta, schema) std_plans.append(refldelta) assert current_block is not None reflplan.generate(current_block) subblock = current_block.add_block() compiler = edbcompiler.new_compiler( std_schema=schema, reflection_schema=reflschema, schema_class_layout=classlayout, bootstrap_mode=True, ) compilerctx = edbcompiler.new_compiler_context(reflschema) for std_plan in std_plans: compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=std_plan, block=subblock, is_internal_reflection=std_plan is refldelta, stdmode=True, ) sqltext = current_block.to_string() compilerctx = edbcompiler.new_compiler_context( reflschema, schema_reflection_mode=True, output_format=edbcompiler.IoFormat.JSON_ELEMENTS, ) # The introspection query bits are returned in chunks # because it's a large UNION and we currently generate SQL # that is much harder for Posgres to plan as opposed to a # straight flat UNION. sql_introparts = [] for intropart in introparts: introtokens = tokenizer.tokenize(intropart.encode()) units = compiler._compile(ctx=compilerctx, tokens=introtokens) assert len(units) == 1 and len(units[0].sql) == 1 sql_intropart = units[0].sql[0].decode() sql_introparts.append(sql_intropart) introsql = ' UNION ALL '.join(sql_introparts) return StdlibBits( stdschema=schema, reflschema=reflschema, sqltext=sqltext, types=types, classlayout=classlayout, introquery=introsql, )
def compile_ast_to_ir( tree: qlast.Base, schema: s_schema.Schema, *, options: Optional[CompilerOptions] = None, ) -> irast.Command: """Compile given EdgeQL AST into EdgeDB IR. This is the normal compiler entry point. It assumes that *tree* represents a complete statement. Args: tree: EdgeQL AST. schema: Schema instance. Must contain definitions for objects referenced by the AST *tree*. options: An optional :class:`edgeql.compiler.options.CompilerOptions` instance specifying compilation options. allow_writing_protected_ptrs: If ``True``, allows protected object properties or links to be overwritten in `INSERT` shapes. Returns: An instance of :class:`ir.ast.Command`. Most frequently, this would be an instance of :class:`ir.ast.Statement`. """ if options is None: options = CompilerOptions() if debug.flags.edgeql_compile: debug.header('EdgeQL AST') debug.dump(tree, schema=schema) debug.header('Compiler Options') debug.dump(options.__dict__) ctx = stmtctx_mod.init_context(schema=schema, options=options) ir_set = dispatch_mod.compile(tree, ctx=ctx) ir_expr = stmtctx_mod.fini_expression(ir_set, ctx=ctx) if ctx.env.query_parameters: first_argname = next(iter(ctx.env.query_parameters)) if first_argname.isdecimal(): args_decnames = {int(arg) for arg in ctx.env.query_parameters} args_tpl = set(range(len(ctx.env.query_parameters))) if args_decnames != args_tpl: missing_args = args_tpl - args_decnames missing_args_repr = ', '.join(f'${a}' for a in missing_args) raise errors.QueryError( f'missing {missing_args_repr} positional argument' f'{"s" if len(missing_args) > 1 else ""}') if debug.flags.edgeql_compile: debug.header('Scope Tree') if ctx.path_scope is not None: print(ctx.path_scope.pdebugformat()) else: print('N/A') debug.header('EdgeDB IR') debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None)) if isinstance(ir_expr, irast.Statement): ir_expr.dml_exprs = ctx.env.dml_exprs return ir_expr
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None migration_schema = None migration_target = None migration_script = [] for stmt in statements: if isinstance(stmt, qlast.StartMigration): # START MIGRATION if target_schema is None: target_schema = _load_std_schema() migration_target = s_ddl.apply_sdl( stmt.target, base_schema=target_schema, current_schema=current_schema, testmode=True, ) migration_schema = current_schema ddl_plan = None elif isinstance(stmt, qlast.PopulateMigration): # POPULATE MIGRATION if migration_target is None: raise errors.QueryError( 'unexpected POPULATE MIGRATION:' ' not currently in a migration block', context=stmt.context, ) migration_diff = s_ddl.delta_schemas( migration_schema, migration_target, ) if debug.flags.delta_plan: debug.header('Populate Migration Diff') debug.dump(migration_diff, schema=schema) new_ddl = s_ddl.ddlast_from_delta( migration_schema, migration_target, migration_diff, ) migration_script.extend(new_ddl) if debug.flags.delta_plan: debug.header('Populate Migration DDL AST') text = [] for cmd in new_ddl: debug.dump(cmd) text.append(edgeql.generate_source(cmd, pretty=True)) debug.header('Populate Migration DDL Text') debug.dump_code(';\n'.join(text) + ';') elif isinstance(stmt, qlast.CommitMigration): if migration_target is None: raise errors.QueryError( 'unexpected COMMIT MIGRATION:' ' not currently in a migration block', context=stmt.context, ) last_migration = current_schema.get_last_migration() if last_migration: last_migration_ref = s_utils.name_to_ast_ref( last_migration.get_name(current_schema), ) else: last_migration_ref = None create_migration = qlast.CreateMigration( body=qlast.MigrationBody(commands=tuple(migration_script)), parent=last_migration_ref, ) ddl_plan = s_ddl.delta_from_ddl( create_migration, schema=migration_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) migration_schema = None migration_target = None migration_script = [] elif isinstance(stmt, qlast.DDL): if migration_target is not None: migration_script.append(stmt) ddl_plan = None else: ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') if ddl_plan is not None: context = sd.CommandContext() context.testmode = True current_schema = ddl_plan.apply(current_schema, context) return current_schema
async def _make_stdlib(testmode: bool, global_ids) -> StdlibBits: schema = s_schema.ChainedSchema( s_schema.FlatSchema(), s_schema.FlatSchema(), s_schema.FlatSchema(), ) schema, _ = s_mod.Module.create_in_schema( schema, name=sn.UnqualName('__derived__'), ) current_block = dbops.PLTopBlock() std_texts = [] for modname in s_schema.STD_SOURCES: std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text(sn.UnqualName('_testmode'))) ddl_text = '\n'.join(std_texts) types: Set[uuid.UUID] = set() std_plans: List[sd.Command] = [] for ddl_cmd in edgeql.parse_block(ddl_text): assert isinstance(ddl_cmd, qlast.DDLCommand) delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) std_plans.append(delta_command) types.update(plan.new_types) plan.generate(current_block) _, schema_version = s_std.make_schema_version(schema) schema, plan = _process_delta(schema_version, schema) std_plans.append(schema_version) plan.generate(current_block) stdglobals = '\n'.join([ f'''CREATE SUPERUSER ROLE {edbdef.EDGEDB_SUPERUSER} {{ SET id := <uuid>'{global_ids[edbdef.EDGEDB_SUPERUSER]}' }};''', ]) schema = await _execute_edgeql_ddl(schema, stdglobals) _, global_schema_version = s_std.make_global_schema_version(schema) schema, plan = _process_delta(global_schema_version, schema) std_plans.append(global_schema_version) plan.generate(current_block) reflection = s_refl.generate_structure(schema) reflschema, reflplan = _process_delta(reflection.intro_schema_delta, schema) assert current_block is not None reflplan.generate(current_block) subblock = current_block.add_block() compiler = edbcompiler.new_compiler( std_schema=schema.get_top_schema(), reflection_schema=reflschema.get_top_schema(), schema_class_layout=reflection.class_layout, # type: ignore ) compilerctx = edbcompiler.new_compiler_context( user_schema=reflschema.get_top_schema(), global_schema=schema.get_global_schema(), bootstrap_mode=True, ) for std_plan in std_plans: compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=std_plan, block=subblock, ) compilerctx = edbcompiler.new_compiler_context( user_schema=reflschema.get_top_schema(), global_schema=schema.get_global_schema(), bootstrap_mode=True, internal_schema_mode=True, ) compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=reflection.intro_schema_delta, block=subblock, ) sqltext = current_block.to_string() compilerctx = edbcompiler.new_compiler_context( user_schema=reflschema.get_top_schema(), global_schema=schema.get_global_schema(), schema_reflection_mode=True, output_format=edbcompiler.IoFormat.JSON_ELEMENTS, ) # The introspection query bits are returned in chunks # because it's a large UNION and we currently generate SQL # that is much harder for Posgres to plan as opposed to a # straight flat UNION. sql_intro_local_parts = [] sql_intro_global_parts = [] for intropart in reflection.local_intro_parts: sql_intro_local_parts.append( compile_single_query( intropart, compiler=compiler, compilerctx=compilerctx, ), ) for intropart in reflection.global_intro_parts: sql_intro_global_parts.append( compile_single_query( intropart, compiler=compiler, compilerctx=compilerctx, ), ) local_intro_sql = ' UNION ALL '.join(sql_intro_local_parts) local_intro_sql = f''' WITH intro(c) AS ({local_intro_sql}) SELECT json_agg(intro.c) FROM intro ''' global_intro_sql = ' UNION ALL '.join(sql_intro_global_parts) global_intro_sql = f''' WITH intro(c) AS ({global_intro_sql}) SELECT json_agg(intro.c) FROM intro ''' return StdlibBits( stdschema=schema.get_top_schema(), reflschema=reflschema.get_top_schema(), global_schema=schema.get_global_schema(), sqltext=sqltext, types=types, classlayout=reflection.class_layout, local_intro_query=local_intro_sql, global_intro_query=global_intro_sql, )
def compile_ast_to_ir( tree: qlast.Base, schema: s_schema.Schema, *, modaliases: Optional[Mapping[Optional[str], str]] = None, anchors: Optional[Mapping[str, Any]] = None, path_prefix_anchor: Optional[str] = None, singletons: Sequence[s_types.Type] = (), func_params: Optional[s_func.ParameterLikeList] = None, result_view_name: Optional[s_name.SchemaName] = None, derived_target_module: Optional[str] = None, parent_object_type: Optional[s_obj.ObjectMeta] = None, implicit_limit: int = 0, implicit_id_in_shapes: bool = False, implicit_tid_in_shapes: bool = False, schema_view_mode: bool = False, session_mode: bool = False, disable_constant_folding: bool = False, json_parameters: bool = False, allow_generic_type_output: bool = False, ) -> irast.Command: """Compile given EdgeQL AST into EdgeDB IR. This is the normal compiler entry point. It assumes that *tree* represents a complete statement. Args: tree: EdgeQL AST. schema: Schema instance. Must contain definitions for objects referenced by the AST *tree*. modaliases: Module name resolution table. Useful when this EdgeQL expression is part of some other construct, such as a DDL statement. anchors: Predefined symbol table. Maps identifiers (or ``qlast.SpecialAnchor`` instances) to specified schema objects or IR fragments. path_prefix_anchor: Symbol name used to resolve the prefix of abbreviated path expressions by default. The symbol must be present in *anchors*. singletons: An optional set of schema types that should be treated as singletons in the context of this compilation. func_params: When compiling a function body, specifies function parameter definitions. result_view_name: Optionally defines the name of the topmost generated view type. Useful when compiling schema views. derived_target_module: The name of the module where derived types and pointers should be placed. When compiling a schema view, this would be the name of the module where the view is defined. By default, the special ``__derived__`` module is used. parent_object_type: Optionaly specifies the class of the schema object, in the context of which this expression is compiled. Used in schema definitions. implicit_limit: If set to a non-zero integer value, this will be injected as an implicit `LIMIT` clause into each read query. implicit_id_in_shapes: Whether to include object id property in shapes by default. implicit_tid_in_shapes: Whether to implicitly include object type id in shapes as the ``__tid__`` computable. schema_view_mode: When compiling a schema view, set this to ``True``. session_mode: When ``True``, assumes that the expression is compiled in the presence of a persistent database session. Otherwise, the use of functions and other constructs that require a persistent session will trigger an error. disable_constant_folding: When ``True``, the compile-time evaluation and substitution of constant expressions is disabled. json_parameters: When ``True``, the argument values are assumed to be in JSON format. allow_generic_type_output: If ``True``, allows the expression to return a generic type. By default, expressions must resolve into concrete types. Returns: An instance of :class:`ir.ast.Command`. Most frequently, this would be an instance of :class:`ir.ast.Statement`. """ if debug.flags.edgeql_compile: debug.header('EdgeQL AST') debug.dump(tree, schema=schema) ctx = stmtctx.init_context( schema=schema, anchors=anchors, singletons=singletons, modaliases=modaliases, func_params=func_params, derived_target_module=derived_target_module, result_view_name=result_view_name, implicit_limit=implicit_limit, implicit_id_in_shapes=implicit_id_in_shapes, implicit_tid_in_shapes=implicit_tid_in_shapes, schema_view_mode=schema_view_mode, disable_constant_folding=disable_constant_folding, json_parameters=json_parameters, session_mode=session_mode, allow_generic_type_output=allow_generic_type_output, parent_object_type=parent_object_type, ) if path_prefix_anchor is not None: assert anchors is not None path_prefix = anchors[path_prefix_anchor] assert isinstance(path_prefix, s_types.Type) ctx.partial_path_prefix = setgen.class_set(path_prefix, ctx=ctx) ctx.partial_path_prefix.anchor = path_prefix_anchor ctx.partial_path_prefix.show_as_anchor = path_prefix_anchor ir_set = dispatch.compile(tree, ctx=ctx) ir_expr = stmtctx.fini_expression(ir_set, ctx=ctx) if ctx.env.query_parameters: first_argname = next(iter(ctx.env.query_parameters)) if first_argname.isdecimal(): args_decnames = {int(arg) for arg in ctx.env.query_parameters} args_tpl = set(range(len(ctx.env.query_parameters))) if args_decnames != args_tpl: missing_args = args_tpl - args_decnames missing_args_repr = ', '.join(f'${a}' for a in missing_args) raise errors.QueryError( f'missing {missing_args_repr} positional argument' f'{"s" if len(missing_args) > 1 else ""}') if debug.flags.edgeql_compile: debug.header('Scope Tree') if ctx.path_scope is not None: print(ctx.path_scope.pdebugformat()) else: print('N/A') debug.header('EdgeDB IR') debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None)) return ir_expr