Exemple #1
0
def compile_ir_to_sql(
        ir_expr: irast.Base, *,
        output_format: Optional[OutputFormat]=None,
        ignore_shapes: bool=False,
        explicit_top_cast: Optional[irast.TypeRef]=None,
        use_named_params: bool=False,
        expected_cardinality_one: bool=False,
        pretty: bool=True) -> Tuple[str, Dict[str, int]]:

    qtree = compile_ir_to_sql_tree(
        ir_expr,
        output_format=output_format,
        ignore_shapes=ignore_shapes,
        explicit_top_cast=explicit_top_cast,
        use_named_params=use_named_params,
        expected_cardinality_one=expected_cardinality_one)

    if debug.flags.edgeql_compile:  # pragma: no cover
        debug.header('SQL Tree')
        debug.dump(qtree)

    assert isinstance(qtree, pgast.Query), "expected instance of ast.Query"
    argmap = qtree.argnames

    # Generate query text
    codegen = _run_codegen(qtree, pretty=pretty)
    sql_text = ''.join(codegen.result)

    if debug.flags.edgeql_compile:  # pragma: no cover
        debug.header('SQL')
        debug.dump_code(sql_text, lexer='sql')

    return sql_text, argmap
Exemple #2
0
def compile_to_ir(expr,
                  schema,
                  *,
                  anchors=None,
                  path_prefix_anchor=None,
                  security_context=None,
                  modaliases=None,
                  implicit_id_in_shapes=False,
                  implicit_tid_in_shapes=False):
    """Compile given EdgeQL statement into EdgeDB IR."""

    if debug.flags.edgeql_compile:
        debug.header('EdgeQL TEXT')
        debug.print(expr)

    tree = ql_parser.parse(expr, modaliases)

    return compile_ast_to_ir(tree,
                             schema,
                             anchors=anchors,
                             path_prefix_anchor=path_prefix_anchor,
                             security_context=security_context,
                             modaliases=modaliases,
                             implicit_id_in_shapes=implicit_id_in_shapes,
                             implicit_tid_in_shapes=implicit_tid_in_shapes)
Exemple #3
0
    def _compile_and_apply_ddl_command(self, ctx: CompileContext, cmd):
        current_tx = ctx.state.current_tx()
        schema = current_tx.get_schema()

        if debug.flags.delta_plan_input:
            debug.header('Delta Plan Input')
            debug.dump(cmd)

        # Do a dry-run on test_schema to canonicalize
        # the schema delta-commands.
        test_schema = schema
        context = self._new_delta_context(ctx)
        cmd.apply(test_schema, context=context)
        cmd.canonical = True

        # Apply and adapt delta, build native delta plan, which
        # will also update the schema.
        schema, plan = self._process_delta(ctx, cmd, schema)

        if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)):
            block = pg_dbops.SQLBlock()
        else:
            block = pg_dbops.PLTopBlock()

        plan.generate(block)
        sql = block.to_string().encode('utf-8')

        current_tx.update_schema(schema)

        if debug.flags.delta_execute:
            debug.header('Delta Script')
            debug.dump_code(sql, lexer='sql')

        return dbstate.DDLQuery(sql=(sql, ))
Exemple #4
0
def _process_delta(ctx, delta, schema):
    """Adapt and process the delta command."""

    if debug.flags.delta_plan:
        debug.header('Delta Plan')
        debug.dump(delta, schema=schema)

    context = sd.CommandContext()
    context.stdmode = True

    if not delta.canonical:
        # Canonicalize
        sd.apply(delta, schema=schema)

    delta = delta_cmds.CommandMeta.adapt(delta)
    context = sd.CommandContext(
        stdmode=True,
        backend_runtime_params=ctx.cluster.get_runtime_params(),
    )
    schema = sd.apply(delta, schema=schema, context=context)

    if debug.flags.delta_pgsql_plan:
        debug.header('PgSQL Delta Plan')
        debug.dump(delta, schema=schema)

    return schema, delta
async def _make_stdlib(testmode: bool):
    schema = s_schema.Schema()

    current_block = None

    std_texts = []
    for modname in s_schema.STD_LIB + ('stdgraphql', ):
        std_texts.append(s_std.get_std_module_text(modname))

    if testmode:
        std_texts.append(s_std.get_std_module_text('_testmode'))

    ddl_text = '\n'.join(std_texts)

    for ddl_cmd in edgeql.parse_block(ddl_text):
        delta_command = s_ddl.delta_from_ddl(ddl_cmd,
                                             schema=schema,
                                             modaliases={None: 'std'},
                                             stdmode=True)

        if debug.flags.delta_plan_input:
            debug.header('Delta Plan Input')
            debug.dump(delta_command)

        # Do a dry-run on test_schema to canonicalize
        # the schema delta-commands.
        test_schema = schema

        context = sd.CommandContext()
        context.stdmode = True

        delta_command.apply(test_schema, context=context)

        # Apply and adapt delta, build native delta plan, which
        # will also update the schema.
        schema, plan = _process_delta(delta_command, schema)

        if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)):
            if (current_block is not None
                    and not isinstance(current_block, dbops.SQLBlock)):
                raise errors.QueryError(
                    'cannot mix DATABASE commands with regular DDL '
                    'commands in a single block')
            if current_block is None:
                current_block = dbops.SQLBlock()

        else:
            if (current_block is not None
                    and not isinstance(current_block, dbops.PLTopBlock)):
                raise errors.QueryError(
                    'cannot mix DATABASE commands with regular DDL '
                    'commands in a single block')
            if current_block is None:
                current_block = dbops.PLTopBlock()

        plan.generate(current_block)

    sql_text = current_block.to_string()

    return schema, sql_text
Exemple #6
0
async def _amend_stdlib(
    ddl_text: str,
    stdlib: StdlibBits,
) -> Tuple[StdlibBits, str]:
    schema = stdlib.stdschema
    reflschema = stdlib.reflschema

    topblock = dbops.PLTopBlock()
    plans = []

    context = sd.CommandContext()
    context.stdmode = True

    for ddl_cmd in edgeql.parse_block(ddl_text):
        assert isinstance(ddl_cmd, qlast.DDLCommand)
        delta_command = s_ddl.delta_from_ddl(ddl_cmd,
                                             modaliases={},
                                             schema=schema,
                                             stdmode=True)

        if debug.flags.delta_plan_input:
            debug.header('Delta Plan Input')
            debug.dump(delta_command)

        # Apply and adapt delta, build native delta plan, which
        # will also update the schema.
        schema, plan = _process_delta(delta_command, schema)
        reflschema = delta_command.apply(reflschema, context)
        plan.generate(topblock)
        plans.append(plan)

    compiler = edbcompiler.new_compiler(
        std_schema=schema,
        reflection_schema=reflschema,
        schema_class_layout=stdlib.classlayout,
    )

    compilerctx = edbcompiler.new_compiler_context(
        schema,
        bootstrap_mode=True,
    )

    compilerctx = edbcompiler.new_compiler_context(schema)

    for plan in plans:
        compiler._compile_schema_storage_in_delta(
            ctx=compilerctx,
            delta=plan,
            block=topblock,
        )

    sqltext = topblock.to_string()

    return stdlib._replace(stdschema=schema, reflschema=reflschema), sqltext
Exemple #7
0
async def _execute_block(conn, block: dbops.PLBlock) -> None:

    if not block.is_transactional():
        stmts = block.get_statements()
    else:
        stmts = [block.to_string()]
    if debug.flags.bootstrap:
        debug.header('Bootstrap')
        debug.dump_code(';\n'.join(stmts), lexer='sql')

    for stmt in stmts:
        await _execute(conn, stmt)
Exemple #8
0
async def _execute_ddl(conn, sql_text):
    try:
        if debug.flags.bootstrap:
            debug.header('Delta Script')
            debug.dump_code(sql_text, lexer='sql')

        await conn.execute(sql_text)

    except Exception as e:
        position = getattr(e, 'position', None)
        internal_position = getattr(e, 'internal_position', None)
        context = getattr(e, 'context', '')
        if context:
            pl_func_line = re.search(
                r'^PL/pgSQL function inline_code_block line (\d+).*',
                context, re.M)

            if pl_func_line:
                pl_func_line = int(pl_func_line.group(1))
        else:
            pl_func_line = None
        point = None

        if position is not None:
            position = int(position)
            point = parser_context.SourcePoint(
                None, None, position)
            text = e.query
            if text is None:
                # Parse errors
                text = sql_text

        elif internal_position is not None:
            internal_position = int(internal_position)
            point = parser_context.SourcePoint(
                None, None, internal_position)
            text = e.internal_query

        elif pl_func_line:
            point = parser_context.SourcePoint(
                pl_func_line, None, None
            )
            text = sql_text

        if point is not None:
            context = parser_context.ParserContext(
                'query', text, start=point, end=point)
            exceptions.replace_context(e, context)

        raise
Exemple #9
0
async def _configure(schema, conn, cluster, *, insecure=False, testmode=False):
    scripts = []

    if not testmode:
        memory_kb = psutil.virtual_memory().total // 1024
        settings = {
            'shared_buffers': f'"{int(memory_kb * 0.2)}kB"',
            'effective_cache_size': f'"{int(memory_kb * 0.5)}kB"',
            'query_work_mem': f'"{6 * (2 ** 10)}kB"',
        }

        for setting, value in settings.items():
            scripts.append(f'''
                CONFIGURE SYSTEM SET {setting} := {value};
            ''')
    else:
        settings = {}

    if insecure:
        scripts.append('''
            CONFIGURE SYSTEM INSERT Auth {
                priority := 0,
                method := (INSERT Trust),
            };
        ''')

    config_spec = config.get_settings()

    for script in scripts:
        _, sql = compiler.compile_bootstrap_script(
            schema, schema, script, single_statement=True)

        if debug.flags.bootstrap:
            debug.header('Bootstrap')
            debug.dump_code(sql, lexer='sql')

        config_op_data = await conn.fetchval(sql)
        if config_op_data is not None and isinstance(config_op_data, str):
            config_op = config.Operation.from_json(config_op_data)
            settings = config_op.apply(config_spec, immutables.Map())

    config_json = config.to_json(config_spec, settings)
    block = dbops.PLTopBlock()
    dbops.UpdateMetadata(
        dbops.Database(name=edbdef.EDGEDB_TEMPLATE_DB),
        {'sysconfig': json.loads(config_json)},
    ).generate(block)

    await _execute_block(conn, block)
Exemple #10
0
def compile_ir_to_sql(
        ir_expr: irast.Base,
        *,
        output_format: typing.Optional[OutputFormat] = None,
        ignore_shapes: bool = False,
        explicit_top_cast: typing.Optional[irast.TypeRef] = None,
        timer=None,
        use_named_params: bool = False,
        expected_cardinality_one: bool = False,
        pretty: bool = True) -> typing.Tuple[str, typing.Dict[str, int]]:

    if timer is None:
        qtree = compile_ir_to_sql_tree(
            ir_expr,
            output_format=output_format,
            ignore_shapes=ignore_shapes,
            explicit_top_cast=explicit_top_cast,
            use_named_params=use_named_params,
            expected_cardinality_one=expected_cardinality_one)
    else:
        with timer.timeit('compile_ir_to_sql'):
            qtree = compile_ir_to_sql_tree(
                ir_expr,
                output_format=output_format,
                ignore_shapes=ignore_shapes,
                explicit_top_cast=explicit_top_cast,
                use_named_params=use_named_params,
                expected_cardinality_one=expected_cardinality_one)

    if debug.flags.edgeql_compile:  # pragma: no cover
        debug.header('SQL Tree')
        debug.dump(qtree)

    argmap = qtree.argnames

    # Generate query text
    if timer is None:
        codegen = _run_codegen(qtree, pretty=pretty)
    else:
        with timer.timeit('compile_ir_to_sql'):
            codegen = _run_codegen(qtree, pretty=pretty)

    sql_text = ''.join(codegen.result)

    if debug.flags.edgeql_compile:  # pragma: no cover
        debug.header('SQL')
        debug.dump_code(sql_text, lexer='sql')

    return sql_text, argmap
Exemple #11
0
async def _configure(
    schema: s_schema.Schema,
    compiler: edbcompiler.Compiler,
    conn: asyncpg_con.Connection,
    cluster: pgcluster.BaseCluster,
    *,
    insecure: bool = False,
) -> None:
    config_spec = config.get_settings()

    scripts = []
    settings: Mapping[str, config.SettingValue] = {}

    if insecure:
        scripts.append('''
            CONFIGURE SYSTEM INSERT Auth {
                priority := 0,
                method := (INSERT Trust),
            };
        ''')

    for script in scripts:
        _, sql = compile_bootstrap_script(
            compiler,
            schema,
            script,
            single_statement=True,
        )

        if debug.flags.bootstrap:
            debug.header('Bootstrap')
            debug.dump_code(sql, lexer='sql')

        config_op_data = await conn.fetchval(sql)
        if config_op_data is not None and isinstance(config_op_data, str):
            config_op = config.Operation.from_json(config_op_data)
            settings = config_op.apply(config_spec, immutables.Map())

    config_json = config.to_json(config_spec, settings, include_source=False)
    block = dbops.PLTopBlock()
    dbops.UpdateMetadata(
        dbops.Database(name=edbdef.EDGEDB_TEMPLATE_DB),
        {
            'sysconfig': json.loads(config_json)
        },
    ).generate(block)

    await _execute_block(conn, block)
Exemple #12
0
async def _configure(schema, conn, cluster, *, insecure=False, testmode=False):
    scripts = []

    if not testmode:
        memory_kb = psutil.virtual_memory().total // 1024
        settings = {
            'shared_buffers': f'"{int(memory_kb * 0.2)}kB"',
            'effective_cache_size': f'"{int(memory_kb * 0.5)}kB"',
            'query_work_mem': f'"{6 * (2 ** 10)}kB"',
        }

        for setting, value in settings.items():
            scripts.append(f'''
                CONFIGURE SYSTEM SET {setting} := {value};
            ''')
    else:
        settings = {}

    if insecure:
        scripts.append('''
            CONFIGURE SYSTEM INSERT Auth {
                priority := 0,
                method := (INSERT Trust),
            };
        ''')

    config_spec = config.get_settings()

    for script in scripts:
        _, sql = compiler.compile_bootstrap_script(schema,
                                                   schema,
                                                   script,
                                                   single_statement=True)

        if debug.flags.bootstrap:
            debug.header('Bootstrap')
            debug.dump_code(sql, lexer='sql')

        config_op_data = await conn.fetchval(sql)
        if config_op_data is not None and isinstance(config_op_data, str):
            config_op = config.Operation.from_json(config_op_data)
            settings = config_op.apply(config_spec, immutables.Map())

    data_dir = cluster.get_data_dir()
    overrides_fn = os.path.join(data_dir, 'config_sys.json')

    with open(overrides_fn, 'wt') as f:
        f.write(config.to_json(config_spec, settings))
Exemple #13
0
    def _process_delta(self, ctx: CompileContext, delta, schema):
        """Adapt and process the delta command."""

        if debug.flags.delta_plan:
            debug.header('Delta Plan')
            debug.dump(delta, schema=schema)

        delta = pg_delta.CommandMeta.adapt(delta)
        context = self._new_delta_context(ctx)
        schema, _ = delta.apply(schema, context)

        if debug.flags.delta_pgsql_plan:
            debug.header('PgSQL Delta Plan')
            debug.dump(delta, schema=schema)

        return schema, delta
Exemple #14
0
def compile_func_to_ir(func,
                       schema,
                       *,
                       anchors=None,
                       security_context=None,
                       modaliases=None,
                       implicit_id_in_shapes=False,
                       implicit_tid_in_shapes=False):
    """Compile an EdgeQL function into EdgeDB IR."""

    if debug.flags.edgeql_compile:
        debug.header('EdgeQL Function')
        debug.print(func.get_code(schema))

    trees = ql_parser.parse_block(func.get_code(schema) + ';')
    if len(trees) != 1:
        raise errors.InvalidFunctionDefinitionError(
            'functions can only contain one statement')

    tree = trees[0]
    if modaliases:
        ql_parser.append_module_aliases(tree, modaliases)

    param_anchors, param_aliases = get_param_anchors_for_callable(
        func.get_params(schema), schema)

    if anchors is None:
        anchors = {}

    anchors.update(param_anchors)
    tree.aliases.extend(param_aliases)

    ir = compile_ast_to_ir(
        tree,
        schema,
        anchors=anchors,
        func_params=func.get_params(schema),
        security_context=security_context,
        modaliases=modaliases,
        implicit_id_in_shapes=implicit_id_in_shapes,
        implicit_tid_in_shapes=implicit_tid_in_shapes,
        # the body of a session_only function can contain calls to
        # other session_only functions
        session_mode=func.get_session_only(schema))

    return ir
def _process_delta(delta, schema):
    """Adapt and process the delta command."""

    if debug.flags.delta_plan:
        debug.header('Delta Plan')
        debug.dump(delta, schema=schema)

    delta = delta_cmds.CommandMeta.adapt(delta)

    context = sd.CommandContext()
    context.stdmode = True

    schema, _ = delta.apply(schema, context)

    if debug.flags.delta_pgsql_plan:
        debug.header('PgSQL Delta Plan')
        debug.dump(delta, schema=schema)

    return schema, delta
Exemple #16
0
def compile_ir_to_sql(
    ir_expr: irast.Base,
    *,
    output_format: Optional[OutputFormat] = None,
    ignore_shapes: bool = False,
    explicit_top_cast: Optional[irast.TypeRef] = None,
    singleton_mode: bool = False,
    use_named_params: bool = False,
    expected_cardinality_one: bool = False,
    pretty: bool = True,
    backend_runtime_params: Optional[pgparams.BackendRuntimeParams] = None,
) -> Tuple[str, Dict[str, pgast.Param]]:

    qtree = compile_ir_to_sql_tree(
        ir_expr,
        output_format=output_format,
        ignore_shapes=ignore_shapes,
        explicit_top_cast=explicit_top_cast,
        singleton_mode=singleton_mode,
        use_named_params=use_named_params,
        expected_cardinality_one=expected_cardinality_one,
        backend_runtime_params=backend_runtime_params,
    )

    if (  # pragma: no cover
            debug.flags.edgeql_compile or debug.flags.edgeql_compile_sql_ast):
        debug.header('SQL Tree')
        debug.dump(qtree)

    if isinstance(qtree, pgast.Query) and qtree.argnames:
        argmap = qtree.argnames
    else:
        argmap = {}

    # Generate query text
    sql_text = run_codegen(qtree, pretty=pretty)

    if (  # pragma: no cover
            debug.flags.edgeql_compile or debug.flags.edgeql_compile_sql_text):
        debug.header('SQL')
        debug.dump_code(sql_text, lexer='sql')
    if (  # pragma: no cover
            debug.flags.edgeql_compile_sql_reordered_text):
        debug.header('Reordered SQL')
        debug_sql_text = run_codegen(qtree, pretty=True, reordered=True)
        debug.dump_code(debug_sql_text, lexer='sql')

    return sql_text, argmap
Exemple #17
0
async def _make_stdlib(
        testmode: bool) -> Tuple[s_schema.Schema, str, Set[uuid.UUID]]:
    schema = s_schema.Schema()
    schema, _ = s_mod.Module.create_in_schema(schema, name='__derived__')
    schema = s_pseudo.populate_types(schema)

    current_block = None

    std_texts = []
    for modname in s_schema.STD_LIB + ('stdgraphql', ):
        std_texts.append(s_std.get_std_module_text(modname))

    if testmode:
        std_texts.append(s_std.get_std_module_text('_testmode'))

    ddl_text = '\n'.join(std_texts)
    new_types: Set[uuid.UUID] = set()

    for ddl_cmd in edgeql.parse_block(ddl_text):
        delta_command = s_ddl.delta_from_ddl(ddl_cmd,
                                             modaliases={},
                                             schema=schema,
                                             stdmode=True)

        if debug.flags.delta_plan_input:
            debug.header('Delta Plan Input')
            debug.dump(delta_command)

        # Apply and adapt delta, build native delta plan, which
        # will also update the schema.
        schema, plan = _process_delta(delta_command, schema)

        if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)):
            if (current_block is not None
                    and not isinstance(current_block, dbops.SQLBlock)):
                raise errors.QueryError(
                    'cannot mix DATABASE commands with regular DDL '
                    'commands in a single block')
            if current_block is None:
                current_block = dbops.SQLBlock()

        else:
            new_types.update(plan.new_types)
            if (current_block is not None
                    and not isinstance(current_block, dbops.PLTopBlock)):
                raise errors.QueryError(
                    'cannot mix DATABASE commands with regular DDL '
                    'commands in a single block')
            if current_block is None:
                current_block = dbops.PLTopBlock()

        plan.generate(current_block)

    assert current_block is not None
    sql_text = current_block.to_string()

    mods = {
        mod.get_name(schema)
        for mod in schema.get_modules() if mod.get_builtin(schema)
    }
    if mods != s_schema.STD_MODULES:
        raise errors.SchemaError(
            f'modules {s_schema.STD_MODULES - mods} are not marked as builtin')

    return schema, sql_text, new_types
Exemple #18
0
def compile_ast_to_ir(
    tree: qlast.Base,
    schema: s_schema.Schema,
    *,
    options: Optional[CompilerOptions] = None,
) -> irast.Command:
    """Compile given EdgeQL AST into EdgeDB IR.

    This is the normal compiler entry point.  It assumes that *tree*
    represents a complete statement.

    Args:
        tree:
            EdgeQL AST.

        schema:
            Schema instance.  Must contain definitions for objects
            referenced by the AST *tree*.

        options:
            An optional :class:`edgeql.compiler.options.CompilerOptions`
            instance specifying compilation options.

        allow_writing_protected_ptrs:
            If ``True``, allows protected object properties or links to
            be overwritten in `INSERT` shapes.

    Returns:
        An instance of :class:`ir.ast.Command`.  Most frequently, this
        would be an instance of :class:`ir.ast.Statement`.
    """
    if options is None:
        options = CompilerOptions()

    if debug.flags.edgeql_compile:
        debug.header('EdgeQL AST')
        debug.dump(tree, schema=schema)
        debug.header('Compiler Options')
        debug.dump(options.__dict__)

    ctx = stmtctx_mod.init_context(schema=schema, options=options)

    ir_set = dispatch_mod.compile(tree, ctx=ctx)
    ir_expr = stmtctx_mod.fini_expression(ir_set, ctx=ctx)

    if ctx.env.query_parameters:
        first_argname = next(iter(ctx.env.query_parameters))
        if first_argname.isdecimal():
            args_decnames = {int(arg) for arg in ctx.env.query_parameters}
            args_tpl = set(range(len(ctx.env.query_parameters)))
            if args_decnames != args_tpl:
                missing_args = args_tpl - args_decnames
                missing_args_repr = ', '.join(f'${a}' for a in missing_args)
                raise errors.QueryError(
                    f'missing {missing_args_repr} positional argument'
                    f'{"s" if len(missing_args) > 1 else ""}')

    if debug.flags.edgeql_compile:
        debug.header('Scope Tree')
        if ctx.path_scope is not None:
            print(ctx.path_scope.pdebugformat())
        else:
            print('N/A')
        debug.header('EdgeDB IR')
        debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None))

    if isinstance(ir_expr, irast.Statement):
        ir_expr.dml_exprs = ctx.env.dml_exprs

    return ir_expr
Exemple #19
0
def compile_func_to_ir(func,
                       schema,
                       *,
                       anchors=None,
                       security_context=None,
                       modaliases=None,
                       implicit_id_in_shapes=False,
                       implicit_tid_in_shapes=False):
    """Compile an EdgeQL function into EdgeDB IR."""

    if debug.flags.edgeql_compile:
        debug.header('EdgeQL Function')
        debug.print(func.get_code(schema))

    trees = ql_parser.parse_block(func.get_code(schema) + ';')
    if len(trees) != 1:
        raise errors.InvalidFunctionDefinitionError(
            'functions can only contain one statement')

    tree = trees[0]
    if modaliases:
        ql_parser.append_module_aliases(tree, modaliases)

    param_anchors, param_aliases = get_param_anchors_for_callable(
        func.get_params(schema),
        schema,
        inlined_defaults=func.has_inlined_defaults(schema))

    if anchors is None:
        anchors = {}

    anchors.update(param_anchors)
    tree.aliases.extend(param_aliases)

    ir = compile_ast_to_ir(
        tree,
        schema,
        anchors=anchors,
        func_params=func.get_params(schema),
        security_context=security_context,
        modaliases=modaliases,
        implicit_id_in_shapes=implicit_id_in_shapes,
        implicit_tid_in_shapes=implicit_tid_in_shapes,
        # the body of a session_only function can contain calls to
        # other session_only functions
        session_mode=func.get_session_only(schema))

    return_type = func.get_return_type(schema)
    if (not ir.stype.issubclass(schema, return_type)
            and not ir.stype.implicitly_castable_to(return_type, schema)):
        raise errors.InvalidFunctionDefinitionError(
            f'return type mismatch in function declared to return '
            f'{return_type.get_verbosename(schema)}',
            details=f'Actual return type is '
            f'{ir.stype.get_verbosename(schema)}',
            context=tree.context,
        )

    return_typemod = func.get_return_typemod(schema)
    if (return_typemod is not qltypes.TypeModifier.SET_OF
            and ir.cardinality is qltypes.Cardinality.MANY):
        raise errors.InvalidFunctionDefinitionError(
            f'return cardinality mismatch in function declared to return '
            f'a singleton',
            details=f'Function may return a set with more than one element.',
            context=tree.context,
        )

    return ir
Exemple #20
0
def compile_ast_to_ir(tree,
                      schema,
                      *,
                      parent_object_type=None,
                      anchors=None,
                      path_prefix_anchor=None,
                      singletons=None,
                      func_params=None,
                      security_context=None,
                      derived_target_module=None,
                      result_view_name=None,
                      modaliases=None,
                      implicit_id_in_shapes=False,
                      implicit_tid_in_shapes=False,
                      schema_view_mode=False,
                      disable_constant_folding=False,
                      json_parameters=False,
                      session_mode=False,
                      allow_abstract_operators=False,
                      allow_generic_type_output=False):
    """Compile given EdgeQL AST into EdgeDB IR."""

    if debug.flags.edgeql_compile:
        debug.header('EdgeQL AST')
        debug.dump(tree, schema=schema)

    ctx = stmtctx.init_context(
        schema=schema,
        anchors=anchors,
        singletons=singletons,
        modaliases=modaliases,
        security_context=security_context,
        func_params=func_params,
        derived_target_module=derived_target_module,
        result_view_name=result_view_name,
        implicit_id_in_shapes=implicit_id_in_shapes,
        implicit_tid_in_shapes=implicit_tid_in_shapes,
        schema_view_mode=schema_view_mode,
        disable_constant_folding=disable_constant_folding,
        json_parameters=json_parameters,
        session_mode=session_mode,
        allow_abstract_operators=allow_abstract_operators,
        allow_generic_type_output=allow_generic_type_output,
        parent_object_type=parent_object_type)

    if path_prefix_anchor is not None:
        path_prefix = anchors[path_prefix_anchor]
        ctx.partial_path_prefix = setgen.class_set(path_prefix, ctx=ctx)
        ctx.partial_path_prefix.anchor = path_prefix_anchor
        ctx.partial_path_prefix.show_as_anchor = path_prefix_anchor

    ir_set = dispatch.compile(tree, ctx=ctx)
    ir_expr = stmtctx.fini_expression(ir_set, ctx=ctx)

    if ctx.env.query_parameters:
        first_argname = next(iter(ctx.env.query_parameters))
        if first_argname.isdecimal():
            args_decnames = {int(arg) for arg in ctx.env.query_parameters}
            args_tpl = set(range(len(ctx.env.query_parameters)))
            if args_decnames != args_tpl:
                missing_args = args_tpl - args_decnames
                missing_args_repr = ', '.join(f'${a}' for a in missing_args)
                raise errors.QueryError(
                    f'missing {missing_args_repr} positional argument'
                    f'{"s" if len(missing_args) > 1 else ""}')

    if debug.flags.edgeql_compile:
        debug.header('Scope Tree')
        if ctx.path_scope is not None:
            print(ctx.path_scope.pdebugformat())
        else:
            print('N/A')
        debug.header('EdgeDB IR')
        debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None))

    return ir_expr
Exemple #21
0
def compile_ast_to_ir(
    tree: qlast.Base,
    schema: s_schema.Schema,
    *,
    options: Optional[CompilerOptions] = None,
) -> irast.Command:
    """Compile given EdgeQL AST into EdgeDB IR.

    This is the normal compiler entry point.  It assumes that *tree*
    represents a complete statement.

    Args:
        tree:
            EdgeQL AST.

        schema:
            Schema instance.  Must contain definitions for objects
            referenced by the AST *tree*.

        options:
            An optional :class:`edgeql.compiler.options.CompilerOptions`
            instance specifying compilation options.

        allow_writing_protected_ptrs:
            If ``True``, allows protected object properties or links to
            be overwritten in `INSERT` shapes.

    Returns:
        An instance of :class:`ir.ast.Command`.  Most frequently, this
        would be an instance of :class:`ir.ast.Statement`.
    """
    if options is None:
        options = CompilerOptions()

    if debug.flags.edgeql_compile or debug.flags.edgeql_compile_edgeql_text:
        debug.header('EdgeQL Text')
        debug.dump_code(qlcodegen.generate_source(tree, pretty=True))

    if debug.flags.edgeql_compile or debug.flags.edgeql_compile_edgeql_ast:
        debug.header('Compiler Options')
        debug.dump(options.__dict__)
        debug.header('EdgeQL AST')
        debug.dump(tree, schema=schema)

    ctx = stmtctx_mod.init_context(schema=schema, options=options)

    ir_set = dispatch_mod.compile(tree, ctx=ctx)
    ir_expr = stmtctx_mod.fini_expression(ir_set, ctx=ctx)

    if ctx.env.query_parameters:
        first_argname = next(iter(ctx.env.query_parameters))
        if first_argname.isdecimal():
            args_decnames = {int(arg) for arg in ctx.env.query_parameters}
            args_tpl = set(range(len(ctx.env.query_parameters)))
            if args_decnames != args_tpl:
                missing_args = args_tpl - args_decnames
                missing_args_repr = ', '.join(f'${a}' for a in missing_args)
                raise errors.QueryError(
                    f'missing {missing_args_repr} positional argument'
                    f'{"s" if len(missing_args) > 1 else ""}')

    if debug.flags.edgeql_compile or debug.flags.edgeql_compile_scope:
        debug.header('Scope Tree')
        print(ctx.path_scope.pdebugformat())

        # Also build and dump a mapping from scope ids to
        # paths that appear directly at them.
        scopes: Dict[int, Set[irast.PathId]] = {
            k: set() for k in
            sorted(node.unique_id
                   for node in ctx.path_scope.descendants
                   if node.unique_id)
        }
        for ir_set in ctx.env.set_types:
            if ir_set.path_scope_id and ir_set.path_scope_id in scopes:
                scopes[ir_set.path_scope_id].add(ir_set.path_id)
        debug.dump(scopes)

    if debug.flags.edgeql_compile or debug.flags.edgeql_compile_ir:
        debug.header('EdgeDB IR')
        debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None))

    return ir_expr
def compile_func_to_ir(func, schema, *,
                       anchors=None,
                       security_context=None,
                       modaliases=None,
                       implicit_id_in_shapes=False,
                       implicit_tid_in_shapes=False):
    """Compile an EdgeQL function into EdgeDB IR."""

    if debug.flags.edgeql_compile:
        debug.header('EdgeQL Function')
        debug.print(func.get_code(schema))

    trees = ql_parser.parse_block(func.get_code(schema) + ';')
    if len(trees) != 1:
        raise errors.InvalidFunctionDefinitionError(
            'functions can only contain one statement')

    tree = trees[0]
    if modaliases:
        ql_parser.append_module_aliases(tree, modaliases)

    if anchors is None:
        anchors = {}

    anchors['__defaults_mask__'] = irast.Parameter(
        name='__defaults_mask__',
        typeref=irtyputils.type_to_typeref(schema, schema.get('std::bytes')))

    func_params = func.get_params(schema)
    pg_params = s_func.PgParams.from_params(schema, func_params)
    for pi, p in enumerate(pg_params.params):
        p_shortname = p.get_shortname(schema)
        anchors[p_shortname] = irast.Parameter(
            name=p_shortname,
            typeref=irtyputils.type_to_typeref(schema, p.get_type(schema)))

        if p.get_default(schema) is None:
            continue

        tree.aliases.append(
            qlast.AliasedExpr(
                alias=p_shortname,
                expr=qlast.IfElse(
                    condition=qlast.BinOp(
                        left=qlast.FunctionCall(
                            func=('std', 'bytes_get_bit'),
                            args=[
                                qlast.FuncArg(
                                    arg=qlast.Path(steps=[
                                        qlast.ObjectRef(
                                            name='__defaults_mask__')
                                    ])),
                                qlast.FuncArg(
                                    arg=qlast.IntegerConstant(value=str(pi)))
                            ]),
                        right=qlast.IntegerConstant(value='0'),
                        op='='),
                    if_expr=qlast.Path(
                        steps=[qlast.ObjectRef(name=p_shortname)]),
                    else_expr=qlast._Optional(expr=p.get_ql_default(schema)))))

    ir = compile_ast_to_ir(
        tree, schema, anchors=anchors, func=func,
        security_context=security_context, modaliases=modaliases,
        implicit_id_in_shapes=implicit_id_in_shapes,
        implicit_tid_in_shapes=implicit_tid_in_shapes)

    return ir
Exemple #23
0
def compile_func_to_ir(
    func: s_func.Function,
    schema: s_schema.Schema,
) -> irast.Statement:
    """Compile an EdgeQL function into EdgeDB IR.

    Args:
        func:
            A function object.

        schema:
            A schema instance where the function is defined.

    Returns:
        An instance of :class:`ir.ast.Statement` representing the
        function body.
    """
    if debug.flags.edgeql_compile:
        debug.header('EdgeQL Function')
        debug.print(func.get_code(schema))

    code = func.get_code(schema)
    assert code is not None
    trees = ql_parser.parse_block(code + ';')
    if len(trees) != 1:
        raise errors.InvalidFunctionDefinitionError(
            'functions can only contain one statement')

    tree = trees[0]

    param_anchors, param_aliases = get_param_anchors_for_callable(
        func.get_params(schema),
        schema,
        inlined_defaults=func.has_inlined_defaults(schema))

    tree.aliases.extend(param_aliases)

    ir = compile_ast_to_ir(
        tree,
        schema,
        anchors=param_anchors,  # type: ignore
        # (typing#273)
        func_params=func.get_params(schema),
        # the body of a session_only function can contain calls to
        # other session_only functions
        session_mode=func.get_session_only(schema),
    )

    assert isinstance(ir, irast.Statement)

    return_type = func.get_return_type(schema)
    if (not ir.stype.issubclass(schema, return_type)
            and not ir.stype.implicitly_castable_to(return_type, schema)):
        raise errors.InvalidFunctionDefinitionError(
            f'return type mismatch in function declared to return '
            f'{return_type.get_verbosename(schema)}',
            details=f'Actual return type is '
            f'{ir.stype.get_verbosename(schema)}',
            context=tree.context,
        )

    return_typemod = func.get_return_typemod(schema)
    if (return_typemod is not qltypes.TypeModifier.SET_OF
            and ir.cardinality is qltypes.Cardinality.MANY):
        raise errors.InvalidFunctionDefinitionError(
            f'return cardinality mismatch in function declared to return '
            f'a singleton',
            details=f'Function may return a set with more than one element.',
            context=tree.context,
        )

    return ir
Exemple #24
0
def translate(gqlcore: gt.GQLCoreSchema,
              query,
              *,
              operation_name=None,
              variables=None):
    try:
        document_ast = graphql.parse(query)
    except graphql.GraphQLError as err:
        err_loc = (err.locations[0].line, err.locations[0].column)
        raise g_errors.GraphQLCoreError(err.message, loc=err_loc) from None

    if variables is None:
        variables = {}

    if debug.flags.graphql_compile:
        debug.header('GraphQL compiler')
        print(query)
        print(f'variables: {variables}')

    gql_vars = {}
    for n, v in variables.items():
        gql_vars[n] = value_node_from_pyvalue(v)

    validation_errors = graphql.validate(gqlcore.graphql_schema, document_ast)
    if validation_errors:
        err = validation_errors[0]
        if isinstance(err, graphql.GraphQLError):
            err_loc = (err.locations[0].line, err.locations[0].column)
            raise g_errors.GraphQLCoreError(err.message, loc=err_loc)
        else:
            raise err

    context = GraphQLTranslatorContext(gqlcore=gqlcore,
                                       query=query,
                                       variables=gql_vars,
                                       document_ast=document_ast,
                                       operation_name=operation_name)

    edge_forest_map = GraphQLTranslator(context=context).visit(document_ast)

    if debug.flags.graphql_compile:
        for opname, op in sorted(edge_forest_map.items()):
            print(f'== operationName: {opname!r} =============')
            print(ql_codegen.generate_source(op.stmt))

    op = next(iter(edge_forest_map.values()))

    # convert critvars and vars to JSON-like format
    critvars = {}
    for name, val in op.critvars.items():
        if val is not None:
            critvars[name] = json.loads(gqlcodegen.generate_source(val))

    defvars = {}
    for name, val in op.vars.items():
        if val is not None:
            defvars[name] = json.loads(gqlcodegen.generate_source(val))

    # generate the specific result
    return TranspiledOperation(
        edgeql_ast=op.stmt,
        cacheable=True,
        cache_deps_vars=dict(critvars) if critvars else None,
        variables_desc=defvars,
    )
async def _execute_block(conn, block: dbops.PLBlock) -> None:
    sql_text = block.to_string()
    if debug.flags.bootstrap:
        debug.header('Bootstrap')
        debug.dump_code(sql_text, lexer='sql')
    await _execute(conn, sql_text)
Exemple #26
0
    def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS):
        statements = edgeql.parse_block(ddl)

        current_schema = schema
        target_schema = None
        migration_schema = None
        migration_target = None
        migration_script = []

        for stmt in statements:
            if isinstance(stmt, qlast.StartMigration):
                # START MIGRATION
                if target_schema is None:
                    target_schema = _load_std_schema()

                migration_target = s_ddl.apply_sdl(
                    stmt.target,
                    base_schema=target_schema,
                    current_schema=current_schema,
                    testmode=True,
                )

                migration_schema = current_schema

                ddl_plan = None

            elif isinstance(stmt, qlast.PopulateMigration):
                # POPULATE MIGRATION
                if migration_target is None:
                    raise errors.QueryError(
                        'unexpected POPULATE MIGRATION:'
                        ' not currently in a migration block',
                        context=stmt.context,
                    )

                migration_diff = s_ddl.delta_schemas(
                    migration_schema,
                    migration_target,
                )

                if debug.flags.delta_plan:
                    debug.header('Populate Migration Diff')
                    debug.dump(migration_diff, schema=schema)

                new_ddl = s_ddl.ddlast_from_delta(
                    migration_schema,
                    migration_target,
                    migration_diff,
                )

                migration_script.extend(new_ddl)

                if debug.flags.delta_plan:
                    debug.header('Populate Migration DDL AST')
                    text = []
                    for cmd in new_ddl:
                        debug.dump(cmd)
                        text.append(edgeql.generate_source(cmd, pretty=True))
                    debug.header('Populate Migration DDL Text')
                    debug.dump_code(';\n'.join(text) + ';')

            elif isinstance(stmt, qlast.CommitMigration):
                if migration_target is None:
                    raise errors.QueryError(
                        'unexpected COMMIT MIGRATION:'
                        ' not currently in a migration block',
                        context=stmt.context,
                    )

                last_migration = current_schema.get_last_migration()
                if last_migration:
                    last_migration_ref = s_utils.name_to_ast_ref(
                        last_migration.get_name(current_schema), )
                else:
                    last_migration_ref = None

                create_migration = qlast.CreateMigration(
                    body=qlast.MigrationBody(commands=tuple(migration_script)),
                    parent=last_migration_ref,
                )

                ddl_plan = s_ddl.delta_from_ddl(
                    create_migration,
                    schema=migration_schema,
                    modaliases={None: default_module},
                    testmode=True,
                )

                if debug.flags.delta_plan:
                    debug.header('Delta Plan')
                    debug.dump(ddl_plan, schema=schema)

                migration_schema = None
                migration_target = None
                migration_script = []

            elif isinstance(stmt, qlast.DDL):
                if migration_target is not None:
                    migration_script.append(stmt)
                    ddl_plan = None
                else:
                    ddl_plan = s_ddl.delta_from_ddl(
                        stmt,
                        schema=current_schema,
                        modaliases={None: default_module},
                        testmode=True,
                    )

                    if debug.flags.delta_plan:
                        debug.header('Delta Plan')
                        debug.dump(ddl_plan, schema=schema)
            else:
                raise ValueError(
                    f'unexpected {stmt!r} in compiler setup script')

            if ddl_plan is not None:
                context = sd.CommandContext()
                context.testmode = True
                current_schema = ddl_plan.apply(current_schema, context)

        return current_schema
Exemple #27
0
async def _make_stdlib(testmode: bool, global_ids) -> StdlibBits:
    schema = s_schema.Schema()
    schema, _ = s_mod.Module.create_in_schema(schema, name='__derived__')

    current_block = dbops.PLTopBlock()

    std_texts = []
    for modname in s_schema.STD_LIB + ('stdgraphql', ):
        std_texts.append(s_std.get_std_module_text(modname))

    if testmode:
        std_texts.append(s_std.get_std_module_text('_testmode'))

    ddl_text = '\n'.join(std_texts)
    types: Set[uuid.UUID] = set()
    std_plans: List[sd.Command] = []

    for ddl_cmd in edgeql.parse_block(ddl_text):
        delta_command = s_ddl.delta_from_ddl(ddl_cmd,
                                             modaliases={},
                                             schema=schema,
                                             stdmode=True)

        if debug.flags.delta_plan_input:
            debug.header('Delta Plan Input')
            debug.dump(delta_command)

        # Apply and adapt delta, build native delta plan, which
        # will also update the schema.
        schema, plan = _process_delta(delta_command, schema)
        std_plans.append(delta_command)

        types.update(plan.new_types)
        plan.generate(current_block)

    stdglobals = '\n'.join([
        f'''CREATE SUPERUSER ROLE {edbdef.EDGEDB_SUPERUSER} {{
            SET id := <uuid>'{global_ids[edbdef.EDGEDB_SUPERUSER]}'
        }};''',
        f'''CREATE DATABASE {edbdef.EDGEDB_TEMPLATE_DB} {{
            SET id := <uuid>'{global_ids[edbdef.EDGEDB_TEMPLATE_DB]}'
        }};''',
        f'CREATE DATABASE {edbdef.EDGEDB_SUPERUSER_DB};',
    ])

    context = sd.CommandContext(stdmode=True)

    for ddl_cmd in edgeql.parse_block(stdglobals):
        delta_command = s_ddl.delta_from_ddl(ddl_cmd,
                                             modaliases={},
                                             schema=schema,
                                             stdmode=True)

        schema = delta_command.apply(schema, context)

    refldelta, classlayout, introparts = s_refl.generate_structure(schema)
    reflschema, reflplan = _process_delta(refldelta, schema)

    std_plans.append(refldelta)

    assert current_block is not None
    reflplan.generate(current_block)
    subblock = current_block.add_block()

    compiler = edbcompiler.new_compiler(
        std_schema=schema,
        reflection_schema=reflschema,
        schema_class_layout=classlayout,
        bootstrap_mode=True,
    )

    compilerctx = edbcompiler.new_compiler_context(reflschema)

    for std_plan in std_plans:
        compiler._compile_schema_storage_in_delta(
            ctx=compilerctx,
            delta=std_plan,
            block=subblock,
            is_internal_reflection=std_plan is refldelta,
            stdmode=True,
        )

    sqltext = current_block.to_string()

    compilerctx = edbcompiler.new_compiler_context(
        reflschema,
        schema_reflection_mode=True,
        output_format=edbcompiler.IoFormat.JSON_ELEMENTS,
    )

    # The introspection query bits are returned in chunks
    # because it's a large UNION and we currently generate SQL
    # that is much harder for Posgres to plan as opposed to a
    # straight flat UNION.
    sql_introparts = []

    for intropart in introparts:
        introtokens = tokenizer.tokenize(intropart.encode())
        units = compiler._compile(ctx=compilerctx, tokens=introtokens)
        assert len(units) == 1 and len(units[0].sql) == 1
        sql_intropart = units[0].sql[0].decode()
        sql_introparts.append(sql_intropart)

    introsql = ' UNION ALL '.join(sql_introparts)

    return StdlibBits(
        stdschema=schema,
        reflschema=reflschema,
        sqltext=sqltext,
        types=types,
        classlayout=classlayout,
        introquery=introsql,
    )
Exemple #28
0
async def _make_stdlib(testmode: bool, global_ids) -> StdlibBits:
    schema = s_schema.ChainedSchema(
        s_schema.FlatSchema(),
        s_schema.FlatSchema(),
        s_schema.FlatSchema(),
    )
    schema, _ = s_mod.Module.create_in_schema(
        schema,
        name=sn.UnqualName('__derived__'),
    )

    current_block = dbops.PLTopBlock()

    std_texts = []
    for modname in s_schema.STD_SOURCES:
        std_texts.append(s_std.get_std_module_text(modname))

    if testmode:
        std_texts.append(s_std.get_std_module_text(sn.UnqualName('_testmode')))

    ddl_text = '\n'.join(std_texts)
    types: Set[uuid.UUID] = set()
    std_plans: List[sd.Command] = []

    for ddl_cmd in edgeql.parse_block(ddl_text):
        assert isinstance(ddl_cmd, qlast.DDLCommand)
        delta_command = s_ddl.delta_from_ddl(ddl_cmd,
                                             modaliases={},
                                             schema=schema,
                                             stdmode=True)

        if debug.flags.delta_plan_input:
            debug.header('Delta Plan Input')
            debug.dump(delta_command)

        # Apply and adapt delta, build native delta plan, which
        # will also update the schema.
        schema, plan = _process_delta(delta_command, schema)
        std_plans.append(delta_command)

        types.update(plan.new_types)
        plan.generate(current_block)

    _, schema_version = s_std.make_schema_version(schema)
    schema, plan = _process_delta(schema_version, schema)
    std_plans.append(schema_version)
    plan.generate(current_block)

    stdglobals = '\n'.join([
        f'''CREATE SUPERUSER ROLE {edbdef.EDGEDB_SUPERUSER} {{
            SET id := <uuid>'{global_ids[edbdef.EDGEDB_SUPERUSER]}'
        }};''',
    ])

    schema = await _execute_edgeql_ddl(schema, stdglobals)

    _, global_schema_version = s_std.make_global_schema_version(schema)
    schema, plan = _process_delta(global_schema_version, schema)
    std_plans.append(global_schema_version)
    plan.generate(current_block)

    reflection = s_refl.generate_structure(schema)
    reflschema, reflplan = _process_delta(reflection.intro_schema_delta,
                                          schema)

    assert current_block is not None
    reflplan.generate(current_block)
    subblock = current_block.add_block()

    compiler = edbcompiler.new_compiler(
        std_schema=schema.get_top_schema(),
        reflection_schema=reflschema.get_top_schema(),
        schema_class_layout=reflection.class_layout,  # type: ignore
    )

    compilerctx = edbcompiler.new_compiler_context(
        user_schema=reflschema.get_top_schema(),
        global_schema=schema.get_global_schema(),
        bootstrap_mode=True,
    )

    for std_plan in std_plans:
        compiler._compile_schema_storage_in_delta(
            ctx=compilerctx,
            delta=std_plan,
            block=subblock,
        )

    compilerctx = edbcompiler.new_compiler_context(
        user_schema=reflschema.get_top_schema(),
        global_schema=schema.get_global_schema(),
        bootstrap_mode=True,
        internal_schema_mode=True,
    )
    compiler._compile_schema_storage_in_delta(
        ctx=compilerctx,
        delta=reflection.intro_schema_delta,
        block=subblock,
    )

    sqltext = current_block.to_string()

    compilerctx = edbcompiler.new_compiler_context(
        user_schema=reflschema.get_top_schema(),
        global_schema=schema.get_global_schema(),
        schema_reflection_mode=True,
        output_format=edbcompiler.IoFormat.JSON_ELEMENTS,
    )

    # The introspection query bits are returned in chunks
    # because it's a large UNION and we currently generate SQL
    # that is much harder for Posgres to plan as opposed to a
    # straight flat UNION.
    sql_intro_local_parts = []
    sql_intro_global_parts = []
    for intropart in reflection.local_intro_parts:
        sql_intro_local_parts.append(
            compile_single_query(
                intropart,
                compiler=compiler,
                compilerctx=compilerctx,
            ), )

    for intropart in reflection.global_intro_parts:
        sql_intro_global_parts.append(
            compile_single_query(
                intropart,
                compiler=compiler,
                compilerctx=compilerctx,
            ), )

    local_intro_sql = ' UNION ALL '.join(sql_intro_local_parts)
    local_intro_sql = f'''
        WITH intro(c) AS ({local_intro_sql})
        SELECT json_agg(intro.c) FROM intro
    '''

    global_intro_sql = ' UNION ALL '.join(sql_intro_global_parts)
    global_intro_sql = f'''
        WITH intro(c) AS ({global_intro_sql})
        SELECT json_agg(intro.c) FROM intro
    '''

    return StdlibBits(
        stdschema=schema.get_top_schema(),
        reflschema=reflschema.get_top_schema(),
        global_schema=schema.get_global_schema(),
        sqltext=sqltext,
        types=types,
        classlayout=reflection.class_layout,
        local_intro_query=local_intro_sql,
        global_intro_query=global_intro_sql,
    )
Exemple #29
0
def compile_ast_to_ir(
    tree: qlast.Base,
    schema: s_schema.Schema,
    *,
    modaliases: Optional[Mapping[Optional[str], str]] = None,
    anchors: Optional[Mapping[str, Any]] = None,
    path_prefix_anchor: Optional[str] = None,
    singletons: Sequence[s_types.Type] = (),
    func_params: Optional[s_func.ParameterLikeList] = None,
    result_view_name: Optional[s_name.SchemaName] = None,
    derived_target_module: Optional[str] = None,
    parent_object_type: Optional[s_obj.ObjectMeta] = None,
    implicit_limit: int = 0,
    implicit_id_in_shapes: bool = False,
    implicit_tid_in_shapes: bool = False,
    schema_view_mode: bool = False,
    session_mode: bool = False,
    disable_constant_folding: bool = False,
    json_parameters: bool = False,
    allow_generic_type_output: bool = False,
) -> irast.Command:
    """Compile given EdgeQL AST into EdgeDB IR.

    This is the normal compiler entry point.  It assumes that *tree*
    represents a complete statement.

    Args:
        tree:
            EdgeQL AST.

        schema:
            Schema instance.  Must contain definitions for objects
            referenced by the AST *tree*.

        modaliases:
            Module name resolution table.  Useful when this EdgeQL
            expression is part of some other construct, such as a
            DDL statement.

        anchors:
            Predefined symbol table.  Maps identifiers
            (or ``qlast.SpecialAnchor`` instances) to specified
            schema objects or IR fragments.

        path_prefix_anchor:
            Symbol name used to resolve the prefix of abbreviated
            path expressions by default.  The symbol must be present
            in *anchors*.

        singletons:
            An optional set of schema types that should be treated
            as singletons in the context of this compilation.

        func_params:
            When compiling a function body, specifies function parameter
            definitions.

        result_view_name:
            Optionally defines the name of the topmost generated view type.
            Useful when compiling schema views.

        derived_target_module:
            The name of the module where derived types and pointers should
            be placed.  When compiling a schema view, this would be the
            name of the module where the view is defined.  By default,
            the special ``__derived__`` module is used.

        parent_object_type:
            Optionaly specifies the class of the schema object, in the
            context of which this expression is compiled.  Used in schema
            definitions.

        implicit_limit:
            If set to a non-zero integer value, this will be injected
            as an implicit `LIMIT` clause into each read query.

        implicit_id_in_shapes:
            Whether to include object id property in shapes by default.

        implicit_tid_in_shapes:
            Whether to implicitly include object type id in shapes as
            the ``__tid__`` computable.

        schema_view_mode:
            When compiling a schema view, set this to ``True``.

        session_mode:
            When ``True``, assumes that the expression is compiled in
            the presence of a persistent database session.  Otherwise,
            the use of functions and other constructs that require a
            persistent session will trigger an error.

        disable_constant_folding:
            When ``True``, the compile-time evaluation and substitution
            of constant expressions is disabled.

        json_parameters:
            When ``True``, the argument values are assumed to be in JSON
            format.

        allow_generic_type_output:
            If ``True``, allows the expression to return a generic type.
            By default, expressions must resolve into concrete types.

    Returns:
        An instance of :class:`ir.ast.Command`.  Most frequently, this
        would be an instance of :class:`ir.ast.Statement`.
    """

    if debug.flags.edgeql_compile:
        debug.header('EdgeQL AST')
        debug.dump(tree, schema=schema)

    ctx = stmtctx.init_context(
        schema=schema,
        anchors=anchors,
        singletons=singletons,
        modaliases=modaliases,
        func_params=func_params,
        derived_target_module=derived_target_module,
        result_view_name=result_view_name,
        implicit_limit=implicit_limit,
        implicit_id_in_shapes=implicit_id_in_shapes,
        implicit_tid_in_shapes=implicit_tid_in_shapes,
        schema_view_mode=schema_view_mode,
        disable_constant_folding=disable_constant_folding,
        json_parameters=json_parameters,
        session_mode=session_mode,
        allow_generic_type_output=allow_generic_type_output,
        parent_object_type=parent_object_type,
    )

    if path_prefix_anchor is not None:
        assert anchors is not None
        path_prefix = anchors[path_prefix_anchor]
        assert isinstance(path_prefix, s_types.Type)
        ctx.partial_path_prefix = setgen.class_set(path_prefix, ctx=ctx)
        ctx.partial_path_prefix.anchor = path_prefix_anchor
        ctx.partial_path_prefix.show_as_anchor = path_prefix_anchor

    ir_set = dispatch.compile(tree, ctx=ctx)
    ir_expr = stmtctx.fini_expression(ir_set, ctx=ctx)

    if ctx.env.query_parameters:
        first_argname = next(iter(ctx.env.query_parameters))
        if first_argname.isdecimal():
            args_decnames = {int(arg) for arg in ctx.env.query_parameters}
            args_tpl = set(range(len(ctx.env.query_parameters)))
            if args_decnames != args_tpl:
                missing_args = args_tpl - args_decnames
                missing_args_repr = ', '.join(f'${a}' for a in missing_args)
                raise errors.QueryError(
                    f'missing {missing_args_repr} positional argument'
                    f'{"s" if len(missing_args) > 1 else ""}')

    if debug.flags.edgeql_compile:
        debug.header('Scope Tree')
        if ctx.path_scope is not None:
            print(ctx.path_scope.pdebugformat())
        else:
            print('N/A')
        debug.header('EdgeDB IR')
        debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None))

    return ir_expr