def _compile_alias_expr( self, expr: qlast.Base, classname: sn.SchemaName, schema: s_schema.Schema, context: sd.CommandContext, ) -> irast.Statement: cached: Optional[irast.Statement] = (context.get_cached( (expr, classname))) if cached is not None: return cached if not isinstance(expr, qlast.Statement): expr = qlast.SelectQuery(result=expr) existing = schema.get(classname, type=s_types.Type, default=None) if existing is not None: drop_cmd = existing.init_delta_command(schema, sd.DeleteObject) with context.suspend_dep_verification(): schema = drop_cmd.apply(schema, context) ir = qlcompiler.compile_ast_to_ir( expr, schema, options=qlcompiler.CompilerOptions( derived_target_module=classname.module, result_view_name=classname, modaliases=context.modaliases, schema_view_mode=True, ), ) context.cache_value((expr, classname), ir) return ir # type: ignore
def run_test(self, *, source, spec, expected): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir(qltree, self.schema) expected_cardinality = qltypes.Cardinality( textwrap.dedent(expected).strip(' \n')) self.assertEqual(ir.cardinality, expected_cardinality, 'unexpected cardinality:\n' + source)
def run_test(self, *, source, spec, expected): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir(qltree, self.schema, options=compiler.CompilerOptions( apply_query_rewrites=False, modaliases={None: 'default'}, )) root = ir.scope_tree if len(root.children) != 1: self.fail( f'Scope tree root is expected to have only one child, got' f' {len(root.children)}' f' \n{root.pformat()}') scope_tree = next(iter(root.children)) path_scope = textwrap.indent(scope_tree.pformat(), ' ') expected_scope = textwrap.indent( textwrap.dedent(expected).strip(' \n'), ' ') if path_scope != expected_scope: diff = '\n'.join( difflib.context_diff(expected_scope.split('\n'), path_scope.split('\n'))) self.fail(f'Scope tree does not match the expected result.' f'\nEXPECTED:\n{expected_scope}\nACTUAL:\n{path_scope}' f'\nDIFF:\n{diff}')
async def compile_graphql( self, dbver: bytes, gql: str, tokens: Optional[List[Tuple[gql_lexer.TokenKind, int, int, int, int, str]]], substitutions: Optional[Dict[str, Tuple[str, int, int]]], operation_name: str = None, variables: Optional[Mapping[str, object]] = None, ) -> CompiledOperation: db = await self._get_database(dbver) if tokens is None: ast = graphql.parse_text(gql) else: ast = graphql.parse_tokens(gql, tokens) op = graphql.translate_ast( db.gqlcore, # type: ignore[attr-defined] ast, variables=variables, substitutions=substitutions, operation_name=operation_name) ir = qlcompiler.compile_ast_to_ir( op.edgeql_ast, schema=db.schema, options=qlcompiler.CompilerOptions( json_parameters=True, allow_top_level_shape_dml=True, ), ) if ir.cardinality.is_multi(): raise errors.ResultCardinalityMismatchError( f'compiled GrqphQL query has cardinality {ir.cardinality}, ' f'expected ONE') sql_text, argmap = pg_compiler.compile_ir_to_sql( ir, pretty=bool(debug.flags.edgeql_compile), expected_cardinality_one=True, output_format=pg_compiler.OutputFormat.JSON) args: List[Optional[str]] = [None] * len(argmap) for argname, param in argmap.items(): args[param.index - 1] = argname sql_bytes = sql_text.encode() sql_hash = self._hash_sql(sql_bytes) return CompiledOperation( sql=sql_bytes, sql_hash=sql_hash, sql_args=args, # type: ignore[arg-type] # XXX: optional bug? dbver=dbver, cacheable=op.cacheable, cache_deps_vars=op.cache_deps_vars, variables=op.variables_desc, )
def compiled(cls, expr, schema, *, modaliases=None, parent_object_type=None, anchors=None, path_prefix_anchor=None, allow_generic_type_output=False, func_params=None, singletons=None) -> Expression: from edb.edgeql import compiler as qlcompiler ir = qlcompiler.compile_ast_to_ir( expr.qlast, schema=schema, modaliases=modaliases, anchors=anchors, path_prefix_anchor=path_prefix_anchor, func_params=func_params, parent_object_type=parent_object_type, allow_generic_type_output=allow_generic_type_output, singletons=singletons, ) return cls( text=expr.text, origtext=expr.origtext, refs=so.ObjectSet.create(schema, ir.schema_refs), _qlast=expr.qlast, _irast=ir, )
def compiled( cls: Type[Expression], expr: Expression, schema: s_schema.Schema, *, options: Optional[qlcompiler.CompilerOptions] = None, as_fragment: bool = False, ) -> Expression: from edb.ir import ast as irast_ if as_fragment: ir: irast_.Command = qlcompiler.compile_ast_fragment_to_ir( expr.qlast, schema=schema, options=options, ) else: ir = qlcompiler.compile_ast_to_ir( expr.qlast, schema=schema, options=options, ) assert isinstance(ir, irast_.Statement) return cls( text=expr.text, refs=so.ObjectSet.create(schema, ir.schema_refs), _qlast=expr.qlast, _irast=ir, )
def ptr_default_to_col_default(schema, ptr, expr): try: # NOTE: This code currently will only be invoked for scalars. # Blindly cast the default expression into the ptr target # type, validation of the expression type is not the concern # of this function. eql = ql_parser.parse(expr.text) eql = ql_astutils.ensure_qlstmt( qlast.TypeCast( type=ql_astutils.type_to_ql_typeref( ptr.get_target(schema), schema=schema), expr=eql, ) ) ir = ql_compiler.compile_ast_to_ir(eql, schema) except errors.SchemaError: # Reference errors mean that is is a non-constant default # referring to a not-yet-existing objects. return None if not ir_utils.is_const(ir): return None sql_expr = compiler.compile_ir_to_sql_tree(ir, singleton_mode=True) sql_text = codegen.SQLSourceGenerator.to_source(sql_expr) return sql_text
def run_test(self, *, source, spec, expected): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir(qltree, self.schema) root = ir.scope_tree if len(root.children) != 1: self.fail( f'Scope tree root is expected to have only one child, got' f' {len(root.children)}' f' \n{root.pformat()}') scope_tree = next(iter(root.children)) path_scope = self.UNION_NAME_RE.sub( '@SID@', textwrap.indent(scope_tree.pformat(), ' '), ) expected_scope = textwrap.indent( textwrap.dedent(expected).strip(' \n'), ' ') if path_scope != expected_scope: diff = '\n'.join( difflib.context_diff(expected_scope.split('\n'), path_scope.split('\n'))) self.fail(f'Scope tree does not match the expected result.' f'\nEXPECTED:\n{expected_scope}\nACTUAL:\n{path_scope}' f'\nDIFF:\n{diff}')
def run_test(self, *, source, spec, expected): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir(qltree, self.schema) # The expected cardinality is either given for the whole query # (by default) or for a specific element of the top-level # shape. In case of the specific element the name of the shape # element must be given followed by ":" and then the # cardinality. exp = textwrap.dedent(expected).strip(' \n').split(':') if len(exp) == 1: field = None expected_cardinality = qltypes.Cardinality(exp[0]) elif len(exp) == 2: field = exp[0].strip() expected_cardinality = qltypes.Cardinality(exp[1].strip()) else: raise ValueError( f'unrecognized expected specification: {expected!r}') if field is not None: shape = ir.expr.expr.result.shape for el, _ in shape: if str(el.path_id.rptr_name()).endswith(field): card = el.rptr.ptrref.out_cardinality self.assertEqual(card, expected_cardinality, 'unexpected cardinality:\n' + source) break else: raise AssertionError(f'shape field not found: {field!r}') else: self.assertEqual(ir.cardinality, expected_cardinality, 'unexpected cardinality:\n' + source)
def _compile_to_tree(self, source): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir( qltree, self.schema, options=compiler.CompilerOptions(modaliases={None: 'default'}, ), ) return pg_compiler.compile_ir_to_sql_tree( ir, output_format=pg_compiler.OutputFormat.NATIVE, )
def _compile_view(self, viewdecl): view_ql = None for field_decl in viewdecl.fields: fieldname = field_decl.name.name if fieldname == 'expr': view_ql = field_decl.value break if view_ql is None: raise errors.SchemaError( 'missing required expression in view definition', context=viewdecl.context, ) if not isinstance(view_ql, qlast.Statement): view_ql = qlast.SelectQuery(result=view_ql) viewname = s_name.Name(module=self._module.get_name(self._schema), name=viewdecl.name) ir = qlcompiler.compile_ast_to_ir( view_ql, self._schema, derived_target_module=self._module.get_name(self._schema), modaliases=self._mod_aliases, result_view_name=viewname, schema_view_mode=True) self._schema = ir.schema scls = self._schema.get(viewname) self._parse_field_setters(scls, viewdecl.fields) existing_aliases = {} for alias in view_ql.aliases: if isinstance(alias, qlast.ModuleAliasDecl): existing_aliases[alias.alias] = alias.module aliases_to_add = set(self._mod_aliases) - set(existing_aliases) for alias in aliases_to_add: view_ql.aliases.append( qlast.ModuleAliasDecl( alias=alias, module=self._mod_aliases[alias], )) view_expr = qlcodegen.generate_source(view_ql, pretty=False) self._schema = scls.set_field_value(self._schema, 'expr', s_expr.Expression(text=view_expr)) self._schema = scls.set_field_value(self._schema, 'view_type', s_types.ViewType.Select)
def run_test(self, *, source, spec, expected): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir( qltree, self.schema, options=compiler.CompilerOptions(validate_multiplicity=True)) # The expected multiplicity is given for the whole query. exp = textwrap.dedent(expected).strip(' \n') expected_multiplicity = qltypes.Multiplicity(exp) self.assertEqual(ir.multiplicity, expected_multiplicity, 'unexpected multiplicity:\n' + source)
def _parse_computable(self, expr, schema, context) -> so.ObjectRef: from edb.edgeql import compiler as qlcompiler from . import sources as s_sources # "source" attribute is set automatically as a refdict back-attr parent_ctx = context.get(s_sources.SourceCommandContext) source_name = parent_ctx.op.classname source = schema.get(source_name, default=None) if source is None: raise errors.SchemaDefinitionError( f'cannot define link/property computables in CREATE TYPE', hint='Perform a CREATE TYPE without the link ' 'followed by ALTER TYPE defining the computable', context=expr.context ) expr = s_expr.Expression.from_ast(expr, schema, context.modaliases) ir = qlcompiler.compile_ast_to_ir( expr.qlast, schema=schema, anchors={qlast.Source: source}, path_prefix_anchor=qlast.Source, singletons=[source], ) target = utils.reduce_to_typeref(schema, ir.stype) self.add( sd.AlterObjectProperty( property='default', new_value=expr, ) ) self.add( sd.AlterObjectProperty( property='computable', new_value=True ) ) self.add( sd.AlterObjectProperty( property='cardinality', new_value=ir.cardinality ) ) return target
def compiled( cls: Type[Expression], expr: Expression, schema: s_schema.Schema, *, as_fragment: bool = False, modaliases: Optional[Mapping[Optional[str], str]] = None, parent_object_type: Optional[so.ObjectMeta] = None, anchors: Optional[ Mapping[Union[str, qlast_.SpecialAnchorT], so.Object] ] = None, path_prefix_anchor: Optional[qlast_.SpecialAnchorT] = None, allow_generic_type_output: bool = False, func_params: Optional[s_func.ParameterLikeList] = None, singletons: Sequence[s_types.Type] = (), ) -> Expression: from edb.edgeql import compiler as qlcompiler from edb.ir import ast as irast_ if as_fragment: ir: irast_.Command = qlcompiler.compile_ast_fragment_to_ir( expr.qlast, schema=schema, modaliases=modaliases, anchors=anchors, path_prefix_anchor=path_prefix_anchor, ) else: ir = qlcompiler.compile_ast_to_ir( expr.qlast, schema=schema, modaliases=modaliases, anchors=anchors, path_prefix_anchor=path_prefix_anchor, func_params=func_params, parent_object_type=parent_object_type, allow_generic_type_output=allow_generic_type_output, singletons=singletons, ) assert isinstance(ir, irast_.Statement) return cls( text=expr.text, origtext=expr.origtext, refs=so.ObjectSet.create(schema, ir.schema_refs), _qlast=expr.qlast, _irast=ir, )
def _compile_view_expr(cls, expr, classname, schema, context): from edb.edgeql import compiler as qlcompiler ir = context.get_cached((expr, classname)) if ir is None: if not isinstance(expr, qlast.Statement): expr = qlast.SelectQuery(result=expr) ir = qlcompiler.compile_ast_to_ir( expr, schema, derived_target_module=classname.module, result_view_name=classname, modaliases=context.modaliases, schema_view_mode=True) context.cache_value((expr, classname), ir) return ir
def run_test(self, *, source, spec, expected): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir( qltree, self.schema, options=compiler.CompilerOptions( modaliases={None: 'default'}, ), ) expected_volatility = qltypes.Volatility( textwrap.dedent(expected).strip(' \n')) self.assertEqual(ir.volatility, expected_volatility, 'unexpected volatility:\n' + source)
def run_test(self, *, source, spec, expected): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir(qltree, self.schema, options=compiler.CompilerOptions( apply_query_rewrites=False, modaliases={None: 'default'}, )) root = ir.scope_tree if len(root.children) != 1: self.fail( f'Scope tree root is expected to have only one child, got' f' {len(root.children)}' f' \n{root.pformat()}')
async def compile_graphql( self, dbver: int, gql: str, operation_name: str=None, variables: Optional[Mapping[str, object]]=None): db = await self._get_database(dbver) op = graphql.translate( db.gqlcore, gql, variables=variables, operation_name=operation_name) ir = ql_compiler.compile_ast_to_ir( op.edgeql_ast, schema=db.schema, json_parameters=True) if ir.cardinality is not qltypes.Cardinality.ONE: raise errors.ResultCardinalityMismatchError( f'compiled GrqphQL query has cardinality {ir.cardinality}, ' f'expected ONE') sql_text, argmap = pg_compiler.compile_ir_to_sql( ir, pretty=debug.flags.edgeql_compile, expected_cardinality_one=True, output_format=pg_compiler.OutputFormat.JSON) args = [None] * len(argmap) for argname, argpos in argmap.items(): args[argpos - 1] = argname sql_bytes = sql_text.encode() sql_hash = self._hash_sql(sql_bytes) return CompiledOperation( sql=sql_bytes, sql_hash=sql_hash, sql_args=args, dbver=dbver, cacheable=op.cacheable, cache_deps_vars=op.cache_deps_vars, variables=op.variables_desc, )
def _normalize_constraint_expr(cls, schema, module_aliases, expr, subject): from edb.edgeql import compiler as qlcompiler from edb.edgeql import parser as qlparser if isinstance(expr, str): qltree = qlparser.parse(expr, module_aliases) else: qltree = expr ir = qlcompiler.compile_ast_to_ir( qltree, schema, modaliases=module_aliases, anchors={qlast.Subject: subject}, ) return qltree, ir
def _compile_alias_expr( self, expr: qlast.Base, classname: sn.QualName, schema: s_schema.Schema, context: sd.CommandContext, ) -> irast.Statement: cached: Optional[irast.Statement] = ( context.get_cached((expr, classname))) if cached is not None: return cached if not isinstance(expr, qlast.Statement): expr = qlast.SelectQuery(result=expr) existing = schema.get(classname, type=s_types.Type, default=None) if existing is not None: drop_cmd = existing.init_delta_command(schema, sd.DeleteObject) with context.suspend_dep_verification(): schema = drop_cmd.apply(schema, context) ir = qlcompiler.compile_ast_to_ir( expr, schema, options=qlcompiler.CompilerOptions( derived_target_module=classname.module, result_view_name=classname, modaliases=context.modaliases, schema_view_mode=True, in_ddl_context_name='alias definition', ), ) if ir.volatility == qltypes.Volatility.Volatile: srcctx = self.get_attribute_source_context('expr') raise errors.SchemaDefinitionError( f'volatile functions are not permitted in schema-defined ' f'computables', context=srcctx ) context.cache_value((expr, classname), ir) return ir # type: ignore
def run_test(self, *, source, spec, expected): qltree = qlparser.parse(source) ir = compiler.compile_ast_to_ir(qltree, self.schema) path_scope = self.UUID_RE.sub( '@SID@', textwrap.indent(ir.scope_tree.pformat(), ' '), ) expected_scope = textwrap.indent( textwrap.dedent(expected).strip(' \n'), ' ') if path_scope != expected_scope: diff = '\n'.join(difflib.context_diff( expected_scope.split('\n'), path_scope.split('\n'))) self.fail( f'Scope tree does not match the expected result.' f'\nEXPECTED:\n{expected_scope}\nACTUAL:\n{path_scope}' f'\nDIFF:\n{diff}')
def _compile_view(self, viewdecl): view_ql = None for field_decl in viewdecl.fields: fieldname = field_decl.name.name if fieldname == 'expr': view_ql = field_decl.value break if view_ql is None: raise errors.SchemaError( 'missing required expression in view definition', context=viewdecl.context, ) expr = s_expr.Expression.from_ast( view_ql, self._schema, self._mod_aliases) viewname = s_name.Name( module=self._module.get_name(self._schema), name=viewdecl.name) ir = qlcompiler.compile_ast_to_ir( expr.qlast, self._schema, derived_target_module=self._module.get_name(self._schema), modaliases=self._mod_aliases, result_view_name=viewname, schema_view_mode=True) self._schema = ir.schema scls = self._schema.get(viewname) self._parse_field_setters(scls, viewdecl.fields) self._schema = scls.set_field_value( self._schema, 'expr', expr) self._schema = scls.set_field_value( self._schema, 'view_type', s_types.ViewType.Select)
def _compile_ql_query(self, ctx: CompileContext, ql: qlast.Base) -> dbstate.BaseQuery: current_tx = ctx.state.current_tx() session_config = current_tx.get_session_config() native_out_format = (ctx.output_format is pg_compiler.OutputFormat.NATIVE) single_stmt_mode = ctx.stmt_mode is enums.CompileStatementMode.SINGLE implicit_fields = (native_out_format and single_stmt_mode) disable_constant_folding = config.lookup(config.get_settings(), '__internal_no_const_folding', session_config, allow_unrecognized=True) # the capability to execute transaction or session control # commands indicates that session mode is available session_mode = ctx.state.capability & (enums.Capability.TRANSACTION | enums.Capability.SESSION) ir = ql_compiler.compile_ast_to_ir( ql, schema=current_tx.get_schema(), modaliases=current_tx.get_modaliases(), implicit_tid_in_shapes=implicit_fields, implicit_id_in_shapes=implicit_fields, disable_constant_folding=disable_constant_folding, json_parameters=ctx.json_parameters, session_mode=session_mode) if ir.cardinality is qltypes.Cardinality.ONE: result_cardinality = enums.ResultCardinality.ONE else: result_cardinality = enums.ResultCardinality.MANY if ctx.expected_cardinality_one: raise errors.ResultCardinalityMismatchError( f'the query has cardinality {result_cardinality} ' f'which does not match the expected cardinality ONE') sql_text, argmap = pg_compiler.compile_ir_to_sql( ir, pretty=debug.flags.edgeql_compile, expected_cardinality_one=ctx.expected_cardinality_one, output_format=ctx.output_format) sql_bytes = sql_text.encode(defines.EDGEDB_ENCODING) if single_stmt_mode: if native_out_format: out_type_data, out_type_id = sertypes.TypeSerializer.describe( ir.schema, ir.stype, ir.view_shapes, ir.view_shapes_metadata) else: out_type_data, out_type_id = \ sertypes.TypeSerializer.describe_json() in_array_backend_tids: typing.Optional[typing.Mapping[int, int]] = None if ir.params: array_params = [] subtypes = [None] * len(ir.params) first_param_name = next(iter(ir.params)) if first_param_name.isdecimal(): named = False for param_name, param_type in ir.params.items(): idx = int(param_name) subtypes[idx] = (param_name, param_type) if param_type.is_array(): el_type = param_type.get_element_type(ir.schema) array_params.append( (idx, el_type.get_backend_id(ir.schema))) else: named = True for param_name, param_type in ir.params.items(): idx = argmap[param_name] - 1 subtypes[idx] = (param_name, param_type) if param_type.is_array(): el_type = param_type.get_element_type(ir.schema) array_params.append( (idx, el_type.get_backend_id(ir.schema))) params_type = s_types.Tuple.create( ir.schema, element_types=collections.OrderedDict(subtypes), named=named) if array_params: in_array_backend_tids = {p[0]: p[1] for p in array_params} else: params_type = s_types.Tuple.create(ir.schema, element_types={}, named=False) in_type_data, in_type_id = sertypes.TypeSerializer.describe( ir.schema, params_type, {}, {}) in_type_args = None if ctx.json_parameters: in_type_args = [None] * len(argmap) for argname, argpos in argmap.items(): in_type_args[argpos - 1] = argname sql_hash = self._hash_sql(sql_bytes, mode=str(ctx.output_format).encode(), intype=in_type_id.bytes, outtype=out_type_id.bytes) return dbstate.Query( sql=(sql_bytes, ), sql_hash=sql_hash, cardinality=result_cardinality, in_type_id=in_type_id.bytes, in_type_data=in_type_data, in_type_args=in_type_args, in_array_backend_tids=in_array_backend_tids, out_type_id=out_type_id.bytes, out_type_data=out_type_data, ) else: if ir.params: raise errors.QueryError( 'EdgeQL script queries cannot accept parameters') return dbstate.SimpleQuery(sql=(sql_bytes, ))
def _normalize_ptr_default(self, qltree, source, ptr, ptrdecl): expr = s_expr.Expression.from_ast( qltree, self._schema, self._mod_aliases) ir = qlcompiler.compile_ast_to_ir( expr.qlast, schema=self._schema, modaliases=self._mod_aliases, anchors={qlast.Source: source}, path_prefix_anchor=qlast.Source, singletons=[source], ) expr_type = ir.stype self._schema = ptr.set_field_value( self._schema, 'default', expr) if ptr.is_pure_computable(self._schema): # Pure computable without explicit target. # Fixup pointer target and target property. self._schema = ptr.set_field_value( self._schema, 'target', expr_type) if isinstance(ptr, s_links.Link): if not isinstance(expr_type, s_objtypes.ObjectType): raise errors.InvalidLinkTargetError( f'invalid link target, expected object type, got ' f'{expr_type.__class__.__name__}', context=ptrdecl.expr.context ) else: if not isinstance(expr_type, (s_scalars.ScalarType, s_types.Collection)): raise errors.InvalidPropertyTargetError( f'invalid property type: expected primitive type, ' f'got {expr_type.__class__.__name__}', context=ptrdecl.expr.context ) if isinstance(ptr, s_links.Link): tgt_prop = ptr.getptr(self._schema, 'target') self._schema = tgt_prop.set_field_value( self._schema, 'target', expr_type) self._schema = ptr.set_field_value( self._schema, 'cardinality', ir.cardinality) if ptrdecl.cardinality is not ptr.get_cardinality(self._schema): if ptrdecl.cardinality is qltypes.Cardinality.ONE: raise errors.SchemaError( f'computable expression possibly returns more than ' f'one value, but the ' f'{ptr.get_schema_class_displayname()} ' f'is declared as "single"', context=qltree.context) if (not isinstance(expr_type, s_abc.Type) or (ptr.get_target(self._schema) is not None and not expr_type.issubclass( self._schema, ptr.get_target(self._schema)))): raise errors.SchemaError( 'default value query must yield a single result of ' 'type {!r}'.format( ptr.get_target(self._schema).get_name(self._schema)), context=qltree.context)
def schema_constraint_to_backend_constraint(cls, subject, constraint, schema, context, source_context): assert constraint.get_subject(schema) is not None constraint_origin = cls._get_constraint_origin(schema, constraint) if constraint_origin != constraint: origin_subject = constraint_origin.get_subject(schema) else: origin_subject = subject path_prefix_anchor = (qlast.Subject().name if isinstance( subject, s_types.Type) else None) ir = qlcompiler.compile_ast_to_ir( constraint.get_finalexpr(schema).qlast, schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: subject}, path_prefix_anchor=path_prefix_anchor, apply_query_rewrites=not context.stdmode, ), ) terminal_refs = ir_utils.get_longest_paths(ir.expr.expr.result) ref_tables = get_ref_storage_info(ir.schema, terminal_refs) if len(ref_tables) > 1: raise errors.InvalidConstraintDefinitionError( f'Constraint {constraint.get_displayname(schema)} on ' f'{subject.get_displayname(schema)} is not supported ' f'because it would depend on multiple objects', context=source_context, ) elif ref_tables: subject_db_name, _ = next(iter(ref_tables.items())) else: subject_db_name = common.get_backend_name(schema, subject, catenate=False) exclusive_expr_refs = cls._get_exclusive_refs(ir) pg_constr_data = { 'subject_db_name': subject_db_name, 'expressions': [], 'origin_expressions': [], } if constraint_origin != constraint: origin_path_prefix_anchor = (qlast.Subject().name if isinstance( origin_subject, s_types.Type) else None) origin_ir = qlcompiler.compile_ast_to_ir( constraint_origin.get_finalexpr(schema).qlast, schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: origin_subject}, path_prefix_anchor=origin_path_prefix_anchor, apply_query_rewrites=not context.stdmode, ), ) origin_terminal_refs = ir_utils.get_longest_paths( origin_ir.expr.expr.result) origin_ref_tables = get_ref_storage_info(origin_ir.schema, origin_terminal_refs) if origin_ref_tables: origin_subject_db_name, _ = (next( iter(origin_ref_tables.items()))) else: origin_subject_db_name = common.get_backend_name( schema, origin_subject, catenate=False, ) origin_exclusive_expr_refs = cls._get_exclusive_refs(origin_ir) pg_constr_data['origin_subject_db_name'] = origin_subject_db_name else: origin_exclusive_expr_refs = None pg_constr_data['origin_subject_db_name'] = subject_db_name if exclusive_expr_refs: for ref in exclusive_expr_refs: exprdata = cls._edgeql_ref_to_pg_constr( subject, origin_subject, ref, schema) pg_constr_data['expressions'].append(exprdata) if origin_exclusive_expr_refs: for ref in origin_exclusive_expr_refs: exprdata = cls._edgeql_ref_to_pg_constr( subject, origin_subject, ref, schema) pg_constr_data['origin_expressions'].append(exprdata) else: pg_constr_data['origin_expressions'] = ( pg_constr_data['expressions']) pg_constr_data['scope'] = 'relation' pg_constr_data['type'] = 'unique' else: exprdata = cls._edgeql_ref_to_pg_constr(subject, origin_subject, ir, schema) pg_constr_data['expressions'].append(exprdata) pg_constr_data['scope'] = 'row' pg_constr_data['type'] = 'check' if isinstance(constraint.get_subject(schema), s_scalars.ScalarType): constraint = SchemaDomainConstraint(subject=subject, constraint=constraint, pg_constr_data=pg_constr_data, schema=schema) else: constraint = SchemaTableConstraint(subject=subject, constraint=constraint, pg_constr_data=pg_constr_data, schema=schema) return constraint
def schema_constraint_to_backend_constraint( cls, subject, constraint, schema): assert constraint.get_subject(schema) is not None ir = ql_compiler.compile_ast_to_ir( constraint.get_finalexpr(schema).qlast, schema, anchors={qlast.Subject: subject}, ) terminal_refs = ir_utils.get_longest_paths(ir.expr.expr.result) ref_tables = cls._get_ref_storage_info(ir.schema, terminal_refs) if len(ref_tables) > 1: raise ValueError( 'backend: multi-table constraints are not currently supported') elif ref_tables: subject_db_name, refs = next(iter(ref_tables.items())) link_bias = refs[0][3].table_type == 'link' else: subject_db_name = common.get_backend_name( schema, subject, catenate=False) link_bias = False exclusive_expr_refs = cls._get_exclusive_refs(ir) pg_constr_data = { 'subject_db_name': subject_db_name, 'expressions': [] } exprs = pg_constr_data['expressions'] if exclusive_expr_refs: for ref in exclusive_expr_refs: exprdata = cls._edgeql_ref_to_pg_constr( subject, ref, schema, link_bias) exprs.append(exprdata) pg_constr_data['scope'] = 'relation' pg_constr_data['type'] = 'unique' pg_constr_data['subject_db_name'] = subject_db_name else: exprdata = cls._edgeql_ref_to_pg_constr( subject, ir, schema, link_bias) exprs.append(exprdata) pg_constr_data['subject_db_name'] = subject_db_name pg_constr_data['scope'] = 'row' pg_constr_data['type'] = 'check' if isinstance(constraint.get_subject(schema), s_scalars.ScalarType): constraint = SchemaDomainConstraint( subject=subject, constraint=constraint, pg_constr_data=pg_constr_data, schema=schema) else: constraint = SchemaTableConstraint( subject=subject, constraint=constraint, pg_constr_data=pg_constr_data, schema=schema) return constraint
def compile_graphql( std_schema: s_schema.FlatSchema, user_schema: s_schema.FlatSchema, global_schema: s_schema.FlatSchema, database_config: Mapping[str, Any], system_config: Mapping[str, Any], gql: str, tokens: Optional[List[Tuple[gql_lexer.TokenKind, int, int, int, int, str]]], substitutions: Optional[Dict[str, Tuple[str, int, int]]], operation_name: str = None, variables: Optional[Mapping[str, object]] = None, ) -> CompiledOperation: if tokens is None: ast = graphql.parse_text(gql) else: ast = graphql.parse_tokens(gql, tokens) gqlcore = _get_gqlcore(std_schema, user_schema, global_schema) op = graphql.translate_ast(gqlcore, ast, variables=variables, substitutions=substitutions, operation_name=operation_name) ir = qlcompiler.compile_ast_to_ir( op.edgeql_ast, schema=s_schema.ChainedSchema( std_schema, user_schema, global_schema, ), options=qlcompiler.CompilerOptions( json_parameters=True, allow_top_level_shape_dml=True, ), ) if ir.cardinality.is_multi(): raise errors.ResultCardinalityMismatchError( f'compiled GrqphQL query has cardinality {ir.cardinality}, ' f'expected ONE') sql_text, argmap = pg_compiler.compile_ir_to_sql( ir, pretty=bool(debug.flags.edgeql_compile), expected_cardinality_one=True, output_format=pg_compiler.OutputFormat.JSON) args: List[Optional[str]] = [None] * len(argmap) for argname, param in argmap.items(): args[param.index - 1] = argname sql_bytes = sql_text.encode() sql_hash = hashlib.sha1(sql_bytes).hexdigest().encode('latin1') return CompiledOperation( sql=sql_bytes, sql_hash=sql_hash, sql_args=args, # type: ignore[arg-type] # XXX: optional bug? cacheable=op.cacheable, cache_deps_vars=op.cache_deps_vars, variables=op.variables_desc, )
def _compile_ql_config_op(self, ctx: CompileContext, ql: qlast.Base): current_tx = ctx.state.current_tx() schema = current_tx.get_schema() modaliases = ctx.state.current_tx().get_modaliases() session_config = ctx.state.current_tx().get_session_config() if ql.system and not current_tx.is_implicit(): raise errors.QueryError('CONFIGURE SYSTEM cannot be executed in a ' 'transaction block') ir = ql_compiler.compile_ast_to_ir( ql, schema=schema, modaliases=modaliases, ) is_backend_setting = bool(getattr(ir, 'backend_setting', None)) requires_restart = bool(getattr(ir, 'requires_restart', False)) if is_backend_setting: if isinstance(ql, qlast.ConfigReset): val = None else: # Postgres is fine with all setting types to be passed # as strings. value = ireval.evaluate_to_python_val(ir.expr, schema=schema) val = pg_ast.StringConstant(val=str(value)) if ir.system: sql_ast = pg_ast.AlterSystem( name=ir.backend_setting, value=val, ) else: sql_ast = pg_ast.Set( name=ir.backend_setting, value=val, ) sql_text = pg_codegen.generate_source(sql_ast) + ';' sql = (sql_text.encode(), ) else: sql_text, _ = pg_compiler.compile_ir_to_sql( ir, pretty=debug.flags.edgeql_compile, output_format=pg_compiler.OutputFormat.JSONB) sql = (sql_text.encode(), ) if not ql.system: config_op = ireval.evaluate_to_config_op(ir, schema=schema) session_config = config_op.apply(config.get_settings(), session_config) ctx.state.current_tx().update_session_config(session_config) else: config_op = None return dbstate.SessionStateQuery( sql=sql, is_backend_setting=is_backend_setting, is_system_setting=ql.system, requires_restart=requires_restart, config_op=config_op, )
def schema_constraint_to_backend_constraint( cls, subject, constraint, schema): assert constraint.get_subject(schema) is not None constraint_origin = cls._get_constraint_origin(schema, constraint) if constraint_origin != constraint: origin_subject = constraint_origin.get_subject(schema) else: origin_subject = subject path_prefix_anchor = ( qlast.Subject().name if isinstance(subject, s_types.Type) else None ) ir = qlcompiler.compile_ast_to_ir( constraint.get_finalexpr(schema).qlast, schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: subject}, path_prefix_anchor=path_prefix_anchor, ), ) terminal_refs = ir_utils.get_longest_paths(ir.expr.expr.result) ref_tables = cls._get_ref_storage_info(ir.schema, terminal_refs) if len(ref_tables) > 1: raise ValueError( 'backend: multi-table constraints are not currently supported') elif ref_tables: subject_db_name, _ = next(iter(ref_tables.items())) else: subject_db_name = common.get_backend_name( schema, subject, catenate=False) exclusive_expr_refs = cls._get_exclusive_refs(ir) pg_constr_data = { 'subject_db_name': subject_db_name, 'expressions': [], 'origin_expressions': [], } if constraint_origin != constraint: origin_ir = qlcompiler.compile_ast_to_ir( constraint_origin.get_finalexpr(schema).qlast, schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: origin_subject}, ), ) origin_terminal_refs = ir_utils.get_longest_paths( origin_ir.expr.expr.result) origin_ref_tables = cls._get_ref_storage_info( origin_ir.schema, origin_terminal_refs) if origin_ref_tables: origin_subject_db_name, _ = ( next(iter(origin_ref_tables.items())) ) else: origin_subject_db_name = common.get_backend_name( schema, origin_subject, catenate=False, ) origin_exclusive_expr_refs = cls._get_exclusive_refs(origin_ir) pg_constr_data['origin_subject_db_name'] = origin_subject_db_name else: origin_exclusive_expr_refs = None pg_constr_data['origin_subject_db_name'] = subject_db_name if exclusive_expr_refs: for ref in exclusive_expr_refs: exprdata = cls._edgeql_ref_to_pg_constr( subject, origin_subject, ref, schema) pg_constr_data['expressions'].append(exprdata) if origin_exclusive_expr_refs: for ref in origin_exclusive_expr_refs: exprdata = cls._edgeql_ref_to_pg_constr( subject, origin_subject, ref, schema) pg_constr_data['origin_expressions'].append(exprdata) else: pg_constr_data['origin_expressions'] = ( pg_constr_data['expressions']) pg_constr_data['scope'] = 'relation' pg_constr_data['type'] = 'unique' else: exprdata = cls._edgeql_ref_to_pg_constr( subject, origin_subject, ir, schema) pg_constr_data['expressions'].append(exprdata) pg_constr_data['scope'] = 'row' pg_constr_data['type'] = 'check' if isinstance(constraint.get_subject(schema), s_scalars.ScalarType): constraint = SchemaDomainConstraint( subject=subject, constraint=constraint, pg_constr_data=pg_constr_data, schema=schema) else: constraint = SchemaTableConstraint( subject=subject, constraint=constraint, pg_constr_data=pg_constr_data, schema=schema) return constraint