def get_ref_name(self, ref: qlast.ObjectRef) -> sn.Name: if ref.module: return sn.Name(module=ref.module, name=ref.name) elif f'{self.module}::{ref.name}' in self.objects: return sn.Name(module=self.module, name=ref.name) else: return sn.Name(module="std", name=ref.name)
def get_local_name( self, ref: qlast.ObjectRef, *, type: Optional[Type[qltracer.NamedObject]] = None ) -> s_name.Name: if isinstance(ref, qlast.ObjectRef): if ref.module: return s_name.Name(module=ref.module, name=ref.name) else: if type is None: return s_name.Name(module=self.module, name=ref.name) else: # check if there's a name in default module # actually registered to the right type name = f'{self.module}::{ref.name}' if isinstance(self.objects.get(name), type): return s_name.Name(module=self.module, name=ref.name) else: return s_name.Name(module='std', name=ref.name) else: raise TypeError( "ObjectRef expected " "(got type {!r})".format(type(ref).__name__) )
def get_schema_name_for_pycls(py_cls: Type[s_obj.Object]) -> str: py_cls_name = py_cls.__name__ if issubclass(py_cls, s_obj.GlobalObject): # Global objects, like Role and Database live in the sys:: module return sn.Name(module='sys', name=py_cls_name) else: return sn.Name(module='schema', name=py_cls_name)
def get_local_name(self, ref: qlast.ObjectRef) -> s_name.Name: if isinstance(ref, qlast.ObjectRef): if ref.module: return s_name.Name(module=ref.module, name=ref.name) else: return s_name.Name(module=self.module, name=ref.name) else: raise TypeError("ObjectRef expected " "(got type {!r})".format(type(ref).__name__))
def get_ref_name(self, ref: qlast.ObjectRef) -> sn.Name: if ref.module: # replace the module alias with the real name module = self.modaliases.get(ref.module, ref.module) return sn.Name(module=module, name=ref.name) elif f'{self.module}::{ref.name}' in self.objects: return sn.Name(module=self.module, name=ref.name) else: return sn.Name(module="std", name=ref.name)
def _get_derived_ptr_name(self, ptr_name, source): source_name = source.get_name(self._schema) shortname = s_name.Name( module=source_name.module, name=ptr_name, ) return s_name.Name( module=source.get_name(self._schema).module, name=s_name.get_specialized_name(shortname, source_name), )
def get_ref_name(self, ref: qlast.ObjectRef) -> s_name.Name: if isinstance(ref, qlast.ObjectRef): if ref.module: return s_name.Name(module=ref.module, name=ref.name) elif f'{self.module}::{ref.name}' in self.objects: return s_name.Name(module=self.module, name=ref.name) else: return s_name.Name(module="std", name=ref.name) else: raise TypeError("ObjectRef expected " "(got type {!r})".format(type(ref).__name__))
def get_ref_name(self, ref: qlast.BaseObjectRef) -> sn.Name: # We don't actually expect to handle anything other than # ObjectRef here. assert isinstance(ref, qlast.ObjectRef) if ref.module: # replace the module alias with the real name module = self.modaliases.get(ref.module, ref.module) return sn.Name(module=module, name=ref.name) elif ref.name in self.params: return self.params[ref.name] elif f'{self.module}::{ref.name}' in self.objects: return sn.Name(module=self.module, name=ref.name) else: return sn.Name(module="std", name=ref.name)
def _decode_func_params(self, schema, row, param_map): if row['params']: params = [] for r in row['params']: param_data = param_map.get(r) param = schema.get(r, None) if param is None: schema, param = s_funcs.Parameter.create_in_schema( schema, id=param_data['id'], num=param_data['num'], name=sn.Name(param_data['name']), default=param_data['default'], type=self.unpack_typeref(param_data['type'], schema), typemod=param_data['typemod'], kind=param_data['kind']) params.append(param) p_type = param.get_type(schema) if p_type.is_collection(): schema, _ = p_type.as_schema_coll(schema) return schema, params else: return schema, []
def get_schema_object( name: Union[str, qlast.BaseObjectRef], module: Optional[str] = None, *, item_type: Optional[Type[s_obj.Object]] = None, condition: Optional[Callable[[s_obj.Object], bool]] = None, label: Optional[str] = None, ctx: context.ContextLevel, srcctx: Optional[parsing.ParserContext] = None) -> s_obj.Object: if isinstance(name, qlast.ObjectRef): if srcctx is None: srcctx = name.context module = name.module name = name.name elif isinstance(name, qlast.AnyType): return s_pseudo.PseudoType.get(ctx.env.schema, 'anytype') elif isinstance(name, qlast.AnyTuple): return s_pseudo.PseudoType.get(ctx.env.schema, 'anytuple') elif isinstance(name, qlast.BaseObjectRef): raise AssertionError(f"Unhandled BaseObjectRef subclass: {name!r}") if module: name = sn.Name(name=name, module=module) elif isinstance(name, str): view = _get_type_variant(name, ctx) if view is not None: return view try: stype = ctx.env.get_track_schema_object( name=name, modaliases=ctx.modaliases, type=item_type, condition=condition, label=label, ) except errors.QueryError as e: s_utils.enrich_schema_lookup_error(e, name, modaliases=ctx.modaliases, schema=ctx.env.schema, item_type=item_type, condition=condition, context=srcctx) raise view = _get_type_variant(stype.get_name(ctx.env.schema), ctx) if view is not None: return view elif stype == ctx.defining_view: # stype is the view in process of being defined and as such is # not yet a valid schema object raise errors.SchemaDefinitionError( f'illegal self-reference in definition of {name!r}', context=srcctx) else: return stype
async def read_annotation_values(self, schema, only_modules, exclude_modules): annotations = await datasources.schema.annos.fetch_values( self.connection, modules=only_modules, exclude_modules=exclude_modules) basemap = {} for r in annotations: name = sn.Name(r['name']) subject = schema.get(r['subject_name']) anno = schema.get(r['annotation_name']) value = r['value'] schema, anno = s_anno.AnnotationValue.create_in_schema( schema, id=r['id'], field_inh_map=self._unpack_field_inh_map(r['field_inh_map']), name=name, subject=subject, annotation=anno, value=value, inheritable=r['inheritable'], ) basemap[anno] = (r['bases'], r['ancestors']) schema = subject.add_annotation(schema, anno) for scls, (basenames, ancestors) in basemap.items(): schema = self._set_reflist(schema, scls, 'bases', basenames) schema = self._set_reflist(schema, scls, 'ancestors', ancestors) return schema
async def read_indexes(self, schema, only_modules, exclude_modules): pg_index_data = await introspection.tables.fetch_indexes( self.connection, schema_pattern='edgedb%', index_pattern='%_index') pg_indexes = set() for row in pg_index_data: table_name = tuple(row['table_name']) for pg_index in self.interpret_indexes(table_name, row['indexes']): pg_indexes.add( (table_name, pg_index.get_metadata('schemaname'))) ds = datasources.schema.indexes indexes = await ds.fetch(self.connection, modules=only_modules, exclude_modules=exclude_modules) basemap = {} for index_data in indexes: subj = schema.get(index_data['subject_name']) subj_table_name = common.get_backend_name(schema, subj, catenate=False) index_name = sn.Name(index_data['name']) if index_data['is_local']: try: pg_indexes.remove((subj_table_name, index_name)) except KeyError: raise errors.SchemaError( 'internal metadata inconsistency', details=( f'Index {index_name} is defined in schema, but ' f'the corresponding PostgreSQL index is missing.' )) from None schema, index = s_indexes.Index.create_in_schema( schema, id=index_data['id'], name=index_name, subject=subj, is_local=index_data['is_local'], inherited_fields=self._unpack_inherited_fields( index_data['inherited_fields']), expr=self.unpack_expr(index_data['expr'], schema)) schema = subj.add_index(schema, index) basemap[index] = (index_data['bases'], index_data['ancestors']) for scls, (basenames, ancestors) in basemap.items(): schema = self._set_reflist(schema, scls, 'bases', basenames) schema = self._set_reflist(schema, scls, 'ancestors', ancestors) if pg_indexes and not only_modules and not exclude_modules: details = f'Extraneous PostgreSQL indexes found: {pg_indexes!r}' raise errors.SchemaError('internal metadata inconsistency', details=details) return schema
async def read_casts(self, schema, only_modules, exclude_modules): self._operator_commutators.clear() ds = datasources.schema cast_list = await ds.casts.fetch(self.connection, modules=only_modules, exclude_modules=exclude_modules) for row in cast_list: name = sn.Name(row['name']) cast_data = { 'id': row['id'], 'name': name, 'from_type': self.unpack_typeref(row['from_type'], schema), 'to_type': self.unpack_typeref(row['to_type'], schema), 'language': row['language'], 'from_cast': row['from_cast'], 'from_function': row['from_function'], 'from_expr': row['from_expr'], 'allow_implicit': row['allow_implicit'], 'allow_assignment': row['allow_assignment'], 'code': row['code'], 'volatility': row['volatility'], } schema, oper = s_casts.Cast.create_in_schema(schema, **cast_data) return schema
def resolve_schema_name(name: str, module: str, *, ctx: context.ContextLevel) -> typing.Optional[sn.Name]: schema_module = ctx.modaliases.get(module) if schema_module is None: return None else: return sn.Name(name=name, module=schema_module)
async def read_annotation_values(self, schema, only_modules, exclude_modules): annotations = await datasources.schema.annotations.fetch_values( self.connection, modules=only_modules, exclude_modules=exclude_modules) for r in annotations: name = sn.Name(r['name']) subject = schema.get(r['subject_name']) anno = schema.get(r['annotation_name']) value = r['value'] schema, anno = s_anno.AnnotationValue.create_in_schema( schema, id=r['id'], name=name, subject=subject, annotation=anno, value=value, inheritable=r['inheritable'], ) schema = subject.add_annotation(schema, anno) return schema
def _get_bases(decl, *, ctx): """Resolve object bases from the "extends" declaration.""" bases = [] if decl.bases: # Explicit inheritance has_enums = any(br.maintype.name == "enum" and br.subtypes for br in decl.bases) if has_enums: if len(decl.bases) > 1: raise errors.SchemaError( f"invalid scalar type definition, enumeration must " f"be the only supertype specified", context=decl.bases[0].context, ) bases = [s_name.Name("std::anyenum")] else: for base_ref in decl.bases: base_name = ctx.get_ref_name(base_ref.maintype) bases.append(base_name) return bases
def _decode_func_params(self, schema, row, param_map): if row['params']: params = [] for r in row['params']: param_data = param_map.get(r) param = schema.get(r, None) if param is None: if param_data['default']: default = self.unpack_expr( param_data['default'], schema) else: default = None schema, ptype = self.unpack_typeref( param_data['type'], schema) schema, param = s_funcs.Parameter.create_in_schema( schema, id=param_data['id'], num=param_data['num'], name=sn.Name(param_data['name']), default=default, type=ptype, typemod=param_data['typemod'], kind=param_data['kind']) params.append(param) return schema, params else: return schema, []
def get_function_backend_name(name, module_id, catenate=False): fullname = s_name.Name(module=str(module_id), name=name.name) schema, func_name = convert_name(fullname, catenate=False) if catenate: return qname(schema, func_name) else: return schema, func_name
def _cb(node): if isinstance(node, qlast.ObjectRef): name = sn.Name(name=node.name, module=node.module) upd = callback(name) if name != upd: node.name = upd.name node.module = upd.module
def get_constraint_backend_name(id, module_id, catenate=True, *, aspect=None): if aspect not in ('trigproc', ): raise ValueError( f'unexpected aspect for constraint backend name: {aspect!r}') name = s_name.Name(module=str(module_id), name=str(id)) return convert_name(name, aspect, catenate)
def get_operator_backend_name(name, module_id, catenate=False, *, aspect=None): if aspect is None: aspect = 'operator' if aspect == 'function': fullname = s_name.Name(module=str(module_id), name=name.name) return convert_name(fullname, 'f', catenate=catenate) elif aspect != 'operator': raise ValueError( f'unexpected aspect for operator backend name: {aspect!r}') oper_name = _operator_map.get(name) if oper_name is None: oper_name = name.name if re.search(r'[a-zA-Z]', oper_name): # Alphanumeric operator, cannot be expressed in Postgres as-is # Since this is a rare occasion, we hard-code the translation # table. if oper_name == 'OR': oper_name = '|||' else: raise ValueError( f'cannot represent operator {oper_name} in Postgres') oper_name = f'`{oper_name}`' schema = 'edgedb' else: schema = '' if catenate: return qname(schema, oper_name) else: return schema, oper_name
def get_cast_backend_name(name, module_id, catenate=False, *, aspect=None): if aspect == 'function': fullname = s_name.Name(module=str(module_id), name=name.name) return convert_name(fullname, 'f', catenate=catenate) else: raise ValueError( f'unexpected aspect for cast backend name: {aspect!r}')
def get_expression_path_id( stype: s_types.Type, alias: Optional[str] = None, *, ctx: context.ContextLevel) -> irast.PathId: if alias is None: alias = ctx.aliases.get('expr') typename = s_name.Name(module='__derived__', name=alias) return get_path_id(stype, typename=typename, ctx=ctx)
def _parse_subject_indexes(self, subject, subjdecl): module_aliases = {None: subject.get_name(self._schema).module} for indexdecl in subjdecl.indexes: index_name = self._get_ref_name(indexdecl.name) index_name = subject.get_name(self._schema) + '.' + index_name local_name = s_name.get_specialized_name( index_name, subject.get_name(self._schema)) der_name = s_name.Name(name=local_name, module=subject.get_name( self._schema).module) _, _, index_expr = qlutils.normalize_tree( indexdecl.expression, self._schema, modaliases=module_aliases, anchors={qlast.Subject: subject}, inline_anchors=True) self._schema, index = s_indexes.Index.create_in_schema( self._schema, name=der_name, expr=s_expr.Expression(text=index_expr), subject=subject) self._schema = subject.add_index(self._schema, index)
def compile_GroupQuery( expr: qlast.Base, *, ctx: context.ContextLevel) -> irast.Set: raise errors.UnsupportedFeatureError( "'GROUP' statement is not currently implemented", context=expr.context) with ctx.subquery() as ictx: stmt = irast.GroupStmt() init_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) typename = s_name.Name( module='__group__', name=ctx.aliases.get('Group')) obj = ctx.env.get_track_schema_object('std::BaseObject') stmt.group_path_id = pathctx.get_path_id( obj, typename=typename, ctx=ictx) pathctx.register_set_in_scope(stmt.group_path_id, ctx=ictx) with ictx.newscope(fenced=True) as subjctx: subject_set = setgen.scoped_set( dispatch.compile(expr.subject, ctx=subjctx), ctx=subjctx) alias = expr.subject_alias or subject_set.path_id.target_name_hint stmt.subject = stmtctx.declare_inline_view( subject_set, alias, ctx=ictx) with subjctx.new() as grpctx: stmt.groupby = compile_groupby_clause( expr.groupby, ctx=grpctx) with ictx.subquery() as isctx, isctx.newscope(fenced=True) as sctx: o_stmt = sctx.stmt = irast.SelectStmt() o_stmt.result = compile_result_clause( expr.result, view_scls=ctx.view_scls, view_rptr=ctx.view_rptr, result_alias=expr.result_alias, view_name=ctx.toplevel_result_view_name, ctx=sctx) clauses.compile_where_clause( o_stmt, expr.where, ctx=sctx) o_stmt.orderby = clauses.compile_orderby_clause( expr.orderby, ctx=sctx) o_stmt.offset = clauses.compile_limit_offset_clause( expr.offset, ctx=sctx) o_stmt.limit = clauses.compile_limit_offset_clause( expr.limit, ctx=sctx) stmt.result = setgen.scoped_set(o_stmt, ctx=sctx) result = fini_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) return result
async def read_constraints(self, schema, only_modules, exclude_modules): ds = datasources.schema constraints_list = await ds.constraints.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) constraints_list = {sn.Name(r['name']): r for r in constraints_list} param_list = await ds.functions.fetch_params( self.connection, modules=only_modules, exclude_modules=exclude_modules) param_map = {p['name']: p for p in param_list} basemap = {} exprmap = collections.defaultdict(dict) for name, r in constraints_list.items(): subject = schema.get(r['subject']) if r['subject'] else None schema, params = self._decode_func_params(schema, r, param_map) schema, constraint = s_constr.Constraint.create_in_schema( schema, id=r['id'], inherited_fields=self._unpack_inherited_fields( r['inherited_fields']), name=name, subject=subject, params=params, is_abstract=r['is_abstract'], is_final=r['is_final'], is_local=r['is_local'], delegated=r['delegated'], errmessage=r['errmessage'], args=([self.unpack_expr(arg, schema) for arg in r['args']] if r['args'] is not None else None), return_type=self.unpack_typeref(r['return_type'], schema), return_typemod=r['return_typemod'], ) if r['expr']: exprmap[constraint]['expr'] = r['expr'] if r['subjectexpr']: exprmap[constraint]['subjectexpr'] = r['subjectexpr'] if r['finalexpr']: exprmap[constraint]['finalexpr'] = r['finalexpr'] basemap[constraint] = (r['bases'], r['ancestors']) if subject: schema = subject.add_constraint(schema, constraint) for scls, (basenames, ancestors) in basemap.items(): schema = self._set_reflist(schema, scls, 'bases', basenames) schema = self._set_reflist(schema, scls, 'ancestors', ancestors) return schema, exprmap
def get_scalar_backend_name(id, module_id, catenate=True, *, aspect=None): if aspect is None: aspect = 'domain' if aspect not in ('domain', 'sequence', 'enum'): raise ValueError( f'unexpected aspect for scalar backend name: {aspect!r}') name = s_name.Name(module=str(module_id), name=str(id)) return convert_name(name, aspect, catenate)
def __init__(self, source, target, *, optional, ancestral, cardinality): name = 'optindirection' if optional else 'indirection' self._name = sn.Name(module='__type__', name=name) self._source = source self._target = target self._cardinality = cardinality self._optional = optional self._ancestral = ancestral
async def read_views(self, schema, only_modules, exclude_modules): tuple_views = await datasources.schema.types.fetch_tuple_views( self.connection, modules=only_modules, exclude_modules=exclude_modules) exprmap = collections.defaultdict(dict) for r in tuple_views: eltypes = self.unpack_typeref(r['element_types'], schema) schema, tview = s_types.TupleExprAlias.create_in_schema( schema, id=r['id'], name=sn.Name(r['name']), expr_type=s_types.ExprType(r['expr_type']), alias_is_persistent=r['alias_is_persistent'], named=r['named'], element_types=s_obj.ObjectDict.create( schema, dict(eltypes.iter_subtypes(schema))), ) exprmap[tview]['expr'] = r['expr'] array_views = await datasources.schema.types.fetch_array_views( self.connection, modules=only_modules, exclude_modules=exclude_modules) for r in array_views: eltype = self.unpack_typeref(r['element_type'], schema) schema, tview = s_types.ArrayExprAlias.create_in_schema( schema, id=r['id'], name=sn.Name(r['name']), expr_type=s_types.ExprType(r['expr_type']), alias_is_persistent=r['alias_is_persistent'], element_type=eltype, dimensions=r['dimensions'], ) exprmap[tview]['expr'] = r['expr'] return schema, exprmap
def _get_ref_name(self, ref): if isinstance(ref, edgeql.ast.ObjectRef): if ref.module: return s_name.Name(module=ref.module, name=ref.name) else: return ref.name else: raise TypeError('ObjectRef expected ' '(got type {!r})'.format(type(ref).__name__))