async def read_objtypes(self, schema, only_modules, exclude_modules): objtype_list = await datasources.schema.objtypes.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) objtype_list = {sn.Name(row['name']): row for row in objtype_list} basemap = {} for name, row in objtype_list.items(): objtype = { 'id': row['id'], 'name': name, 'is_abstract': row['is_abstract'], 'is_final': row['is_final'], 'view_type': (s_types.ViewType(row['view_type']) if row['view_type'] else None), 'expr': (s_expr.Expression(**row['expr']) if row['expr'] else None) } basemap[name] = row['bases'] or [] schema, objtype = s_objtypes.ObjectType.create_in_schema( schema, id=objtype['id'], name=name, is_abstract=objtype['is_abstract'], is_final=objtype['is_final'], view_type=objtype['view_type'], expr=objtype['expr']) for objtype in schema.get_objects(type=s_objtypes.BaseObjectType): try: bases = basemap[objtype.get_name(schema)] except KeyError: pass else: schema = objtype.set_field_value( schema, 'bases', [schema.get(b) for b in bases]) derived = await datasources.schema.objtypes.fetch_derived( self.connection) for row in derived: attrs = dict(row) attrs['name'] = sn.SchemaName(attrs['name']) attrs['bases'] = [schema.get(b) for b in attrs['bases']] attrs['view_type'] = (s_types.ViewType(attrs['view_type']) if attrs['view_type'] else None) attrs['expr'] = (s_expr.Expression(**row['expr']) if row['expr'] else None) attrs['is_derived'] = True schema, objtype = s_objtypes.ObjectType.create_in_schema( schema, **attrs) return schema
def _parse_subject_indexes(self, subject, subjdecl): module_aliases = {None: subject.get_name(self._schema).module} for indexdecl in subjdecl.indexes: index_name = self._get_ref_name(indexdecl.name) index_name = subject.get_name(self._schema) + '.' + index_name local_name = s_name.get_specialized_name( index_name, subject.get_name(self._schema)) der_name = s_name.Name(name=local_name, module=subject.get_name( self._schema).module) _, _, index_expr = qlutils.normalize_tree( indexdecl.expression, self._schema, modaliases=module_aliases, anchors={qlast.Subject: subject}, inline_anchors=True) self._schema, index = s_indexes.Index.create_in_schema( self._schema, name=der_name, expr=s_expr.Expression(text=index_expr), subject=subject) self._schema = subject.add_index(self._schema, index)
def _parse_field_setters(self, scls, field_decls: typing.List[qlast.Field]): fields = type(scls).get_fields() updates = {} for field_decl in field_decls: fieldname = field_decl.name.name attrfield = fields.get(fieldname) if attrfield is None or not attrfield.allow_ddl_set: raise errors.SchemaError(f'unexpected field {fieldname}', context=field_decl.context) if issubclass(attrfield.type, s_expr.Expression): updates[fieldname] = s_expr.Expression( text=qlcodegen.generate_source(field_decl.value)) else: updates[fieldname] = qlcompiler.evaluate_ast_to_python_val( field_decl.value, self._schema, modaliases=self._mod_aliases) if updates: self._schema = scls.update(self._schema, updates)
def _parse_expression(val: Dict[str, Any]) -> s_expr.Expression: refids = frozenset(uuidgen.UUID(r) for r in val['refs']) return s_expr.Expression(text=val['text'], refs=s_obj.ObjectSet( refids, _private_init=True, ))
def unpack_expr(self, expr, schema): text, origtext, refs = expr if refs is None: refs = [] refs = s_obj.ObjectSet.create(schema, [schema.get_by_id(ref) for ref in refs]) return s_expr.Expression(text=text, origtext=origtext, refs=refs)
def _compile_view(self, viewdecl): view_ql = None for field_decl in viewdecl.fields: fieldname = field_decl.name.name if fieldname == 'expr': view_ql = field_decl.value break if view_ql is None: raise errors.SchemaError( 'missing required expression in view definition', context=viewdecl.context, ) if not isinstance(view_ql, qlast.Statement): view_ql = qlast.SelectQuery(result=view_ql) viewname = s_name.Name(module=self._module.get_name(self._schema), name=viewdecl.name) ir = qlcompiler.compile_ast_to_ir( view_ql, self._schema, derived_target_module=self._module.get_name(self._schema), modaliases=self._mod_aliases, result_view_name=viewname, schema_view_mode=True) self._schema = ir.schema scls = self._schema.get(viewname) self._parse_field_setters(scls, viewdecl.fields) existing_aliases = {} for alias in view_ql.aliases: if isinstance(alias, qlast.ModuleAliasDecl): existing_aliases[alias.alias] = alias.module aliases_to_add = set(self._mod_aliases) - set(existing_aliases) for alias in aliases_to_add: view_ql.aliases.append( qlast.ModuleAliasDecl( alias=alias, module=self._mod_aliases[alias], )) view_expr = qlcodegen.generate_source(view_ql, pretty=False) self._schema = scls.set_field_value(self._schema, 'expr', s_expr.Expression(text=view_expr)) self._schema = scls.set_field_value(self._schema, 'view_type', s_types.ViewType.Select)
def _init_constraints(self, constraints): for constraint, decl in constraints.items(): attrs = {a.name.name: a.value for a in decl.fields} assert 'subject' not in attrs # TODO: Add proper validation assert 'subjectexpr' not in attrs # TODO: Add proper validation expr = attrs.pop('expr', None) if expr is not None: self._schema = constraint.set_field_value( self._schema, 'expr', s_expr.Expression(text=qlcodegen.generate_source(expr))) subjexpr = decl.subject if subjexpr is not None: self._schema = constraint.set_field_value( self._schema, 'subjectexpr', s_expr.Expression( text=qlcodegen.generate_source(subjexpr))) self._schema, params = s_func.FuncParameterList.from_ast( self._schema, decl, self._mod_aliases, func_fqname=constraint.get_name(self._schema)) for param in params.objects(self._schema): p_kind = param.get_kind(self._schema) if p_kind is qltypes.ParameterKind.NAMED_ONLY: raise errors.InvalidConstraintDefinitionError( 'named only parameters are not allowed ' 'in this context', context=decl.context) if param.get_default(self._schema) is not None: raise errors.InvalidConstraintDefinitionError( 'constraints do not support parameters ' 'with defaults', context=decl.context) self._schema = constraint.set_field_value(self._schema, 'params', params)
def _parse_subject_constraints(self, subject, subjdecl): # Perform initial collection of constraints defined in subject context. # At this point all referenced constraints should be fully initialized. for constrdecl in subjdecl.constraints: attrs = {a.name.name: a.value for a in constrdecl.fields} assert 'subject' not in attrs # TODO: Add proper validation constr_name = self._get_ref_name(constrdecl.name) if constrdecl.args: args = [ s_expr.Expression( text=qlcodegen.generate_source(arg, pretty=False)) for arg in constrdecl.args ] else: args = [] modaliases = {None: subject.get_name(self._schema).module} if constrdecl.subject is not None: subjectexpr = s_expr.Expression( text=qlcodegen.generate_source(constrdecl.subject)) else: subjectexpr = None self._schema, c, _ = \ s_constr.Constraint.create_concrete_constraint( self._schema, subject, name=constr_name, is_abstract=constrdecl.delegated, sourcectx=constrdecl.context, subjectexpr=subjectexpr, args=args, modaliases=modaliases, ) self._schema = subject.add_constraint(self._schema, c)
async def read_indexes(self, schema, only_modules, exclude_modules): pg_index_data = await introspection.tables.fetch_indexes( self.connection, schema_pattern='edgedb%', index_pattern='%_index') pg_indexes = set() for row in pg_index_data: table_name = tuple(row['table_name']) for pg_index in self.interpret_indexes(table_name, row['indexes']): pg_indexes.add( (table_name, pg_index.get_metadata('schemaname')) ) ds = datasources.schema.indexes indexes = await ds.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) for index_data in indexes: subj = schema.get(index_data['subject_name']) subj_table_name = common.get_backend_name( schema, subj, catenate=False) index_name = sn.Name(index_data['name']) try: pg_indexes.remove((subj_table_name, index_name)) except KeyError: raise errors.SchemaError( 'internal metadata inconsistency', details=f'Index {index_name} is defined in schema, but' f'the corresponding PostgreSQL index is missing.' ) from None schema, index = s_indexes.Index.create_in_schema( schema, id=index_data['id'], name=index_name, subject=subj, expr=s_expr.Expression(**index_data['expr'])) schema = subj.add_index(schema, index) if pg_indexes and not only_modules and not exclude_modules: details = f'Extraneous PostgreSQL indexes found: {pg_indexes!r}' raise errors.SchemaError( 'internal metadata inconsistency', details=details) return schema
async def read_functions(self, schema, only_modules, exclude_modules): ds = datasources.schema.functions func_list = await ds.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) param_list = await ds.fetch_params( self.connection, modules=only_modules, exclude_modules=exclude_modules) param_map = {p['name']: p for p in param_list} for row in func_list: name = sn.Name(row['name']) schema, params = self._decode_func_params(schema, row, param_map) r_type = self.unpack_typeref(row['return_type'], schema) if r_type.is_collection(): schema, _ = r_type.as_schema_coll(schema) if row['initial_value']: initial_value = s_expr.Expression(**row['initial_value']) else: initial_value = None func_data = { 'id': row['id'], 'name': name, 'language': row['language'], 'params': params, 'return_typemod': row['return_typemod'], 'from_function': row['from_function'], 'from_expr': row['from_expr'], 'force_return_cast': row['force_return_cast'], 'sql_func_has_out_params': row['sql_func_has_out_params'], 'error_on_null_result': row['error_on_null_result'], 'code': row['code'], 'initial_value': initial_value, 'return_type': r_type, } schema, _ = s_funcs.Function.create_in_schema(schema, **func_data) return schema
def _normalize_objtype_expressions(self, objtype, typedecl): """Interpret and validate EdgeQL expressions in type declaration.""" for ptrdecl in itertools.chain(typedecl.links, typedecl.properties): link_name = ptrdecl.name spec_link = objtype.getptr(self._schema, link_name) if ptrdecl.expr is not None: # Computable self._normalize_ptr_default(ptrdecl.expr, objtype, spec_link, ptrdecl) for attr in ptrdecl.fields: name = attr.name.name if name == 'default': if isinstance(attr.value, edgeql.ast.SelectQuery): self._normalize_ptr_default(attr.value, objtype, spec_link, ptrdecl) else: expr = attr.value _, _, default = qlutils.normalize_tree( expr, self._schema) self._schema = spec_link.set_field_value( self._schema, 'default', s_expr.Expression(text=default))
def _normalize_ptr_default(self, expr, source, ptr, ptrdecl): module_aliases = {None: source.get_name(self._schema).module} ir, _, expr_text = qlutils.normalize_tree( expr, self._schema, modaliases=module_aliases, anchors={qlast.Source: source}, path_prefix_anchor=qlast.Source, singletons=[source]) expr_type = ir.stype self._schema = ptr.set_field_value(self._schema, 'default', s_expr.Expression(text=expr_text)) if ptr.is_pure_computable(self._schema): # Pure computable without explicit target. # Fixup pointer target and target property. self._schema = ptr.set_field_value(self._schema, 'target', expr_type) if isinstance(ptr, s_links.Link): if not isinstance(expr_type, s_objtypes.ObjectType): raise errors.InvalidLinkTargetError( f'invalid link target, expected object type, got ' f'{expr_type.__class__.__name__}', context=ptrdecl.expr.context) else: if not isinstance(expr_type, (s_scalars.ScalarType, s_types.Collection)): raise errors.InvalidPropertyTargetError( f'invalid property type: expected primitive type, ' f'got {expr_type.__class__.__name__}', context=ptrdecl.expr.context) if isinstance(ptr, s_links.Link): tgt_prop = ptr.getptr(self._schema, 'target') self._schema = tgt_prop.set_field_value( self._schema, 'target', expr_type) self._schema = ptr.set_field_value(self._schema, 'cardinality', ir.cardinality) if ptrdecl.cardinality is not ptr.get_cardinality(self._schema): if ptrdecl.cardinality is qltypes.Cardinality.ONE: raise errors.SchemaError( f'computable expression possibly returns more than ' f'one value, but the ' f'{ptr.get_schema_class_displayname()} ' f'is declared as "single"', context=expr.context) if (not isinstance(expr_type, s_abc.Type) or (ptr.get_target(self._schema) is not None and not expr_type.issubclass( self._schema, ptr.get_target(self._schema)))): raise errors.SchemaError( 'default value query must yield a single result of ' 'type {!r}'.format( ptr.get_target(self._schema).get_name(self._schema)), context=expr.context)
async def read_link_properties( self, schema, only_modules, exclude_modules): link_props = await datasources.schema.links.fetch_properties( self.connection, modules=only_modules, exclude_modules=exclude_modules) link_props = {sn.Name(r['name']): r for r in link_props} basemap = {} for name, r in link_props.items(): bases = () if r['bases']: bases = tuple(sn.Name(b) for b in r['bases']) elif name != 'std::property': bases = (sn.Name('std::property'), ) source = schema.get(r['source']) if r['source'] else None if r['derived_from']: derived_from = schema.get(r['derived_from']) else: derived_from = None if r['default']: default = s_expr.Expression(**r['default']) else: default = None required = r['required'] target = self.unpack_typeref(r['target'], schema) basemap[name] = bases if target.is_collection(): schema, _ = target.as_schema_coll(schema) if r['cardinality']: cardinality = qltypes.Cardinality(r['cardinality']) else: cardinality = None schema, prop = s_props.Property.create_in_schema( schema, id=r['id'], name=name, source=source, target=target, required=required, readonly=r['readonly'], computable=r['computable'], default=default, cardinality=cardinality, derived_from=derived_from, is_derived=r['is_derived'], is_abstract=r['is_abstract'], is_final=r['is_final']) if bases and bases[0] in {'std::target', 'std::source'}: if bases[0] == 'std::target' and source is not None: target = source.get_target(schema) elif bases[0] == 'std::source' and source is not None: target = source.get_source(schema) schema = prop.set_field_value(schema, 'target', target) if source: schema = prop.acquire_ancestor_inheritance(schema) schema = source.add_pointer(schema, prop) for prop in schema.get_objects(type=s_props.Property): try: bases = basemap[prop.get_name(schema)] except KeyError: pass else: schema = prop.set_field_value( schema, 'bases', [schema.get(b, type=s_props.Property) for b in bases]) return schema
async def read_links(self, schema, only_modules, exclude_modules): link_tables = await introspection.tables.fetch_tables( self.connection, schema_pattern='edgedb%', table_pattern='%_link') link_tables = {(t['schema'], t['name']): t for t in link_tables} links_list = await datasources.schema.links.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) links_list = {sn.Name(r['name']): r for r in links_list} basemap = {} dermap = {} for name, r in links_list.items(): bases = tuple() if r['bases']: bases = tuple(sn.Name(b) for b in r['bases']) elif name != 'std::link': bases = (sn.Name('std::link'), ) if r['derived_from']: dermap[name] = r['derived_from'] source = schema.get(r['source']) if r['source'] else None if r['spectargets']: spectargets = [schema.get(t) for t in r['spectargets']] target = None else: spectargets = None target = self.unpack_typeref(r['target'], schema) if r['default']: default = s_expr.Expression(**r['default']) else: default = None required = r['required'] if r['cardinality']: cardinality = qltypes.Cardinality(r['cardinality']) else: cardinality = None basemap[name] = bases schema, link = s_links.Link.create_in_schema( schema, id=r['id'], name=name, source=source, target=target, spectargets=spectargets, cardinality=cardinality, required=required, is_derived=r['is_derived'], is_abstract=r['is_abstract'], is_final=r['is_final'], readonly=r['readonly'], computable=r['computable'], default=default) if spectargets: # Multiple specified targets, # target is a virtual derived object schema, target = link.create_common_target(schema, spectargets) schema = link.set_field_value(schema, 'target', target) if source: schema = source.add_pointer(schema, link) for link in schema.get_objects(type=s_links.Link): try: bases = basemap[link.get_name(schema)] except KeyError: pass else: schema = link.set_field_value( schema, 'bases', [schema.get(b) for b in bases]) try: derived_from = dermap[link.get_name(schema)] except KeyError: pass else: schema = link.set_field_value( schema, 'derived_from', schema.get(derived_from)) for link in schema.get_objects(type=s_links.Link): schema = link.acquire_ancestor_inheritance(schema) return schema
async def read_constraints(self, schema, only_modules, exclude_modules): ds = datasources.schema constraints_list = await ds.constraints.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) constraints_list = {sn.Name(r['name']): r for r in constraints_list} param_list = await ds.functions.fetch_params( self.connection, modules=only_modules, exclude_modules=exclude_modules) param_map = {p['name']: p for p in param_list} basemap = {} for name, r in constraints_list.items(): bases = tuple() if r['subject']: bases = (sn.shortname_from_fullname(name), ) elif r['bases']: bases = tuple(sn.Name(b) for b in r['bases']) elif name != 'std::constraint': bases = (sn.Name('std::constraint'), ) subject = schema.get(r['subject']) if r['subject'] else None basemap[name] = r['bases'] or [] if not r['subject']: schema, params = self._decode_func_params(schema, r, param_map) else: params = None schema, constraint = s_constr.Constraint.create_in_schema( schema, id=r['id'], name=name, subject=subject, params=params, is_abstract=r['is_abstract'], is_final=r['is_final'], expr=s_expr.Expression(**r['expr']) if r['expr'] else None, subjectexpr=(s_expr.Expression(**r['subjectexpr']) if r['subjectexpr'] else None), finalexpr=(s_expr.Expression(**r['finalexpr']) if r['finalexpr'] else None), errmessage=r['errmessage'], args=([s_expr.Expression(**arg) for arg in r['args']] if r['args'] is not None else None), return_type=self.unpack_typeref(r['return_type'], schema), return_typemod=r['return_typemod'], ) if subject: schema = subject.add_constraint(schema, constraint) for constraint in schema.get_objects(type=s_constr.Constraint): try: bases = basemap[constraint.get_name(schema)] except KeyError: pass else: schema = constraint.set_field_value( schema, 'bases', [schema.get(b) for b in bases]) for constraint in schema.get_objects(type=s_constr.Constraint): schema = constraint.acquire_ancestor_inheritance(schema) return schema
async def read_scalars(self, schema, only_modules, exclude_modules): seqs = await introspection.sequences.fetch( self.connection, schema_pattern='edgedb%', sequence_pattern='%_sequence') seqs = {(s['schema'], s['name']): s for s in seqs} seen_seqs = set() scalar_list = await datasources.schema.scalars.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) basemap = {} for row in scalar_list: name = sn.Name(row['name']) scalar_data = { 'id': row['id'], 'name': name, 'is_abstract': row['is_abstract'], 'is_final': row['is_final'], 'view_type': (s_types.ViewType(row['view_type']) if row['view_type'] else None), 'bases': row['bases'], 'default': (s_expr.Expression(**row['default']) if row['default'] else None), 'expr': (s_expr.Expression(**row['expr']) if row['expr'] else None), 'enum_values': row['enum_values'], } if scalar_data['bases']: basemap[name] = scalar_data.pop('bases') schema, scalar = s_scalars.ScalarType.create_in_schema( schema, **scalar_data ) for scalar in schema.get_objects(type=s_scalars.ScalarType): try: basename = basemap[scalar.get_name(schema)] except KeyError: pass else: schema = scalar.set_field_value( schema, 'bases', [schema.get(sn.Name(basename[0]))]) sequence = schema.get('std::sequence', None) for scalar in schema.get_objects(type=s_scalars.ScalarType): if (sequence is not None and scalar.issubclass(schema, sequence) and not scalar.get_is_abstract(schema)): seq_name = common.get_backend_name( schema, scalar, catenate=False, aspect='sequence') if seq_name not in seqs: msg = 'internal metadata incosistency' details = (f'Missing sequence for sequence ' f'scalar {scalar.get_name(schema)}') raise errors.SchemaError(msg, details=details) seen_seqs.add(seq_name) extra_seqs = set(seqs) - seen_seqs if extra_seqs and not only_modules and not exclude_modules: msg = 'internal metadata incosistency' details = 'Extraneous sequences exist: {}'.format( ', '.join(common.qname(*t) for t in extra_seqs)) raise errors.SchemaError(msg, details=details) return schema