async def read_objtypes(self, schema, only_modules, exclude_modules): objtype_list = await datasources.schema.objtypes.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) objtype_list = {sn.Name(row['name']): row for row in objtype_list} basemap = {} for name, row in objtype_list.items(): objtype = { 'id': row['id'], 'name': name, 'is_abstract': row['is_abstract'], 'is_final': row['is_final'], 'view_type': (s_types.ViewType(row['view_type']) if row['view_type'] else None), 'expr': (s_expr.Expression(**row['expr']) if row['expr'] else None) } basemap[name] = row['bases'] or [] schema, objtype = s_objtypes.ObjectType.create_in_schema( schema, id=objtype['id'], name=name, is_abstract=objtype['is_abstract'], is_final=objtype['is_final'], view_type=objtype['view_type'], expr=objtype['expr']) for objtype in schema.get_objects(type=s_objtypes.BaseObjectType): try: bases = basemap[objtype.get_name(schema)] except KeyError: pass else: schema = objtype.set_field_value( schema, 'bases', [schema.get(b) for b in bases]) derived = await datasources.schema.objtypes.fetch_derived( self.connection) for row in derived: attrs = dict(row) attrs['name'] = sn.SchemaName(attrs['name']) attrs['bases'] = [schema.get(b) for b in attrs['bases']] attrs['view_type'] = (s_types.ViewType(attrs['view_type']) if attrs['view_type'] else None) attrs['expr'] = (s_expr.Expression(**row['expr']) if row['expr'] else None) attrs['is_derived'] = True schema, objtype = s_objtypes.ObjectType.create_in_schema( schema, **attrs) return schema
async def read_views(self, schema, only_modules, exclude_modules): tuple_views = await datasources.schema.types.fetch_tuple_views( self.connection, modules=only_modules, exclude_modules=exclude_modules) for r in tuple_views: eltypes = self.unpack_typeref(r['element_types'], schema) schema, tview = s_types.TupleView.create_in_schema( schema, id=r['id'], name=sn.Name(r['name']), view_type=s_types.ViewType(r['view_type']), view_is_persistent=r['view_is_persistent'], named=r['named'], expr=self.unpack_expr(r['expr'], schema), element_types=s_obj.ObjectDict.create( schema, dict(eltypes.iter_subtypes(schema))), ) array_views = await datasources.schema.types.fetch_array_views( self.connection, modules=only_modules, exclude_modules=exclude_modules) for r in array_views: eltype = self.unpack_typeref(r['element_type'], schema) schema, tview = s_types.ArrayView.create_in_schema( schema, id=r['id'], name=sn.Name(r['name']), view_type=s_types.ViewType(r['view_type']), view_is_persistent=r['view_is_persistent'], expr=self.unpack_expr(r['expr'], schema), element_type=eltype, dimensions=r['dimensions'], ) return schema
async def read_objtypes(self, schema, only_modules, exclude_modules): objtype_list = await datasources.schema.objtypes.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) objtype_list = {sn.Name(row['name']): row for row in objtype_list} basemap = {} exprmap = {} for name, row in objtype_list.items(): objtype = { 'id': row['id'], 'inherited_fields': self._unpack_inherited_fields(row['inherited_fields']), 'name': name, 'is_abstract': row['is_abstract'], 'is_final': row['is_final'], 'view_type': (s_types.ViewType(row['view_type']) if row['view_type'] else None), 'view_is_persistent': row['view_is_persistent'], } exprmap[name] = row['expr'] if row['union_of']: union_of = [schema.get(t) for t in row['union_of']] else: union_of = None schema, objtype = s_objtypes.ObjectType.create_in_schema( schema, id=objtype['id'], name=name, is_abstract=objtype['is_abstract'], union_of=union_of, is_final=objtype['is_final'], view_type=objtype['view_type'], view_is_persistent=objtype['view_is_persistent'], ) basemap[objtype] = (row['bases'], row['ancestors']) for scls, (basenames, ancestors) in basemap.items(): schema = self._set_reflist(schema, scls, 'bases', basenames) schema = self._set_reflist(schema, scls, 'ancestors', ancestors) return schema, exprmap
async def read_scalars(self, schema, only_modules, exclude_modules): seqs = await introspection.sequences.fetch( self.connection, schema_pattern='edgedb%', sequence_pattern='%_sequence') seqs = {(s['schema'], s['name']): s for s in seqs} seen_seqs = set() scalar_list = await datasources.schema.scalars.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) basemap = {} for row in scalar_list: name = sn.Name(row['name']) scalar_data = { 'id': row['id'], 'inherited_fields': self._unpack_inherited_fields(row['inherited_fields']), 'name': name, 'is_abstract': row['is_abstract'], 'is_final': row['is_final'], 'view_type': (s_types.ViewType(row['view_type']) if row['view_type'] else None), 'view_is_persistent': row['view_is_persistent'], 'default': (self.unpack_expr(row['default'], schema) if row['default'] else None), 'expr': (self.unpack_expr(row['expr'], schema) if row['expr'] else None), 'enum_values': row['enum_values'], } schema, scalar = s_scalars.ScalarType.create_in_schema( schema, **scalar_data) basemap[scalar] = (row['bases'], row['ancestors']) for scls, (basenames, ancestors) in basemap.items(): schema = self._set_reflist(schema, scls, 'bases', basenames) schema = self._set_reflist(schema, scls, 'ancestors', ancestors) sequence = schema.get('std::sequence', None) for scalar in schema.get_objects(type=s_scalars.ScalarType): if (sequence is not None and scalar.issubclass(schema, sequence) and not scalar.get_is_abstract(schema)): seq_name = common.get_backend_name(schema, scalar, catenate=False, aspect='sequence') if seq_name not in seqs: msg = 'internal metadata incosistency' details = (f'Missing sequence for sequence ' f'scalar {scalar.get_name(schema)}') raise errors.SchemaError(msg, details=details) seen_seqs.add(seq_name) extra_seqs = set(seqs) - seen_seqs if extra_seqs and not only_modules and not exclude_modules: msg = 'internal metadata incosistency' details = 'Extraneous sequences exist: {}'.format(', '.join( common.qname(*t) for t in extra_seqs)) raise errors.SchemaError(msg, details=details) return schema