Esempio n. 1
0
    async def read_events(self, schema):
        events = await datasources.schema.policy.fetch_events(self.connection)

        basemap = {}

        for r in events:
            name = sn.Name(r['name'])
            title = self.json_to_word_combination(r['title'])
            description = r['description']

            if r['bases']:
                bases = tuple(sn.Name(b) for b in r['bases'])
            elif name != 'std::event':
                bases = (sn.Name('std::event'), )
            else:
                bases = tuple()

            basemap[name] = bases

            event = s_policy.Event(name=name,
                                   title=title,
                                   description=description)
            schema.add(event)

        for event in schema.get_objects(type='event'):
            try:
                bases = basemap[event.name]
            except KeyError:
                pass
            else:
                event.bases = [schema.get(b) for b in bases]

        for event in schema.get_objects(type='event'):
            event.acquire_ancestor_inheritance(schema)
Esempio n. 2
0
    async def read_constraints(self, schema):
        constraints_list = await datasources.schema.constraints.fetch(
            self.connection)
        constraints_list = collections.OrderedDict((sn.Name(r['name']), r)
                                                   for r in constraints_list)

        basemap = {}

        for name, r in constraints_list.items():
            bases = tuple()

            if r['subject']:
                bases = (s_constr.Constraint.get_shortname(name), )
            elif r['bases']:
                bases = tuple(sn.Name(b) for b in r['bases'])
            elif name != 'std::constraint':
                bases = (sn.Name('std::constraint'), )

            title = self.json_to_word_combination(r['title'])
            description = r['description']
            subject = schema.get(r['subject']) if r['subject'] else None

            basemap[name] = bases

            paramtypes = None
            if r['paramtypes']:
                paramtypes = [
                    s[1] for s in self.unpack_typedesc_nodes(
                        r['paramtypes']['types'], schema)]

            constraint = s_constr.Constraint(
                name=name, subject=subject, title=title,
                description=description, is_abstract=r['is_abstract'],
                is_final=r['is_final'], expr=r['expr'],
                subjectexpr=r['subjectexpr'],
                localfinalexpr=r['localfinalexpr'], finalexpr=r['finalexpr'],
                errmessage=r['errmessage'], paramtypes=paramtypes,
                varparam=r['varparam'], args=r['args'])

            if subject:
                subject.add_constraint(constraint)

            schema.add(constraint)

        for constraint in schema.get_objects(type='constraint'):
            try:
                bases = basemap[constraint.name]
            except KeyError:
                pass
            else:
                constraint.bases = [schema.get(b) for b in bases]

        for constraint in schema.get_objects(type='constraint'):
            constraint.acquire_ancestor_inheritance(schema)
Esempio n. 3
0
    async def get_type_map(self, force_reload=False):
        if not self.type_cache or force_reload:
            cl_ds = datasources.schema.objtypes

            for row in await cl_ds.fetch(self.connection):
                self.type_cache[row['name']] = row['id']
                self.type_cache[row['id']] = sn.Name(row['name'])

            cl_ds = datasources.schema.scalars

            for row in await cl_ds.fetch(self.connection):
                self.type_cache[row['name']] = row['id']
                self.type_cache[row['id']] = sn.Name(row['name'])

        return self.type_cache
Esempio n. 4
0
 def __init__(self, source, target, *, optional, cardinality):
     name = 'optindirection' if optional else 'indirection'
     self._name = s_name.Name(module='__type__', name=name)
     self._source = source
     self._target = target
     self._cardinality = cardinality
     self._optional = optional
Esempio n. 5
0
    async def read_functions(self, schema):
        func_list = await datasources.schema.functions.fetch(self.connection)

        for row in func_list:
            name = sn.Name(row['name'])

            paramtypes = None
            if row['paramtypes']:
                paramtypes = [
                    s[1] for s in self.unpack_typedesc_nodes(
                        row['paramtypes']['types'], schema)]

            func_data = {
                'name': name,
                'title': self.json_to_word_combination(row['title']),
                'description': row['description'],
                'aggregate': row['aggregate'],
                'set_returning': row['set_returning'],
                'varparam': row['varparam'],
                'from_function': row['from_function'],
                'code': row['code'],
                'initial_value': row['initial_value'],
                'paramtypes': paramtypes,
                'paramnames': row['paramnames'] if row['paramnames'] else [],
                'paramdefaults':
                    row['paramdefaults'] if row['paramdefaults'] else [],
                'paramkinds':
                    row['paramkinds'] if row['paramkinds'] else [],
                'returntype': self.unpack_typeref(row['returntype'], schema)
            }

            func = s_funcs.Function(**func_data)
            schema.add(func)
Esempio n. 6
0
    async def read_objtypes(self, schema, only_modules, exclude_modules):
        objtype_list = await datasources.schema.objtypes.fetch(
            self.connection,
            modules=only_modules,
            exclude_modules=exclude_modules)
        objtype_list = collections.OrderedDict(
            (sn.Name(row['name']), row) for row in objtype_list)

        basemap = {}

        for name, row in objtype_list.items():
            objtype = {
                'id':
                row['id'],
                'name':
                name,
                'is_abstract':
                row['is_abstract'],
                'is_final':
                row['is_final'],
                'view_type': (s_types.ViewType(row['view_type'])
                              if row['view_type'] else None),
                'expr':
                (s_expr.ExpressionText(row['expr']) if row['expr'] else None)
            }

            basemap[name] = row['bases'] or []

            schema, objtype = s_objtypes.ObjectType.create_in_schema(
                schema,
                id=objtype['id'],
                name=name,
                is_abstract=objtype['is_abstract'],
                is_final=objtype['is_final'],
                view_type=objtype['view_type'],
                expr=objtype['expr'])

        for objtype in schema.get_objects(type=s_objtypes.BaseObjectType):
            try:
                bases = basemap[objtype.get_name(schema)]
            except KeyError:
                pass
            else:
                schema = objtype.set_field_value(
                    schema, 'bases', [schema.get(b) for b in bases])

        derived = await datasources.schema.objtypes.fetch_derived(
            self.connection)

        for row in derived:
            attrs = dict(row)
            attrs['name'] = sn.SchemaName(attrs['name'])
            attrs['bases'] = [schema.get(b) for b in attrs['bases']]
            attrs['view_type'] = (s_types.ViewType(attrs['view_type'])
                                  if attrs['view_type'] else None)
            attrs['is_derived'] = True
            schema, objtype = s_objtypes.ObjectType.create_in_schema(
                schema, **attrs)

        return schema
Esempio n. 7
0
    async def read_attribute_values(self, schema, only_modules,
                                    exclude_modules):
        attributes = await datasources.schema.attributes.fetch_values(
            self.connection,
            modules=only_modules,
            exclude_modules=exclude_modules)

        for r in attributes:
            name = sn.Name(r['name'])
            subject = schema.get(r['subject_name'])
            attribute = schema.get(r['attribute_name'])
            value = r['value']

            schema, attribute = s_attrs.AttributeValue.create_in_schema(
                schema,
                id=r['id'],
                name=name,
                subject=subject,
                attribute=attribute,
                value=value,
                inheritable=r['inheritable'],
            )

            schema = subject.add_attribute(schema, attribute)

        return schema
Esempio n. 8
0
def amend_empty_set_type(es: irast.EmptySet, t: s_obj.Object, schema) -> None:
    alias = es.path_id[-1].name.name
    scls_name = s_name.Name(module='__expr__', name=alias)
    scls = t.__class__(name=scls_name, bases=[t])
    scls.acquire_ancestor_inheritance(schema)
    es.path_id = irast.PathId(scls)
    es.scls = t
Esempio n. 9
0
    async def read_functions(self, schema, only_modules, exclude_modules):
        ds = datasources.schema.functions
        func_list = await ds.fetch(self.connection,
                                   modules=only_modules,
                                   exclude_modules=exclude_modules)
        param_list = await ds.fetch_params(self.connection,
                                           modules=only_modules,
                                           exclude_modules=exclude_modules)
        param_map = {p['name']: p for p in param_list}

        for row in func_list:
            name = sn.Name(row['name'])

            schema, params = self._decode_func_params(schema, row, param_map)

            func_data = {
                'id': row['id'],
                'name': name,
                'language': row['language'],
                'params': params,
                'return_typemod': row['return_typemod'],
                'from_function': row['from_function'],
                'force_return_cast': row['force_return_cast'],
                'code': row['code'],
                'initial_value': row['initial_value'],
                'return_type': self.unpack_typeref(row['return_type'], schema)
            }

            schema, _ = s_funcs.Function.create_in_schema(schema, **func_data)

        return schema
Esempio n. 10
0
    async def read_casts(self, schema, only_modules, exclude_modules):
        self._operator_commutators.clear()

        ds = datasources.schema
        cast_list = await ds.casts.fetch(self.connection,
                                         modules=only_modules,
                                         exclude_modules=exclude_modules)

        for row in cast_list:
            name = sn.Name(row['name'])

            cast_data = {
                'id': row['id'],
                'name': name,
                'from_type': self.unpack_typeref(row['from_type'], schema),
                'to_type': self.unpack_typeref(row['to_type'], schema),
                'language': row['language'],
                'from_cast': row['from_cast'],
                'from_function': row['from_function'],
                'from_expr': row['from_expr'],
                'allow_implicit': row['allow_implicit'],
                'allow_assignment': row['allow_assignment'],
                'code': row['code'],
            }

            schema, oper = s_casts.Cast.create_in_schema(schema, **cast_data)

        return schema
Esempio n. 11
0
 def _cb(node):
     if isinstance(node, qlast.ObjectRef):
         name = sn.Name(name=node.name, module=node.module)
         upd = callback(name)
         if name != upd:
             node.name = upd.name
             node.module = upd.module
Esempio n. 12
0
def resolve_schema_name(name: str, module: str, *,
                        ctx: context.ContextLevel) -> sn.Name:
    schema_module = ctx.modaliases.get(module)
    if schema_module is None:
        return None
    else:
        return sn.Name(name=name, module=schema_module)
Esempio n. 13
0
 def __init__(self, source, target, *, optional, cardinality):
     name = 'optindirection' if optional else 'indirection'
     super().__init__(name=s_name.Name(module='__type__', name=name),
                      source=source,
                      target=target,
                      direction=s_pointers.PointerDirection.Outbound)
     self.optional = optional
     self.cardinality = cardinality
Esempio n. 14
0
def compile_GroupQuery(
        expr: qlast.Base, *, ctx: context.ContextLevel) -> irast.Base:
    with ctx.subquery() as ictx:
        stmt = irast.GroupStmt()
        init_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx)

        c = s_objtypes.ObjectType(
            name=s_name.Name(
                module='__group__', name=ctx.aliases.get('Group')),
            bases=[ctx.schema.get('std::Object')]
        )
        c.acquire_ancestor_inheritance(ctx.schema)

        stmt.group_path_id = pathctx.get_path_id(c, ctx=ictx)
        pathctx.register_set_in_scope(stmt.group_path_id, ctx=ictx)

        with ictx.newscope(fenced=True) as subjctx:
            subjctx.clause = 'input'

            subject_set = setgen.scoped_set(
                dispatch.compile(expr.subject, ctx=subjctx), ctx=subjctx)

            alias = expr.subject_alias or subject_set.path_id.target.name
            stmt.subject = stmtctx.declare_inline_view(
                subject_set, alias, ctx=ictx)

            with subjctx.new() as grpctx:
                stmt.groupby = compile_groupby_clause(
                    expr.groupby, ctx=grpctx)

        with ictx.subquery() as isctx, isctx.newscope(fenced=True) as sctx:
            o_stmt = sctx.stmt = irast.SelectStmt()

            o_stmt.result = compile_result_clause(
                expr.result,
                view_scls=ctx.view_scls,
                view_rptr=ctx.view_rptr,
                result_alias=expr.result_alias,
                view_name=ctx.toplevel_result_view_name,
                ctx=sctx)

            o_stmt.where = clauses.compile_where_clause(
                expr.where, ctx=sctx)

            o_stmt.orderby = clauses.compile_orderby_clause(
                expr.orderby, ctx=sctx)

            o_stmt.offset = clauses.compile_limit_offset_clause(
                expr.offset, ctx=sctx)

            o_stmt.limit = clauses.compile_limit_offset_clause(
                expr.limit, ctx=sctx)

            stmt.result = setgen.scoped_set(o_stmt, ctx=sctx)

        result = fini_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx)

    return result
Esempio n. 15
0
def new_empty_set(schema, *, scls=None, alias):
    if scls is None:
        base_scls = schema.get('std::str')
    else:
        base_scls = scls
    cls_name = s_name.Name(module='__expr__', name=alias)
    cls = base_scls.__class__(name=cls_name, bases=[base_scls])
    cls.acquire_ancestor_inheritance(schema)
    return irast.EmptySet(path_id=irast.PathId(cls), scls=scls)
Esempio n. 16
0
def new_empty_set(schema, *, stype=None, alias):
    if stype is None:
        path_id_scls = s_pseudo.Any.create()
    else:
        path_id_scls = stype

    typename = s_name.Name(module='__expr__', name=alias)
    path_id = irast.PathId.from_type(schema, path_id_scls, typename=typename)
    return irast.EmptySet(path_id=path_id, stype=stype)
Esempio n. 17
0
def get_expression_path_id(t: s_types.Type, alias: str, *,
                           ctx: context.ContextLevel) -> irast.PathId:
    cls_name = s_name.Name(module='__expr__', name=alias)
    if isinstance(t, (s_types.Collection, s_types.Tuple)):
        et = t.copy()
        et.name = cls_name
    else:
        et = t.__class__(name=cls_name, bases=[t])
        et.acquire_ancestor_inheritance(ctx.schema)
    return pathctx.get_path_id(et, ctx=ctx)
Esempio n. 18
0
    async def read_attributes(self, schema):
        attributes = await datasources.schema.attributes.fetch(self.connection)

        for r in attributes:
            name = sn.Name(r['name'])
            title = self.json_to_word_combination(r['title'])
            description = r['description']
            attribute = s_attrs.Attribute(
                name=name, title=title, description=description,
                type=self.unpack_typeref(r['type'], schema))
            schema.add(attribute)
Esempio n. 19
0
    async def read_actions(self, schema):
        actions = await datasources.schema.policy.fetch_actions(
            self.connection)

        for r in actions:
            name = sn.Name(r['name'])
            title = self.json_to_word_combination(r['title'])
            description = r['description']

            action = s_policy.Action(
                name=name, title=title, description=description)
            schema.add(action)
Esempio n. 20
0
    async def _init_scalar_map_cache(self):
        scalar_list = await datasources.schema.scalars.fetch(self.connection)

        domain_to_scalar_map = {}

        for row in scalar_list:
            name = sn.Name(row['name'])

            domain_name = common.scalar_name_to_domain_name(name,
                                                            catenate=False)
            domain_to_scalar_map[domain_name] = name

        return domain_to_scalar_map
Esempio n. 21
0
    async def constraint_name_from_pg_name(self, connection, pg_name):
        if self._constraints_cache is None:
            self._constraints_cache = \
                await self._populate_constraint_cache(connection)

        try:
            cdata = self._constraints_cache[pg_name]
        except KeyError:
            return None
        else:
            name = cdata['constraint_description']
            name, _, _ = name.rpartition(';')
            return sn.Name(name)
Esempio n. 22
0
def _get_backend_constraint_name(schema,
                                 constraint,
                                 catenate=True,
                                 prefix='edgedb_',
                                 *,
                                 aspect=None):
    if aspect not in ('trigproc', ):
        raise ValueError(
            f'unexpected aspect for constraint backend name: {aspect!r}')

    name = s_name.Name(module=constraint.get_name(schema).module,
                       name=str(constraint.id))

    return convert_name(name, aspect, catenate, prefix=prefix)
Esempio n. 23
0
    async def read_attribute_values(self, schema):
        attributes = await datasources.schema.attributes.fetch_values(
            self.connection)

        for r in attributes:
            name = sn.Name(r['name'])
            subject = schema.get(r['subject_name'])
            attribute = schema.get(r['attribute_name'])
            value = pickle.loads(r['value'])

            attribute = s_attrs.AttributeValue(
                name=name, subject=subject, attribute=attribute, value=value)
            subject.add_attribute(attribute)
            schema.add(attribute)
Esempio n. 24
0
    async def read_policies(self, schema):
        policies = await datasources.schema.policy.fetch_policies(
            self.connection)

        for r in policies:
            name = sn.Name(r['name'])
            title = self.json_to_word_combination(r['title'])
            description = r['description']
            policy = s_policy.Policy(
                name=name, title=title, description=description,
                subject=schema.get(r['subject']), event=schema.get(r['event']),
                actions=[schema.get(a) for a in r['actions']])
            schema.add(policy)
            policy.subject.add_policy(policy)
Esempio n. 25
0
    async def read_indexes(self, schema, only_modules, exclude_modules):
        pg_index_data = await introspection.tables.fetch_indexes(
            self.connection,
            schema_pattern='edgedb%',
            index_pattern='%_reg_idx')

        pg_indexes = set()
        for row in pg_index_data:
            table_name = tuple(row['table_name'])
            for pg_index in self.interpret_indexes(table_name, row['indexes']):
                pg_indexes.add(
                    (table_name, pg_index.get_metadata('schemaname')))

        ds = datasources.schema.indexes
        indexes = await ds.fetch(self.connection,
                                 modules=only_modules,
                                 exclude_modules=exclude_modules)

        for index_data in indexes:
            subj = schema.get(index_data['subject_name'])
            subj_table_name = common.get_backend_name(schema,
                                                      subj,
                                                      catenate=False)
            index_name = sn.Name(index_data['name'])

            try:
                pg_indexes.remove((subj_table_name, index_name))
            except KeyError:
                raise errors.SchemaError(
                    'internal metadata inconsistency',
                    details=f'Index {index_name} is defined in schema, but'
                    f'the corresponding PostgreSQL index is missing.'
                ) from None

            schema, index = s_indexes.SourceIndex.create_in_schema(
                schema,
                id=index_data['id'],
                name=index_name,
                subject=subj,
                expr=index_data['expr'])

            schema = subj.add_index(schema, index)

        if pg_indexes and not only_modules and not exclude_modules:
            details = f'Extraneous PostgreSQL indexes found: {pg_indexes!r}'
            raise errors.SchemaError('internal metadata inconsistency',
                                     details=details)

        return schema
Esempio n. 26
0
    async def read_attributes(self, schema, only_modules, exclude_modules):
        attributes = await datasources.schema.attributes.fetch(
            self.connection,
            modules=only_modules,
            exclude_modules=exclude_modules)

        for r in attributes:
            name = sn.Name(r['name'])
            schema, attribute = s_attrs.Attribute.create_in_schema(
                schema,
                id=r['id'],
                name=name,
                inheritable=r['inheritable'],
            )

        return schema
Esempio n. 27
0
def get_schema_object(
        name: typing.Union[str, qlast.ObjectRef],
        module: typing.Optional[str] = None,
        *,
        item_types: typing.Optional[typing.List[s_obj.ObjectMeta]],
        ctx: context.ContextLevel,
        srcctx: typing.Optional[parsing.ParserContext] = None) -> s_obj.Object:

    if isinstance(name, qlast.ObjectRef):
        if srcctx is None:
            srcctx = name.context
        module = name.module
        name = name.name

    if module:
        name = sn.Name(name=name, module=module)

    if not module:
        result = ctx.aliased_views.get(name)
        if result is not None:
            return result

    try:
        scls = ctx.schema.get(name=name,
                              module_aliases=ctx.modaliases,
                              type=item_types)

    except s_err.ItemNotFoundError as e:
        qlerror = qlerrors.EdgeQLError(e.args[0], context=srcctx)
        s_utils.enrich_schema_lookup_error(qlerror,
                                           name,
                                           modaliases=ctx.modaliases,
                                           schema=ctx.schema,
                                           item_types=item_types)

        raise qlerror

    except s_err.SchemaError as e:
        raise qlerrors.EdgeQLError(e.args[0], context=srcctx)

    result = ctx.aliased_views.get(scls.name)
    if result is None:
        result = scls

    return result
Esempio n. 28
0
def _get_backend_objtype_name(schema, objtype, catenate=True, aspect=None):
    if aspect is None:
        aspect = 'table'
    if aspect not in ('table', 'target-del-def-t', 'target-del-imm-t',
                      'source-del-def-t', 'source-del-imm-t',
                      'target-del-def-f', 'target-del-imm-f',
                      'source-del-def-f', 'source-del-imm-f'):
        raise ValueError(
            f'unexpected aspect for object type backend name: {aspect!r}')

    name = s_name.Name(module=objtype.get_name(schema).module,
                       name=str(objtype.id))

    if aspect != 'table':
        suffix = aspect
    else:
        suffix = ''

    return convert_name(name, suffix=suffix, catenate=catenate)
Esempio n. 29
0
def compile_FunctionCall(
        expr: qlast.Base, *, ctx: context.ContextLevel) -> irast.Base:
    with ctx.new() as fctx:
        if isinstance(expr.func, str):
            funcname = expr.func
        else:
            funcname = sn.Name(expr.func[1], expr.func[0])

        funcs = fctx.schema.get_functions(
            funcname, module_aliases=fctx.modaliases)

        if funcs is None:
            raise errors.EdgeQLError(
                f'could not resolve function name {funcname}',
                context=expr.context)

        fctx.in_func_call = True
        args, kwargs, arg_types = process_func_args(expr, funcname, ctx=fctx)

        fatal_array_check = len(funcs) == 1
        for funcobj in funcs:
            if check_function(expr, funcname, funcobj, arg_types,
                              fatal_array_check=fatal_array_check):
                break
        else:
            raise errors.EdgeQLError(
                f'could not find a function variant {funcname}',
                context=expr.context)

        fixup_param_scope(funcobj, args, kwargs, ctx=fctx)

        node = irast.FunctionCall(func=funcobj, args=args, kwargs=kwargs)

        if funcobj.initial_value is not None:
            rtype = irutils.infer_type(node, fctx.schema)
            iv_ql = qlast.TypeCast(
                expr=qlparser.parse_fragment(funcobj.initial_value),
                type=typegen.type_to_ql_typeref(rtype)
            )
            node.initial_value = dispatch.compile(iv_ql, ctx=fctx)

    ir_set = setgen.ensure_set(node, ctx=ctx)
    return ir_set
Esempio n. 30
0
def get_schema_object(
        name: typing.Union[str, qlast.ObjectRef],
        module: typing.Optional[str] = None,
        *,
        item_types: typing.Optional[typing.List[s_obj.ObjectMeta]],
        ctx: context.ContextLevel,
        srcctx: typing.Optional[parsing.ParserContext] = None) -> s_obj.Object:

    if isinstance(name, qlast.ObjectRef):
        if srcctx is None:
            srcctx = name.context
        module = name.module
        name = name.name
    elif isinstance(name, qlast.AnyType):
        return s_pseudo.Any.create()

    if module:
        name = sn.Name(name=name, module=module)

    if not module:
        result = ctx.aliased_views.get(name)
        if result is not None:
            return result

    try:
        stype = ctx.env.schema.get(name=name,
                                   module_aliases=ctx.modaliases,
                                   type=item_types)

    except errors.QueryError as e:
        s_utils.enrich_schema_lookup_error(e,
                                           name,
                                           modaliases=ctx.modaliases,
                                           schema=ctx.env.schema,
                                           item_types=item_types)
        raise

    result = ctx.aliased_views.get(stype.get_name(ctx.env.schema))
    if result is None:
        result = stype

    return result