Esempio n. 1
0
def doc_func(f, parent_type=None):
    if isinstance(f, MethodInstance):
        f = f.func
    try:
        doc_tree = parse(f.docstring or '')
    except LarkError as e:
        raise AutoDocError(f"Error in docstring of function {f.name}: {e}")

    assert {s.name for s in doc_tree.sections} <= {'Parameters', 'Example', 'Examples', 'Note', 'Returns', 'See Also'}, [s.name for s in doc_tree.sections]
    try:
        params_doc = doc_tree.get_section('Parameters')
    except KeyError:
        if f.params:
            params_doc = Section('Parameters', [Defin(p.name, None, str(p.type) if p.type else '') for p in f.params])
            doc_tree.sections.insert(0, params_doc)
    else:
        params = list(f.params)
        if f.param_collector:
            params.append(f.param_collector)
        if len(params) != len(params_doc.items):
            raise AutoDocError(f"Parameters don't match docstring in function {f}")

        for d, p in safezip(params_doc.items, params):
            assert d.name == p.name, (d.name, p.name)
            d.type = str(p.type) if p.type else ''
            d.default = p.default.repr() if p.default else ''

    return FuncDoc(f, doc_tree, parent_type=parent_type)
Esempio n. 2
0
    def _compile(self, qb):
        cols = list(self.type.elems)

        rows = [(['STRUCT('] + join_comma(
            v.compile(qb).code + [" as ", name]
            for name, v in safezip(cols, row.values)) +
                 [")"]) if isinstance(row, Tuple) else row.compile(qb).code
                for row in self.values]

        return ["SELECT * FROM UNNEST(["] + join_comma(rows) + ["]) as item"]
Esempio n. 3
0
def from_sql(arr: T.list):
    fields = flatten_type(arr.type)
    if not all(len(e) == len(fields) for e in arr.value):
        raise Signal.make(T.TypeError, None,
                          f"Expected 1 column. Got {len(arr.value[0])}")

    if arr.type.elem <= T.struct:
        return [{
            n: _from_sql_primitive(e)
            for (n, _t), e in safezip(fields, tpl)
        } for tpl in arr.value]
    else:
        return [_from_sql_primitive(e[0]) for e in arr.value]
Esempio n. 4
0
def _destructure_param_match(state, ast_node, param_match):
    # TODO use cast rather than a ad-hoc hardwired destructure
    for k, v in param_match:
        if isinstance(v, objects.RowInstance):
            v = v.primary_key()
        v = localize(state, v)

        if k.type <= T.struct:
            names = [name for name, t in flatten_type(k.orig, [k.name])]
            if not isinstance(v, list):
                msg = f"Parameter {k.name} received a bad value: {v} (expecting a struct or a list)"
                raise Signal.make(T.TypeError, ast_node, msg)
            if len(v) != len(names):
                msg = f"Parameter {k.name} received a bad value (size of {len(names)})"
                raise Signal.make(T.TypeError, ast_node, msg)
            yield from safezip(names, v)
        else:
            yield k.name, v
Esempio n. 5
0
def compile_to_inst(state: State, proj: ast.Projection):
    table = cast_to_instance(state, proj.table)

    if table is objects.EmptyList:
        return table   # Empty list projection is always an empty list.

    t = T.union[T.table, T.struct]
    if not table.type <= t:
        raise Signal.make(T.TypeError, proj, f"Cannot project objects of type {table.type}")

    fields = _expand_ellipsis(state, table, proj.fields)

    # Test duplicates in field names. If an automatic name is used, collision should be impossible
    dup = find_duplicate([f for f in list(proj.fields) + list(proj.agg_fields) if f.name], key=lambda f: f.name)
    if dup:
        raise Signal.make(T.TypeError, dup, f"Field '{dup.name}' was already used in this projection")

    attrs = table.all_attrs()

    with state.use_scope({n: projected(c) for n, c in attrs.items()}):
        fields = _process_fields(state, fields)

    for name, f in fields:
        if not f.type <= T.union[T.primitive, T.struct, T.json, T.nulltype, T.unknown]:
            raise Signal.make(T.TypeError, proj, f"Cannot project values of type: {f.type}")

    if isinstance(table, objects.StructInstance):
        d = {n[1]:c for n, c in fields}     # Remove used_defined bool
        t = T.struct({n:f.type for n, f in d.items()})
        return objects.StructInstance(t, d)

    agg_fields = []
    if proj.agg_fields:
        with state.use_scope({n:objects.aggregate(c) for n, c in attrs.items()}):
            agg_fields = _process_fields(state, proj.agg_fields)

    all_fields = fields + agg_fields
    assert all(isinstance(inst, AbsInstance) for name_, inst in all_fields)

    #
    # Make new type (and resolve names)
    #
    field_types = [(name, inst.type) for name, inst in all_fields]
    reserved_names = {name[1] for name, _ in all_fields if name[0]}
    elems = {}
    for (user_defined, name), type_ in field_types:
        # Unvectorize for placing in the table type
        type_ = kernel_type(type_)

        # Find name without collision
        if not user_defined:
            name_ = name
            i = 1
            while name in elems or name in reserved_names:
                name = name_ + str(i)
                i += 1

        assert name not in elems
        elems[name] = type_

    # TODO inherit primary key? indexes?
    # codename = state.unique_name('proj')
    new_table_type = T.table(elems, temporary=False)    # XXX abstract=True

    # Make code
    flat_codes = [code
                  for _, inst in all_fields
                  for code in inst.flatten_code()]

    sql_fields = [
        sql.ColumnAlias.make(code, nn)
        for code, (nn, _nt) in safezip(flat_codes, flatten_type(new_table_type))
    ]

    if not sql_fields:
        raise Signal.make(T.TypeError, proj, "No column provided for projection (empty projection)")

    # Make Instance
    new_table = objects.TableInstance.make(sql.null, new_table_type, [table] + [inst for _, inst in all_fields])

    groupby = []
    limit = None
    if proj.groupby:
        if fields:
            # groupby = [new_table.get_column(n).primary_key().code for n, rc in fields]
            groupby = [sql.Primitive(T.int, str(i+1)) for i in range(len(fields))]
        else:
            limit = 1
            # Alternatively we could
            #   groupby = [sql.null]
            # But postgres doesn't support it

    code = sql.Select(new_table_type, table.code, sql_fields, group_by=groupby, limit=limit)

    # Make Instance
    return new_table.replace(code=code)
Esempio n. 6
0
def _join(state: State, join: str, exprs_dict: dict, joinall=False, nullable=None):

    names = list(exprs_dict)
    exprs = [evaluate(state, value) for value in exprs_dict.values()]

    # Validation and edge cases
    for x in exprs:
        if not isinstance(x, objects.AbsInstance):
            raise Signal.make(T.TypeError, None, f"Unexpected object type: {x}")

    for e in exprs:
        if e is objects.EmptyList:
            raise Signal.make(T.TypeError, None, "Cannot join on an untyped empty list")

        if isinstance(e, objects.UnknownInstance):
            table_type = T.table({n: T.unknown for n in names})
            return objects.TableInstance.make(sql.unknown, table_type, [])

    # Initialization
    tables = [_get_table(x) for x in exprs]
    assert all((t.type <= T.table) for t in tables)

    structs = {name: T.struct(table.type.elems) for name, table in safezip(names, tables)}

    if nullable:
        # Update nullable for left/right/outer joins
        structs = {name: t.as_nullable() if n else t
                   for (name, t), n in safezip(structs.items(), nullable)}

    tables = [objects.alias_table_columns(t, n) for n, t in safezip(names, tables)]

    primary_keys = [[name] + pk
                    for name, t in safezip(names, tables)
                    for pk in t.type.options.get('pk', [])
                   ]
    table_type = T.table(structs, name=Id(state.unique_name("joinall" if joinall else "join")), pk=primary_keys)

    conds = []
    if joinall:
        for e in exprs:
            if not isinstance(e, objects.CollectionInstance):
                raise Signal.make(T.TypeError, None, f"joinall() expected tables. Got {e}")
    else:
        if len(exprs) < 2:
            raise Signal.make(T.TypeError, None, "join expected at least 2 arguments")

        joined_exprs = set()
        for (na, ta), (nb, tb) in itertools.combinations(safezip(names, exprs), 2):
            try:
                cols = _join2(ta, tb)
                cond = sql.Compare('=', [sql.Name(c.type, join_names((n, c.name))) for n, c in safezip([na, nb], cols)])
                conds.append(cond)
                joined_exprs |= {id(ta), id(tb)}
            except NoAutoJoinFound as e:
                pass

        if {id(e) for e in exprs} != set(joined_exprs):
            # TODO better error!!! table name?? specific failed auto-join?
            s = ', '.join(repr(t.type) for t in exprs)
            raise Signal.make(T.JoinError, None, f"Cannot auto-join: No plausible relations found between {s}")


    code = sql.Join(table_type, join, [t.code for t in tables], conds)
    return objects.TableInstance.make(code, table_type, exprs)