def compile_sequence(node, state): total_length = len(node.items) verify_call = IR.call("count", [state.compute_path()]) length_verifier = IR.filter(verify_call, total_length, "=") if total := node.items.count(grammar.Expand): state.ensure(node, total == 1) length_verifier = IR.filter(verify_call, total_length - 1, ">=")
def aggregate_array(state): # If we are in a nested list search (e.g: Call(args=[Call(args=[Name()])])) # we can't directly use `ORDER BY @index` since the EdgeDB can't quite infer # which @index are we talking about. if len(state.parents) >= 1: path = IR.attribute( IR.typed(IR.name(_COMPILER_WORKAROUND_FOR_TARGET), state.match), state.pointer, ) body = IR.loop( IR.name(_COMPILER_WORKAROUND_FOR_TARGET), state.parents[-1].compute_path(allow_missing=True), IR.select(path, order=IR.property("index")), ) else: body = IR.select(state.compute_path(), order=IR.property("index")) return IR.call("array_agg", [body])
def convert_length(node, state, arguments): state.ensure(node, any((arguments.min, arguments.max))) count = IR.call("count", [state.compute_path()]) filters = None for value, operator in [ (arguments.min, IR.as_operator(">=")), (arguments.max, IR.as_operator("<=")), ]: if value is None: continue state.ensure(value, isinstance(value, grammar.Constant)) state.ensure(value, isinstance(value.value, int)) filters = IR.combine_filters( filters, IR.filter(count, IR.literal(value.value), operator)) assert filters is not None return filters
def serialize_sequence(sequence, context): ir_set = IR.set([serialize(value, context) for value in sequence]) if all(isinstance(item, _BASIC_SET_TYPES) for item in sequence): # {1, 2, 3} / {<ast::op>'Add', <ast::op>'Sub', ...} return ir_set else: # Inserting a sequence of AST objects would require special # attention to calculate the index property. target = IR.name("item") scope = IR.namespace({"items": ir_set}) loop = IR.loop( target, IR.call("enumerate", [IR.name("items")]), IR.select( IR.attribute(target, 1), selections=[ IR.assign(IR.property("index"), IR.attribute(target, 0)) ], ), ) return IR.add_namespace(scope, loop)
def insert_file(context): if context.is_cached(): return Insertion.CACHED if not (tree := context.as_ast()): return Insertion.SKIPPED with context.connection.transaction(): module = apply_ast(tree, context) module_select = IR.select(tree.kind_name, filters=IR.object_ref(module), limit=1) update_filter = IR.filter( IR.attribute(None, "id"), IR.call("array_unpack", [IR.cast("array<uuid>", IR.variable("ids"))]), "IN", ) for base_type in Schema.module_annotated_types: update = IR.update( base_type.kind_name, filters=update_filter, assignments={"_module": module_select}, ) context.connection.query(IR.construct(update), ids=context.reference_pool) logger.info("%r has been inserted successfully", context.filename) context.cache() return Insertion.INSERTED
def convert_all_any(node, state, arguments): return IR.call(node.name.lower(), [state.codegen(arguments.value)])
"_module", [ IR.selection("filename"), IR.selection( "project", [IR.selection("git_source"), IR.selection("git_revision")], ), ], ), ] STATS_QUERY = IR.construct( IR.select( IR.merge( IR.call("count", [IR.wrap(name)]) for name in STATISTICS_NODES))) class LocationNode(ast.AST): _attributes = ("lineno", "col_offset", "end_lineno", "end_col_offset") def get_username(link): if link.endswith("/"): index = 3 else: index = 2 return link.split("/")[-index]