def run_query_on_connection( connection, reiz_ql, *, limit=DEFAULT_LIMIT, offset=0, ): query = IR.construct(compile_query(reiz_ql, limit, offset)) query_set = connection.query(query) return process_queryset(query_set)
def apply_ast(node, context): with context.enter_node(node): insertions = { field: serialize(value, context) for field, value in iter_properties(node) if value is not None } query = IR.insert(node.kind_name, insertions) return context.connection.query_one(IR.construct(query))
def compile_query(self): query = compile_query(self.reiz_ql, limit=None, offset=0) query.filters = IR.combine_filters( query.filters, IR.filter( IR.attribute(IR.attribute(None, "_module"), "filename"), IR.literal(self.expected_filename), "=", ), ) return IR.construct(query)
async def run_query_on_async_connection( connection, reiz_ql, *, limit=DEFAULT_LIMIT, offset=0, loop=None, timeout=config.web.timeout, ): query = IR.construct(compile_query(reiz_ql, limit, offset)) query_set = await asyncio.wait_for(connection.query(query), timeout=timeout, loop=loop) return process_queryset(query_set)
async def analyze_query(request): if "query" not in request.json: return error("Missing 'query' data") results = dict.fromkeys(("exception", "reiz_ql", "edge_ql")) try: reiz_ql = parse_query(request.json["query"]) results["reiz_ql"] = normalize(asdict(reiz_ql)) results["edge_ql"] = IR.construct(compile_to_ir(reiz_ql)) except ReizQLSyntaxError as syntax_err: results["status"] = "error" results["exception"] = syntax_err.message results.update(syntax_err.position) else: results["status"] = "success" return json_response(results)
def main(): parser = ArgumentParser() parser.add_argument( "source", type=FileType(mode="rb"), nargs="?", default="-", help="the file to parse; defaults to stdin", ) parser.add_argument( "--do-not-optimize", action="store_false", help="do not generated optimized IR", ) options = parser.parse_args() with options.source: query = parse_query(options.source.read()) pprint(query) ir = compile_to_ir(query) print( IR.construct(ir, optimize=options.do_not_optimize, top_level=True))
filters=IR.object_ref(module), limit=1) update_filter = IR.filter( IR.attribute(None, "id"), IR.call("array_unpack", [IR.cast("array<uuid>", IR.variable("ids"))]), "IN", ) for base_type in Schema.module_annotated_types: update = IR.update( base_type.kind_name, filters=update_filter, assignments={"_module": module_select}, ) context.connection.query(IR.construct(update), ids=context.reference_pool) logger.info("%r has been inserted successfully", context.filename) context.cache() return Insertion.INSERTED def insert_project(project, *, global_ctx): with global_ctx.pool.new_connection() as connection: project_ctx = global_ctx.new_child(project, connection) if not project_ctx.is_cached(): apply_ast(project_ctx.as_ast(), project_ctx) project_ctx.cache() stats = Statistics()
IR.selection("end_col_offset"), IR.selection( "_module", [ IR.selection("filename"), IR.selection( "project", [IR.selection("git_source"), IR.selection("git_revision")], ), ], ), ] STATS_QUERY = IR.construct( IR.select( IR.merge( IR.call("count", [IR.wrap(name)]) for name in STATISTICS_NODES))) class LocationNode(ast.AST): _attributes = ("lineno", "col_offset", "end_lineno", "end_col_offset") def get_username(link): if link.endswith("/"): index = 3 else: index = 2 return link.split("/")[-index]