def find_matching_pulls(gh_repo: Repository, commits: Iter[Commit]) -> Generator: """Find pull requests that contains commits matching the given ``commits``. It yields tuple :class:`PullRequest` and list of the matched :class:`Commit`s (subset of the given ``commits``). The matching algorithm is based on comparing commits by an *author* (triplet name, email and date) and set of the affected files (just file names). The match is found when a pull request contains at least one commit from the given ``commits`` (i.e. their author triplet is the same), and an union of filenames affected by all the matching commits is the same as of all the pull request's commits. """ LOG.debug('Fetching commits referenced in payload') commits_by_author = {commit_git_author(c): c for c in commits} find_matching_commit = commits_by_author.get cache = shared_cache() for pullreq in gh_repo.get_pulls(state='open'): LOG.debug("Checking pull request #%s", pullreq.number) merged_commits = list(keep(find_matching_commit, pullreq_commits_authors(pullreq, cache))) merged_files = (f.filename for c in merged_commits for f in c.files) pullreq_files = (f.filename for f in pullreq.get_files()) if any(merged_commits) and set(merged_files) == set(pullreq_files): del cache[pullreq.id] yield pullreq, merged_commits LOG.debug("Cached items: %d, max size: %d" % (cache.currsize, cache.maxsize))
def find_matching_pulls(gh_repo: Repository, commits: Iter[Commit]) -> Generator: """Find pull requests that contains commits matching the given ``commits``. It yields tuple :class:`PullRequest` and list of the matched :class:`Commit`s (subset of the given ``commits``). The matching algorithm is based on comparing commits by an *author* (triplet name, email and date) and set of the affected files (just file names). The match is found when a pull request contains at least one commit from the given ``commits`` (i.e. their author triplet is the same), and an union of filenames affected by all the matching commits is the same as of all the pull request's commits. """ LOG.debug('Fetching commits referenced in payload') commits_by_author = {commit_git_author(c): c for c in commits} find_matching_commit = commits_by_author.get cache = shared_cache() for pullreq in gh_repo.get_pulls(state='open'): LOG.debug("Checking pull request #%s", pullreq.number) merged_commits = list( keep(find_matching_commit, pullreq_commits_authors(pullreq, cache))) merged_files = (f.filename for c in merged_commits for f in c.files) pullreq_files = (f.filename for f in pullreq.get_files()) if any(merged_commits) and set(merged_files) == set(pullreq_files): del cache[pullreq.id] yield pullreq, merged_commits LOG.debug("Cached items: %d, max size: %d" % (cache.currsize, cache.maxsize))
def add_to_path(env: Dict[str, str], name: str, item: str) -> None: """ Add ``item`` to the ``name`` path environment variable in ``env``. """ # GDB helpers import this unit, but they do not necessarily have access to # funcy, so use a local import. from funcy import keep env[name] = os.path.pathsep.join(keep([item, env.get(name, '')]))
def search(request): q = request.GET.get('q') if not q: return {'series': None} exclude_tags = keep(silent(int), request.GET.getlist('exclude_tags')) serie_tags, tag_series, tag_ids = series_tags_data() q_string, q_tags = _parse_query(q) q_tags, wrong_tags = split(lambda t: t.lower() in tag_ids, q_tags) if wrong_tags: message = 'Unknown tag%s %s.' % ('s' if len(wrong_tags) > 1 else '', ', '.join(wrong_tags)) messages.warning(request, message) if not q_string and not q_tags: return {'series': None} qs = search_series_qs(q_string) if q_tags: q_tag_ids = keep(tag_ids.get(t.lower()) for t in q_tags) include_series = reduce(set.intersection, (tag_series[t] for t in q_tag_ids)) if include_series: qs = qs.filter(id__in=include_series) else: message = 'No series annotated with %s.' \ % (q_tags[0] if len(q_tags) == 1 else 'all these tags simultaneously') messages.warning(request, message) return {'series': []} if exclude_tags: exclude_series = join(tag_series[t] for t in exclude_tags) qs = qs.exclude(id__in=exclude_series) series_ids = qs.values_list('id', flat=True) tags = distinct(imapcat(serie_tags, series_ids), key=itemgetter('id')) # TODO: do not hide excluded tags return { 'series': qs, 'tags': tags, 'serie_tags': serie_tags, }
def emit(self, file_root='.', generate_lexer=True, main_programs=set(), annotate_fields_types=False, compile_only=False, no_property_checks=False): """ Generate sources for the analysis library. Also emit a tiny program useful for testing purposes. :param str file_root: (optional) Path of the directory in which the library should be generated. The default is the current directory. :param bool generate_lexer: (optional) Whether to invoke Quex to generate the lexer source code. Will do by default. As this can take time, it is useful to disable it during testing. :param set[str] main_programs: List of names for programs to build in addition to the generated library. To each X program, there must be a X.adb source file in the $BUILD/src directory. :param bool annotate_fields_types: Whether to try and annotate the type of fields in the grammar. If this is True, this will actually modify the file in which ASTNode subclasses are defined, and annotate empty field definitions. """ dir_path = path.join(path.dirname(path.realpath(__file__)), "templates") template_utils.template_lookup = TemplateLookup( directories=keep([dir_path, self.extensions_dir] + self.template_lookup_extra_dirs), strict_undefined=True) self.no_property_checks = no_property_checks # Automatically add all source files in the "extensions/src" directory # to the generated library project. if self.extensions_dir: src_dir = path.join(self.extensions_dir, 'src') if path.isdir(src_dir): for filename in os.listdir(src_dir): filepath = path.join(src_dir, filename) if path.isfile(filepath) and not filename.startswith("."): self.additional_source_files.append(filepath) self.annotate_fields_types = annotate_fields_types self.compile(compile_only=compile_only) if compile_only: return with global_context(self): self._emit(file_root, generate_lexer, main_programs)
def save(self, **kwargs): # Only set specie when it's non-controversial taxid = ldistinct( keep(self.attrs.get, ['platform_taxid', 'sample_taxid'])) if len(taxid) == 1: self.specie = SPECIES.get(taxid[0]) else: self.specie = '' self.platforms = re_all(r'GPL\d+', self.attrs['platform_id']) self.samples_count = len(self.attrs['sample_id'].split()) super(Series, self).save(**kwargs)
def run_cropdetect(filename, cmd=None, **input_args): """Runs the crepdetect filter for a file. Returns output as a dict. Runs crop detection only on a single frame. To select the frame to use, pass `ss` with the start position in seconds. """ stream = (ffmpeg.input(filename, t=1, **input_args).video.filter('cropdetect', round=2)) # Cropdetect outputs a line per frame _, stderr = stream.output('-', f='null').run(cmd=cmd, capture_stderr=True) return F.first( F.keep(read_cropdetect_line, stderr.decode('utf-8').splitlines()))
def generic_visit(self, node): """ Modified .generic_visit() from NodeTransformer allows callables in tree. """ for field, old_value in ast.iter_fields(node): old_value = getattr(node, field, None) if isinstance(old_value, list): old_value[:] = keep(self.visit, old_value) elif isinstance(old_value, ast.AST): new_node = self.visit(old_value) if new_node is None: delattr(node, field) else: setattr(node, field, new_node) return node
def mygene_fetch(platform, probes, scopes): """Queries mygene.info for current entrezid and sym, given an identifier.""" if scopes == "dna": probes = get_dna_probes(platform, probes) scopes = "accession" def extract_queries(lines): lines = remove(r'^(IMAGE:\d+|--[\w>-]+)$', lines) queries = cat(re_iter(r'[\w+.-]+', l) for l in lines) queries = remove(r'_at$|^\d+-\d+$', queries) # No such thing return queries # Clean unicode for mygene # http://stackoverflow.com/questions/15321138/removing-unicode-u2026-like-characters return [ q.decode('unicode_escape').encode('ascii', 'ignore') for q in queries ] _by_probe = group_values(probes.items()) queries_by_probe = walk_values(extract_queries, _by_probe) # Collect all possible queries to make a single request to mygene queries = set(cat(queries_by_probe.values())) if not queries: return [] mygenes = _mygene_fetch(queries, scopes, platform.specie) # Form results into rows results = [] dups = 0 for probe, queries in queries_by_probe.items(): matches = ldistinct(keep(mygenes.get, queries)) # Skip dups if len(matches) > 1: dups += 1 elif matches: entrez, sym = matches[0] results.append({ 'probe': probe, 'mygene_sym': sym, 'mygene_entrez': entrez }) if dups: cprint('-> Produced %d dups' % dups, 'red') return results
def emit(self, file_root='.', generate_lexer=True, main_programs=set(), annotate_fields_types=False): """ Generate sources for the analysis library. Also emit a tiny program useful for testing purposes. :param str file_root: (optional) Path of the directory in which the library should be generated. The default is the current directory. :param bool generate_lexer: (optional) Whether to invoke Quex to generate the lexer source code. Will do by default. As this can take time, it is useful to disable it during testing. :param set[str] main_programs: List of names for programs to build in addition to the generated library. To each X program, there must be a X.adb source file in the $BUILD/src directory. :param bool annotate_fields_types: Whether to try and annotate the type of fields in the grammar. If this is True, this will actually modify the file in which ASTNode subclasses are defined, and annotate empty field definitions. """ dir_path = path.join( path.dirname(path.realpath(__file__)), "templates" ) template_utils.template_lookup = TemplateLookup( directories=keep([dir_path, self.extensions_dir] + self.template_lookup_extra_dirs), strict_undefined=True ) # Automatically add all source files in the "extensions/src" directory # to the generated library project. if self.extensions_dir: src_dir = path.join(self.extensions_dir, 'src') if path.isdir(src_dir): for filename in os.listdir(src_dir): filepath = path.join(src_dir, filename) if path.isfile(filepath) and not filename.startswith("."): self.additional_source_files.append(filepath) self.annotate_fields_types = annotate_fields_types self.compile() with global_context(self): self._emit(file_root, generate_lexer, main_programs)
def object_name(submission, extension): name_parts = F.flatten(( # Parts submission['parts'], # Names ['.'.join(s.get('name', '').split()) for s in submission['singers']], # Location F.keep( submission.get('location', {}).get, ('city', 'state', 'country')))) return '/'.join( F.map( sanitize_filename, ( submission['singing'], submission['song'], # attach a uuid to the file name so that we never clobber uploads '_'.join(filter(None, name_parts)) + '.' + str(uuid.uuid4()) + extension)))
def run(self, prev): fs.mkdir(self.working_dir('test')) fs.cp(self.test_dir('test.py'), self.working_dir('test', 'test.py')) # Try to build, but don't log errors in the build, and recover from # TestError: We want to be able to test compilation errors too. try: self.run_and_check([sys.executable, 'build_lib.py'], log_errors=False) except TestError: return environ['PYTHONPATH'] = P.pathsep.join( keep([environ.get('PYTHONPATH'), self.working_dir('test')]) ) self.run_and_check(['python', self.working_dir('test', 'test.py')], append_output=True)
def unparse_nodes(ctx, f): """ Unparse the nodes for the current language to the given file. """ from langkit.diagnostics import check_source_language, Severity check_source_language( predicate=lpl is not None, message="libpythonlang not found, comments cannot be unparsed", severity=Severity.warning, do_raise=False ) types = keep(emit_node_type(t) for t in ctx.astnode_types + ctx._struct_types) template = """ % for t in types: $hl ${t} % endfor """ f.write(pp(sf(template)))
def get_included_syntax_name(syntaxes): ''' Sometimes the user would like to include a syntax file in a syntax command (:h syn-include). ''' def get_it(syntax_cmd): return fn.first(re.findall('include (@\w+) ', syntax_cmd)) return fn.first(fn.keep(imap(get_it, syntaxes)))
def custom_merge(*args): return list(set(keep(flatten(args))))
def __init__(self, context, lib_root, extensions_dir, main_source_dirs=set(), main_programs=set(), no_property_checks=False, generate_ada_api=True, generate_gdb_hook=True, pretty_print=False, post_process_ada=None, post_process_cpp=None, post_process_python=None, coverage=False, relative_project=False, unparse_script=None): """ Generate sources for the analysis library. Also emit a tiny program useful for testing purposes. :param str lib_root: Path of the directory in which the library should be generated. :param str|None extensions_dir: Directory to contain extensions for code generation. If None is provided, assume there is no extension. :param set[str] main_source_dirs: List of source directories to use in the project file for mains. Source directories must be relative to the mains project file directory (i.e. $BUILD/src-mains). :param set[str] main_programs: List of names for programs to build in addition to the generated library. To each X program, there must be a X.adb source file in the $BUILD/src directory. :param bool no_property_checks: If True, do not emit safety checks in the generated code for properties. Namely, this disables null checks on field access. :param bool generate_ada_api: If True, generate the public Ada API. If False and there is no main to generate, do not generate this Ada API. :param bool generate_gdb_hook: Whether to generate the ".debug_gdb_scripts" section. Good for debugging, but better to disable for releases. :param bool pretty_print: If true, pretty-print the generated sources. :param post_process_ada: Optional post-processing for generated Ada source code. :type post_process_ada: None|(str) -> str :param post_process_cpp: Optional post-processing for generated C++ source code. :type post_process_cpp: None|(str) -> str :param post_process_python: Optional post-processing for generated Python source code. :type post_process_python: None|(str) -> str :param bool coverage: Instrument the generated library to compute its code coverage. This requires GNATcoverage. :param bool relative_project: See libmanage's --relative-project option. """ self.context = context self.verbosity = context.verbosity self.lib_root = lib_root self.cache = Cache(os.path.join(self.lib_root, 'obj', 'langkit_cache')) self.extensions_dir = extensions_dir # TODO: contain the add_template_dir calls to this context (i.e. avoid # global mutation). if self.extensions_dir: add_template_dir(self.extensions_dir) for dirpath in keep(self.context.template_lookup_extra_dirs): add_template_dir(dirpath) self.no_property_checks = no_property_checks self.generate_ada_api = generate_ada_api or bool(main_programs) self.generate_gdb_hook = generate_gdb_hook self.generate_unparser = context.generate_unparser self.pretty_print = pretty_print self.post_process_ada = post_process_ada self.post_process_cpp = post_process_cpp self.post_process_python = post_process_python self.coverage = coverage self.gnatcov = context.gnatcov self.relative_project = relative_project # Automatically add all source files in the "extensions/src" directory # to the generated library project. self.extensions_src_dir = None if self.extensions_dir: src_dir = path.join(self.extensions_dir, 'src') if path.isdir(src_dir): self.extensions_src_dir = src_dir for filename in os.listdir(src_dir): filepath = path.join(src_dir, filename) if path.isfile(filepath) and not filename.startswith('.'): self.context.additional_source_files.append(filepath) self.main_source_dirs = main_source_dirs self.main_programs = main_programs self.lib_name_low = context.ada_api_settings.lib_name.lower() """ Lower-case name for the generated library. """ self.lib_name_up = context.ada_api_settings.lib_name.upper() """ Upper-case name for the generated library. """ # Paths for the various directories in which code is generated self.src_dir = path.join(self.lib_root, "src") self.src_mains_dir = path.join(self.lib_root, "src-mains") self.scripts_dir = path.join(self.lib_root, "scripts") self.python_dir = path.join(self.lib_root, "python") self.python_pkg_dir = path.join( self.lib_root, "python", context.python_api_settings.module_name) self.ocaml_dir = path.join(self.lib_root, "ocaml") self.lib_project = path.join(self.lib_root, f"{self.lib_name_low}.gpr") self.mains_project = path.join(self.lib_root, "mains.gpr") self.dfa_code = None """ Holder for the data structures used to generate code for the lexer state machine (DFA). As an optimization, it is left to None if we decide not to generate it (i.e. when the already generated sources are up-to-date). :type: langkit.lexer.regexp.DFACodeGenHolder """ self._project_file_emitted = False """ Whether we emitted a project file for the generated library. :type: bool """ self.project_languages = {'Ada'} """ List of GPR names for languages used in the generated library. :type: set[str] """ self.library_interfaces = set() """ Set of source file base names for all sources that must appear in the "Interfaces" attribute of the generated library project file. :type: set[str] """ self.instr_md = InstrumentationMetadata() # Add all additional source files to the list of library interfaces and # declare them as such in instrumentation metadata. for f in context.additional_source_files: self.add_library_interface(f, generated=False) if self.coverage: # Add the buffer-list unit from GNATcoverage's instrumentation to # the list of library interfaces. TODO: hopefully, we should not # have to do this anymore after S916-064 is addressed. self.library_interfaces.add(self.gnatcov.buffer_list_file(self)) self.main_project_file = os.path.join(self.lib_root, f'{self.lib_name_low}.gpr') self.unparse_script = unparse_script """ RA22-015: If set to something else than None, then the "dsl unparse" pass will be run on the given script. :type: langkit.compile_context.UnparseScript|None """ # Determine whether we have user external properties. If so, # automatically WITH $.Implementation.Extensions from the body of # $.Analysis and $.Implementation. if any(prop.user_external for prop in context.all_properties(include_inherited=True)): for unit in ('Analysis', 'Implementation', 'Implementation.C'): context.add_with_clause(unit, ADA_BODY, '{}.Implementation.Extensions'.format( context.ada_api_settings.lib_name), use_clause=True)
def add_path(name, p): env[name] = path.pathsep.join(keep([p, env.get(name, '')]))
def emit_expr(expr, **ctx): from langkit.expressions import ( Literal, Let, FieldAccess, AbstractVariable, SelfVariable, EntityVariable, LogicTrue, LogicFalse, unsugar, Map, All, Any, GetSymbol, Match, Eq, BinaryBooleanOperator, Then, OrderingTest, Quantifier, If, IsNull, Cast, DynamicVariable, IsA, Not, SymbolLiteral, No, Cond, New, CollectionSingleton, Concat, EnumLiteral, EnvGet, ArrayLiteral, Arithmetic, PropertyError, CharacterLiteral, Predicate, StructUpdate, BigIntLiteral, RefCategories, Bind, Try, Block, Contains, PropertyDef ) def is_a(*names): return any(expr.__class__.__name__ == n for n in names) then_underscore_var = ctx.get('then_underscore_var') overload_coll_name = ctx.get('overload_coll_name') walker = ctx.get('walker') def emit_lambda(expr, vars): vars_str = ", ".join(var_name(var) for var in vars) return "({}) => {}".format(vars_str, ee(expr)) del vars_str def emit_method_call(receiver, name, args=[], force_parens=True, as_multiline=False): return "{}.{}{}".format( receiver, name, emit_paren(", ".join(args), as_multiline) if force_parens or args else "" ) def emit_let(expr): if len(expr.vars) == 0: with walker.returned_expr(): return walker.emit_comments() + ee(expr.expr) vars_defs = "" for i, (var, abs_expr) in enumerate(zip(expr.vars, expr.var_exprs)): with walker.var_assignment(i): vars_defs += "{}val {} = {};$hl".format( walker.emit_comments(), var_name(var), ee(abs_expr) ) vars_defs += walker.emit_comments() with walker.returned_expr(): return "{{$i$hl{}$hl{}{}$hl$d}}".format( vars_defs, walker.emit_comments(), ee(expr.expr) ) def ee(expr, **extra_ctx): full_ctx = dict(ctx, **extra_ctx) return emit_expr(expr, **full_ctx) def ee_pexpr(expr, **extra_ctx): # We don't want to carry an arg_expr data left in the context from a # previous call, it needs to be specified in each call to ee_pexpr. ctx.pop('arg_expr', None) full_ctx = dict(ctx, **extra_ctx) return emit_paren_expr(expr, **full_ctx) expr = unsugar(expr) if isinstance(expr, Literal): return str(expr.literal).lower() elif isinstance(expr, SymbolLiteral): return json.dumps(expr.name) elif isinstance(expr, PropertyError): return "raise PropertyError({})".format( repr(expr.message) if expr.message else "" ) elif isinstance(expr, IsA): return "{} is {}".format( ee_pexpr(expr.expr), "{}".format(" | ".join(type_name(t) for t in expr.astnodes)) ) elif isinstance(expr, LogicTrue): return "%true" elif isinstance(expr, LogicFalse): return "%false" elif is_a("bind"): bind = "bind {} = {};$hl".format( ee(expr.expr_0), ee(expr.expr_1) ) return "{{$i$hl{}$hl{}$hl$d}}".format( bind, ee(expr.expr_2) ) elif isinstance(expr, Let): if isinstance(expr, Block): return emit_let(expr) else: with walker.call('Let'): with walker.arg(0): return walker.emit_comments() + emit_let(expr) elif isinstance(expr, Map): op_name = expr.kind args = [] vars = [expr.element_var] if expr.requires_index: vars.append(expr.index_var) if op_name in ["map", "mapcat"]: args.append(emit_lambda(expr.expr, vars)) elif op_name == "filter": args.append(emit_lambda(expr.filter_expr, vars)) elif op_name == "filter_map": args.append(emit_lambda(expr.expr, vars)) args.append(emit_lambda(expr.filter_expr, vars)) op_name = "filtermap" elif op_name == "take_while": args.append(emit_lambda(expr.take_while_expr, vars)) if overload_coll_name: op_name = overload_coll_name del ctx['overload_coll_name'] coll = ee(expr.collection) return emit_method_call(coll, op_name, args) elif isinstance(expr, Quantifier): return emit_method_call( ee(expr.collection), expr.kind, [emit_lambda(expr.expr, [expr.element_var])] ) elif isinstance(expr, Contains): return emit_method_call(ee(expr.collection), "contains", [ee(expr.item)]) elif isinstance(expr, If): with walker.call('If'): with walker.arg(0): coms = walker.emit_comments() cond_strn = ee_pexpr(expr.cond) res = "{}if {} then {} else {}".format( coms, cond_strn, ee_pexpr(expr._then, arg_expr=1), ee_pexpr(expr.else_then, arg_expr=2) ) return res elif isinstance(expr, Cond): with walker.call('Cond'): branches = expr.branches res = "" for i, b in enumerate(branches): # condition with walker.arg(i * 2): coms = walker.emit_comments() cond_strn = ee_pexpr(b[0]) expr_strn = ee_pexpr(b[1], arg_expr=i * 2 + 1) res += "{}{} {} then {}$hl".format( coms, "if" if i == 0 else "elif", cond_strn, expr_strn ) with walker.arg(len(branches) * 2): coms = walker.emit_comments() else_strn = ee_pexpr(expr.else_expr) res += "{}else {}".format(coms, else_strn) return res elif isinstance(expr, IsNull): return "{}.is_null".format(ee(expr.expr)) elif isinstance(expr, Cast): return "{}.as[{}]{}".format( ee(expr.expr), type_name(expr.dest_type), "!" if expr.do_raise else "", ) elif isinstance(expr, All): return ee(expr.equation_array, overload_coll_name="logic_all") elif isinstance(expr, Any): return ee(expr.equation_array, overload_coll_name="logic_any") elif isinstance(expr, Match): with walker.method_call("match"): res = "" with walker.self_arg(): coms = walker.emit_comments() matched_expr_strn = ee(expr.matched_expr) res += "{}match {} {{$i".format(coms, matched_expr_strn) for i, (typ, var, e) in enumerate(expr.matchers): with walker.arg(i): coms = walker.emit_comments() if coms and i > 0: coms = "$hl" + coms res += "$hl{}case {}{} => {}".format( coms, var_name(var), (" " + sf(": ${type_name(typ)}")) if typ else "", ee(e) ) res += "$d$hl}" return res elif isinstance(expr, Eq): return "{} = {}".format(ee(expr.lhs), ee(expr.rhs)) elif isinstance(expr, BinaryBooleanOperator): with walker.boolean_binop(expr.kind): def emit_bool_op_rec(expr, depth): if depth == 2: lhs = emit_paren_expr(expr.lhs, arg_expr=0, **ctx) else: lhs = emit_bool_op_rec(expr.lhs, depth - 1) return "{} {} {}".format( lhs, expr.kind, emit_paren_expr(expr.rhs, arg_expr=depth - 1, **ctx) ) return emit_bool_op_rec(expr, walker.arg_count()) elif isinstance(expr, Not): return "not {}".format(emit_paren_expr(expr.expr, **ctx)) elif isinstance(expr, Then): if expr.var_expr.source_name is None: assert expr.underscore_then # Match is like a function call in the Python DSL, but is a regular # expression in the new syntax, so we don't want to use the ? # syntax on it. if not isinstance(expr.then_expr, Match): return "{}?{}".format( ee(expr.expr), ee(expr.then_expr, then_underscore_var=expr.var_expr) ) return emit_method_call( ee_pexpr(expr.expr), "do", keep([ "({}) => {}".format(var_name(expr.var_expr), ee(expr.then_expr)), "default_val={}".format(ee_pexpr(expr.default_val)) if expr.default_val else None ]) ) elif isinstance(expr, OrderingTest): return "{} {} {}".format( ee_pexpr(expr.lhs), expr.OPERATOR_IMAGE[expr.operator], ee_pexpr(expr.rhs) ) elif isinstance(expr, GetSymbol): return "{}.symbol".format(ee(expr.node_expr)) elif is_a("as_entity", "as_bare_entity", "children", "env_parent", "rebindings_parent", "parents", "parent", "root", "env_node", "rebindings_new_env", "rebindings_old_env"): # Field like expressions exprs = expr.sub_expressions return emit_method_call(ee(exprs[0]), type(expr).__name__, map(ee, exprs[1:]), False) elif is_a("append_rebinding", "concat_rebindings", "env_node", "get_value", "solve", "is_referenced_from", "env_group", "length", "can_reach", "as_int", "unique", "env_orphan", "is_visible_from", "as_array", "rebind_env"): # Method like expressions exprs = expr.sub_expressions return emit_method_call(ee(exprs[0]), type(expr).__name__, map(ee, exprs[1:])) elif isinstance(expr, EnumLiteral): return expr.value.dsl_name elif isinstance(expr, Try): return "try $sl$i{}$sl$d {}".format( ee_pexpr(expr.try_expr), "or {}".format(ee_pexpr(expr.else_expr)) if expr.else_expr is not None else "" ) elif isinstance(expr, Arithmetic): return "{} {} {}".format(ee_pexpr(expr.l), expr.op, ee_pexpr(expr.r)) elif isinstance(expr, EnvGet): args = [ee(expr.symbol)] if expr.sequential_from: args.append("from={}".format(ee(expr.sequential_from))) if expr.categories: args.append('categories={}'.format(ee(expr.categories))) return emit_method_call( ee(expr.env), "get_first" if expr.only_first else "get", args ) elif is_a("at"): # Recognize find if (isinstance(expr.expr_0, Map) and expr.expr_0.kind == 'filter' and ee(expr.expr_1) == "0"): return ee(expr.expr_0, overload_coll_name="find") return "{}?({})".format(ee(expr.expr_0), ee(expr.expr_1)) elif is_a("at_or_raise"): return "{}({})".format(ee(expr.expr_0), ee(expr.expr_1)) elif isinstance(expr, FieldAccess): args = [] has_any_commented_arg = False is_property = isinstance(expr.constructed_expr.node_data, PropertyDef) if expr.arguments: with walker.method_call(expr.field): field_coms = walker.emit_comments() receiver_str = ee(expr.receiver) for i in range(walker.arg_count()): kw = walker.arg_keyword(i) with walker.arg(i): arg_coms = walker.emit_comments() if kw is None: args.append(arg_coms + ee(expr.arguments.args[i])) else: args.append(arg_coms + "{}={}".format( kw, ee(expr.arguments.kwargs[kw]) )) has_any_commented_arg |= arg_coms != "" else: field_coms = "" receiver_str = ee(expr.receiver) return field_coms + emit_method_call( receiver_str, expr.field, args, as_multiline=has_any_commented_arg, force_parens=is_property ) elif isinstance(expr, Concat): return "{} & {}".format(ee_pexpr(expr.array_1), ee_pexpr(expr.array_2)) elif isinstance(expr, EntityVariable): return "self" elif isinstance(expr, SelfVariable): return "node" elif isinstance(expr, DynamicVariable): return expr.argument_name.lower elif isinstance(expr, AbstractVariable): if then_underscore_var: if id(then_underscore_var) == id(expr): return "" return var_name(expr) elif isinstance(expr, No): return "null".format(type_name(expr.expr_type)) # TODO: Emit valid null values for other types, eg. [] for arrays. elif isinstance(expr, CollectionSingleton): if then_underscore_var: return emit_method_call(ee(expr.expr), "singleton") else: return "[{}]".format(ee(expr.expr)) elif isinstance(expr, New): return "{}{}".format( type_name(expr.struct_type), emit_paren(", ".join( "{}={}".format(unparsed_name(k), ee(v)) for k, v in expr.field_values.items() )) ) elif isinstance(expr, StructUpdate): return '{}.update({})'.format( ee(expr.expr), ', '.join('{}={}'.format(name, ee(field_expr)) for name, field_expr in sorted(expr.assocs.items())) ) elif isinstance(expr, ArrayLiteral): if not len(expr.elements): return '[]' elif isinstance(expr.elements[0], CharacterLiteral): return repr(u"".join(e.literal for e in expr.elements))[1:] return "[{}]".format(", ".join(ee(el) for el in expr.elements)) elif isinstance(expr, CharacterLiteral): # Get rid of the 'u' unicode prefix return repr(expr.literal)[1:] elif isinstance(expr, BigIntLiteral): return 'BigInt({})'.format(str(expr.expr) if isinstance(expr.expr, (int, long)) else ee(expr.expr)) elif isinstance(expr, RefCategories): return 'RefCats({})'.format(', '.join( '{}={}'.format(name, ee(value)) for name, value in list(sorted(expr.cat_map.items())) + [("others", expr.default)] )) elif isinstance(expr, Predicate): return "%predicate({})".format(", ".join(keep([ fqn(expr.pred_property), ee(expr.exprs[0]), ] + [ee(e) for e in expr.exprs[1:]]))) elif is_a("domain"): return "%domain({}, {})".format(ee(expr.expr_0), ee(expr.expr_1)) elif isinstance(expr, Bind): return "%eq({})".format(", ".join(keep([ ee(expr.from_expr), ee(expr.to_expr), "eq_prop={}".format(fqn(expr.eq_prop)) if expr.eq_prop else "" "conv_prop={}".format(fqn(expr.conv_prop)) if expr.conv_prop else "" ]))) else: # raise NotImplementedError(type(expr)) return repr(expr)
def __init__(self, context, lib_root, extensions_dir, main_source_dirs=set(), main_programs=set(), no_property_checks=False, generate_astdoc=True, generate_gdb_hook=True, pretty_print=False, post_process_ada=None, post_process_cpp=None, post_process_python=None): """ Generate sources for the analysis library. Also emit a tiny program useful for testing purposes. :param str lib_root: Path of the directory in which the library should be generated. :param str|None extensions_dir: Directory to contain extensions for code generation. If None is provided, assume there is no extension. :param set[str] main_source_dirs: List of source directories to use in the project file for mains. Source directories must be relative to the mains project file directory (i.e. $BUILD/src). :param set[str] main_programs: List of names for programs to build in addition to the generated library. To each X program, there must be a X.adb source file in the $BUILD/src directory. :param bool no_property_checks: If True, do not emit safety checks in the generated code for properties. Namely, this disables null checks on field access. :param bool generate_astdoc: Whether to generate the HTML documentation for AST nodes, their fields and their properties. :param bool generate_gdb_hook: Whether to generate the ".debug_gdb_scripts" section. Good for debugging, but better to disable for releases. :param bool pretty_print: If true, pretty-print the generated sources. :param post_process_ada: Optional post-processing for generated Ada source code. :type post_process_ada: None|(str) -> str :param post_process_cpp: Optional post-processing for generated C++ source code. :type post_process_cpp: None|(str) -> str :param post_process_python: Optional post-processing for generated Python source code. :type post_process_python: None|(str) -> str """ self.context = context self.verbosity = context.verbosity self.lib_root = lib_root self.cache = Cache(os.path.join(self.lib_root, 'obj', 'langkit_cache')) self.extensions_dir = extensions_dir # TODO: contain the add_template_dir calls to this context (i.e. avoid # global mutation). if self.extensions_dir: add_template_dir(self.extensions_dir) for dirpath in keep(self.context.template_lookup_extra_dirs): add_template_dir(dirpath) self.no_property_checks = no_property_checks self.generate_astdoc = generate_astdoc self.generate_gdb_hook = generate_gdb_hook self.pretty_print = pretty_print self.post_process_ada = post_process_ada self.post_process_cpp = post_process_cpp self.post_process_python = post_process_python # Automatically add all source files in the "extensions/src" directory # to the generated library project. self.extensions_src_dir = None if self.extensions_dir: src_dir = path.join(self.extensions_dir, 'src') if path.isdir(src_dir): self.extensions_src_dir = src_dir for filename in os.listdir(src_dir): filepath = path.join(src_dir, filename) if path.isfile(filepath) and not filename.startswith('.'): self.context.additional_source_files.append(filepath) self.main_source_dirs = main_source_dirs self.main_programs = main_programs self.lib_name_low = context.ada_api_settings.lib_name.lower() """ Lower-case name for the generated library. """ # Paths for the various directories in which code is generated self.include_path = path.join(self.lib_root, 'include') self.src_path = path.join(self.lib_root, 'include', self.lib_name_low) self.lib_path = path.join(self.lib_root, 'lib') self.share_path = path.join(self.lib_root, 'share', self.lib_name_low) self.python_path = path.join(self.lib_root, 'python') self.dfa_code = None """
def add_path(name, p): if isinstance(name, unicode): name = name.encode('ascii') if isinstance(p, unicode): p = p.encode('ascii') env[name] = path.pathsep.join(keep([p, env.get(name, b'')]))