def py_dep_import(src_path, module_name, dir_names): 'Calculate dependencies for a Python ast.Import or ast.ImportFrom node.' src_dir = path_dir(src_path) leading_dots_count = re.match('\.*', module_name).end() module_parts = ['..'] * leading_dots_count + module_name[leading_dots_count:].split('.') module_path = path_join(src_dir, *module_parts) + '.py' if is_file(module_path): yield module_path
def fetch(url, expected_status_code=200, headers={}, timeout=4, delay=0, delay_range=0): "Fetch the data at `url` and save it to a path in the '_fetch' directory derived from the URL." path = path_join('_fetch', path_for_url(url)) if not path_exists(path): errFL('fetch: {}', url) r = _fetch(url, timeout, headers, expected_status_code) make_dirs(path_dir(path)) with open(path, 'wb') as f: f.write(r.content) sleep_min = delay - delay_range * 0.5 sleep_max = delay + delay_range * 0.5 sleep_time = random.uniform(sleep_min, sleep_max) if sleep_time > 0: time.sleep(sleep_time) return path
def output_swift(path:str, dfas:List[DFA], mode_transitions:ModeTransitions, pattern_descs:Dict[str,str], license:str, args:Namespace) -> None: # Create safe mode names. modes = { dfa.name : swift_safe_sym(dfa.name) for dfa in dfas } mode_case_defs = [f'case {modes[dfa.name]} = {dfa.start_node}' for dfa in dfas] # Create safe token kind names. kind_syms = { kind : swift_safe_sym(kind) for kind in pattern_descs } kind_syms['incomplete'] = 'incomplete' assert len(kind_syms) == len(set(kind_syms.values())) token_kind_case_defs = [f'case {sym}' for sym in sorted(kind_syms.values())] # Token kind descriptions. def pattern_desc(kind:str) -> str: return swift_repr(pattern_descs[kind]) token_kind_case_descs = [f'case .{sym}: return {pattern_desc(kind)}' for kind, sym in sorted(kind_syms.items())] # Mode transitions dictionary. def mode_trans_dict(d:Dict[str,Tuple[str,str]]) -> dict: return {SwiftEnum(parent_kind): (SwiftEnum(child_mode), SwiftEnum(child_kind)) for parent_kind, (child_mode, child_kind) in d.items()} mode_transitions_dict = {SwiftEnum(modes[name]):mode_trans_dict(d) for name, d in mode_transitions.items()} # State cases. def byte_case_patterns(chars:List[int]) -> List[str]: def fmt(l:int, h:int) -> str: if l == h: return hex(l) return hex(l) + (', ' if l + 1 == h else '...') + hex(h) return [fmt(*r) for r in closed_int_intervals(chars)] def byte_case(dfa:DFA, chars:List[int], dst:int) -> str: pattern_kind = dfa.match_kind(dst) sym = None if pattern_kind is None else kind_syms.get(pattern_kind) return 'case {chars}: state = {dst}{suffix}'.format( chars=', '.join(byte_case_patterns(chars)), dst=dst, suffix=f'; last = pos; kind = .{sym}' if sym else '') def byte_cases(dfa:DFA, node:int) -> List[str]: dst_chars:DefaultDict[int, List[int]] = DefaultDict(list) for char, dst in sorted(dfa.transitions[node].items()): dst_chars[dst].append(char) dst_chars_sorted = sorted(dst_chars.items(), key=lambda p: p[1]) return [byte_case(dfa, chars, dst) for dst, chars in dst_chars_sorted] def transition_code(dfa:DFA, node:int) -> str: d = dfa.transitions[node] if not d: return 'break loop' # no transitions. return render_template('''switch byte { ${byte_cases} default: break loop }''', byte_cases='\n '.join(byte_cases(dfa, node))) def state_case(dfa:DFA, node:int) -> str: mode = dfa.name kind = dfa.match_kind(node) if kind: desc = kind elif node in dfa.pre_match_nodes: desc = f'{mode} pre-match' else: desc = f'{mode} post-match' return 'case {node}: // {desc}.\n {transition_code}'.format( desc=desc, node=node, transition_code=transition_code(dfa, node)) state_cases = [state_case(dfa, node) for dfa in dfas for node in sorted(dfa.transitions.keys())] with open(path, 'w', encoding='utf8') as f: src = render_template(template, Name=args.type_prefix, license=license, mode_case_defs='\n '.join(mode_case_defs), mode_transitions_dict=swift_repr(mode_transitions_dict, indent=2), patterns_path=args.path, state_cases='\n '.join(state_cases), token_kind_case_defs='\n '.join(token_kind_case_defs), token_kind_case_descs='\n '.join(token_kind_case_descs), ) f.write(src) if args.test: # Append the base source because `swift` will only interpret a single file. spec = find_module_spec('legs') assert spec is not None pkg_dir_path = path_dir(cast(str, spec.origin)) legs_base_path = path_join(pkg_dir_path, 'legs_base.swift') legs_base_contents = open(legs_base_path).read() f.write(legs_base_contents) # Write the test main function. test_src = render_template(test_template, Name=args.type_prefix) f.write(test_src)
# gloss uses a single system installation directory for all files, to ease removal and upgrade. # a custom installation directory can be speficied as an argument to the installation scripts. # please note that custom directories are not well tested. install_prefix = '/usr/local' # parse arguments. if len(argv) > 2: exit('usage: optionally specify a custom installation prefix.') if len(argv) == 2: install_prefix = argv[1] check(' ' not in install_prefix, "installation prefix contains space.") # determine the gloss source directory. src_dir = abs_path(path_join(path_dir(argv[0]), '..')) check(is_dir(src_dir), 'bad source directory:', src_dir) dst_dir = path_join(install_prefix, 'gloss') uname = os_uname()[0].lower() if uname == 'darwin': platform = 'mac' elif uname == 'linux': with open('/etc/issue') as f: # get the first word from the issue string (e.g. 'Fedora') platform = f.readline().split()[0].lower() else: platform = uname.lower() errSL('src_dir:', src_dir)
def build_product(ctx, target_path: str, src_path: str, prod_path: str) -> bool: ''' Run a source file, producing zero or more products. Return a list of produced product paths. ''' src_ext = path_ext(src_path) try: build_tool = build_tools[src_ext] except KeyError: # TODO: fall back to generic .deps file. failF(target_path, 'unsupported source file extension: `{}`', src_ext) prod_path_out = prod_path + out_ext prod_path_tmp = prod_path + tmp_ext remove_file_if_exists(prod_path_out) remove_file_if_exists(prod_path_tmp) if not build_tool: noteF(target_path, 'no op.') return False # no product. prod_dir = path_dir(prod_path) make_dirs(prod_dir) # Extract args from the combination of wilds in the source and the matching target. m = match_wilds(target_path_for_source(src_path), target_path) if m is None: failF(target_path, 'internal error: match failed; src_path: {!r}', src_path) argv = [src_path] + list(m.groups()) cmd = build_tool + argv try: env_fn = build_tool_env_fns[src_ext] except KeyError: env = None else: env = os.environ.copy() custom_env = env_fn() env.update(custom_env) noteF(target_path, 'building: `{}`', ' '.join(shlex.quote(w) for w in cmd)) out_file = open(prod_path_out, 'wb') time_start = time.time() code = runC(cmd, env=env, out=out_file) time_elapsed = time.time() - time_start out_file.close() if code != 0: failF(target_path, 'build failed with code: {}', code) def cleanup_out(): if file_size(prod_path_out) == 0: remove_file(prod_path_out) else: warnF(target_path, 'wrote data directly to `{}`;\n ignoring output captured in `{}`', prod_path_tmp, prod_path_out) manif_path = manifest_path(argv) try: f = open(manif_path) except FileNotFoundError: # no list. if not path_exists(prod_path_tmp): via = 'stdout' tmp_paths = [prod_path_out] else: via = 'tmp' tmp_paths = [prod_path_tmp] cleanup_out() else: via = 'manifest' tmp_paths = list(line[:-1] for line in f) # strip newlines. cleanup_out() if ('%' not in prod_path_tmp) and prod_path_tmp not in tmp_paths: failF(target_path, 'product does not appear in manifest ({} records): {}', len(tmp_paths), manif_path) remove_file(manif_path) time_msg = '{:0.2f} seconds '.format(time_elapsed) if ctx.report_times else '' noteF(target_path, 'finished: {}(via {}).', time_msg, via) return tmp_paths