def main(): arg_parser = ArgumentParser(description='muck around with dependencies.') arg_parser.add_argument('targets', nargs='*', default=['index.html'], help='target file names.') arg_parser.add_argument('-no-times', action='store_true', help='do not report process times.') arg_parser.add_argument('-dbg', action='store_true') arg_parser.add_argument('-force', action='store_true') args = arg_parser.parse_args() if args.dbg: def dbgF(path, fmt, *items): errFL('muck dbg: {}: ' + fmt, path, *items) else: def dbgF(path, fmt, *items): pass command_needs_ctx, command_fn = commands.get(args.targets[0], (None, None)) if command_fn and not command_needs_ctx: return command_fn(args.targets[1:]) make_dirs(build_dir) # required to create new DB. ctx = Ctx(db=DB(path=db_path), statuses={}, dir_names={}, dependents=defaultdict(set), report_times=(not args.no_times), dbgF=dbgF) if command_fn: assert command_needs_ctx command_fn(ctx, args.targets[1:]) else: # no command; default behavior is to update each specified target. for target in args.targets: update_dependency(ctx, target, dependent=None, force=args.force)
def fetch(url, expected_status_code=200, headers={}, timeout=4, delay=0, delay_range=0): "Fetch the data at `url` and save it to a path in the '_fetch' directory derived from the URL." path = path_join('_fetch', path_for_url(url)) if not path_exists(path): errFL('fetch: {}', url) r = _fetch(url, timeout, headers, expected_status_code) make_dirs(path_dir(path)) with open(path, 'wb') as f: f.write(r.content) sleep_min = delay - delay_range * 0.5 sleep_max = delay + delay_range * 0.5 sleep_time = random.uniform(sleep_min, sleep_max) if sleep_time > 0: time.sleep(sleep_time) return path
def build_product(ctx, target_path: str, src_path: str, prod_path: str) -> bool: ''' Run a source file, producing zero or more products. Return a list of produced product paths. ''' src_ext = path_ext(src_path) try: build_tool = build_tools[src_ext] except KeyError: # TODO: fall back to generic .deps file. failF(target_path, 'unsupported source file extension: `{}`', src_ext) prod_path_out = prod_path + out_ext prod_path_tmp = prod_path + tmp_ext remove_file_if_exists(prod_path_out) remove_file_if_exists(prod_path_tmp) if not build_tool: noteF(target_path, 'no op.') return False # no product. prod_dir = path_dir(prod_path) make_dirs(prod_dir) # Extract args from the combination of wilds in the source and the matching target. m = match_wilds(target_path_for_source(src_path), target_path) if m is None: failF(target_path, 'internal error: match failed; src_path: {!r}', src_path) argv = [src_path] + list(m.groups()) cmd = build_tool + argv try: env_fn = build_tool_env_fns[src_ext] except KeyError: env = None else: env = os.environ.copy() custom_env = env_fn() env.update(custom_env) noteF(target_path, 'building: `{}`', ' '.join(shlex.quote(w) for w in cmd)) out_file = open(prod_path_out, 'wb') time_start = time.time() code = runC(cmd, env=env, out=out_file) time_elapsed = time.time() - time_start out_file.close() if code != 0: failF(target_path, 'build failed with code: {}', code) def cleanup_out(): if file_size(prod_path_out) == 0: remove_file(prod_path_out) else: warnF(target_path, 'wrote data directly to `{}`;\n ignoring output captured in `{}`', prod_path_tmp, prod_path_out) manif_path = manifest_path(argv) try: f = open(manif_path) except FileNotFoundError: # no list. if not path_exists(prod_path_tmp): via = 'stdout' tmp_paths = [prod_path_out] else: via = 'tmp' tmp_paths = [prod_path_tmp] cleanup_out() else: via = 'manifest' tmp_paths = list(line[:-1] for line in f) # strip newlines. cleanup_out() if ('%' not in prod_path_tmp) and prod_path_tmp not in tmp_paths: failF(target_path, 'product does not appear in manifest ({} records): {}', len(tmp_paths), manif_path) remove_file(manif_path) time_msg = '{:0.2f} seconds '.format(time_elapsed) if ctx.report_times else '' noteF(target_path, 'finished: {}(via {}).', time_msg, via) return tmp_paths