def run(args, project): if args.symlink and args.executable: raise RuntimeError("Cannot use --symlink and --executable!") good = True if args.output_file: if len(args.input_files) != 1: raise RuntimeError("Can only specify one input file with --output") input_file = os.path.abspath(args.input_files[0]) info = project.get_file_info(input_file) output_file = os.path.abspath(args.output_file) do_download(args, project, info, output_file) else: for input_file in args.input_files: input_file = os.path.abspath(input_file) info = project.get_file_info(input_file) output_file = info.orig_filepath def action(): do_download(args, project, info, output_file) if args.keep_going: try: action() except RuntimeError as e: good = False eprint(e) eprint("Continuing (--keep_going).") else: action() return good
def download_file_direct(output_file): # Assuming we're on Unix (where `os.rename` is atomic), use a # tempfile to avoid race conditions. tmp_file = os.path.join( os.path.dirname(output_file), str(uuid.uuid4())) try: self._download_file_direct(hash, project_relpath, tmp_file) except util.DownloadError as e: util.eprint("ERROR: For remote '{}'".format(self.name)) raise e os.rename(tmp_file, output_file)
def run(args, project): good = True for input_file in args.input_files: def action(): do_check(args, project, input_file) if args.keep_going: try: action() except RuntimeError as e: good = False eprint(e) eprint("Continuing (--keep_going).") else: action() return good
def run(args, project): good = True for filepath in args.filepaths: def action(): do_upload(args, project, filepath) if args.keep_going: try: action() except RuntimeError as e: good = False eprint(e) eprint("Continuing (--keep_going).") else: action() return good
def get_cached(check_sha): # Can use cache. Copy to output path. if symlink: os.symlink(cache_path, output_file) else: shutil.copy(cache_path, output_file) # Ensure file is writeable. mode_original = os.stat(output_file)[stat.ST_MODE] os.chmod(output_file, mode_original | stat.S_IWUSR) # On error, remove cached file, and re-download. if check_sha: if not hash.compare_file(output_file, do_throw=False): util.eprint("Hashsum mismatch. " + "Removing old cached file, re-downloading.") os.remove(cache_path) if os.path.islink(output_file): # In this situation, the cache was corrupted, and # Bazel recompilation, but the symlink is still in # Bazel-space. Remove the symlink, so that we do not # download into a symlink (which complicates the logic # in `download_and_cache`). os.remove(output_file) download_and_cache()
'(e.g. keys) as well!') subparsers = parser.add_subparsers(dest="command") download.add_arguments(subparsers.add_parser("download", help=download.__doc__)) upload.add_arguments(subparsers.add_parser("upload", help=upload.__doc__)) check.add_arguments(subparsers.add_parser("check", help=check.__doc__)) squash.add_arguments(subparsers.add_parser("squash", help=squash.__doc__)) args = parser.parse_args() # Do not allow running under Bazel unless we have a guess for the project root # from an input file. if in_bazel_runfiles() and not args.project_root_guess: eprint("ERROR: Do not run this command via `bazel run`. " + "Use a wrapper to call the binary.") eprint(" (If you are writing a test in Bazel, ensure that " + "you pass `--project_root_guess=$(location <target>)`.)") exit(1) if args.verbose: eprint("cmdline:") eprint(" pwd: {}".format(os.getcwd())) eprint(" argv[0]: {}".format(sys.argv[0])) eprint(" argv[1:]: {}".format(sys.argv[1:])) project = load_project(os.path.abspath(args.project_root_guess), user_config_file=args.user_config, project_name=args.project_name) if args.verbose:
def run(args, project): # Ensure that all remotes are disjoint. assert args.base != args.head and args.head != args.merge, ( "Must supply unique remotes") if args.verbose: print("base: {}".format(args.base)) print("head: {}".format(args.head)) print("merge: {}".format(args.merge)) # Remotes. base = project.get_remote(args.base) head = project.get_remote(args.head) merge = project.get_remote(args.merge) stage_dir = mkdtemp(prefix="bazel_external_data-merge-") if args.verbose: print("stage_dir: {}".format(stage_dir)) # List files. if args.files is None: files = project.get_registered_files() else: files = [os.path.abspath(file) for file in args.files] def do_squash(info): if args.verbose: yaml.dump(info.debug_config(), sys.stdout, default_flow_style=False) # If the file already exists in `base`, no need to do anything. if base.check_file(info.hash, info.project_relpath): print("- Skip: {}".format(info.project_relpath)) return # File not already uploaded: download from `head` to `stage_dir`, then # upload to `merge`. file_stage_abspath = os.path.join(stage_dir, info.project_relpath) file_stage_dir = os.path.dirname(file_stage_abspath) if not os.path.exists(file_stage_dir): os.makedirs(file_stage_dir) head.download_file(info.hash, info.project_relpath, file_stage_abspath, symlink=True) # Upload file to `merge`. hash_merge = merge.upload_file(info.hash.hash_type, info.project_relpath, file_stage_abspath) assert info.hash == hash_merge # Sanity check print("Uploaded: {}".format(info.project_relpath)) good = True for file_abspath in files: info = project.get_file_info(file_abspath, needs_hash=True) def action(): do_squash(info) if args.keep_going: try: action() except RuntimeError as e: good = False eprint(e) eprint("Continuing (--keep_going)") else: action() return good