Exemplo n.º 1
0
def pack(
    crates: List[Crate],
    crates_root: Path,
    bundle_path: Optional[Path],
    archive_path: Path,
    keep_going: bool,
) -> None:
    num_good_paths = 0
    num_bad_paths = 0

    with common.tar_context(archive_path, "w") as tar_f:
        if bundle_path is not None:
            packed_name = INDEX_BUNDLE_PACKED_NAME
            common.vprint("[pack] {}".format(packed_name))
            tar_f.add(str(bundle_path), packed_name)
        for rel_path in sorted(crate.rel_path() for crate in crates):
            path = crates_root / rel_path
            packed_name = "crates/" + rel_path.as_posix()
            try:
                common.vprint("[pack] {}".format(rel_path.name))
                tar_f.add(str(path), packed_name)
                num_good_paths += 1
            except FileNotFoundError:
                num_bad_paths += 1
                common.eprint("Error: Missing {}".format(rel_path))
                if not keep_going:
                    raise error.AbortError()

    common.iprint("{} bad paths, {} good paths".format(num_bad_paths,
                                                       num_good_paths))
Exemplo n.º 2
0
    def cmd_unpack(self) -> None:
        archive_path = self.get_archive_path()
        common.iprint("Unpacking archive: {}".format(archive_path))
        dist_prefix = "dist/"
        extracted = set()
        with common.tar_context(archive_path, "r") as tar_f:
            for tar_info in tar_f:
                if tar_info.isdir():
                    continue
                if not tar_info.name.startswith(dist_prefix):
                    raise error.UnexpectedArchiveMemberError(tar_info.name)

                rel_path = tar_info.name[len(dist_prefix):]
                dest_path = self.dest_path_from_rel_path(rel_path)
                tar_info.name = str(dest_path)
                common.vprint("[unpack] {}".format(rel_path))
                tar_f.extract(tar_info)
                extracted.add(rel_path)

        specs = self._detect_specs(extracted)
        targets = self._detect_targets(specs, extracted)

        common.iprint("Unpacked specs: {}".format(len(specs)))
        for spec in specs:
            common.iprint("  {}".format(spec))

        common.iprint("Unpacked targets: {}".format(len(targets)))
        for target in targets:
            common.iprint("  {}".format(target))

        self.specs = specs
        self.targets = targets
Exemplo n.º 3
0
    def cmd_list(self) -> None:
        max_verbosity = common.get_max_verbosity()
        show_details = max_verbosity >= common.VERBOSITY_INFO
        for spec in self.adjust_wild_specs(self.specs):
            common.vprint("List: {}".format(spec))
            manifest = self.select_manifest(spec, download=False)
            if show_details:
                available_packages = manifest.available_packages()
                available_targets = manifest.available_targets()
                packages = self.downloaded_packages(manifest)
                targets = self.downloaded_targets(manifest)

                target_out = "targets[{}/{}]".format(
                    len(targets),
                    len(available_targets),
                )
                package_out = "packages[{}/{}]".format(
                    len(packages),
                    len(available_packages),
                )
                # Example output:
                #   stable-2020-01-30(1.41.0)    \
                #     targets[84/84], packages[272/326]
                common.iprint("{:28} {:16} {:18}".format(
                    manifest.ident, target_out, package_out))
                for target in targets:
                    common.iprint("  {}".format(target))
            else:
                common.eprint(manifest.ident)
Exemplo n.º 4
0
 def cmd_fixup(self) -> None:
     for spec in self.adjust_wild_specs(self.specs):
         common.iprint("Fixup: {}".format(spec))
         manifest = self.select_manifest(spec,
                                         download=False,
                                         canonical=True)
         common.vprint("  ident: {}".format(manifest.ident))
         self._write_manifest_variations(manifest)
Exemplo n.º 5
0
 def pack_path(rel_path: str) -> None:
     dest_path = self.dest_path_from_rel_path(rel_path)
     packed_name = "dist/" + rel_path
     common.vprint("[pack] {}".format(rel_path))
     try:
         tar_f.add(str(dest_path), packed_name)
     except FileNotFoundError:
         raise error.MissingFileError(str(dest_path))
Exemplo n.º 6
0
 def get_crates(self) -> List[Crate]:
     if self._crates is None:
         common.vprint("[calculating crate list]")
         self._crates = list(
             crates_in_range(self.get_repo(), self.get_start(),
                             self.args.end))
         common.vprint("[{} crates in range]".format(len(self._crates)))
     return self._crates
Exemplo n.º 7
0
def update_config_json(repo: git.Repo, config: bytes) -> None:
    old_config = read_config_json(repo)
    if old_config is None or config != old_config:
        config_path = _config_json_path(repo)
        common.vprint("update-config: {}".format(config_path))
        config_path.write_bytes(config)
        repo.index.add(str(config_path))
        repo.index.commit("Apply config.json adjustments")
Exemplo n.º 8
0
 def verify_hash(self, path: Path, hash: str) -> None:
     """
     Raises:
         MissingFileError - path doesn't exist
         IntegrityError - path exists with bad hash
     """
     common.vprint("[verify] {}".format(path))
     integrity.verify_hash(path, hash)
Exemplo n.º 9
0
 def download_cached(self,
                     dest_url: str,
                     dest_path: Path,
                     *,
                     cached: bool = True) -> None:
     if cached and dest_path.is_file():
         common.vprint("[cached file] {}".format(dest_path))
     else:
         common.vprint("[downloading] {}".format(dest_path))
         self.download(dest_url, dest_path)
Exemplo n.º 10
0
 def get_release_stable_version(self, *, download: bool) -> str:
     url, path = self.release_stable_url_path
     if download:
         # This file changes unexpectedly.  Avoid caching to ensure the
         # correct version is used.
         self.downloader.download_cached(url, path, cached=False)
     elif path.is_file():
         common.vprint("[read] {}".format(path))
     else:
         raise error.MissingFileError(str(path))
     return toml.load(path)["version"]
Exemplo n.º 11
0
 def verify_hash(self, path: Path, hash: str) -> None:
     """
     Raises:
         MissingFileError - path doesn't exist
         IntegrityError - path exists with bad hash
     """
     common.vprint("[verify] {}".format(path))
     try:
         integrity.verify_hash(path, hash)
     except Exception as err:
         common.eprint("[{}] verification failed: {}".format(path, err))
Exemplo n.º 12
0
 def verify(self, path: Path, *, with_sig: bool = False) -> None:
     """
     Raises:
         MissingFileError - path or associated sha256path doesn't exist
         IntegrityError - paths exists with bad hash
     """
     hash_path = integrity.path_append_hash_suffix(path)
     common.vprint("[verify] {}".format(path))
     integrity.verify(path, hash_path)
     if with_sig:
         sig_path = signature.path_append_sig_suffix(path)
         self.sig_verify(path, sig_path)
Exemplo n.º 13
0
    def download_verify(self,
                        dest_url: str,
                        dest_path: Path,
                        *,
                        cached: bool = True,
                        assume_ok: bool = False,
                        with_sig: bool = False) -> None:
        hash_path = integrity.path_append_hash_suffix(dest_path)
        sig_path = signature.path_append_sig_suffix(dest_path)
        if cached:
            if (assume_ok and dest_path.is_file() and hash_path.is_file()
                    and (not with_sig or sig_path.is_file())):
                common.vvprint("[assuming OK] {}".format(dest_path))
                return
            try:
                integrity.verify(dest_path, hash_path)
                if with_sig:
                    self.sig_verify(dest_path, sig_path)
                common.vprint("[cached file] {}".format(dest_path))
                return
            except (error.MissingFileError, error.IntegrityError):
                pass
        common.vprint("[downloading] {}".format(dest_path))
        # Download the (small) hash and signature files first.
        hash_url = integrity.append_hash_suffix(dest_url)
        self.download(hash_url, hash_path)
        if with_sig:
            sig_url = signature.append_sig_suffix(dest_url)
            self.download(sig_url, sig_path)

        # If dest_path exists and has the correct hash, bypass the downloading
        # step to save download time.
        download_required = True
        if dest_path.is_file():
            try:
                integrity.verify(dest_path, hash_path)
                download_required = False
            except (error.MissingFileError, error.IntegrityError):
                pass
        if download_required:
            self.download(dest_url, dest_path)
            integrity.verify(dest_path, hash_path)

        if with_sig:
            self.sig_verify(dest_path, sig_path)
Exemplo n.º 14
0
Arquivo: crate.py Projeto: k3d3/romt
def unpack(
    repo: git.Repo,
    crates_root: Path,
    bundle_path: Path,
    archive_path: Path,
    keep_going: bool,
) -> None:
    num_crates = 0
    crates_prefix = "crates/"
    found_bundle = False

    try:
        with common.tar_context(archive_path, "r") as tar_f:
            for tar_info in tar_f:
                if tar_info.isdir():
                    continue
                elif tar_info.name == INDEX_BUNDLE_PACKED_NAME:
                    found_bundle = True
                    tar_info.name = str(bundle_path)
                    common.vprint("[unpack] {}".format(tar_info.name))
                    tar_f.extract(tar_info)

                elif tar_info.name.startswith(crates_prefix):
                    num_crates += 1
                    tar_info.name = tar_info.name[len(crates_prefix):]
                    common.vprint("[unpack] {}".format(
                        os.path.basename(tar_info.name)))
                    tar_f.extract(tar_info, str(crates_root))

                else:
                    common.eprint("Unexpected archive member {}".format(
                        tar_info.name))
                    if not keep_going:
                        raise error.AbortError()
    except Exception as err:
        common.eprint("Exception unpacking: {}".format(err))
        raise

    if not found_bundle:
        common.eprint("Missing {} in archive".format(INDEX_BUNDLE_PACKED_NAME))
        if not keep_going:
            raise error.AbortError()

    common.iprint("{} extracted crates".format(num_crates))
Exemplo n.º 15
0
 def download_verify_hash(self,
                          dest_url: str,
                          dest_path: Path,
                          hash: str,
                          *,
                          cached: bool = True,
                          assume_ok: bool = False) -> None:
     if cached:
         if assume_ok and dest_path.is_file():
             common.vvprint("[assuming OK] {}".format(dest_path))
             return
         try:
             integrity.verify_hash(dest_path, hash)
             common.vprint("[cached file] {}".format(dest_path))
             return
         except (error.MissingFileError, error.IntegrityError):
             pass
     common.vprint("[downloading] {}".format(dest_path))
     self.download(dest_url, dest_path)
     integrity.verify_hash(dest_path, hash)
Exemplo n.º 16
0
def merge_origin_master(repo: git.Repo) -> None:
    _upgrade_to_working(repo)
    initial_config = read_config_json(repo)

    try:
        common.vprint("merge-index: merge origin/master")
        repo.git.merge("remotes/origin/master", "-m", "Merge origin/master")
    except git.GitError:
        common.iprint("merge-index: merge failed; reconstructing")
        common.vprint("merge-index: reset to recover failed merge state")
        repo.head.reset(working_tree=True, index=True)
        common.vprint("merge-index: reset to remotes/origin/master")
        repo.head.reset("remotes/origin/master", working_tree=True, index=True)

    # Restore initial_config if necessary.
    if initial_config is not None:
        update_config_json(repo, initial_config)