def get_csv_rows_for_installed( old_csv_rows, # type: Iterable[List[str]] installed, # type: Dict[RecordPath, RecordPath] changed, # type: Set[RecordPath] generated, # type: List[str] lib_dir, # type: str ): # type: (...) -> List[InstalledCSVRow] """ :param installed: A map from archive RECORD path to installation RECORD path. """ installed_rows = [] # type: List[InstalledCSVRow] for row in old_csv_rows: if len(row) > 3: logger.warning( 'RECORD line has more than three elements: {}'.format(row)) old_record_path = _parse_record_path(row[0]) new_record_path = installed.pop(old_record_path, old_record_path) if new_record_path in changed: digest, length = rehash(_record_to_fs_path(new_record_path)) else: digest = row[1] if len(row) > 1 else '' length = row[2] if len(row) > 2 else '' installed_rows.append((new_record_path, digest, length)) for f in generated: path = _fs_to_record_path(f, lib_dir) digest, length = rehash(f) installed_rows.append((path, digest, length)) for installed_record_path in itervalues(installed): installed_rows.append((installed_record_path, '', '')) return installed_rows
def check_against_chunks(self, chunks): # type: (Iterator[bytes]) -> None """Check good hashes against ones built from iterable of chunks of data. Raise HashMismatch if none match. """ gots = {} for hash_name in iterkeys(self._allowed): try: gots[hash_name] = hashlib.new(hash_name) except (ValueError, TypeError): raise InstallationError( 'Unknown hash name: {}'.format(hash_name) ) for chunk in chunks: for hash in itervalues(gots): hash.update(chunk) for hash_name, got in iteritems(gots): if got.hexdigest() in self._allowed[hash_name]: return self._raise(gots)
def iter_found_candidates(self, ireq, extras): # type: (InstallRequirement, Set[str]) -> Iterator[Candidate] name = canonicalize_name(ireq.req.name) # We use this to ensure that we only yield a single candidate for # each version (the finder's preferred one for that version). The # requirement needs to return only one candidate per version, so we # implement that logic here so that requirements using this helper # don't all have to do the same thing later. candidates = collections.OrderedDict() # type: VersionCandidates # Yield the installed version, if it matches, unless the user # specified `--force-reinstall`, when we want the version from # the index instead. installed_version = None if not self._force_reinstall and name in self._installed_dists: installed_dist = self._installed_dists[name] installed_version = installed_dist.parsed_version if ireq.req.specifier.contains( installed_version, prereleases=True ): candidate = self._make_candidate_from_dist( dist=installed_dist, extras=extras, parent=ireq, ) candidates[installed_version] = candidate found = self.finder.find_best_candidate( project_name=ireq.req.name, specifier=ireq.req.specifier, hashes=ireq.hashes(trust_internet=False), ) for ican in found.iter_applicable(): if ican.version == installed_version: continue candidate = self._make_candidate_from_link( link=ican.link, extras=extras, parent=ireq, name=name, version=ican.version, ) candidates[ican.version] = candidate return six.itervalues(candidates)
def check_against_chunks(self, chunks): """Check good hashes against ones built from iterable of chunks of data. Raise HashMismatch if none match. """ gots = {} for hash_name in iterkeys(self._allowed): try: gots[hash_name] = hashlib.new(hash_name) except (ValueError, TypeError): raise InstallationError('Unknown hash name: %s' % hash_name) for chunk in chunks: for hash in itervalues(gots): hash.update(chunk) for hash_name, got in iteritems(gots): if got.hexdigest() in self._allowed[hash_name]: return self._raise(gots)
def _iter_found_candidates( self, ireqs, # type: Sequence[InstallRequirement] specifier, # type: SpecifierSet hashes, # type: Hashes ): # type: (...) -> Iterable[Candidate] if not ireqs: return () # The InstallRequirement implementation requires us to give it a # "template". Here we just choose the first requirement to represent # all of them. # Hopefully the Project model can correct this mismatch in the future. template = ireqs[0] name = canonicalize_name(template.req.name) extras = frozenset() # type: FrozenSet[str] for ireq in ireqs: specifier &= ireq.req.specifier hashes &= ireq.hashes(trust_internet=False) extras |= frozenset(ireq.extras) # We use this to ensure that we only yield a single candidate for # each version (the finder's preferred one for that version). The # requirement needs to return only one candidate per version, so we # implement that logic here so that requirements using this helper # don't all have to do the same thing later. candidates = collections.OrderedDict() # type: VersionCandidates # Get the installed version, if it matches, unless the user # specified `--force-reinstall`, when we want the version from # the index instead. installed_version = None installed_candidate = None if not self._force_reinstall and name in self._installed_dists: installed_dist = self._installed_dists[name] installed_version = installed_dist.parsed_version if specifier.contains(installed_version, prereleases=True): installed_candidate = self._make_candidate_from_dist( dist=installed_dist, extras=extras, template=template, ) found = self._finder.find_best_candidate( project_name=name, specifier=specifier, hashes=hashes, ) for ican in found.iter_applicable(): if ican.version == installed_version and installed_candidate: candidate = installed_candidate else: candidate = self._make_candidate_from_link( link=ican.link, extras=extras, template=template, name=name, version=ican.version, ) candidates[ican.version] = candidate # Yield the installed version even if it is not found on the index. if installed_version and installed_candidate: candidates[installed_version] = installed_candidate return six.itervalues(candidates)
""" gots = {} for hash_name in iterkeys(self._allowed): try: gots[hash_name] = hashlib.new(hash_name) except (ValueError, TypeError): <<<<<<< HEAD raise InstallationError('Unknown hash name: %s' % hash_name) ======= raise InstallationError( 'Unknown hash name: {}'.format(hash_name) ) >>>>>>> e585743114c1741ec20dc76010f96171f3516589 for chunk in chunks: for hash in itervalues(gots): hash.update(chunk) for hash_name, got in iteritems(gots): if got.hexdigest() in self._allowed[hash_name]: return self._raise(gots) def _raise(self, gots): # type: (Dict[str, _Hash]) -> NoReturn raise HashMismatch(self._allowed, gots) def check_against_file(self, file): # type: (BinaryIO) -> None """Check good hashes against a file-like object