Beispiel #1
0
    def _refresh_signing_keymgr(self):
        # Refresh key manager metadata
        self._key_mgr_filename = "key_mgr.json"  # TODO (AV): make this a constant or config value
        self._key_mgr = None

        key_mgr_path = join(context.av_data_dir, self._key_mgr_filename)
        try:
            untrusted_key_mgr = fetch_channel_signing_data(
                context.signing_metadata_url_base, self._key_mgr_filename)
            verify_trust_delegation("key_mgr", untrusted_key_mgr,
                                    self._trusted_root)
            self._key_mgr = untrusted_key_mgr
            write_trust_metadata_to_file(self._key_mgr, key_mgr_path)
        except (
                ConnectionError,
                HTTPError,
        ) as err:
            log.warn(
                f"Could not retrieve {self.channel.base_url}/{self._key_mgr_filename}: {err}"
            )
        # TODO (AV): much more sensible error handling here
        except Exception as err:
            log.error(err)

        # If key_mgr is unavailable from server, fall back to copy on disk
        if self._key_mgr is None and exists(key_mgr_path):
            self._key_mgr = load_trust_metadata_from_file(key_mgr_path)
Beispiel #2
0
    def _process_raw_repodata_str(self, raw_repodata_str):
        json_obj = json.loads(raw_repodata_str or '{}')

        subdir = json_obj.get('info', {}).get('subdir') or self.channel.subdir
        assert subdir == self.channel.subdir
        add_pip = context.add_pip_as_python_dependency
        schannel = self.channel.canonical_name

        self._package_records = _package_records = []
        self._names_index = _names_index = defaultdict(list)
        self._track_features_index = _track_features_index = defaultdict(list)

        signatures = json_obj.get("signatures", {})

        _internal_state = {
            'channel': self.channel,
            'url_w_subdir': self.url_w_subdir,
            'url_w_credentials': self.url_w_credentials,
            'cache_path_base': self.cache_path_base,
            'fn': self.repodata_fn,

            '_package_records': _package_records,
            '_names_index': _names_index,
            '_track_features_index': _track_features_index,

            '_etag': json_obj.get('_etag'),
            '_mod': json_obj.get('_mod'),
            '_cache_control': json_obj.get('_cache_control'),
            '_url': json_obj.get('_url'),
            '_add_pip': add_pip,
            '_pickle_version': REPODATA_PICKLE_VERSION,
            '_schannel': schannel,
            'repodata_version': json_obj.get('repodata_version', 0),
        }
        if _internal_state["repodata_version"] > MAX_REPODATA_VERSION:
            raise CondaUpgradeError(dals("""
                The current version of conda is too old to read repodata from

                    %s

                (This version only supports repodata_version 1.)
                Please update conda to use this channel.
                """) % self.url_w_subdir)

        meta_in_common = {  # just need to make this once, then apply with .update()
            'arch': json_obj.get('info', {}).get('arch'),
            'channel': self.channel,
            'platform': json_obj.get('info', {}).get('platform'),
            'schannel': schannel,
            'subdir': subdir,
        }

        channel_url = self.url_w_credentials
        legacy_packages = json_obj.get("packages", {})
        conda_packages = {} if context.use_only_tar_bz2 else json_obj.get("packages.conda", {})

        _tar_bz2 = CONDA_PACKAGE_EXTENSION_V1
        use_these_legacy_keys = set(iterkeys(legacy_packages)) - set(
            k[:-6] + _tar_bz2 for k in iterkeys(conda_packages)
        )

        if context.extra_safety_checks:
            if cct is None:
                log.warn("metadata signature verification requested, "
                         "but `conda-content-trust` is not installed.")
                verify_metadata_signatures = False
            elif not context.signing_metadata_url_base:
                log.info("no metadata URL base has not been specified")
                verify_metadata_signatures = False
            elif self._key_mgr is None:
                log.warn("could not find key_mgr data for metadata signature verification")
                verify_metadata_signatures = False
            else:
                verify_metadata_signatures = True
        else:
            verify_metadata_signatures = False

        for group, copy_legacy_md5 in (
                (iteritems(conda_packages), True),
                (((k, legacy_packages[k]) for k in use_these_legacy_keys), False)):
            for fn, info in group:

                # Verify metadata signature before anything else so run-time
                # updates to the info dictionary performed below do not
                # invalidate the signatures provided in metadata.json.
                if verify_metadata_signatures:
                    if fn in signatures:
                        signable = wrap_as_signable(info)
                        signable['signatures'].update(signatures[fn])
                        try:
                            verify_trust_delegation('pkg_mgr', signable, self._key_mgr)
                            info['metadata_signature_status'] = MetadataSignatureStatus.verified
                        # TODO (AV): more granular signature errors (?)
                        except SignatureError:
                            log.warn(f"invalid signature for {fn}")
                            info['metadata_signature_status'] = MetadataSignatureStatus.error
                    else:
                        info['metadata_signature_status'] = MetadataSignatureStatus.unsigned

                info['fn'] = fn
                info['url'] = join_url(channel_url, fn)
                if copy_legacy_md5:
                    counterpart = fn.replace('.conda', '.tar.bz2')
                    if counterpart in legacy_packages:
                        info['legacy_bz2_md5'] = legacy_packages[counterpart].get('md5')
                        info['legacy_bz2_size'] = legacy_packages[counterpart].get('size')
                if (add_pip and info['name'] == 'python' and
                        info['version'].startswith(('2.', '3.'))):
                    info['depends'].append('pip')
                info.update(meta_in_common)
                if info.get('record_version', 0) > 1:
                    log.debug("Ignoring record_version %d from %s",
                              info["record_version"], info['url'])
                    continue

                package_record = PackageRecord(**info)

                _package_records.append(package_record)
                _names_index[package_record.name].append(package_record)
                for ftr_name in package_record.track_features:
                    _track_features_index[ftr_name].append(package_record)

        self._internal_state = _internal_state
        return _internal_state