Example #1
0
    def channels(self):
        local_add = ('local',) if self.use_local else ()
        if (self._argparse_args and 'override_channels' in self._argparse_args
                and self._argparse_args['override_channels']):
            if not self.override_channels_enabled:
                from ..exceptions import OperationNotAllowed
                raise OperationNotAllowed(dals("""
                Overriding channels has been disabled.
                """))
            elif not (self._argparse_args and 'channel' in self._argparse_args
                      and self._argparse_args['channel']):
                from ..exceptions import CommandArgumentError
                raise CommandArgumentError(dals("""
                At least one -c / --channel flag must be supplied when using --override-channels.
                """))
            else:
                return tuple(IndexedSet(concatv(local_add, self._argparse_args['channel'])))

        # add 'defaults' channel when necessary if --channel is given via the command line
        if self._argparse_args and 'channel' in self._argparse_args:
            # TODO: it's args.channel right now, not channels
            argparse_channels = tuple(self._argparse_args['channel'] or ())
            if argparse_channels and argparse_channels == self._channels:
                return tuple(IndexedSet(concatv(local_add, argparse_channels,
                                                (DEFAULTS_CHANNEL_NAME,))))
        return tuple(IndexedSet(concatv(local_add, self._channels)))
Example #2
0
File: plan.py Project: groutr/conda
def _handle_menuinst(unlink_dists, link_dists):  # pragma: no cover
    from .common.compat import on_win
    if not on_win:
        return unlink_dists, link_dists

    # Always link/unlink menuinst first/last on windows in case a subsequent
    # package tries to import it to create/remove a shortcut

    # unlink
    menuinst_idx = next((q for q, d in enumerate(unlink_dists) if d.name == 'menuinst'), None)
    if menuinst_idx is not None:
        unlink_dists = tuple(concatv(
            unlink_dists[:menuinst_idx],
            unlink_dists[menuinst_idx+1:],
            unlink_dists[menuinst_idx:menuinst_idx+1],
        ))

    # link
    menuinst_idx = next((q for q, d in enumerate(link_dists) if d.name == 'menuinst'), None)
    if menuinst_idx is not None:
        link_dists = tuple(concatv(
            link_dists[menuinst_idx:menuinst_idx+1],
            link_dists[:menuinst_idx],
            link_dists[menuinst_idx+1:],
        ))

    return unlink_dists, link_dists
Example #3
0
    def _merge(self, matches):
        # get matches up to and including first important_match
        #   but if no important_match, then all matches are important_matches
        relevant_matches = self._first_important_matches(matches)

        # get individual lines from important_matches that were marked important
        # these will be prepended to the final result
        def get_marked_lines(match, marker):
            return tuple(line
                         for line, flag in zip(match.value(self.__class__),
                                               match.valueflags(self.__class__))
                         if flag is marker)
        top_lines = concat(get_marked_lines(m, ParameterFlag.top) for m in relevant_matches)

        # also get lines that were marked as bottom, but reverse the match order so that lines
        # coming earlier will ultimately be last
        bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom) for m in
                              reversed(relevant_matches))

        # now, concat all lines, while reversing the matches
        #   reverse because elements closer to the end of search path take precedence
        all_lines = concat(m.value(self.__class__) for m in reversed(relevant_matches))

        # stack top_lines + all_lines, then de-dupe
        top_deduped = tuple(unique(concatv(top_lines, all_lines)))

        # take the top-deduped lines, reverse them, and concat with reversed bottom_lines
        # this gives us the reverse of the order we want, but almost there
        # NOTE: for a line value marked both top and bottom, the bottom marker will win out
        #       for the top marker to win out, we'd need one additional de-dupe step
        bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))

        # just reverse, and we're good to go
        return tuple(reversed(tuple(bottom_deduped)))
Example #4
0
    def canonical_name(self):
        try:
            return self.__canonical_name
        except AttributeError:
            pass

        for multiname, channels in iteritems(context.custom_multichannels):
            for channel in channels:
                if self.name == channel.name:
                    cn = self.__canonical_name = multiname
                    return cn

        for that_name in context.custom_channels:
            if self.name and tokenized_startswith(self.name.split('/'), that_name.split('/')):
                cn = self.__canonical_name = self.name
                return cn

        if any(c.location == self.location for c in concatv(
                (context.channel_alias,),
                context.migrated_channel_aliases,
        )):
            cn = self.__canonical_name = self.name
            return cn

        # fall back to the equivalent of self.base_url
        # re-defining here because base_url for MultiChannel is None
        if self.scheme:
            cn = self.__canonical_name = "%s://%s" % (self.scheme,
                                                      join_url(self.location, self.name))
            return cn
        else:
            cn = self.__canonical_name = join_url(self.location, self.name).lstrip('/')
            return cn
Example #5
0
        def make_link_operation(source_short_path):
            # no side effects in this method!

            # first part, same as parent class
            if source_short_path in package_info.has_prefix_files:
                link_type = LinkType.copy
                prefix_placehoder, file_mode = package_info.has_prefix_files[source_short_path]
            elif source_short_path in concatv(package_info.no_link, package_info.soft_links):
                link_type = LinkType.copy
                prefix_placehoder, file_mode = '', None
            else:
                link_type = requested_link_type
                prefix_placehoder, file_mode = '', None
            is_menu_file = bool(MENU_RE.match(source_short_path))

            # second part, noarch python-specific
            if source_short_path.startswith('site-packages/'):
                dest_short_path = site_packages_dir + source_short_path.replace(
                    'site-packages', '', 1)
            elif source_short_path.startswith('python-scripts/'):
                dest_short_path = bin_dir + source_short_path.replace('python-scripts', '', 1)
            else:
                dest_short_path = source_short_path
            return LinkOperation(source_short_path, dest_short_path, link_type, prefix_placehoder,
                                 file_mode, is_menu_file)
Example #6
0
    def execute(self):
        if not self._verified:
            self.verify()

        pkg_idx = 0
        try:
            for pkg_idx, (pkg_data, actions) in enumerate(self.all_actions):
                self._execute_actions(self.target_prefix, self.num_unlink_pkgs, pkg_idx,
                                      pkg_data, actions)
        except Exception as execute_multi_exc:
            # reverse all executed packages except the one that failed
            rollback_excs = []
            if context.rollback_enabled:
                failed_pkg_idx = pkg_idx
                reverse_actions = self.all_actions[:failed_pkg_idx]
                for pkg_idx, (pkg_data, actions) in reversed(tuple(enumerate(reverse_actions))):
                    excs = self._reverse_actions(self.target_prefix, self.num_unlink_pkgs,
                                                 pkg_idx, pkg_data, actions)
                    rollback_excs.extend(excs)

            raise CondaMultiError(tuple(concatv(
                (execute_multi_exc.errors
                 if isinstance(execute_multi_exc, CondaMultiError)
                 else (execute_multi_exc,)),
                rollback_excs,
            )))

        else:
            for pkg_idx, (pkg_data, actions) in enumerate(self.all_actions):
                for axn_idx, action in enumerate(actions):
                    action.cleanup()
Example #7
0
    def query_all(cls, package_ref_or_match_spec, pkgs_dirs=None):
        if pkgs_dirs is None:
            pkgs_dirs = context.pkgs_dirs

        return concat(pcache.query(package_ref_or_match_spec) for pcache in concatv(
            cls.writable_caches(pkgs_dirs),
            cls.read_only_caches(pkgs_dirs),
        ))
Example #8
0
 def all_caches_writable_first(cls, pkgs_dirs=None):
     if pkgs_dirs is None:
         pkgs_dirs = context.pkgs_dirs
     pc_groups = groupby(
         lambda pc: pc.is_writable,
         (cls(pd) for pd in pkgs_dirs)
     )
     return tuple(concatv(pc_groups.get(True, ()), pc_groups.get(False, ())))
Example #9
0
 def _add_prefix_to_path(self, prefix, starting_path_dirs=None):
     prefix = self.path_conversion(prefix)
     if starting_path_dirs is None:
         starting_path_dirs = self._get_starting_path_list()
     return self.path_conversion(concatv(
         self._get_path_dirs(prefix),
         starting_path_dirs,
     ))
Example #10
0
 def _add_prefix_to_path(self, prefix, starting_path_dirs=None):
     if starting_path_dirs is None:
         starting_path_dirs = self._get_starting_path_list()
     return self.path_conversion(
         concatv(
             self._get_path_dirs(prefix),
             starting_path_dirs,
         ))
Example #11
0
 def merge(cls, match_specs):
     match_specs = tuple(cls(s) for s in match_specs)
     grouped = groupby(lambda spec: spec.get_exact_value('name'), match_specs)
     dont_merge_these = grouped.pop('*', []) + grouped.pop(None, [])
     specs_map = {
         name: reduce(lambda x, y: x._merge(y), specs) if len(specs) > 1 else specs[0]
         for name, specs in iteritems(grouped)
     }
     return tuple(concatv(itervalues(specs_map), dont_merge_these))
Example #12
0
    def query_all(cls, package_ref_or_match_spec, pkgs_dirs=None):
        if pkgs_dirs is None:
            pkgs_dirs = context.pkgs_dirs

        return concat(
            pcache.query(package_ref_or_match_spec) for pcache in concatv(
                cls.writable_caches(pkgs_dirs),
                cls.read_only_caches(pkgs_dirs),
            ))
Example #13
0
    def custom_channels(self):
        from ..models.channel import Channel

        custom_channels = (
            Channel.make_simple_channel(self.channel_alias, url, name) for name, url in iteritems(self._custom_channels)
        )
        channels_from_multichannels = concat(channel for channel in itervalues(self.custom_multichannels))
        all_channels = odict((x.name, x) for x in (ch for ch in concatv(channels_from_multichannels, custom_channels)))
        return all_channels
Example #14
0
 def merge(cls, match_specs):
     match_specs = tuple(cls(s) for s in match_specs)
     grouped = groupby(lambda spec: spec.get_exact_value('name'), match_specs)
     dont_merge_these = grouped.pop('*', []) + grouped.pop(None, [])
     specs_map = {
         name: reduce(lambda x, y: x._merge(y), specs) if len(specs) > 1 else specs[0]
         for name, specs in iteritems(grouped)
     }
     return tuple(concatv(itervalues(specs_map), dont_merge_these))
 def _verify(cls, prefix_setups, prefix_action_groups):
     exceptions = tuple(exc for exc in concatv(
         concat(cls._verify_individual_level(prefix_group)
                for prefix_group in itervalues(prefix_action_groups)),
         concat(cls._verify_prefix_level(target_prefix, prefix_group)
                for target_prefix, prefix_group in iteritems(prefix_action_groups)),
         cls._verify_transaction_level(prefix_setups),
     ) if exc)
     return exceptions
Example #16
0
    def make_link_actions(transaction_context, package_info, target_prefix,
                          requested_link_type):
        required_quad = transaction_context, package_info, target_prefix, requested_link_type

        file_link_actions = LinkPathAction.create_file_link_actions(
            *required_quad)
        create_directory_actions = LinkPathAction.create_directory_actions(
            *required_quad, file_link_actions=file_link_actions)
        create_nonadmin_actions = CreateNonadminAction.create_actions(
            *required_quad)
        create_menu_actions = MakeMenuAction.create_actions(*required_quad)

        python_entry_point_actions = CreatePythonEntryPointAction.create_actions(
            *required_quad)
        compile_pyc_actions = CompilePycAction.create_actions(
            *required_quad, file_link_actions=file_link_actions)

        application_entry_point_actions = CreateApplicationEntryPointAction.create_actions(
            *required_quad)
        private_envs_meta_actions = CreatePrivateEnvMetaAction.create_actions(
            *required_quad)

        all_target_short_paths = tuple(axn.target_short_path
                                       for axn in concatv(
                                           file_link_actions,
                                           python_entry_point_actions,
                                           compile_pyc_actions,
                                           application_entry_point_actions,
                                       ))
        meta_create_actions = CreateLinkedPackageRecordAction.create_actions(
            *required_quad, all_target_short_paths=all_target_short_paths)
        # the ordering here is significant
        return tuple(
            concatv(
                meta_create_actions,
                create_directory_actions,
                file_link_actions,
                create_nonadmin_actions,
                python_entry_point_actions,
                compile_pyc_actions,
                create_menu_actions,
                application_entry_point_actions,
                private_envs_meta_actions,
            ))
Example #17
0
    def _merge(self, matches):
        # get matches up to and including first important_match
        #   but if no important_match, then all matches are important_matches
        relevant_matches_and_values = tuple(
            (match, match.value(self))
            for match in self._first_important_matches(matches))
        for match, value in relevant_matches_and_values:
            if not isinstance(value, tuple):
                raise InvalidTypeError(self.name, value, match.source,
                                       value.__class__.__name__,
                                       self._type.__name__)

        # get individual lines from important_matches that were marked important
        # these will be prepended to the final result
        def get_marked_lines(match, marker, parameter_obj):
            return tuple(line for line, flag in zip(
                match.value(parameter_obj), match.valueflags(parameter_obj))
                         if flag is marker) if match else ()

        top_lines = concat(
            get_marked_lines(m, ParameterFlag.top, self)
            for m, _ in relevant_matches_and_values)

        # also get lines that were marked as bottom, but reverse the match order so that lines
        # coming earlier will ultimately be last
        bottom_lines = concat(
            get_marked_lines(m, ParameterFlag.bottom, self)
            for m, _ in reversed(relevant_matches_and_values))

        # now, concat all lines, while reversing the matches
        #   reverse because elements closer to the end of search path take precedence
        all_lines = concat(v for _, v in reversed(relevant_matches_and_values))

        # stack top_lines + all_lines, then de-dupe
        top_deduped = tuple(unique(concatv(top_lines, all_lines)))

        # take the top-deduped lines, reverse them, and concat with reversed bottom_lines
        # this gives us the reverse of the order we want, but almost there
        # NOTE: for a line value marked both top and bottom, the bottom marker will win out
        #       for the top marker to win out, we'd need one additional de-dupe step
        bottom_deduped = unique(
            concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))
        # just reverse, and we're good to go
        return tuple(reversed(tuple(bottom_deduped)))
Example #18
0
 def envs_dirs(self):
     return tuple(
         abspath(expanduser(p))
         for p in concatv(
             self._envs_dirs,
             (join(self.root_dir, "envs"),)
             if self.root_writable
             else ("~/.conda/envs", join(self.root_dir, "envs")),
         )
     )
Example #19
0
 def _finalize(self, commands, ext):
     commands = concatv(commands, ('',))  # add terminating newline
     if ext is None:
         return self.command_join.join(commands)
     elif ext:
         with NamedTemporaryFile(suffix=ext, delete=False) as tf:
             tf.write(ensure_binary(self.command_join.join(commands)))
         return tf.name
     else:
         raise NotImplementedError()
Example #20
0
File: env.py Project: zdog234/conda
def from_environment(name, prefix, no_builds=False, ignore_channels=False):
    """
        Get environment object from prefix
    Args:
        name: The name of environment
        prefix: The path of prefix
        no_builds: Whether has build requirement
        ignore_channels: whether ignore_channels

    Returns:     Environment object
    """
    # requested_specs_map = History(prefix).get_requested_specs_map()
    pd = PrefixData(prefix, pip_interop_enabled=True)

    precs = tuple(PrefixGraph(pd.iter_records()).graph)
    grouped_precs = groupby(lambda x: x.package_type, precs)
    conda_precs = sorted(concatv(
        grouped_precs.get(None, ()),
        grouped_precs.get(PackageType.NOARCH_GENERIC, ()),
        grouped_precs.get(PackageType.NOARCH_PYTHON, ()),
    ), key=lambda x: x.name)

    pip_precs = sorted(concatv(
        grouped_precs.get(PackageType.VIRTUAL_PYTHON_WHEEL, ()),
        grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, ()),
        grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, ()),
        # grouped_precs.get(PackageType.SHADOW_PYTHON_EGG_LINK, ()),
    ), key=lambda x: x.name)

    if no_builds:
        dependencies = ['='.join((a.name, a.version)) for a in conda_precs]
    else:
        dependencies = ['='.join((a.name, a.version, a.build)) for a in conda_precs]
    if pip_precs:
        dependencies.append({'pip': ["%s==%s" % (a.name, a.version) for a in pip_precs]})

    channels = list(context.channels)
    if not ignore_channels:
        for prec in conda_precs:
            canonical_name = prec.channel.canonical_name
            if canonical_name not in channels:
                channels.insert(0, canonical_name)
    return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
Example #21
0
    def display_actions(self, pfe):
        from ..models.dist import Dist
        from ..plan import display_actions
        legacy_action_groups = self.make_legacy_action_groups(pfe)

        for actions, (prefix, stp) in zip(legacy_action_groups, iteritems(self.prefix_setups)):
            pseudo_index = {Dist(prec): prec for prec in concatv(stp.unlink_precs, stp.link_precs)}
            display_actions(actions, pseudo_index, show_channel_urls=context.show_channel_urls)

        return legacy_action_groups
Example #22
0
File: link.py Project: groutr/conda
    def print_transaction_summary(self):
        from ..plan import display_actions
        legacy_action_groups = self._make_legacy_action_groups()

        for actions, (prefix, stp) in zip(legacy_action_groups, iteritems(self.prefix_setups)):
            pseudo_index = {prec: prec for prec in concatv(stp.unlink_precs, stp.link_precs)}
            display_actions(actions, pseudo_index, show_channel_urls=context.show_channel_urls,
                            specs_to_remove=stp.remove_specs, specs_to_add=stp.update_specs)

        return legacy_action_groups
Example #23
0
File: env.py Project: conda/conda
def from_environment(name, prefix, no_builds=False, ignore_channels=False):
    """
        Get environment object from prefix
    Args:
        name: The name of environment
        prefix: The path of prefix
        no_builds: Whether has build requirement
        ignore_channels: whether ignore_channels

    Returns:     Environment object
    """
    # requested_specs_map = History(prefix).get_requested_specs_map()
    pd = PrefixData(prefix, pip_interop_enabled=True)

    precs = tuple(PrefixGraph(pd.iter_records()).graph)
    grouped_precs = groupby(lambda x: x.package_type, precs)
    conda_precs = sorted(concatv(
        grouped_precs.get(None, ()),
        grouped_precs.get(PackageType.NOARCH_GENERIC, ()),
        grouped_precs.get(PackageType.NOARCH_PYTHON, ()),
    ), key=lambda x: x.name)

    pip_precs = sorted(concatv(
        grouped_precs.get(PackageType.VIRTUAL_PYTHON_WHEEL, ()),
        grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, ()),
        grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, ()),
        # grouped_precs.get(PackageType.SHADOW_PYTHON_EGG_LINK, ()),
    ), key=lambda x: x.name)

    if no_builds:
        dependencies = ['='.join((a.name, a.version)) for a in conda_precs]
    else:
        dependencies = ['='.join((a.name, a.version, a.build)) for a in conda_precs]
    if pip_precs:
        dependencies.append({'pip': ["%s==%s" % (a.name, a.version) for a in pip_precs]})

    channels = list(context.channels)
    if not ignore_channels:
        for prec in conda_precs:
            canonical_name = prec.channel.canonical_name
            if canonical_name not in channels:
                channels.insert(0, canonical_name)
    return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
Example #24
0
    def locate_prefix_by_name(cls, name, envs_dirs=None):
        """Find the location of a prefix given a conda env name."""
        if name in (ROOT_ENV_NAME, 'root'):
            return context.root_prefix

        for envs_dir in concatv(envs_dirs or context.envs_dirs, (getcwd(),)):
            prefix = join(envs_dir, name)
            if isdir(prefix):
                return prefix

        raise EnvironmentNameNotFound(name)
    def print_transaction_summary(self):
        from ..models.dist import Dist
        from ..plan import display_actions
        legacy_action_groups = self._make_legacy_action_groups()

        for actions, (prefix, stp) in zip(legacy_action_groups, iteritems(self.prefix_setups)):
            pseudo_index = {Dist(prec): prec for prec in concatv(stp.unlink_precs, stp.link_precs)}
            display_actions(actions, pseudo_index, show_channel_urls=context.show_channel_urls,
                            specs_to_remove=stp.remove_specs, specs_to_add=stp.update_specs)

        return legacy_action_groups
Example #26
0
 def custom_channels(self):
     from ..models.channel import Channel
     custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
                        for name, url in iteritems(self._custom_channels))
     channels_from_multichannels = concat(channel for channel
                                          in itervalues(self.custom_multichannels))
     all_channels = odict((x.name, x) for x in (ch for ch in concatv(
         channels_from_multichannels,
         custom_channels,
     )))
     return all_channels
Example #27
0
def get_pinned_specs(prefix):
    """Find pinned specs from file and return a tuple of MatchSpec."""
    pinfile = join(prefix, 'conda-meta', 'pinned')
    if exists(pinfile):
        with open(pinfile) as f:
            from_file = (i for i in f.read().strip().splitlines()
                         if i and not i.strip().startswith('#'))
    else:
        from_file = ()

    return tuple(MatchSpec(s, optional=True) for s in
                 concatv(context.pinned_packages, from_file))
Example #28
0
def word_movers(doc1, doc2, metric="cosine"):
    """
    Measure the semantic similarity between two documents using Word Movers
    Distance.

    Args:
        doc1 (:class:`textacy.Doc` or :class:`spacy.Doc`)
        doc2 (:class:`textacy.Doc` or :class:`spacy.Doc`)
        metric ({'cosine', 'euclidean', 'l1', 'l2', 'manhattan'})

    Returns:
        float: Similarity between ``doc1`` and ``doc2`` in the interval [0.0, 1.0],
        where larger values correspond to more similar documents.

    References:
        - Ofir Pele and Michael Werman, "A linear time histogram metric for improved
          SIFT matching," in Computer Vision - ECCV 2008, Marseille, France, 2008.
        - Ofir Pele and Michael Werman, "Fast and robust earth mover's distances,"
          in Proc. 2009 IEEE 12th Int. Conf. on Computer Vision, Kyoto, Japan, 2009.
        - Kusner, Matt J., et al. "From word embeddings to document distances."
          Proceedings of the 32nd International Conference on Machine Learning
          (ICML 2015). 2015. http://jmlr.org/proceedings/papers/v37/kusnerb15.pdf
    """
    word_idxs = dict()

    n = 0
    word_vecs = []
    for word in itertoolz.concatv(extract.words(doc1), extract.words(doc2)):
        if word.has_vector and word_idxs.setdefault(word.orth, n) == n:
            word_vecs.append(word.vector)
            n += 1
    distance_mat = pairwise_distances(np.array(word_vecs), metric=metric).astype(
        np.double
    )
    distance_mat /= distance_mat.max()

    vec1 = collections.Counter(
        word_idxs[word.orth] for word in extract.words(doc1) if word.has_vector
    )
    vec1 = np.array(
        [vec1[word_idx] for word_idx in compat.range_(len(word_idxs))]
    ).astype(np.double)
    vec1 /= vec1.sum()  # normalize word counts

    vec2 = collections.Counter(
        word_idxs[word.orth] for word in extract.words(doc2) if word.has_vector
    )
    vec2 = np.array(
        [vec2[word_idx] for word_idx in compat.range_(len(word_idxs))]
    ).astype(np.double)
    vec2 /= vec2.sum()  # normalize word counts

    return 1.0 - emd(vec1, vec2, distance_mat)
Example #29
0
 def _finalize(self, commands, ext):
     commands = concatv(commands, ('',))  # add terminating newline
     if ext is None:
         return self.command_join.join(commands)
     elif ext:
         with NamedTemporaryFile('w+b', suffix=ext, delete=False) as tf:
             # the default mode is 'w+b', and universal new lines don't work in that mode
             # command_join should account for that
             tf.write(ensure_binary(self.command_join.join(commands)))
         return tf.name
     else:
         raise NotImplementedError()
Example #30
0
def get_pinned_specs(prefix):
    """Find pinned specs from file and return a tuple of MatchSpec."""
    pinfile = join(prefix, 'conda-meta', 'pinned')
    if exists(pinfile):
        with open(pinfile) as f:
            from_file = (i for i in f.read().strip().splitlines()
                         if i and not i.strip().startswith('#'))
    else:
        from_file = ()

    return tuple(MatchSpec(s, optional=True) for s in
                 concatv(context.pinned_packages, from_file))
Example #31
0
 def _finalize(self, commands, ext):
     commands = concatv(commands, ('', ))  # add terminating newline
     if ext is None:
         return self.command_join.join(commands)
     elif ext:
         with NamedTemporaryFile('w+b', suffix=ext, delete=False) as tf:
             # the default mode is 'w+b', and universal new lines don't work in that mode
             # command_join should account for that
             tf.write(ensure_binary(self.command_join.join(commands)))
         return tf.name
     else:
         raise NotImplementedError()
Example #32
0
    def make_link_actions(transaction_context, package_info, target_prefix, requested_link_type):
        required_quad = transaction_context, package_info, target_prefix, requested_link_type

        file_link_actions = LinkPathAction.create_file_link_actions(*required_quad)
        create_directory_actions = LinkPathAction.create_directory_actions(
            *required_quad, file_link_actions=file_link_actions
        )
        create_nonadmin_actions = CreateNonadminAction.create_actions(*required_quad)
        create_menu_actions = MakeMenuAction.create_actions(*required_quad)

        python_entry_point_actions = CreatePythonEntryPointAction.create_actions(*required_quad)
        compile_pyc_actions = CompilePycAction.create_actions(*required_quad,
                                                              file_link_actions=file_link_actions)

        application_entry_point_actions = CreateApplicationEntryPointAction.create_actions(
            *required_quad
        )
        private_envs_meta_actions = CreatePrivateEnvMetaAction.create_actions(*required_quad)

        all_target_short_paths = tuple(axn.target_short_path for axn in concatv(
            file_link_actions,
            python_entry_point_actions,
            compile_pyc_actions,
            application_entry_point_actions,
        ))
        meta_create_actions = CreateLinkedPackageRecordAction.create_actions(
            *required_quad, all_target_short_paths=all_target_short_paths
        )
        # the ordering here is significant
        return tuple(concatv(
            meta_create_actions,
            create_directory_actions,
            file_link_actions,
            create_nonadmin_actions,
            python_entry_point_actions,
            compile_pyc_actions,
            create_menu_actions,
            application_entry_point_actions,
            private_envs_meta_actions,
        ))
Example #33
0
    def _execute_actions(target_prefix, num_unlink_pkgs, pkg_idx, pkg_data,
                         actions):
        axn_idx, action, is_unlink = 0, None, True
        try:
            dist = Dist(pkg_data)
            is_unlink = pkg_idx <= num_unlink_pkgs - 1
            if is_unlink:
                log.info("===> UNLINKING PACKAGE: %s <===\n"
                         "  prefix=%s\n", dist, target_prefix)

            else:
                log.info(
                    "===> LINKING PACKAGE: %s <===\n"
                    "  prefix=%s\n"
                    "  source=%s\n", dist, target_prefix,
                    pkg_data.extracted_package_dir)

            run_script(
                target_prefix if is_unlink else pkg_data.extracted_package_dir,
                Dist(pkg_data), 'pre-unlink' if is_unlink else 'pre-link',
                target_prefix)
            for axn_idx, action in enumerate(actions):
                action.execute()
            run_script(target_prefix, Dist(pkg_data),
                       'post-unlink' if is_unlink else 'post-link')
        except Exception as e:  # this won't be a multi error
            # reverse this package
            log.debug("Error in action #%d for pkg_idx #%d %r",
                      axn_idx,
                      pkg_idx,
                      action,
                      exc_info=True)
            reverse_excs = ()
            if context.rollback_enabled:
                log.error(
                    "An error occurred while %s package '%s'.\n"
                    "%r\n"
                    "Attempting to roll back.\n",
                    'uninstalling' if is_unlink else 'installing',
                    Dist(pkg_data), e)
                reverse_excs = UnlinkLinkTransaction._reverse_actions(
                    target_prefix,
                    num_unlink_pkgs,
                    pkg_idx,
                    pkg_data,
                    actions,
                    reverse_from_idx=axn_idx)
            raise CondaMultiError(tuple(concatv(
                (e, ),
                reverse_excs,
            )))
Example #34
0
def word_movers(doc1, doc2, metric='cosine'):
    """
    Measure the semantic distance between two documents using Word Movers Distance.

    Args:
        doc1 (`TextDoc` or `spacy.Doc`)
        doc2 (`TextDoc` or `spacy.Doc`)
        metric ({'cosine', 'euclidean', 'l1', 'l2', 'manhattan'})

    Returns:
        float: distance between `doc1` and `doc2` in [0.0, 1.0], where smaller
            values correspond to more similar documents

    References:
        Ofir Pele and Michael Werman, "A linear time histogram metric for improved
            SIFT matching," in Computer Vision - ECCV 2008, Marseille, France, 2008.
        Ofir Pele and Michael Werman, "Fast and robust earth mover's distances,"
            in Proc. 2009 IEEE 12th Int. Conf. on Computer Vision, Kyoto, Japan, 2009.
        Kusner, Matt J., et al. "From word embeddings to document distances."
            Proceedings of the 32nd International Conference on Machine Learning
            (ICML 2015). 2015. http://jmlr.org/proceedings/papers/v37/kusnerb15.pdf
    """
    stringstore = StringStore()

    n = 0
    word_vecs = []
    for word in itertoolz.concatv(extract.words(doc1), extract.words(doc2)):
        if word.has_vector:
            if stringstore[
                    word.text] - 1 == n:  # stringstore[0] always empty space
                word_vecs.append(word.vector)
                n += 1
    distance_mat = pairwise_distances(np.array(word_vecs),
                                      metric=metric).astype(np.double)
    distance_mat /= distance_mat.max()

    vec1 = collections.Counter(stringstore[word.text] - 1
                               for word in extract.words(doc1)
                               if word.has_vector)
    vec1 = np.array([vec1[word_idx] for word_idx in range(len(stringstore))
                     ]).astype(np.double)
    vec1 /= vec1.sum()  # normalize word counts

    vec2 = collections.Counter(stringstore[word.text] - 1
                               for word in extract.words(doc2)
                               if word.has_vector)
    vec2 = np.array([vec2[word_idx] for word_idx in range(len(stringstore))
                     ]).astype(np.double)
    vec2 /= vec2.sum()  # normalize word counts

    return emd(vec1, vec2, distance_mat)
Example #35
0
 def envs_dirs(self):
     if self.root_writable:
         fixed_dirs = (
             join(self.root_prefix, 'envs'),
             join(self._user_data_dir, 'envs'),
             join('~', '.conda', 'envs'),
         )
     else:
         fixed_dirs = (
             join(self._user_data_dir, 'envs'),
             join(self.root_prefix, 'envs'),
             join('~', '.conda', 'envs'),
         )
     return tuple(IndexedSet(expand(p) for p in concatv(self._envs_dirs, fixed_dirs)))
Example #36
0
 def envs_dirs(self):
     if self.root_writable:
         fixed_dirs = (
             join(self.root_prefix, 'envs'),
             join(self._user_data_dir, 'envs'),
             join('~', '.conda', 'envs'),
         )
     else:
         fixed_dirs = (
             join(self._user_data_dir, 'envs'),
             join(self.root_prefix, 'envs'),
             join('~', '.conda', 'envs'),
         )
     return tuple(IndexedSet(expand(p) for p in concatv(self._envs_dirs, fixed_dirs)))
Example #37
0
 def make_link_operation(source_short_path):
     if source_short_path in package_info.has_prefix_files:
         link_type = LinkType.copy
         prefix_placehoder, file_mode = package_info.has_prefix_files[source_short_path]
     elif source_short_path in concatv(package_info.no_link, package_info.soft_links):
         link_type = LinkType.copy
         prefix_placehoder, file_mode = '', None
     else:
         link_type = requested_link_type
         prefix_placehoder, file_mode = '', None
     is_menu_file = bool(MENU_RE.match(source_short_path))
     dest_short_path = source_short_path
     return LinkOperation(source_short_path, dest_short_path, link_type, prefix_placehoder,
                          file_mode, is_menu_file)
Example #38
0
    def execute(self):
        if not self._verified:
            self.verify()

        assert not context.dry_run
        # make sure prefix directory exists
        if not isdir(self.target_prefix):
            try:
                mkdir_p(self.target_prefix)
            except (IOError, OSError) as e:
                log.debug(repr(e))
                raise CondaError("Unable to create prefix directory '%s'.\n"
                                 "Check that you have sufficient permissions."
                                 "" % self.target_prefix)

        with signal_handler(conda_signal_handler):
            pkg_idx = 0
            try:
                for pkg_idx, (pkg_data,
                              actions) in enumerate(self.all_actions):
                    self._execute_actions(self.target_prefix,
                                          self.num_unlink_pkgs, pkg_idx,
                                          pkg_data, actions)
            except Exception as execute_multi_exc:
                # reverse all executed packages except the one that failed
                rollback_excs = []
                if context.rollback_enabled:
                    failed_pkg_idx = pkg_idx
                    reverse_actions = reversed(
                        tuple(enumerate(self.all_actions[:failed_pkg_idx])))
                    for pkg_idx, (pkg_data, actions) in reverse_actions:
                        excs = self._reverse_actions(self.target_prefix,
                                                     self.num_unlink_pkgs,
                                                     pkg_idx, pkg_data,
                                                     actions)
                        rollback_excs.extend(excs)

                raise CondaMultiError(
                    tuple(
                        concatv(
                            (execute_multi_exc.errors if isinstance(
                                execute_multi_exc, CondaMultiError) else
                             (execute_multi_exc, )),
                            rollback_excs,
                        )))
            else:
                for pkg_idx, (pkg_data,
                              actions) in enumerate(self.all_actions):
                    for axn_idx, action in enumerate(actions):
                        action.cleanup()
Example #39
0
    def _execute_actions(pkg_idx, axngroup):
        target_prefix = axngroup.target_prefix
        axn_idx, action, is_unlink = 0, None, axngroup.type == 'unlink'
        prec = axngroup.pkg_data

        conda_meta_dir = join(target_prefix, 'conda-meta')
        if not isdir(conda_meta_dir):
            mkdir_p(conda_meta_dir)

        try:
            if axngroup.type == 'unlink':
                log.info("===> UNLINKING PACKAGE: %s <===\n"
                         "  prefix=%s\n", prec.dist_str(), target_prefix)

            elif axngroup.type == 'link':
                log.info(
                    "===> LINKING PACKAGE: %s <===\n"
                    "  prefix=%s\n"
                    "  source=%s\n", prec.dist_str(), target_prefix,
                    prec.extracted_package_dir)

            if axngroup.type in ('unlink', 'link'):
                run_script(
                    target_prefix if is_unlink else prec.extracted_package_dir,
                    prec, 'pre-unlink' if is_unlink else 'pre-link',
                    target_prefix)
            for axn_idx, action in enumerate(axngroup.actions):
                action.execute()
            if axngroup.type in ('unlink', 'link'):
                run_script(target_prefix, prec,
                           'post-unlink' if is_unlink else 'post-link')
        except Exception as e:  # this won't be a multi error
            # reverse this package
            log.debug("Error in action #%d for pkg_idx #%d %r",
                      axn_idx,
                      pkg_idx,
                      action,
                      exc_info=True)
            reverse_excs = ()
            if context.rollback_enabled:
                # log.error("An error occurred while %s package '%s'.\n"
                #           "%r\n"
                #           "Attempting to roll back.\n",
                #           'uninstalling' if is_unlink else 'installing', prec.dist_str(), e)
                reverse_excs = UnlinkLinkTransaction._reverse_actions(
                    pkg_idx, axngroup, reverse_from_idx=axn_idx)
            raise CondaMultiError(tuple(concatv(
                (e, ),
                reverse_excs,
            )))
Example #40
0
def word_movers(doc1, doc2, metric='cosine'):
    """
    Measure the semantic similarity between two documents using Word Movers
    Distance.

    Args:
        doc1 (``textacy.Doc`` or ``spacy.Doc``)
        doc2 (``textacy.Doc`` or ``spacy.Doc``)
        metric ({'cosine', 'euclidean', 'l1', 'l2', 'manhattan'})

    Returns:
        float: similarity between `doc1` and `doc2` in the interval [0.0, 1.0],
            where larger values correspond to more similar documents

    References:
        Ofir Pele and Michael Werman, "A linear time histogram metric for improved
            SIFT matching," in Computer Vision - ECCV 2008, Marseille, France, 2008.
        Ofir Pele and Michael Werman, "Fast and robust earth mover's distances,"
            in Proc. 2009 IEEE 12th Int. Conf. on Computer Vision, Kyoto, Japan, 2009.
        Kusner, Matt J., et al. "From word embeddings to document distances."
            Proceedings of the 32nd International Conference on Machine Learning
            (ICML 2015). 2015. http://jmlr.org/proceedings/papers/v37/kusnerb15.pdf
    """
    stringstore = StringStore()

    n = 0
    word_vecs = []
    for word in itertoolz.concatv(extract.words(doc1), extract.words(doc2)):
        if word.has_vector:
            if stringstore[word.text] - 1 == n:  # stringstore[0] always empty space
                word_vecs.append(word.vector)
                n += 1
    distance_mat = pairwise_distances(np.array(word_vecs), metric=metric).astype(np.double)
    distance_mat /= distance_mat.max()

    vec1 = collections.Counter(
        stringstore[word.text] - 1
        for word in extract.words(doc1)
        if word.has_vector)
    vec1 = np.array([vec1[word_idx] for word_idx in range(len(stringstore))]).astype(np.double)
    vec1 /= vec1.sum()  # normalize word counts

    vec2 = collections.Counter(
        stringstore[word.text] - 1
        for word in extract.words(doc2)
        if word.has_vector)
    vec2 = np.array([vec2[word_idx] for word_idx in range(len(stringstore))]).astype(np.double)
    vec2 /= vec2.sum()  # normalize word counts

    return 1.0 - emd(vec1, vec2, distance_mat)
Example #41
0
    def _execute_link_operations(self, leaf_directories, link_operations):
        dest_short_paths = super(NoarchPythonPackageInstaller, self)._execute_link_operations(
            leaf_directories, link_operations)

        # create pyc files
        python_veresion = get_python_version_for_prefix(self.prefix)
        extra_pyc_paths = compile_missing_pyc(self.prefix, python_veresion,
                                              tuple(op.dest_short_path for op in link_operations))

        # create entry points
        entry_points = self.package_info.noarch.entry_points
        entry_point_paths = []
        for entry_point in entry_points:
            entry_point_paths.extend(create_entry_point(entry_point, self.prefix))

        return sorted(concatv(dest_short_paths, extra_pyc_paths, entry_point_paths))
Example #42
0
    def _execute_link_operations(self, leaf_directories, link_operations):
        dest_short_paths = super(NoarchPythonPackageInstaller, self)._execute_link_operations(
            leaf_directories, link_operations)

        # create pyc files
        python_veresion = get_python_version_for_prefix(self.prefix)
        extra_pyc_paths = compile_missing_pyc(self.prefix, python_veresion,
                                              tuple(op.dest_short_path for op in link_operations))

        # create entry points
        entry_points = self.package_info.noarch.get('entry_points', ())
        entry_point_paths = []
        for entry_point in entry_points:
            entry_point_paths.extend(create_entry_point(entry_point, self.prefix))

        return sorted(concatv(dest_short_paths, extra_pyc_paths, entry_point_paths))
Example #43
0
    def verify(self):
        if not self._prepared:
            self.prepare()

        exceptions = tuple(exc for exc in concatv(
            self._verify_individual_level(self.all_actions),
            self._verify_transaction_level(self.target_prefix, self.all_actions,
                                           self.num_unlink_pkgs),
        ) if exc)

        if exceptions:
            maybe_raise(CondaMultiError(exceptions), context)
        else:
            log.info(exceptions)

        self._verified = True
Example #44
0
    def verify(self):
        if not self._prepared:
            self.prepare()

        exceptions = tuple(exc for exc in concatv(
            self._verify_individual_level(self.all_actions),
            self._verify_transaction_level(
                self.target_prefix, self.all_actions, self.num_unlink_pkgs),
        ) if exc)

        if exceptions:
            maybe_raise(CondaMultiError(exceptions), context)
        else:
            log.info(exceptions)

        self._verified = True
def get_pinned_specs(prefix):
    pinfile = join(prefix, 'conda-meta', 'pinned')
    if exists(pinfile):
        with open(pinfile) as f:
            from_file = (i for i in f.read().strip().splitlines()
                         if i and not i.strip().startswith('#'))
    else:
        from_file = ()

    from .cli.common import spec_from_line

    def munge_spec(s):
        return s if ' ' in s else spec_from_line(s)

    return tuple(
        munge_spec(s) for s in concatv(context.pinned_packages, from_file))
Example #46
0
    def _merge(self, matches):
        # get matches up to and including first important_match
        #   but if no important_match, then all matches are important_matches
        relevant_matches = self._first_important_matches(matches)

        # mapkeys with important matches
        def key_is_important(match, key):
            return match.valueflags(self.__class__).get(key) is ParameterFlag.final
        important_maps = tuple(dict((k, v)
                                    for k, v in iteritems(match.value(self.__class__))
                                    if key_is_important(match, k))
                               for match in relevant_matches)
        # dump all matches in a dict
        # then overwrite with important matches
        return merge(concatv((m.value(self.__class__) for m in relevant_matches),
                             reversed(important_maps)))
Example #47
0
 def make_link_operation(source_short_path):
     if source_short_path in package_info.has_prefix_files:
         link_type = LinkType.copy
         prefix_placehoder, file_mode = package_info.has_prefix_files[
             source_short_path]
     elif source_short_path in concatv(package_info.no_link,
                                       package_info.soft_links):
         link_type = LinkType.copy
         prefix_placehoder, file_mode = '', None
     else:
         link_type = requested_link_type
         prefix_placehoder, file_mode = '', None
     is_menu_file = bool(MENU_RE.match(source_short_path))
     dest_short_path = source_short_path
     return LinkOperation(source_short_path, dest_short_path, link_type,
                          prefix_placehoder, file_mode, is_menu_file)
Example #48
0
    def _execute_actions(pkg_idx, axngroup):
        target_prefix = axngroup.target_prefix
        axn_idx, action, is_unlink = 0, None, axngroup.type == 'unlink'
        prec = axngroup.pkg_data

        conda_meta_dir = join(target_prefix, 'conda-meta')
        if not isdir(conda_meta_dir):
            mkdir_p(conda_meta_dir)

        try:
            if axngroup.type == 'unlink':
                log.info("===> UNLINKING PACKAGE: %s <===\n"
                         "  prefix=%s\n",
                         prec.dist_str(), target_prefix)

            elif axngroup.type == 'link':
                log.info("===> LINKING PACKAGE: %s <===\n"
                         "  prefix=%s\n"
                         "  source=%s\n",
                         prec.dist_str(), target_prefix, prec.extracted_package_dir)

            if axngroup.type in ('unlink', 'link'):
                run_script(target_prefix if is_unlink else prec.extracted_package_dir,
                           prec,
                           'pre-unlink' if is_unlink else 'pre-link',
                           target_prefix)
            for axn_idx, action in enumerate(axngroup.actions):
                action.execute()
            if axngroup.type in ('unlink', 'link'):
                run_script(target_prefix, prec, 'post-unlink' if is_unlink else 'post-link')
        except Exception as e:  # this won't be a multi error
            # reverse this package
            log.debug("Error in action #%d for pkg_idx #%d %r", axn_idx, pkg_idx, action,
                      exc_info=True)
            reverse_excs = ()
            if context.rollback_enabled:
                # log.error("An error occurred while %s package '%s'.\n"
                #           "%r\n"
                #           "Attempting to roll back.\n",
                #           'uninstalling' if is_unlink else 'installing', prec.dist_str(), e)
                reverse_excs = UnlinkLinkTransaction._reverse_actions(
                    pkg_idx, axngroup, reverse_from_idx=axn_idx
                )
            raise CondaMultiError(tuple(concatv(
                (e,),
                reverse_excs,
            )))
Example #49
0
def _main(*args):
    import importlib

    try:
        from cytoolz.itertoolz import concatv
    except ImportError:  # pragma: no cover
        from .._vendor.toolz.itertoolz import concatv

    from ..base.constants import SEARCH_PATH
    from ..base.context import context

    if len(args) == 1:
        args = args + ('-h', )

    p, sub_parsers = generate_parser()

    main_modules = [
        "info", "help", "list", "search", "create", "install", "update",
        "remove", "config", "clean", "package"
    ]
    modules = ["conda.cli.main_" + suffix for suffix in main_modules]
    for module in modules:
        imported = importlib.import_module(module)
        imported.configure_parser(sub_parsers)
        if "update" in module:
            imported.configure_parser(sub_parsers, name='upgrade')
        if "remove" in module:
            imported.configure_parser(sub_parsers, name='uninstall')

    from .find_commands import find_commands

    # when using sys.argv, first argument is generally conda or __main__.py.  Ignore it.
    if (any(sname in args[0] for sname in ('conda', 'conda.exe', '__main__.py',
                                           'conda-script.py'))
            and (args[1] in concatv(sub_parsers.choices, find_commands())
                 or args[1].startswith('-'))):
        # Ignoring first argument (%s), as it is not a subcommand
        args = args[1:]

    args = p.parse_args(args)

    context.__init__(SEARCH_PATH, 'conda', args)
    init_loggers(context)

    exit_code = args.func(args, p)
    if isinstance(exit_code, int):
        return exit_code
Example #50
0
    def _execute_actions(pkg_idx, axngroup):
        target_prefix = axngroup.target_prefix
        axn_idx, action, is_unlink = 0, None, axngroup.type == 'unlink'
        pkg_data = axngroup.pkg_data
        dist = pkg_data and Dist(pkg_data)
        try:

            if axngroup.type == 'unlink':
                log.info("===> UNLINKING PACKAGE: %s <===\n"
                         "  prefix=%s\n", dist, target_prefix)

            elif axngroup.type == 'link':
                log.info(
                    "===> LINKING PACKAGE: %s <===\n"
                    "  prefix=%s\n"
                    "  source=%s\n", dist, target_prefix,
                    pkg_data.extracted_package_dir)

            if axngroup.type in ('unlink', 'link'):
                run_script(
                    target_prefix
                    if is_unlink else pkg_data.extracted_package_dir, dist,
                    'pre-unlink' if is_unlink else 'pre-link', target_prefix)
            for axn_idx, action in enumerate(axngroup.actions):
                action.execute()
            if axngroup.type in ('unlink', 'link'):
                run_script(target_prefix, Dist(pkg_data),
                           'post-unlink' if is_unlink else 'post-link')
        except Exception as e:  # this won't be a multi error
            # reverse this package
            log.debug("Error in action #%d for pkg_idx #%d %r", axn_idx,
                      pkg_idx, action)
            log.debug(format_exc())
            reverse_excs = ()
            if context.rollback_enabled:
                log.error(
                    "An error occurred while %s package '%s'.\n"
                    "%r\n"
                    "Attempting to roll back.\n",
                    'uninstalling' if is_unlink else 'installing',
                    Dist(pkg_data), e)
                reverse_excs = UnlinkLinkTransaction._reverse_actions(
                    pkg_idx, axngroup, reverse_from_idx=axn_idx)
            raise CondaMultiError(tuple(concatv(
                (e, ),
                reverse_excs,
            )))
Example #51
0
def make_unlink_actions(transaction_context, target_prefix,
                        linked_package_data):
    # no side effects in this function!
    unlink_path_actions = tuple(
        UnlinkPathAction(transaction_context, linked_package_data,
                         target_prefix, trgt)
        for trgt in linked_package_data.files)

    remove_menu_actions = RemoveMenuAction.create_actions(
        transaction_context, linked_package_data, target_prefix)

    meta_short_path = '%s/%s' % (
        'conda-meta', Dist(linked_package_data).to_filename('.json'))
    remove_conda_meta_actions = (RemoveLinkedPackageRecordAction(
        transaction_context, linked_package_data, target_prefix,
        meta_short_path), )

    _all_d = get_all_directories(axn.target_short_path
                                 for axn in unlink_path_actions)
    all_directories = sorted(explode_directories(_all_d, already_split=True),
                             reverse=True)
    directory_remove_actions = tuple(
        UnlinkPathAction(transaction_context, linked_package_data,
                         target_prefix, d, LinkType.directory)
        for d in all_directories)

    if linked_package_data.preferred_env is not None:
        app_entry_point_short_path = os.path.join(
            get_bin_directory_short_path(), linked_package_data.name)
        unlink_app_entry_point = UnlinkPathAction(transaction_context,
                                                  linked_package_data,
                                                  context.root_prefix,
                                                  app_entry_point_short_path),
        unlink_path_actions = unlink_path_actions + unlink_app_entry_point
        private_envs_meta_action = RemovePrivateEnvMetaAction(
            transaction_context, linked_package_data, target_prefix),
    else:
        private_envs_meta_action = ()

    return tuple(
        concatv(
            remove_menu_actions,
            unlink_path_actions,
            directory_remove_actions,
            private_envs_meta_action,
            remove_conda_meta_actions,
        ))
Example #52
0
    def _execute(cls, all_action_groups):
        with signal_handler(conda_signal_handler), time_recorder(
                "unlink_link_execute"):
            pkg_idx = 0
            try:
                with spinner("Executing transaction", not context.verbosity
                             and not context.quiet, context.json):
                    for pkg_idx, axngroup in enumerate(all_action_groups):
                        cls._execute_actions(pkg_idx, axngroup)
            except CondaMultiError as e:
                action, is_unlink = (None, axngroup.type == 'unlink')
                prec = axngroup.pkg_data

                log.error(
                    "An error occurred while %s package '%s'.\n"
                    "%r\n"
                    "Attempting to roll back.\n",
                    'uninstalling' if is_unlink else 'installing', prec
                    and prec.dist_str(), e.errors[0])

                # reverse all executed packages except the one that failed
                rollback_excs = []
                if context.rollback_enabled:
                    with spinner("Rolling back transaction",
                                 not context.verbosity and not context.quiet,
                                 context.json):
                        failed_pkg_idx = pkg_idx
                        reverse_actions = reversed(
                            tuple(
                                enumerate(
                                    take(failed_pkg_idx, all_action_groups))))
                        for pkg_idx, axngroup in reverse_actions:
                            excs = cls._reverse_actions(pkg_idx, axngroup)
                            rollback_excs.extend(excs)

                raise CondaMultiError(
                    tuple(
                        concatv(
                            (e.errors if isinstance(e, CondaMultiError) else
                             (e, )),
                            rollback_excs,
                        )))
            else:
                for axngroup in all_action_groups:
                    for action in axngroup.actions:
                        action.cleanup()
Example #53
0
    def canonical_name(self):
        for multiname, channels in iteritems(context.custom_multichannels):
            for channel in channels:
                if self.name == channel.name:
                    return multiname

        for that_name in context.custom_channels:
            if tokenized_startswith(self.name.split('/'), that_name.split('/')):
                return self.name

        if any(c.location == self.location
               for c in concatv((context.channel_alias,), context.migrated_channel_aliases)):
            return self.name

        # fall back to the equivalent of self.base_url
        # re-defining here because base_url for MultiChannel is None
        return "%s://%s/%s" % (self.scheme, self.location, self.name)
Example #54
0
def solve_prefix(prefix, r, specs_to_remove=(), specs_to_add=(), prune=False):
    # this function gives a "final state" for an existing prefix given just these simple inputs
    prune = context.prune or prune
    log.debug(
        "solving prefix %s\n"
        "  specs_to_remove: %s\n"
        "  specs_to_add: %s\n"
        "  prune: %s", prefix, specs_to_remove, specs_to_add, prune)

    # declare starting point
    solved_linked_dists = () if prune else tuple(iterkeys(linked_data(prefix)))
    # TODO: to change this whole function from working with dists to working with records, just
    #       change iterkeys to itervalues

    if solved_linked_dists and specs_to_remove:
        solved_linked_dists = r.remove(
            tuple(text_type(s) for s in specs_to_remove), solved_linked_dists)

    specs_from_history = _get_relevant_specs_from_history(
        prefix, specs_to_remove, specs_to_add)
    augmented_specs_to_add = augment_specs(
        prefix, concatv(specs_from_history, specs_to_add))

    log.debug("final specs to add:\n    %s\n",
              "\n    ".join(text_type(s) for s in augmented_specs_to_add))
    solved_linked_dists = r.install(augmented_specs_to_add,
                                    solved_linked_dists,
                                    update_deps=context.update_dependencies)

    if context.respect_pinned:
        # TODO: assert all pinned specs are compatible with what's in solved_linked_dists
        pass

    # TODO: don't uninstall conda or its dependencies, probably need to check elsewhere

    solved_linked_dists = IndexedSet(
        r.dependency_sort({d.name: d
                           for d in solved_linked_dists}))

    log.debug("solved prefix %s\n"
              "  solved_linked_dists:\n"
              "    %s\n", prefix,
              "\n    ".join(text_type(d) for d in solved_linked_dists))

    return solved_linked_dists, specs_to_add
Example #55
0
    def get_entry_to_link(cls, package_ref):
        pc_entry = next((pcrec for pcrec in cls.query_all(package_ref)
                         if pcrec.is_extracted),
                        None)
        if pc_entry is not None:
            return pc_entry

        # this can happen with `conda install path/to/package.tar.bz2`
        #   because dist has channel '<unknown>'
        # if ProgressiveFetchExtract did its job correctly, what we're looking for
        #   should be the matching dist_name in the first writable package cache
        # we'll search all caches for a match, but search writable caches first
        caches = concatv(cls.writable_caches(), cls.read_only_caches())
        dist_str = package_ref.dist_str().rsplit(':', 1)[-1]
        pc_entry = next((cache._scan_for_dist_no_channel(dist_str)
                         for cache in caches if cache), None)
        if pc_entry is not None:
            return pc_entry
        raise CondaError("No package '%s' found in cache directories." % Dist(package_ref))
Example #56
0
def make_unlink_actions(transaction_context, target_prefix, prefix_record):
    # no side effects in this function!
    unlink_path_actions = tuple(UnlinkPathAction(transaction_context, prefix_record,
                                                 target_prefix, trgt)
                                for trgt in prefix_record.files)

    remove_menu_actions = RemoveMenuAction.create_actions(transaction_context,
                                                          prefix_record,
                                                          target_prefix)

    try:
        extracted_package_dir = basename(prefix_record.extracted_package_dir)
    except AttributeError:
        try:
            extracted_package_dir = basename(prefix_record.link.source)
        except AttributeError:
            # for backward compatibility only
            extracted_package_dir = '%s-%s-%s' % (prefix_record.name, prefix_record.version,
                                                  prefix_record.build)

    meta_short_path = '%s/%s' % ('conda-meta', extracted_package_dir + '.json')
    remove_conda_meta_actions = (RemoveLinkedPackageRecordAction(transaction_context,
                                                                 prefix_record,
                                                                 target_prefix, meta_short_path),)

    _all_d = get_all_directories(axn.target_short_path for axn in unlink_path_actions)
    all_directories = sorted(explode_directories(_all_d, already_split=True), reverse=True)
    directory_remove_actions = tuple(UnlinkPathAction(transaction_context, prefix_record,
                                                      target_prefix, d, LinkType.directory)
                                     for d in all_directories)

    # unregister_private_package_actions = UnregisterPrivateEnvAction.create_actions(
    #     transaction_context, package_cache_record, target_prefix
    # )

    return tuple(concatv(
        remove_menu_actions,
        unlink_path_actions,
        directory_remove_actions,
        # unregister_private_package_actions,
        remove_conda_meta_actions,
    ))
Example #57
0
    def get_entry_to_link(cls, dist):
        pc_entry = next((pc_entry
                         for pc_entry in cls.get_matching_entries(dist)
                         if pc_entry.is_extracted),
                        None)
        if pc_entry is not None:
            return pc_entry

        # this can happen with `conda install path/to/package.tar.bz2`
        #   because dist has channel '<unknown>'
        # if ProgressiveFetchExtract did it's job correctly, what we're looking for
        #   should be the matching dist_name in the first writable package cache
        # we'll search all caches for a match, but search writable caches first
        grouped_caches = groupby(lambda x: x.is_writable,
                                 (PackageCache(pd) for pd in context.pkgs_dirs))
        caches = concatv(grouped_caches.get(True, ()), grouped_caches.get(False, ()))
        pc_entry = next((cache.scan_for_dist_no_channel(dist) for cache in caches if cache), None)
        if pc_entry is not None:
            return pc_entry
        raise CondaError("No package '%s' found in cache directories." % dist)
Example #58
0
    def _execute(cls, all_action_groups):
        with signal_handler(conda_signal_handler), time_recorder("unlink_link_execute"):
            pkg_idx = 0
            try:
                with Spinner("Executing transaction", not context.verbosity and not context.quiet,
                             context.json):
                    for pkg_idx, axngroup in enumerate(all_action_groups):
                        cls._execute_actions(pkg_idx, axngroup)
            except CondaMultiError as e:
                action, is_unlink = (None, axngroup.type == 'unlink')
                prec = axngroup.pkg_data

                log.error("An error occurred while %s package '%s'.\n"
                          "%r\n"
                          "Attempting to roll back.\n",
                          'uninstalling' if is_unlink else 'installing',
                          prec and prec.dist_str(), e.errors[0])

                # reverse all executed packages except the one that failed
                rollback_excs = []
                if context.rollback_enabled:
                    with Spinner("Rolling back transaction",
                                 not context.verbosity and not context.quiet, context.json):
                        failed_pkg_idx = pkg_idx
                        reverse_actions = reversed(tuple(enumerate(
                            take(failed_pkg_idx, all_action_groups)
                        )))
                        for pkg_idx, axngroup in reverse_actions:
                            excs = cls._reverse_actions(pkg_idx, axngroup)
                            rollback_excs.extend(excs)

                raise CondaMultiError(tuple(concatv(
                    (e.errors
                     if isinstance(e, CondaMultiError)
                     else (e,)),
                    rollback_excs,
                )))
            else:
                for axngroup in all_action_groups:
                    for action in axngroup.actions:
                        action.cleanup()