Пример #1
0
 def _wrap_context_manager_or_class(cls, thing):
     from .abstract_context_manager import AbstractContextManager
     if isinstance(thing, AbstractContextManager):
         return cls(thing)
     else:
         assert issubclass(thing, AbstractContextManager)
         # It's a context manager class.
         property_name = '__%s_context_manager_%s' % (
             thing.__name__, ''.join(
                 random.choice(string.ascii_letters) for _ in range(30)))
         # We're exposing the wrapped context manager under two names,
         # `__wrapped__` and a randomly created one. The first one is used
         # for convenience but we still define the second one to ensure our
         # mechanism can rely on it even when the `__wrapped__` attribute is
         # being overridden.
         return type(
             thing.__name__, (thing, ), {
                 property_name:
                 caching.CachedProperty(lambda self: cls(
                     (lambda: thing.__enter__(self), lambda exc_type,
                      exc_value, exc_traceback: thing.__exit__(
                          self, exc_type, exc_value, exc_traceback)))),
                 '__enter__':
                 lambda self: getattr(self, property_name).__enter__(),
                 '__exit__':
                 lambda self, exc_type, exc_value, exc_traceback: getattr(
                     self, property_name).__exit__(exc_type, exc_value,
                                                   exc_traceback),
                 '__wrapped__':
                 caching.CachedProperty(
                     lambda self: getattr(self, property_name)),
             })
Пример #2
0
    class Card():
        def __init__(self, number_and_suit):
            number, suit = number_and_suit
            assert number in range(1, 14)
            assert isinstance(suit, Suit)
            self.number = number
            self.suit = suit

        _sequence = \
                  caching.CachedProperty(lambda self: (self.number, self.suit))
        _reduced = \
              caching.CachedProperty(lambda self: (type(self), self._sequence))

        def __lt__(self, other):
            if not isinstance(other, Card): return NotImplemented
            return self._sequence < other._sequence

        def __eq__(self, other):
            return type(self) == type(other) and \
                                              self._sequence == other._sequence

        __hash__ = lambda self: hash(self._reduced)
        __repr__ = lambda self: '%s%s' % (self.number if self.number <= 10 else
                                          'jqk'[self.number - 11],
                                          str(self.suit.name)[0].capitalize())
Пример #3
0
 class E:
     different_type_freeze_counter = caching.CachedProperty(0)
     different_type_thaw_counter = caching.CachedProperty(0)
     different_type_freezer = FreezerProperty(
         freezer_type=CustomFreezer,
         doc='A freezer using a custom freezer class.'
     )
Пример #4
0
 class D:
     mix_freeze_counter = caching.CachedProperty(0)
     mix_thaw_counter = caching.CachedProperty(0)
     def increment_mix_freeze_counter(self):
         self.mix_freeze_counter += 1
     mix_freezer = FreezerProperty(on_freeze=increment_mix_freeze_counter)
     @mix_freezer.on_thaw
     def increment_mix_thaw_counter(self):
         self.mix_thaw_counter += 1
Пример #5
0
 class C:
     decorate_happy_freeze_counter = caching.CachedProperty(0)
     decorate_happy_thaw_counter = caching.CachedProperty(0)
     decorate_happy_freezer = FreezerProperty()
     @decorate_happy_freezer.on_freeze
     def increment_decorate_happy_freeze_counter(self):
         self.decorate_happy_freeze_counter += 1
     @decorate_happy_freezer.on_thaw
     def increment_decorate_happy_thaw_counter(self):
         self.decorate_happy_thaw_counter += 1
Пример #6
0
 class C:
     argument_happy_freeze_counter = caching.CachedProperty(0)
     argument_happy_thaw_counter = caching.CachedProperty(0)
     def increment_argument_happy_freeze_counter(self):
         self.argument_happy_freeze_counter += 1
     def increment_argument_happy_thaw_counter(self):
         self.argument_happy_thaw_counter += 1
     argument_happy_freezer = FreezerProperty(
         on_freeze=increment_argument_happy_freeze_counter,
         on_thaw=increment_argument_happy_thaw_counter,
         name='argument_happy_freezer'
     )
class EventHandlerGrokker(object):
    '''Wraps an event handling function and figures out what to bind it to.'''
    def __init__(self, name, event_handler_self_taking_function,
                 evt_handler_type):
        '''
        Construct the `EventHandlerGrokker`.
        
        `name` is the name of the event handling function.
        `event_handler_self_taking_function` is the function itself, as proper
        function. (i.e. taking two arguments `self` and `event`.)
        `evt_handler_type` is the class in which that event handler is defined.
        '''
        assert evt_handler_type._BindSavvyEvtHandlerType__name_parser.match(
            name, evt_handler_type.__name__)

        self.name = name

        self.event_handler_self_taking_function = \
            event_handler_self_taking_function

        self.evt_handler_type = evt_handler_type


    parsed_words = caching.CachedProperty(
        lambda self: self.evt_handler_type. \
                                   _BindSavvyEvtHandlerType__name_parser.parse(
            self.name,
            self.evt_handler_type.__name__
        ),
        doc=''' '''
    )

    def bind(self, evt_handler):
        assert isinstance(evt_handler, wx.EvtHandler)
        event_handler_bound_method = types.MethodType(
            self.event_handler_self_taking_function, evt_handler,
            self.evt_handler_type)
        if len(self.parsed_words) >= 2:
            closer_evt_handler = address_tools.resolve(
                '.'.join(('window', ) + self.parsed_words[:-1]),
                namespace={'window': evt_handler})
        else:
            closer_evt_handler = None
        last_word = self.parsed_words[-1]
        component_candidate = getattr(closer_evt_handler or evt_handler,
                                      last_word, None)
        if component_candidate is not None and \
           hasattr(component_candidate, 'GetId'):
            component = component_candidate
            event_codes = get_event_codes_of_component(component)
            for event_code in event_codes:
                evt_handler.Bind(event_code,
                                 event_handler_bound_method,
                                 source=component)

        else:
            evt_handler.Bind(
                get_event_code_from_name(last_word, self.evt_handler_type),
                event_handler_bound_method,
            )
Пример #8
0
class EnumType(enum.EnumMeta):
    '''Metaclass for our kickass enum type.'''
    __getitem__ = lambda self, i: self._values_tuple[i]
    # This `__getitem__` is important, so we could feed enum types straight
    # into `ProductSpace`.

    _values_tuple = caching.CachedProperty(tuple)
Пример #9
0
class _ReentrantContextManager(_ContextManagerWrapper):

    depth = caching.CachedProperty(0,
                                   doc='''
            The number of nested suites that entered this context manager.
            
            When the context manager is completely unused, it's `0`. When
            it's first used, it becomes `1`. When its entered again, it
            becomes `2`. If it is then exited, it returns to `1`, etc.
            ''')

    def __enter__(self):
        if self.depth == 0:
            self._enter_value = self._wrapped_enter()
        self.depth += 1
        return self._enter_value

    def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None):
        assert self.depth >= 1
        if self.depth == 1:
            exit_value = self._wrapped_exit(exc_type, exc_value, exc_traceback)
            self._enter_value = None
        else:
            exit_value = None
        self.depth -= 1
        return exit_value
Пример #10
0
class ReentrantContextManager(ContextManager):
    '''
    A context manager which can be entered several times before it's exited.
    
    Subclasses should override `reentrant_enter` and `reentrant_exit`, which
    are analogues to `__enter__` and `__exit__`, except they are called only on
    the outermost suite. In other words: When you enter the reentrant context
    manager for the first time, `reentrant_enter` is called. If you enter it
    for a second time, nothing is called. Now `.depth == 2`. Exit it now,
    nothing is called. Exit it again, and `reentrant_exit` is called.
    
    Note: The value returned by `reentrant_enter` will be returned by all the
    no-op `__enter__` actions contained in the outermost suite.
    '''
    
    depth = caching.CachedProperty(
        0,
        doc='''
            The number of nested suites that entered this context manager.
            
            When the context manager is completely unused, it's `0`. When it's
            first used, it becomes `1`. When its entered again, it becomes `2`.
            If it is then exited, it returns to `1`, etc.
            '''
    )

    
    def __enter__(self):
        '''Enter the context manager.'''
        if self.depth == 0:
            self.__last_reentrant_enter_return_value = self.reentrant_enter()
        self.depth += 1
        return self.__last_reentrant_enter_return_value
    
    
    def __exit__(self, exc_type, exc_value, exc_traceback):
        '''Exit the context manager.'''
        assert self.depth >= 1
        if self.depth == 1:
            # Saving `reentrant_exit`'s return value, since it might be
            # signalling an exception swallowing:
            return_value = self.reentrant_exit(exc_type,
                                               exc_value,
                                               exc_traceback)
        else:
            return_value = None
        self.depth -= 1
        return return_value

        
    def reentrant_enter(self):
        '''Function that gets called when entering the outermost suite.'''
        return self
        
    
    def reentrant_exit(self, exc_type, exc_value, exc_traceback):
        '''Function that gets called when exiting the outermost suite.'''
        pass
Пример #11
0
class EnumType(enum.EnumMeta):
    '''Metaclass for our kickass enum type.'''
    def __dir__(cls):
        # working around Python bug 22506 that would be fixed in Python 3.5.
        return type.__dir__(cls) + cls._member_names_

    __getitem__ = lambda self, i: self._values_tuple[i]
    # This `__getitem__` is important, so we could feed enum types straight
    # into `ProductSpace`.

    _values_tuple = caching.CachedProperty(tuple)
Пример #12
0
class MapSpace(sequence_tools.CuteSequenceMixin, collections.Sequence):
    '''
    A space of a function applied to a sequence.
    
    This is similar to Python's builtin `map`, except that it behaves like a
    sequence rather than an iterable. (Though it's also iterable.) You can
    access any item by its index number.
    
    Example:
    
        >>> map_space = MapSpace(lambda x: x ** 2, range(7))
        >>> map_space
        MapSpace(<function <lambda> at 0x00000000030C1510>, range(0, 7))
        >>> len(map_space)
        7
        >>> map_space[3]
        9
        >>> tuple(map_space)
        (0, 1, 4, 9, 16, 25, 36)
    
    '''
    def __init__(self, function, sequence):

        self.function = function
        self.sequence = sequence_tools.ensure_iterable_is_immutable_sequence(
            sequence, default_type=nifty_collections.LazyTuple)

    length = caching.CachedProperty(
        lambda self: sequence_tools.get_length(self.sequence))

    def __repr__(self):
        return '%s(%s, %s)' % (type(self).__name__, self.function,
                               self.sequence)

    def __getitem__(self, i):
        if isinstance(i, slice):
            return type(self)(self.function, self.sequence[i])
        assert isinstance(i, int)
        return self.function(self.sequence[i])  # Propagating `IndexError`.

    def __iter__(self):
        for item in self.sequence:
            yield self.function(item)

    _reduced = property(lambda self:
                        (type(self), self.function, self.sequence))

    __eq__ = lambda self, other: (isinstance(other, MapSpace) and self._reduced
                                  == other._reduced)
    __hash__ = lambda self: hash(self._reduced)

    __bool__ = lambda self: bool(self.sequence)
Пример #13
0
class _OrderableEnumMixin(object):
    '''
    Mixin for an enum that has an order between items.
    
    We're defining a mixin rather than defining these things on `CuteEnum`
    because we can't use `functools.total_ordering` on `Enum`, because `Enum`
    has exception-raising comparison methods, so `functools.total_ordering`
    doesn't override them.
    '''
    number = caching.CachedProperty(
        lambda self: type(self)._values_tuple.index(self))
    __lt__ = lambda self, other: isinstance(other, CuteEnum) and \
                                                   (self.number < other.number)
Пример #14
0
class Freezer(context_management.DelegatingContextManager):
    '''
    A freezer is used as a context manager to "freeze" and "thaw" an object.

    Different kinds of objects have different concepts of "freezing" and
    "thawing": A GUI widget could be graphically frozen, preventing the OS from
    drawing any changes to it, and then when its thawed have all the changes
    drawn at once. As another example, an ORM could be frozen to have it not
    write to the database while a suite it being executed, and then have it
    write all the data at once when thawed.

    This class only implements the abstract behavior of a freezer: It is a
    reentrant context manager which has handlers for freezing and thawing, and
    its level of frozenness can be checked by accessing the attribute
    `.frozen`. It's up to subclasses to override `freeze_handler` and
    `thaw_handler` to do whatever they should do on freeze and thaw. Note that
    you can override either of these methods to be a no-op, sometimes even both
    methods, and still have a useful freezer by checking the property `.frozen`
    in the logic of the parent object.
    '''

    delegatee_context_manager = caching.CachedProperty(DelegateeContextManager)
    '''The context manager which implements our `__enter__` and `__exit__`.'''


    frozen = misc_tools.ProxyProperty(
        '.delegatee_context_manager.depth'
    )
    '''
    An integer specifying the freezer's level of frozenness.

    If the freezer is not frozen, it's `0`. When it's frozen, it becomes `1`,
    and then every time the freezer is used as a context manager the `frozen`
    level increases. When reduced to `0` again the freezer is said to have
    thawed.

    This can be conveniently used as a boolean, i.e. `if my_freezer.frozen:`.
    '''

    def freeze_handler(self):
        '''Do something when the object gets frozen.'''

    def thaw_handler(self):
        '''Do something when the object gets thawed.'''
Пример #15
0
class ChainSpace(sequence_tools.CuteSequenceMixin, collections.Sequence):
    '''
    A space of sequences chained together.
    
    This is similar to `itertools.chain`, except that items can be fetched by
    index number rather than just iteration.
    
    Example:
    
        >>> chain_space = ChainSpace(('abc', (1, 2, 3)))
        >>> chain_space
        <ChainSpace: 3+3>
        >>> chain_space[4]
        2
        >>> tuple(chain_space)
        ('a', 'b', 'c', 1, 2, 3)
        >>> chain_space.index(2)
        4
    
    '''
    def __init__(self, sequences):
        self.sequences = nifty_collections.LazyTuple(
            (sequence_tools.ensure_iterable_is_immutable_sequence(
                sequence, default_type=nifty_collections.LazyTuple)
                                                     for sequence in sequences)
        )
        
    @caching.CachedProperty
    @nifty_collections.LazyTuple.factory()
    def accumulated_lengths(self):
        '''
        A sequence of the accumulated length as every sequence is added.
        
        For example, if this chain space has sequences with lengths of 10, 100
        and 1000, this would be `[0, 10, 110, 1110]`.
        '''
        total = 0
        yield 0
        for sequence in self.sequences:
            total += sequence_tools.get_length(sequence)
            yield total
        
        
    length = caching.CachedProperty(lambda self: self.accumulated_lengths[-1])
        
    def __repr__(self):
        return '<%s: %s>' % (
            type(self).__name__,
            '+'.join(str(len(sequence)) for sequence in self.sequences),
        )
        
    def __getitem__(self, i):
        if isinstance(i, slice):
            raise NotImplementedError
        assert isinstance(i, int)
        if i <= -1:
            i += self.length
        if i < 0:
            raise IndexError
        if self.accumulated_lengths.is_exhausted and i >= self.length:
            raise IndexError
        # Todo: Can't have a binary search here, it exhausts all the sequences.
        sequence_index = binary_search.binary_search_by_index(
            self.accumulated_lengths, i, rounding=binary_search.LOW_IF_BOTH
        )
        if sequence_index is None:
            raise IndexError
        sequence_start = self.accumulated_lengths[sequence_index]
        return self.sequences[sequence_index][i - sequence_start]
        
    
    def __iter__(self):
        for sequence in self.sequences:
            for thing in sequence:
                yield thing
        
    _reduced = property(lambda self: (type(self), self.sequences))
             
    __eq__ = lambda self, other: (isinstance(other, ChainSpace) and
                                  self._reduced == other._reduced)
    
    def __contains__(self, item):
        return any(item in sequence for sequence in self.sequences
                   if (not isinstance(sequence, str) or isinstance(item, str)))
        
    def index(self, item):
        '''Get the index number of `item` in this space.'''
        for sequence, accumulated_length in zip(self.sequences,
                                                self.accumulated_lengths):
            try:
                index_in_sequence = sequence.index(item)
            except ValueError:
                pass
            except TypeError:
                assert isinstance(sequence, (str, bytes)) and \
                                           (not isinstance(item, (str, bytes)))
            else:
                return index_in_sequence + accumulated_length
        else:
            raise ValueError
    
    def __bool__(self):
        try: next(iter(self))
        except StopIteration: return False
        else: return True
class _VariationRemovingMixin:
    '''Mixin for `PermSpace` to add variations to a perm space.'''
    purified = caching.CachedProperty(
        lambda self: PermSpace(len(self.sequence)),
        doc='''A purified version of this `PermSpace`.''')

    ###########################################################################

    @caching.CachedProperty
    def unrapplied(self):
        '''A version of this `PermSpace` without a custom range.'''
        if self.is_recurrent and self.is_sliced:
            raise TypeError(
                "You can't get an unrapplied version of a recurrent, sliced "
                "`PermSpace` because after unrapplying it, it'll no longer be "
                "recurrent, and thus have a different number of elements, and "
                "thus the slice wouldn't be usable. Please use `.unsliced` "
                "first.")
        return PermSpace(self.sequence_length,
                         n_elements=self.n_elements,
                         domain=self.domain,
                         fixed_map={
                             key: self.sequence.index(value)
                             for key, value in self.fixed_map.items()
                         },
                         degrees=self.degrees,
                         slice_=self.canonical_slice,
                         is_combination=self.is_combination,
                         perm_type=self.perm_type)

    @caching.CachedProperty
    def unrecurrented(self):
        '''A version of this `PermSpace` with no recurrences.'''
        from .perm import UnrecurrentedPerm
        from .comb import UnrecurrentedComb
        assert self.is_recurrent  # Otherwise was overridden in `__init__`
        if self.is_sliced:
            raise TypeError(
                "You can't get an unrecurrented version of a sliced "
                "`PermSpace` because after unrecurrenting it, it'll have a "
                "different number of elements, and thus the slice wouldn't be "
                "usable. Please use `.unsliced` first.")
        if self.is_typed:
            raise TypeError(
                "You can't get an unrecurrented version of a typed "
                "`PermSpace`, because we need to use the "
                "`UnrecurrentedPerm` type to unrecurrent it.")

        sequence_copy = list(self.sequence)
        processed_fixed_map = {}
        for key, value in self.fixed_map:
            index = sequence_copy.index(value)
            sequence_copy[value] = misc.MISSING_ELEMENT
            processed_fixed_map[key] = (index, value)

        return PermSpace(enumerate(self.sequence),
                         n_elements=self.n_elements,
                         domain=self.domain,
                         fixed_map=processed_fixed_map,
                         degrees=self.degrees,
                         is_combination=self.is_combination,
                         perm_type=UnrecurrentedComb
                         if self.is_combination else UnrecurrentedPerm)

    @caching.CachedProperty
    def unpartialled(self):
        '''A non-partial version of this `PermSpace`.'''
        assert self.is_partial  # Otherwise this property would be overridden.
        if self.is_sliced:
            raise TypeError(
                "Can't convert sliced `PermSpace` directly to unpartialled, "
                "because the number of items would be different. Use "
                "`.unsliced` first.")
        if self.is_dapplied:
            raise TypeError(
                "Can't convert a partial, dapplied `PermSpace` to "
                "non-partialled, because we'll need to extend the domain with "
                "more items and we don't know which to use.")

        return PermSpace(self.sequence,
                         n_elements=self.sequence_length,
                         fixed_map=self.fixed_map,
                         degrees=self.degrees,
                         slice_=self.canonical_slice,
                         is_combination=self.is_combination,
                         perm_type=self.perm_type)

    @caching.CachedProperty
    def uncombinationed(self):
        '''A version of this `PermSpace` where permutations have order.'''
        from .perm import Perm
        if self.is_sliced:
            raise TypeError(
                "Can't convert sliced `CombSpace` directly to "
                "uncombinationed, because the number of items would be "
                "different. Use `.unsliced` first.")
        if self.is_typed:
            raise TypeError(
                "Can't convert typed `CombSpace` directly to "
                "uncombinationed, because the perm class would still be a "
                "subclass of `Comb`.")
        return PermSpace(self.sequence,
                         n_elements=self.n_elements,
                         domain=self.domain,
                         fixed_map=self.fixed_map,
                         degrees=self.degrees,
                         slice_=None,
                         is_combination=False,
                         perm_type=Perm)

    undapplied = caching.CachedProperty(
        lambda self: PermSpace(self.sequence,
                               n_elements=self.n_elements,
                               fixed_map=self._undapplied_fixed_map,
                               degrees=self.degrees,
                               slice_=self.canonical_slice,
                               is_combination=self.is_combination,
                               perm_type=self.perm_type),
        doc='''A version of this `PermSpace` without a custom domain.''')

    @caching.CachedProperty
    def unfixed(self):
        '''An unfixed version of this `PermSpace`.'''
        if self.is_sliced:
            raise TypeError("Can't be used on sliced perm spaces. Try "
                            "`perm_space.unsliced.unfixed`.")
        return PermSpace(self.sequence,
                         n_elements=self.n_elements,
                         domain=self.domain,
                         fixed_map=None,
                         degrees=self.degrees,
                         is_combination=self.is_combination,
                         perm_type=self.perm_type)

    @caching.CachedProperty
    def undegreed(self):
        '''An undegreed version of this `PermSpace`.'''
        if self.is_sliced:
            raise TypeError("Can't be used on sliced perm spaces. Try "
                            "`perm_space.unsliced.undegreed`.")
        return PermSpace(self.sequence,
                         n_elements=self.n_elements,
                         domain=self.domain,
                         fixed_map=self.fixed_map,
                         degrees=None,
                         is_combination=self.is_combination,
                         perm_type=self.perm_type)

    unsliced = caching.CachedProperty(
        lambda self: PermSpace(self.sequence,
                               n_elements=self.n_elements,
                               domain=self.domain,
                               fixed_map=self.fixed_map,
                               is_combination=self.is_combination,
                               degrees=self.degrees,
                               slice_=None,
                               perm_type=self.perm_type),
        doc='''An unsliced version of this `PermSpace`.''')

    untyped = caching.CachedProperty(
        lambda self: PermSpace(self.sequence,
                               n_elements=self.n_elements,
                               domain=self.domain,
                               fixed_map=self.fixed_map,
                               is_combination=self.is_combination,
                               degrees=self.degrees,
                               slice_=self.slice_,
                               perm_type=self.default_perm_type),
        doc='''An untyped version of this `PermSpace`.''')

    ###########################################################################
    ###########################################################################

    # More exotic variation removals below:

    _just_fixed = caching.CachedProperty(
        lambda self: self._get_just_fixed(),
        """A version of this perm space without any variations except fixed."""
    )

    def _get_just_fixed(self):
        # This gets overridden in `__init__`.
        raise RuntimeError

    _nominal_perm_space_of_perms = caching.CachedProperty(
        lambda self: self.unsliced.undegreed.unfixed, )
Пример #17
0
class VariationSelection(object):
    '''
    A selection of variations of a `PermSpace`.
    
    The `combi` package allows many different variations on `PermSpace`. It may
    be range-applied, recurrent, partial, a combination, and more. Any
    selection of variations from this list is represented by a
    `VariationSelection` object. Some are allowed, while others aren't allowed.
    (For example a `PermSpace` that is both dapplied and a combination is not
    allowed.)
    
    This type is cached, meaning that after you create one from an iterable of
    variations and then try to create an identical one by using an iterable
    with the same variations, you'll get the original `VariationSelection`
    object you created.
    '''

    __metaclass__ = VariationSelectionType

    @classmethod
    @caching.cache()
    def _create_from_sorted_set(cls, variations):
        '''Create a `VariationSelection` from a `SortedSet` of variations.'''
        # This method exsits so we could cache canonically. The `__new__`
        # method canonicalizes the `variations` argument to a `SortedSet` and
        # we cache according to it.
        variation_selection = super(VariationSelection, cls).__new__(cls)
        variation_selection.__init__(variations)
        return variation_selection

    def __init__(self, variations):
        self.variations = variations
        assert cute_iter_tools.is_sorted(self.variations)
        self.is_rapplied = Variation.RAPPLIED in self.variations
        self.is_recurrent = Variation.RECURRENT in self.variations
        self.is_partial = Variation.PARTIAL in self.variations
        self.is_combination = Variation.COMBINATION in self.variations
        self.is_dapplied = Variation.DAPPLIED in self.variations
        self.is_fixed = Variation.FIXED in self.variations
        self.is_degreed = Variation.DEGREED in self.variations
        self.is_sliced = Variation.SLICED in self.variations
        self.is_typed = Variation.TYPED in self.variations
        self.is_pure = not self.variations

    @caching.cache()
    def __repr__(self):
        return '<%s #%s: %s>' % (type(self).__name__, self.number, ', '.join(
            variation.value for variation in self.variations) or 'pure')

    @caching.CachedProperty
    def is_allowed(self):
        '''Is this `VariationSelection` allowed to be used in a `PermSpace`?'''
        _variations_set = set(self.variations)
        for variation_clash in variation_clashes:
            for variation, included in variation_clash.items():
                if (variation in _variations_set) != included:
                    break
            else:
                return False
        else:
            return True

    number = caching.CachedProperty(
        variation_selection_space.index,
        '''Serial number in the space of all variation selections.''')

    _reduced = caching.CachedProperty(lambda self: (type(self), self.number))
    _hash = caching.CachedProperty(lambda self: hash(self._reduced))
    __eq__ = lambda self, other: isinstance(other, VariationSelection) and \
                                                self._reduced == other._reduced
    __hash__ = lambda self: self._hash
class _FixedMapManagingMixin:
    '''
    Mixin for `PermSpace` to manage the `fixed_map`. (For fixed perm spaces.)
    '''

    @caching.CachedProperty
    def fixed_indices(self):
        '''
        The indices of any fixed items in this `PermSpace`.

        This'll be different from `self.fixed_map.keys()` for dapplied perm
        spaces.
        '''
        if not self.fixed_map:
            return ()
        return tuple(map(self.domain.index, self.fixed_map))

    free_indices = caching.CachedProperty(
        lambda self: tuple(item for item in range(self.sequence_length)
                           if item not in self._undapplied_fixed_map.keys()),
        doc='''Integer indices of free items.'''
    )
    free_keys = caching.CachedProperty(
        lambda self: tuple(item for item in self.domain
                           if item not in self.fixed_map.keys()),
        doc='''Indices (possibly from domain) of free items.'''

    )

    @caching.CachedProperty
    def free_values(self):
        '''Items that can change between permutations.'''
        # This algorithm is required instead of just a one-liner because in the
        # case of recurrent sequences, we don't want to remove all the sequence
        # items that are in `self.fixed_map.values()` but only as many as there
        # are in `self.fixed_map.values()`.
        free_values = []
        fixed_counter = collections.Counter(self.fixed_map.values())
        for item in self.sequence:
            if fixed_counter[item]:
                fixed_counter[item] -= 1
            else:
                free_values.append(item)
        return tuple(free_values)

    @caching.CachedProperty
    def _n_cycles_in_fixed_items_of_just_fixed(self):
        '''
        The number of cycles in the fixed items of this `PermSpace`.

        This is used for degree calculations.
        '''
        unvisited_items = set(self._undapplied_unrapplied_fixed_map)
        n_cycles = 0
        while unvisited_items:
            starting_item = current_item = next(iter(unvisited_items))

            while current_item in unvisited_items:
                unvisited_items.remove(current_item)
                current_item = \
                            self._undapplied_unrapplied_fixed_map[current_item]

            if current_item == starting_item:
                n_cycles += 1

        return n_cycles

    @caching.CachedProperty
    def _undapplied_fixed_map(self):
        if self.is_dapplied:
            return {self.domain.index(key): value for key, value
                    in self.fixed_map.items()}
        else:
            return self.fixed_map

    @caching.CachedProperty
    def _undapplied_unrapplied_fixed_map(self):
        if self.is_dapplied or self.is_rapplied:
            return {self.domain.index(key): self.sequence.index(value)
                    for key, value in self.fixed_map.items()}
        else:
            return self.fixed_map


    @caching.CachedProperty
    def _free_values_purified_perm_space(self):
        '''
        A purified `PermSpace` of the free values in the `PermSpace`.

        Non-fixed permutation spaces have this set to `self` in the
        constructor.
        '''
        if self.is_fixed:
            return PermSpace(
                len(self.free_indices),
                n_elements=self.n_elements-len(self.fixed_map)
            )
        else:
            return self.purified


    _free_values_unsliced_perm_space = caching.CachedProperty(
        lambda self: self._free_values_purified_perm_space.get_degreed(
            (degree - self._n_cycles_in_fixed_items_of_just_fixed
                                                    for degree in self.degrees)
            if self.is_degreed else None).get_rapplied(self.free_values).
            get_dapplied(self.free_keys).
                          get_partialled(self.n_elements - len(self.fixed_map)),
    )
Пример #19
0
class CuteRange(CuteSequence):
    '''
    Improved version of Python's `range` that has extra features.

    `CuteRange` is like Python's built-in `range`, except (1) it's cute and (2)
    it's completely different. LOL, just kidding.

    `CuteRange` takes `start`, `stop` and `step` arguments just like `range`,
    but it allows you to use floating-point numbers (or decimals), and it
    allows you to use infinite numbers to produce infinite ranges.

    Obviously, `CuteRange` allows iteration, index access, searching for a
    number's index number, checking whether a number is in the range or not,
    and slicing.

    Examples:

        `CuteRange(float('inf'))` is an infinite range starting at zero and
        never ending.

        `CuteRange(7, float('inf'))` is an infinite range starting at 7 and
        never ending. (Like `itertools.count(7)` except it has all the
        amenities of a sequence, you can get items using list notation, you can
        slice it, you can get index numbers of items, etc.)

        `CuteRange(-1.6, 7.3)` is the finite range of numbers `(-1.6, -0.6,
        0.4, 1.4, 2.4, 3.4, 4.4, 5.4, 6.4)`.

        `CuteRange(10.4, -float('inf'), -7.1)` is the infinite range of numbers
        `(10.4, 3.3, -3.8, -10.9, -18.0, -25.1, ... )`.

    '''
    def __init__(self, *args):
        self.start, self.stop, self.step = parse_range_args(*args)

    _reduced = property(lambda self: (type(self),
                                      (self.start, self.stop, self.step)))

    __hash__ = lambda self: hash(self._reduced)

    __eq__ = lambda self, other: (type(self) == type(other) and
                                  (self._reduced == other._reduced))

    distance_to_cover = caching.CachedProperty(
        lambda self: self.stop - self.start)

    @caching.CachedProperty
    def length(self):
        '''
        The length of the `CuteRange`.

        We're using a property `.length` rather than the built-in `__len__`
        because `__len__` can't handle infinite values or floats.
        '''
        from python_toolbox import math_tools

        if math_tools.get_sign(self.distance_to_cover) != \
                                                math_tools.get_sign(self.step):
            return 0
        else:
            raw_length, remainder = math_tools.cute_divmod(
                self.distance_to_cover, self.step)
            raw_length += (remainder != 0)
            return raw_length

    __repr__ = lambda self: self._repr

    @caching.CachedProperty
    def _repr(self):
        return '%s(%s%s%s)' % (
            type(self).__name__,
            f'{self.start}, ',
            str(self.stop),
            f', {self.step}' if self.step != 1 else '',
        )

    @caching.CachedProperty
    def short_repr(self):
        '''
        A shorter representation of the `CuteRange`.

        This is different than `repr(cute_range)` only in cases where `step=1`.
        In these cases, while `repr(cute_range)` would be something like
        `CuteRange(7, 20)`, `cute_range.short_repr` would be `7..20`.
        '''
        if self.step != 1:
            return self._repr
        else:
            return f'{self.start}..{self.stop - 1}'

    def __getitem__(self, i, allow_out_of_range=False):
        from python_toolbox import sequence_tools
        if isinstance(i, numbers.Integral):
            if i < 0:
                if i < (-self.length) and not allow_out_of_range:
                    raise IndexError
                i += self.length
            if 0 <= i < self.length or allow_out_of_range:
                return self.start + (self.step * i)
            else:
                raise IndexError
        elif i == infinity:
            if self.length == infinity:
                return self.stop
            else:
                raise IndexError
        elif i == -infinity:
            raise IndexError
        elif isinstance(i, (slice, sequence_tools.CanonicalSlice)):
            canonical_slice = sequence_tools.CanonicalSlice(
                i, iterable_or_length=self)
            if not ((0 <= canonical_slice.start <= self.length) and
                    ((0 <= canonical_slice.stop <= self.length) or
                     (canonical_slice.stop == self.length == infinity))):
                raise TypeError
            return CuteRange(
                self.__getitem__(canonical_slice.start,
                                 allow_out_of_range=True),
                self.__getitem__(canonical_slice.stop,
                                 allow_out_of_range=True),
                self.step * canonical_slice.step)
        else:
            raise TypeError

    def __len__(self):
        # Sadly Python doesn't allow infinity or floats here.
        return self.length if isinstance(self.length, numbers.Integral) else 0

    def index(self, i, start=-infinity, stop=infinity):
        '''Get the index number of `i` in this `CuteRange`.'''
        from python_toolbox import math_tools
        if not isinstance(i, numbers.Number):
            raise ValueError
        else:
            distance = i - self.start
            if distance == 0 and self:
                if start <= 0 < stop: return 0
                else: raise ValueError("Found but not within range.")
            if math_tools.get_sign(distance) != math_tools.get_sign(self.step):
                raise ValueError
            index, remainder = math_tools.cute_divmod(distance, self.step)
            if remainder == 0 and (0 <= index < self.length
                                   or index == self.length == infinity):
                if start <= index < stop: return index
                else: raise ValueError("Found but not within range.")

            else:
                raise ValueError

    is_infinite = caching.CachedProperty(lambda self: self.length == infinity)
Пример #20
0
class Perm(sequence_tools.CuteSequenceMixin,
           collections.abc.Sequence,
           metaclass=PermType):
    '''
    A permutation of items from a `PermSpace`.

    In combinatorics, a permutation is a sequence of items taken from the
    original sequence.

    Example:

        >>> perm_space = PermSpace('abcd')
        >>> perm = Perm('dcba', perm_space)
        >>> perm
        <Perm: ('d', 'c', 'b', 'a')>
        >>> perm_space.index(perm)
        23

    '''
    @classmethod
    def coerce(cls, item, perm_space=None):
        '''Coerce item into a perm, optionally of a specified `PermSpace`.'''
        if isinstance(item, Perm) and (perm_space is not None) and \
          (item.nominal_perm_space == perm_space._nominal_perm_space_of_perms):
            return item
        else:
            return cls(item, perm_space)

    def __init__(self, perm_sequence, perm_space=None):
        '''
        Create the `Perm`.

        If `perm_space` is not supplied, we assume that this is a pure
        permutation, i.e. a permutation on `range(len(perm_sequence))`.
        '''
        perm_space = None if perm_space is None \
                                              else PermSpace.coerce(perm_space)
        assert isinstance(perm_sequence, collections.abc.Iterable)
        perm_sequence = sequence_tools. \
                           ensure_iterable_is_immutable_sequence(perm_sequence)

        ### Analyzing `perm_space`: ###########################################
        #                                                                     #
        if perm_space is None:
            if isinstance(perm_sequence, Perm):
                self.nominal_perm_space = perm_sequence.nominal_perm_space
            else:
                # We're assuming that `number_or_perm_sequence` is a pure
                # permutation sequence. Not asserting this because that would
                # be O(n).
                self.nominal_perm_space = PermSpace(len(perm_sequence))
        else:  # perm_space is not None
            self.nominal_perm_space = perm_space.unsliced.undegreed.unfixed

        # `self.nominal_perm_space` is a perm space that preserves only the
        # rapplied, recurrent, partial, dapplied and combination properties of
        # the original `PermSpace`.

        #                                                                     #
        ### Finished analyzing `perm_space`. ##################################

        self.is_rapplied = self.nominal_perm_space.is_rapplied
        self.is_recurrent = self.nominal_perm_space.is_recurrent
        self.is_partial = self.nominal_perm_space.is_partial
        self.is_combination = self.nominal_perm_space.is_combination
        self.is_dapplied = self.nominal_perm_space.is_dapplied
        self.is_pure = not (self.is_rapplied or self.is_dapplied
                            or self.is_partial or self.is_combination)

        if not self.is_rapplied: self.unrapplied = self
        if not self.is_dapplied: self.undapplied = self
        if not self.is_combination: self.uncombinationed = self

        self._perm_sequence = sequence_tools. \
             ensure_iterable_is_immutable_sequence(perm_sequence)

        assert self.is_combination == isinstance(self, Comb)

    _reduced = property(lambda self: (type(self), self._perm_sequence, self.
                                      nominal_perm_space))

    __iter__ = lambda self: iter(self._perm_sequence)

    def __eq__(self, other):
        return type(self) == type(other) and \
                      self.nominal_perm_space == other.nominal_perm_space and \
           cute_iter_tools.are_equal(self._perm_sequence, other._perm_sequence)

    __ne__ = lambda self, other: not (self == other)
    __hash__ = lambda self: hash(self._reduced)
    __bool__ = lambda self: bool(self._perm_sequence)

    def __contains__(self, item):
        try:
            return (item in self._perm_sequence)
        except TypeError:
            # Gotta have this `except` because Python complains if you try `1
            # in 'meow'`.
            return False

    def __repr__(self):
        return '<%s%s: %s(%s%s)>' % (
            type(self).__name__,
            (', n_elements=%s' % len(self)) if self.is_partial else '',
            ('(%s) => ' %
             ', '.join(map(repr, self.domain))) if self.is_dapplied else '',
            ', '.join(repr(item)
                      for item in self), ',' if self.length == 1 else '')

    def index(self, member):
        '''
        Get the index number of `member` in the permutation.

        Example:

            >>> perm = PermSpace(5)[10]
            >>> perm
            <Perm: (0, 2, 4, 1, 3)>
            >>> perm.index(3)
            4

        '''
        numerical_index = self._perm_sequence.index(member)
        return self.nominal_perm_space. \
               domain[numerical_index] if self.is_dapplied else numerical_index

    @caching.CachedProperty
    def inverse(self):
        '''
        The inverse of this permutation.

        i.e. the permutation that we need to multiply this permutation by to
        get the identity permutation.

        This is also accessible as `~perm`.

        Example:

            >>> perm = PermSpace(5)[10]
            >>> perm
            <Perm: (0, 2, 4, 1, 3)>
            >>> ~perm
            <Perm: (0, 3, 1, 4, 2)>
            >>> perm * ~perm
            <Perm: (0, 1, 2, 3, 4)>

        '''
        if self.is_partial:
            raise TypeError("Partial perms don't have an inverse.")
        if self.is_rapplied:
            raise TypeError("Rapplied perms don't have an inverse.")
        if self.is_dapplied:
            raise TypeError("Dapplied perms don't have an inverse.")
        if self.is_rapplied:
            return self.nominal_perm_space[0] * self.unrapplied.inverse
        else:
            _perm = [None] * \
                     self.nominal_perm_space.sequence_length
            for i, item in enumerate(self):
                _perm[item] = i
            return type(self)(_perm, self.nominal_perm_space)

    __invert__ = lambda self: self.inverse

    domain = caching.CachedProperty(
        lambda self: self.nominal_perm_space.domain,
        '''The permutation's domain.''')

    @caching.CachedProperty
    def unrapplied(self):
        '''An unrapplied version of this permutation.'''
        ### Calculating the new perm sequence: ################################
        #                                                                     #
        # This is more complex than a one-line generator because of recurrent
        # perms; every time there's a recurrent item, we need to take not
        # necessary the index of its first occurrence in the rapplied sequence
        # but the first index we haven't taken already.
        rapplied_sequence = list(self.nominal_perm_space.sequence)
        new_perm_sequence = []
        for i in self._perm_sequence:
            i_index = rapplied_sequence.index(i)
            rapplied_sequence[i_index] = misc.MISSING_ELEMENT
            new_perm_sequence.append(i_index)
        #                                                                     #
        ### Finished calculating the new perm sequence. #######################

        unrapplied = type(self)(new_perm_sequence,
                                self.nominal_perm_space.unrapplied)
        assert not unrapplied.is_rapplied
        return unrapplied

    undapplied = caching.CachedProperty(
        lambda self: type(self)
        (self._perm_sequence, self.nominal_perm_space.undapplied),
        '''An undapplied version of this permutation.''')
    uncombinationed = caching.CachedProperty(
        lambda self: Perm(self._perm_sequence, self.nominal_perm_space.
                          uncombinationed),
        '''A non-combination version of this permutation.''')

    def __getitem__(self, i):
        if self.is_dapplied:
            try:
                i_to_use = self.domain.index(i)
            except TypeError as type_error:
                # Some types, like `str`, annoyingly raise `TypeError` instead
                # of `IndexError`.
                raise IndexError from type_error
        else:
            i_to_use = i
        return self._perm_sequence[i_to_use]

    length = property(lambda self: self.nominal_perm_space.n_elements)

    def apply(self, sequence, result_type=None):
        '''
        Apply the perm to a sequence, choosing items from it.

        This can also be used as `sequence * perm`. Example:

            >>> perm = PermSpace(5)[10]
            >>> perm
            <Perm: (0, 2, 4, 1, 3)>
            >>> perm.apply('growl')
            'golrw'
            >>> 'growl' * perm
            'golrw'

        Specify `result_type` to determine the type of the result returned. If
        `result_type=None`, will use `tuple`, except when `other` is a `str` or
        `Perm`, in which case that same type would be used.
        '''
        sequence = \
             sequence_tools.ensure_iterable_is_immutable_sequence(sequence)
        if sequence_tools.get_length(sequence) < \
                                               sequence_tools.get_length(self):
            raise Exception("Can't apply permutation on sequence of "
                            "shorter length.")

        permed_generator = (sequence[i] for i in self)
        if result_type is not None:
            if result_type is str:
                return ''.join(permed_generator)
            else:
                return result_type(permed_generator)
        elif isinstance(sequence, Perm):
            return type(self)(permed_generator, sequence.nominal_perm_space)
        elif isinstance(sequence, str):
            return ''.join(permed_generator)
        else:
            return tuple(permed_generator)

    __rmul__ = apply

    __mul__ = lambda self, other: other.__rmul__(self)

    # (Must define this explicitly because of Python special-casing
    # multiplication of objects of the same type.)

    def __pow__(self, exponent):
        '''Raise the perm by the power of `exponent`.'''
        assert isinstance(exponent, numbers.Integral)
        if exponent <= -1:
            return self.inverse**(-exponent)
        elif exponent == 0:
            return self.nominal_perm_space[0]
        else:
            assert exponent >= 1
            return misc_tools.general_product((self, ) * exponent)

    @caching.CachedProperty
    def degree(self):
        '''
        The permutation's degree.

        You can think of a permutation's degree like this: Imagine that you're
        starting with the identity permutation, and you want to make this
        permutation, by switching two items with each other over and over again
        until you get this permutation. The degree is the number of such
        switches you'll have to make.
        '''
        if self.is_partial:
            return NotImplemented
        else:
            return len(self) - self.n_cycles

    @caching.CachedProperty
    def n_cycles(self):
        '''
        The number of cycles in this permutation.

        If item 1 points at item 7, and item 7 points at item 3, and item 3
        points at item 1 again, then that's one cycle. `n_cycles` is the total
        number of cycles in this permutation.
        '''
        if self.is_partial:
            return NotImplemented
        if self.is_rapplied:
            return self.unrapplied.n_cycles
        if self.is_dapplied:
            return self.undapplied.n_cycles

        unvisited_items = set(self)
        n_cycles = 0
        while unvisited_items:
            starting_item = current_item = next(iter(unvisited_items))

            while current_item in unvisited_items:
                unvisited_items.remove(current_item)
                current_item = self[current_item]

            if current_item == starting_item:
                n_cycles += 1

        return n_cycles

    def get_neighbors(self, *, degrees=(1, ), perm_space=None):
        '''
        Get the neighbor permutations of this permutation.

        This means, get the permutations that are close to this permutation. By
        default, this means permutations that are one transformation (switching
        a pair of items) away from this permutation. You can specify a custom
        sequence of integers to the `degrees` argument to get different degrees
        of relation. (e.g. specify `degrees=(1, 2)` to get both the closest
        neighbors and the second-closest neighbors.)
        '''
        from ..map_space import MapSpace
        if self.is_combination or self.is_recurrent or self.is_partial:
            raise NotImplementedError
        if perm_space is None:
            perm_space = self.nominal_perm_space
        return MapSpace(
            perm_space.coerce_perm,
            nifty_collections.LazyTuple(
                tuple(perm)
                for perm in PermSpace(self._perm_sequence, degrees=degrees)
                if tuple(perm) in perm_space))

    def __lt__(self, other):
        if isinstance(other, Perm) and \
                           self.nominal_perm_space == other.nominal_perm_space:
            return self._perm_sequence < other._perm_sequence
        else:
            return NotImplemented

    __reversed__ = lambda self: type(self)(reversed(self._perm_sequence), self.
                                           nominal_perm_space)

    items = caching.CachedProperty(PermItems)
    as_dictoid = caching.CachedProperty(PermAsDictoid)
Пример #21
0
class ObjectWithId(object):
    Id = caching.CachedProperty(lambda object: wx.NewId())
class CrossProcessPersistent(Persistent):
    '''
    Object that sometimes shouldn't really be duplicated.

    Say some plain object references a `CrossProcessPersistent` object. Then
    that plain object gets deepcopied with the `DontCopyPersistent` copy mode.
    The plain object will get deepcopied, but the `CrossProcessPersistent`
    object under it will not! The new copy of the plain object will refer to
    the same old copy of the `CrossProcessPersistent` object.
    
    This is useful for objects which are read-only and possibly heavy. You may
    use `CrossProcessPersistent` as a base class for these kinds of objects.
    
    Keep in mind that a `CrossProcessPersistent` is read-only. This means that
    starting from the first time that it is copied or put in a queue, it should
    not be changed.

    There is no mechanism that enforces that the user doesn't change the
    object, so the user must remember not to change it.
    
    What this class adds over `Persistent`, is that when a
    `CrossProcessPersistent` is passed around between processes in queues, each
    process retains only one copy of it.
    
    Note: This class is still experimental.
    '''
    
    _is_atomically_pickleable = True
    
    
    def __new__(cls, *args, **kwargs):
        
        # Here we need to check in what context `__new__` was called.
        # There are two options:
        #   1. The object is being created.
        #   2. The object is being unpickled.
        # We check whether we are getting a uuid token. If we are, it's
        # unpickling. If we don't, it's creation.
        
        if len(args) == 1 and (not kwargs) and isinstance(args[0], UuidToken):
            received_uuid = args[0].uuid
        else:
            received_uuid = None
            
        if received_uuid: # The object is being unpickled
            thing = library.get(received_uuid, None)
            if thing:
                thing._CrossProcessPersistent__skip_setstate = True
                return thing
            else: # This object does not exist in our library yet; let's add it
                thing = super().__new__(cls)
                thing._CrossProcessPersistent__uuid = received_uuid
                library[received_uuid] = thing
                return thing
                
        else: # The object is being created
            thing = super().__new__(cls)
            new_uuid = uuid.uuid4()
            thing._CrossProcessPersistent__uuid = new_uuid
            library[new_uuid] = thing
            return thing

        
    def has_same_uuid_as(self, other):
        '''Does `other` have the same uuid as us?'''
        if not isinstance(other, CrossProcessPersistent):
            return NotImplemented
        return self.__uuid == other.__uuid

    
    def __getstate__(self):
        my_dict = dict(self.__dict__)
        del my_dict['_CrossProcessPersistent__uuid']
        return my_dict

    
    def __getnewargs__(self):
        return (UuidToken(self._CrossProcessPersistent__uuid),)

    
    def __setstate__(self, state):
        if self.__dict__.pop('_CrossProcessPersistent__skip_setstate', None):
            return
        else:
            self.__dict__.update(state)

            
    def __reduce_ex__(self, protocol):
        if protocol < 2:
            raise Exception(
                "You're trying to pickle a `CrossProcessPersistent` object "
                "using protocol %s. You must use protocol 2 or "
                "upwards." % protocol
            )
        else:
            return object.__reduce_ex__(self, protocol)
            
        
    def __deepcopy__(self, memo):
        '''
        Deepcopy the object. If `DontCopyPersistent` is given, only mock-copy.
        
        When this method receieves an instance of `DontCopyPersistent` as a
        memo dictionary, it will not actually `deepcopy` the object but only
        return a reference to the original object.
        '''
        if isinstance(memo, DontCopyPersistent):
            memo[id(self)] = self
            return self
        else:
            new_copy = copy_tools.deepcopy_as_simple_object(self, memo)
            new_copy._Persistent__uuid = uuid.uuid4()
            try:
                del self.personality
            except AttributeError:
                pass
            return new_copy

        
    personality = caching.CachedProperty(
        Personality,
        doc='''Personality containing a human name and two colors.'''
    )
Пример #23
0
class PermSpace(_VariationRemovingMixin,
                _VariationAddingMixin,
                _FixedMapManagingMixin,
                sequence_tools.CuteSequenceMixin,
                collections.abc.Sequence,
                metaclass=PermSpaceType):
    '''
    A space of permutations on a sequence.

    Each item in a `PermSpace` is a `Perm`, i.e. a permutation. This is similar
    to `itertools.permutations`, except it offers far, far more functionality.
    The permutations may be accessed by index number, the permutation space can
    have its range and domain specified, some items can be fixed, and more.

    Here is the simplest possible `PermSpace`:

        >>> perm_space = PermSpace(3)
        <PermSpace: 0..2>
        >>> perm_space[2]
        <Perm: (1, 0, 2)>
        >>> tuple(perm_space)
        (<Perm: (0, 1, 2)>, <Perm: (0, 2, 1)>, <Perm: (1, 0, 2)>,
         <Perm: (1, 2, 0)>, <Perm: (2, 0, 1)>, <Perm: (2, 1, 0)>)

    The members are `Perm` objects, which are sequence-like objects that have
    extra functionality. (See documentation of `Perm` for more info.)

    The permutations are generated on-demand, not in advance. This means you
    can easily create something like `PermSpace(1000)`, which has about
    10**2500 permutations in it (a number that far exceeds the number of
    particles in the universe), in a fraction of a second. You can then fetch
    by index number any permutation of the 10**2500 permutations in a fraction
    of a second as well.

    `PermSpace` allows the creation of various special kinds of permutation
    spaces. For example, you can specify an integer to `n_elements` to set a
    permutation length that's smaller than the sequence length. (a.k.a.
    k-permutaions.) This variation of a `PermSpace` is called "partial" and
    it's one of 8 different variations, that are listed below.

     - Rapplied (Range-applied): having an arbitrary sequence as a range.
       To make one, pass your sequence as the first argument instead of the
       length.

     - Dapplied (Domain-applied): having an arbitrary sequence as a domain.
       To make one, pass a sequence into the `domain` argument.

     - Recurrent: If you provide a sequence (making the space rapplied) and
       that sequence has repeating items, you've made a recurrent `PermSpace`.
       It'll be shorter because all of the copies of same item will be
       considered the same item. (Though they will appear more than once,
       according to their count in the sequence.)

     - Fixed: Having a specified number of indices always pointing at certain
       values, making the space smaller. To make one, pass a dict from each
       key to the value it should be fixed to as the argument `fixed_map`.

     - Sliced: A perm space can be sliced like any Python sequence (except you
       can't change the step.) To make one, use slice notation on an existing
       perm space, e.g. `perm_space[56:100]`.

     - Degreed: A perm space can be limited to perms of a certain degree. (A
       perm's degree is the number of transformations it takes to make it.)
       To make one, pass into the `degrees` argument either a single degree
       (like `5`) or a tuple of different degrees (like `(1, 3, 7)`)

     - Partial: A perm space can be partial, in which case not all elements
       are used in perms. E.g. you can have a perm space of a sequence of
       length 5 but with `n_elements=3`, so every perm will have only 3 items.
       (These are usually called "k-permutations" in math-land.) To make one,
       pass a number as the argument `n_elements`.

     - Combination: If you pass in `is_combination=True` or use the subclass
       `CombSpace`, then you'll have a space of combinations (`Comb`s) instead
       of perms. `Comb`s are like `Perm``s except there's no order to the
       elements. (They are always forced into canonical order.)

     - Typed: If you pass in a perm subclass as `perm_type`, you'll get a typed
       `PermSpace`, meaning that the perms will use the class you provide
       rather than the default `Perm`. This is useful when you want to provide
       extra functionality on top of `Perm` that's specific to your use case.

    Most of these variations can be used in conjuction with each other, but
    some cannot. (See `variation_clashes` in `variations.py` for a list of
    clashes.)

    For each of these variations, there's a function to make a perm space have
    that variation and get rid of it. For example, if you want to make a normal
    perm space be degreed, call `.get_degreed()` on it with the desired
    degrees. If you want to make a degreed perm space non-degreed, access its
    `.undegreed` property. The same is true for all other variations.

    A perm space that has none of these variations is called pure.
    '''
    @classmethod
    def coerce(cls, argument):
        '''Make `argument` into something of class `cls` if it isn't.'''
        if isinstance(argument, cls):
            return argument
        else:
            return cls(argument)

    def __init__(self,
                 iterable_or_length,
                 n_elements=None,
                 *,
                 domain=None,
                 fixed_map=None,
                 degrees=None,
                 is_combination=False,
                 slice_=None,
                 perm_type=None):

        ### Making basic argument checks: #####################################
        #                                                                     #
        assert isinstance(iterable_or_length,
                          (collections.abc.Iterable, numbers.Integral))
        if isinstance(iterable_or_length, numbers.Integral):
            assert iterable_or_length >= 0
        if slice_ is not None:
            assert isinstance(slice_, (slice, sequence_tools.CanonicalSlice))
            if slice_.step not in (1, None):
                raise NotImplementedError
        assert isinstance(n_elements, numbers.Integral) or n_elements is None
        assert isinstance(is_combination, bool)
        #                                                                     #
        ### Finished making basic argument checks. ############################

        ### Figuring out sequence and whether space is rapplied: ##############
        #                                                                     #
        if isinstance(iterable_or_length, numbers.Integral):
            self.is_rapplied = False
            self.sequence = sequence_tools.CuteRange(iterable_or_length)
            self.sequence_length = iterable_or_length
        else:
            assert isinstance(iterable_or_length, collections.abc.Iterable)
            self.sequence = sequence_tools. \
                      ensure_iterable_is_immutable_sequence(iterable_or_length)
            range_candidate = sequence_tools.CuteRange(len(self.sequence))

            self.is_rapplied = not (cute_iter_tools.are_equal(
                self.sequence, range_candidate))
            self.sequence_length = len(self.sequence)
            if not self.is_rapplied:
                self.sequence = sequence_tools.CuteRange(self.sequence_length)

        #                                                                     #
        ### Finished figuring out sequence and whether space is rapplied. #####

        ### Figuring out whether sequence is recurrent: #######################
        #                                                                     #
        if self.is_rapplied:
            self.is_recurrent = any(
                count >= 2 for count in self._frozen_ordered_bag.values())
        else:
            self.is_recurrent = False
        #                                                                     #
        ### Finished figuring out whether sequence is recurrent. ##############

        ### Figuring out number of elements: ##################################
        #                                                                     #

        self.n_elements = self.sequence_length if (n_elements is None) \
                                                                else n_elements
        if not isinstance(self.n_elements, int):
            raise TypeError('`n_elements` must be an `int`.')
        if not self.n_elements >= 0:
            raise TypeError('`n_elements` must be positive or zero.')

        self.is_partial = (self.n_elements != self.sequence_length)

        self.indices = sequence_tools.CuteRange(self.n_elements)

        #                                                                     #
        ### Finished figuring out number of elements. #########################

        ### Figuring out whether it's a combination: ##########################
        #                                                                     #
        self.is_combination = is_combination
        # Well that was quick.
        #                                                                     #
        ### Finished figuring out whether it's a combination. #################

        ### Figuring out whether space is dapplied: ###########################
        #                                                                     #
        if domain is None:
            domain = self.indices
        domain = \
               sequence_tools.ensure_iterable_is_immutable_sequence(domain)
        if self.is_partial:
            domain = domain[:self.n_elements]
        self.is_dapplied = not cute_iter_tools.are_equal(domain, self.indices)
        if self.is_dapplied:
            if self.is_combination:
                raise UnallowedVariationSelectionException({
                    variations.Variation.DAPPLIED:
                    True,
                    variations.Variation.COMBINATION:
                    True,
                })

            self.domain = domain
            if len(set(self.domain)) < len(self.domain):
                raise Exception('The domain must not have repeating elements.')
        else:
            self.domain = self.indices
            self.undapplied = self
        #                                                                     #
        ### Finished figuring out whether space is dapplied. ##################

        ### Figuring out fixed map: ###########################################
        #                                                                     #
        if fixed_map is None:
            fixed_map = {}
        if not isinstance(fixed_map, dict):
            if isinstance(fixed_map, collections.abc.Callable):
                fixed_map = {item: fixed_map(item) for item in self.sequence}
            else:
                fixed_map = dict(fixed_map)
        if fixed_map:
            self.fixed_map = {
                key: value
                for (key, value) in fixed_map.items()
                if (key in self.domain) and (value in self.sequence)
            }

        else:
            (self.fixed_map, self.free_indices, self.free_keys,
             self.free_values) = ({}, self.indices, self.domain, self.sequence)

        self.is_fixed = bool(self.fixed_map)
        if self.is_fixed:
            if not (self.is_dapplied or self.is_rapplied or degrees or slice_
                    or (n_elements is not None) or self.is_combination):
                self._just_fixed = self
            else:
                self._get_just_fixed = lambda: PermSpace(
                    len(self.sequence),
                    fixed_map=self._undapplied_unrapplied_fixed_map,
                )
        else:

            if not (self.is_dapplied or self.is_rapplied or degrees or slice_
                    or (n_elements is not None) or self.is_combination):
                self._just_fixed = self
            else:
                self._get_just_fixed = lambda: PermSpace(len(self.sequence))

        #                                                                     #
        ### Finished figuring out fixed map. ##################################

        ### Figuring out degrees: #############################################
        #                                                                     #
        all_degrees = sequence_tools.CuteRange(self.sequence_length)
        if degrees is None:
            degrees = ()
        degrees = sequence_tools.to_tuple(degrees, item_type=int)

        if (not degrees) or cute_iter_tools.are_equal(degrees, all_degrees):
            self.is_degreed = False
            self.degrees = all_degrees
        else:
            self.is_degreed = True
            if self.is_combination:
                raise UnallowedVariationSelectionException({
                    variations.Variation.DEGREED:
                    True,
                    variations.Variation.COMBINATION:
                    True,
                })
            if self.is_partial:
                raise UnallowedVariationSelectionException({
                    variations.Variation.DEGREED:
                    True,
                    variations.Variation.PARTIAL:
                    True,
                })
            if self.is_recurrent:
                raise UnallowedVariationSelectionException({
                    variations.Variation.DEGREED:
                    True,
                    variations.Variation.RECURRENT:
                    True,
                })
            # The space is degreed; we canonicalize `degrees` into a sorted
            # tuple.
            self.degrees = tuple(
                sorted(degree for degree in degrees if degree in all_degrees))

        #                                                                     #
        ### Finished figuring out degrees. ####################################

        ### Figuring out slice and length: ####################################
        #                                                                     #
        self.slice_ = slice_
        self.canonical_slice = sequence_tools.CanonicalSlice(
            slice_ or slice(float('inf')), self._unsliced_length)
        self.length = max(
            self.canonical_slice.stop - self.canonical_slice.start, 0)
        self.is_sliced = (self.length != self._unsliced_length)
        #                                                                     #
        ### Finished figuring out slice and length. ###########################

        ### Figuring out perm type: ###########################################
        #                                                                     #
        self.is_typed = perm_type not in (None, self.default_perm_type)

        self.perm_type = perm_type if self.is_typed else self.default_perm_type
        assert issubclass(self.perm_type, Perm)
        #                                                                     #
        ### Finished figuring out perm type. ##################################

        self.is_pure = not (self.is_rapplied or self.is_fixed or self.is_sliced
                            or self.is_degreed or self.is_partial
                            or self.is_combination or self.is_typed)

        if self.is_pure:
            self.purified = self
        if not self.is_rapplied:
            self.unrapplied = self
        if not self.is_recurrent:
            self.unrecurrented = self
        if not self.is_partial:
            self.unpartialled = self
        if not self.is_combination:
            self.uncombinationed = self
        # No need do this for `undapplied`, it's already done above.
        if not self.is_fixed:
            self.unfixed = self
        if not self.is_degreed:
            self.undegreed = self
        if not self.is_sliced:
            self.unsliced = self
        if not self.is_typed:
            self.untyped = self

    __init__.signature = inspect.signature(__init__)

    @caching.CachedProperty
    def _unsliced_length(self):
        '''
        The number of perms in the space, ignoring any slicing.

        This is used as an interim step in calculating the actual length of the
        space with the slice taken into account.
        '''
        if self.n_elements > self.sequence_length:
            return 0
        if self.is_degreed:
            assert not self.is_recurrent and not self.is_partial and \
                                                        not self.is_combination
            return sum(
                math_tools.abs_stirling(
                    self.sequence_length -
                    len(self.fixed_map), self.sequence_length - degree -
                    self._n_cycles_in_fixed_items_of_just_fixed)
                for degree in self.degrees)
        elif self.is_fixed:
            assert not self.is_degreed and not self.is_combination
            if self.is_recurrent:
                return calculate_length_of_recurrent_perm_space(
                    self.n_elements - len(self.fixed_map),
                    nifty_collections.FrozenBagBag(
                        nifty_collections.Bag(self.free_values).values()))
            else:
                return math_tools.factorial(
                    len(self.free_indices),
                    start=(len(self.free_indices) -
                           (self.n_elements - len(self.fixed_map)) + 1))

        else:
            assert not self.is_degreed and not self.is_fixed
            if self.is_recurrent:
                if self.is_combination:
                    return calculate_length_of_recurrent_comb_space(
                        self.n_elements, self._frozen_bag_bag)
                else:
                    return calculate_length_of_recurrent_perm_space(
                        self.n_elements, self._frozen_bag_bag)

            else:
                return math_tools.factorial(
                    self.sequence_length,
                    start=(self.sequence_length - self.n_elements +
                           1)) // (math_tools.factorial(self.n_elements)
                                   if self.is_combination else 1)
                # This division is always without a remainder, because math.

    @caching.CachedProperty
    def variation_selection(self):
        '''
        The selection of variations that describes this space.

        For example, a rapplied, recurrent, fixed `PermSpace` will get
        `<VariationSelection #392: rapplied, recurrent, fixed>`.
        '''
        variation_selection = variations.VariationSelection(
            filter(None, (
                variations.Variation.RAPPLIED if self.is_rapplied else None,
                variations.Variation.RECURRENT if self.is_recurrent else None,
                variations.Variation.PARTIAL if self.is_partial else None,
                variations.Variation.COMBINATION
                if self.is_combination else None,
                variations.Variation.DAPPLIED if self.is_dapplied else None,
                variations.Variation.FIXED if self.is_fixed else None,
                variations.Variation.DEGREED if self.is_degreed else None,
                variations.Variation.SLICED if self.is_sliced else None,
                variations.Variation.TYPED if self.is_typed else None,
            )))
        assert variation_selection.is_allowed
        return variation_selection

    @caching.CachedProperty
    def _frozen_ordered_bag(self):
        '''
        A `FrozenOrderedBag` of the items in this space's sequence.

        This is useful for recurrent perm-spaces, where some counts would be 2
        or higher.
        '''
        return nifty_collections.FrozenOrderedBag(self.sequence)

    _frozen_bag_bag = caching.CachedProperty(
        lambda self: self._frozen_ordered_bag.frozen_bag_bag,
        '''A `FrozenBagBag` of items in this space's sequence.''')

    def __repr__(self):
        if self.is_dapplied:
            domain_repr = repr(self.domain)
            if len(domain_repr) > 40:
                domain_repr = \
                          ''.join((domain_repr[:35], ' ... ', domain_repr[-1]))
            domain_snippet = '%s => ' % domain_repr
        else:
            domain_snippet = ''

        sequence_repr = self.sequence.short_repr if \
                  hasattr(self.sequence, 'short_repr') else repr(self.sequence)
        if len(sequence_repr) > 40:
            sequence_repr = \
                      ''.join((sequence_repr[:35], ' ... ', sequence_repr[-1]))

        fixed_map_repr = repr(self.fixed_map)
        if len(fixed_map_repr) > 40:
            fixed_map_repr = ''.join(
                (fixed_map_repr[:35], ' ... ', fixed_map_repr[-1]))

        return '<%s: %s%s%s%s%s%s%s>%s' % (
            type(self).__name__, domain_snippet, sequence_repr,
            (f', n_elements={self.n_elements}') if self.is_partial else '',
            ', is_combination=True' if self.is_combination else '',
            f', fixed_map={fixed_map_repr}' if self.is_fixed else '',
            f', degrees={self.degrees}' if self.is_degreed else '',
            (f', perm_type={self.perm_type.__name__}')
            if self.is_typed else '',
            ('[%s:%s]' %
             (self.slice_.start, self.slice_.stop)) if self.is_sliced else '')

    def __getitem__(self, i):
        if isinstance(i, (slice, sequence_tools.CanonicalSlice)):
            canonical_slice = sequence_tools.CanonicalSlice(
                i, self.length, offset=self.canonical_slice.start)
            return PermSpace(self.sequence,
                             domain=self.domain,
                             n_elements=self.n_elements,
                             fixed_map=self.fixed_map,
                             degrees=self.degrees,
                             is_combination=self.is_combination,
                             slice_=canonical_slice,
                             perm_type=self.perm_type)

        assert isinstance(i, numbers.Integral)
        if i <= -1:
            i += self.length

        if not (0 <= i < self.length):
            raise IndexError
        elif self.is_sliced:
            return self.unsliced[i + self.canonical_slice.start]
        elif self.is_dapplied:
            return self.perm_type(self.undapplied[i], perm_space=self)

        #######################################################################
        elif self.is_degreed:
            if self.is_rapplied:
                assert not self.is_recurrent and \
                       not self.is_partial and not self.is_combination and \
                       not self.is_dapplied and not self.is_sliced
                return self.perm_type(map(self.sequence.__getitem__,
                                          self.unrapplied[i]),
                                      perm_space=self)


            assert not self.is_rapplied and not self.is_recurrent and \
                   not self.is_partial and not self.is_combination and \
                   not self.is_dapplied and not self.is_sliced
            # If that wasn't an example of asserting one's dominance, I don't
            # know what is.

            available_values = list(self.free_values)
            wip_perm_sequence_dict = dict(self.fixed_map)
            wip_n_cycles_in_fixed_items = \
                                    self._n_cycles_in_fixed_items_of_just_fixed
            wip_i = i
            for j in self.sequence:
                if j in wip_perm_sequence_dict:
                    continue
                for unused_value in available_values:
                    candidate_perm_sequence_dict = dict(wip_perm_sequence_dict)
                    candidate_perm_sequence_dict[j] = unused_value

                    ### Checking whether we closed a cycle: ###################
                    #                                                         #
                    if j == unused_value:
                        closed_cycle = True
                    else:
                        current = j
                        while True:
                            current = candidate_perm_sequence_dict[current]
                            if current == j:
                                closed_cycle = True
                                break
                            elif current not in candidate_perm_sequence_dict:
                                closed_cycle = False
                                break
                    #                                                         #
                    ### Finished checking whether we closed a cycle. ##########

                    candidate_n_cycles_in_fixed_items = \
                                     wip_n_cycles_in_fixed_items + closed_cycle

                    candidate_fixed_perm_space_length = sum(
                        math_tools.abs_stirling(
                            self.sequence_length -
                            len(candidate_perm_sequence_dict),
                            self.sequence_length - degree -
                            candidate_n_cycles_in_fixed_items)
                        for degree in self.degrees)

                    if wip_i < candidate_fixed_perm_space_length:
                        available_values.remove(unused_value)
                        wip_perm_sequence_dict[j] = unused_value
                        wip_n_cycles_in_fixed_items = \
                                              candidate_n_cycles_in_fixed_items

                        break
                    wip_i -= candidate_fixed_perm_space_length
                else:
                    raise RuntimeError
            assert wip_i == 0
            return self.perm_type(
                (wip_perm_sequence_dict[k] for k in self.domain), self)

        #######################################################################
        elif self.is_recurrent:
            assert not self.is_dapplied and not self.is_degreed and \
                                                             not self.is_sliced
            available_values = list(self.sequence)
            reserved_values = nifty_collections.Bag(self.fixed_map.values())
            wip_perm_sequence_dict = dict(self.fixed_map)
            wip_i = i
            shit_set = set()
            for j in range(self.n_elements):
                if j in self.fixed_map:
                    available_values.remove(self.fixed_map[j])
                    reserved_values[self.fixed_map[j]] -= 1
                    continue
                unused_values = [
                    item for item in
                    nifty_collections.OrderedBag(available_values) -
                    reserved_values if item not in shit_set
                ]
                for unused_value in unused_values:
                    wip_perm_sequence_dict[j] = unused_value

                    candidate_sub_perm_space = \
                                             PermSpace._create_with_cut_prefix(
                        self.sequence,
                        n_elements=self.n_elements,
                        fixed_map=wip_perm_sequence_dict,
                        is_combination=self.is_combination,
                        shit_set=shit_set, perm_type=self.perm_type
                    )

                    if wip_i < candidate_sub_perm_space.length:
                        available_values.remove(unused_value)
                        break
                    else:
                        wip_i -= candidate_sub_perm_space.length
                        if self.is_combination:
                            shit_set.add(wip_perm_sequence_dict[j])
                        del wip_perm_sequence_dict[j]
                else:
                    raise RuntimeError
            assert wip_i == 0
            return self.perm_type(
                dict_tools.get_tuple(wip_perm_sequence_dict, self.domain),
                self)

        #######################################################################
        elif self.is_fixed:
            free_values_perm = self._free_values_unsliced_perm_space[i]
            free_values_perm_iterator = iter(free_values_perm)
            return self.perm_type(
                tuple((self._undapplied_fixed_map[m] if (
                    m in self.fixed_indices
                ) else next(free_values_perm_iterator))
                      for m in self.indices), self)

        #######################################################################
        elif self.is_combination:
            wip_number = self.length - 1 - i
            wip_perm_sequence = []
            for i in range(self.n_elements, 0, -1):
                for j in range(self.sequence_length, i - 2, -1):
                    candidate = math_tools.binomial(j, i)
                    if candidate <= wip_number:
                        wip_perm_sequence.append(self.sequence[-(j + 1)])
                        wip_number -= candidate
                        break
                else:
                    raise RuntimeError
            result = tuple(wip_perm_sequence)
            assert len(result) == self.n_elements
            return self.perm_type(result, self)

        #######################################################################
        else:
            factoradic_number = math_tools.to_factoradic(
                i * math.factorial(self.n_unused_elements),
                n_digits_pad=self.sequence_length)
            if self.is_partial:
                factoradic_number = factoradic_number[:-self.n_unused_elements]
            unused_numbers = list(self.sequence)
            result = tuple(
                unused_numbers.pop(factoradic_digit)
                for factoradic_digit in factoradic_number)
            assert sequence_tools.get_length(result) == self.n_elements
            return self.perm_type(result, self)

    enumerated_sequence = caching.CachedProperty(
        lambda self: tuple(enumerate(self.sequence)))

    n_unused_elements = caching.CachedProperty(
        lambda self: self.sequence_length - self.n_elements,
        '''In partial perm spaces, number of elements that aren't used.''')

    __iter__ = lambda self: (self[i]
                             for i in sequence_tools.CuteRange(self.length))
    _reduced = property(lambda self: (type(
        self), self.sequence, self.domain, tuple(sorted(self.fixed_map.items(
        ))), self.degrees, self.canonical_slice, self.perm_type))
    # (No need to include `n_degrees` because it's implied by `domain`. No need
    # to include `is_combination` because it's implied by `type(self)`.)

    __eq__ = lambda self, other: (isinstance(other, PermSpace) and self.
                                  _reduced == other._reduced)
    __ne__ = lambda self, other: not (self == other)
    __hash__ = lambda self: hash(self._reduced)

    def index(self, perm):
        '''Get the index number of permutation `perm` in this space.'''
        if not isinstance(perm, collections.abc.Iterable):
            raise ValueError

        perm = sequence_tools.ensure_iterable_is_immutable_sequence(perm)

        perm_set = set(perm) if not isinstance(perm, UnrecurrentedPerm) \
                                                  else set(perm._perm_sequence)
        if not (perm_set <= set(self.sequence)):
            raise ValueError

        if sequence_tools.get_length(perm) != self.n_elements:
            raise ValueError

        if not isinstance(perm, self.perm_type):
            perm = self.perm_type(perm, self)

        if self.sequence != perm.nominal_perm_space.sequence:
            # (This also covers `self.rapplied != perm.rapplied`)
            raise ValueError
        if self.domain != perm.domain:
            # (This also covers `self.dapplied != perm.dapplied`)
            raise ValueError
        if self.is_degreed and (perm.degree not in self.degrees):
            raise ValueError

        # At this point we know the permutation contains the correct items, and
        # has the correct degree.
        if perm.is_dapplied:
            return self.undapplied.index(perm.undapplied)

            #######################################################################
        elif self.is_degreed:
            if perm.is_rapplied: return self.unrapplied.index(perm.unrapplied)
            wip_perm_number = 0
            wip_perm_sequence_dict = dict(self.fixed_map)
            unused_values = list(self.free_values)
            for i, value in enumerate(perm._perm_sequence):
                if i in self.fixed_indices:
                    continue
                unused_values.remove(value)
                lower_values = [j for j in unused_values if j < value]
                for lower_value in lower_values:
                    temp_fixed_map = dict(wip_perm_sequence_dict)
                    temp_fixed_map[i] = lower_value
                    wip_perm_number += PermSpace(
                        self.sequence_length,
                        degrees=self.degrees,
                        fixed_map=temp_fixed_map).length

                wip_perm_sequence_dict[self.domain[i]] = value

            perm_number = wip_perm_number

        #######################################################################
        elif self.is_recurrent:
            assert not self.is_degreed and not self.is_dapplied

            wip_perm_number = 0
            unused_values = list(self.sequence)
            reserved_values = list(self.fixed_map.values())
            perm_sequence_list = list(perm._perm_sequence)
            shit_set = set()
            for i, value in enumerate(perm._perm_sequence):
                if i in self.fixed_map:
                    if self.fixed_map[i] == value:
                        unused_values.remove(value)
                        reserved_values.remove(value)
                        continue
                    else:
                        raise ValueError
                lower_values = [
                    thing
                    for thing in nifty_collections.OrderedSet(unused_values)
                    if (thing not in reserved_values or unused_values.count(
                        thing) > reserved_values.count(thing))
                    and unused_values.index(thing) < unused_values.index(value)
                    and thing not in shit_set
                ]
                unused_values.remove(value)
                for lower_value in lower_values:
                    temp_fixed_map = dict(
                        enumerate(perm_sequence_list[:i] + [lower_value]))
                    temp_fixed_map.update(self.fixed_map)

                    candidate_sub_perm_space = \
                                             PermSpace._create_with_cut_prefix(
                        self.sequence,
                        n_elements=self.n_elements,
                        fixed_map=temp_fixed_map,
                        is_combination=self.is_combination,
                        shit_set=shit_set, perm_type=self.perm_type
                    )

                    wip_perm_number += candidate_sub_perm_space.length
                    if self.is_combination:
                        shit_set.add(lower_value)

            perm_number = wip_perm_number

        #######################################################################
        elif self.is_fixed:
            assert not self.is_degreed and not self.is_recurrent
            free_values_perm_sequence = []
            for i, perm_item in zip(self.domain, perm._perm_sequence):
                if i in self.fixed_map:
                    if self.fixed_map[i] != perm_item:
                        raise ValueError
                else:
                    free_values_perm_sequence.append(perm_item)

            # At this point we know all the items that should be fixed are
            # fixed.

            perm_number = self._free_values_unsliced_perm_space.index(
                free_values_perm_sequence)

        #######################################################################
        elif self.is_combination:
            if perm.is_rapplied:
                return self.unrapplied.index(perm.unrapplied)

            assert not self.is_rapplied and not self.is_recurrent and \
                   not self.is_dapplied and not self.is_fixed and \
                                                            not self.is_degreed

            if not cute_iter_tools.is_sorted(perm._perm_sequence):
                raise ValueError

            processed_perm_sequence = tuple(
                self.sequence_length - 1 - item
                for item in perm._perm_sequence[::-1])
            perm_number = self.unsliced.length - 1 - sum(
                (math_tools.binomial(item, i)
                 for i, item in enumerate(processed_perm_sequence, start=1)),
                0)

        #######################################################################
        else:
            factoradic_number = []
            unused_values = list(self.sequence)
            for i, value in enumerate(perm._perm_sequence):
                index_of_current_number = unused_values.index(value)
                factoradic_number.append(index_of_current_number)
                unused_values.remove(value)
            perm_number = math_tools.from_factoradic(
                factoradic_number +
                [0] * self.n_unused_elements) // math.factorial(
                    self.n_unused_elements)

        #######################################################################

        if perm_number not in self.canonical_slice:
            raise ValueError

        return perm_number - self.canonical_slice.start

    @caching.CachedProperty
    def short_length_string(self):
        '''Short string describing size of space, e.g. "12!"'''
        if not self.is_recurrent and not self.is_partial and \
           not self.is_combination and not self.is_fixed and \
                                                            not self.is_sliced:
            assert self.length == math_tools.factorial(self.sequence_length)
            return misc.get_short_factorial_string(self.sequence_length)
        else:
            return str(self.length)

    __bool__ = lambda self: bool(self.length)

    _domain_set = caching.CachedProperty(
        lambda self: set(self.domain),
        '''The set of items in this space's domain.''')

    def __reduce__(self, *args, **kwargs):
        #######################################################################
        #                                                                     #
        self._just_fixed
        # (Getting this generated because we can't save a lambda.)
        try:
            del self._get_just_fixed
        except AttributeError:
            pass
        #                                                                     #
        #######################################################################
        return super().__reduce__(*args, **kwargs)

    def coerce_perm(self, perm):
        '''Coerce `perm` to be a permutation of this space.'''
        return self.perm_type(perm, self)

    prefix = None

    @classmethod
    def _create_with_cut_prefix(cls,
                                sequence,
                                domain=None,
                                *,
                                n_elements=None,
                                fixed_map=None,
                                degrees=None,
                                is_combination=False,
                                slice_=None,
                                perm_type=None,
                                shit_set=frozenset()):
        '''
        Create a `PermSpace`, cutting a prefix off the start if possible.

        This is used internally in `PermSpace.__getitem__` and
        `PermSpace.index`. It's important to cut off the prefix, especially for
        `CombSpace` because in such cases it obviates the need for a
        `fixed_map`, and `CombSpace` doesn't work with `fixed_map`.
        '''
        if degrees is not None:
            raise NotImplementedError

        prefix = []
        fixed_map = dict(fixed_map)
        for i in sequence_tools.CuteRange(infinity):
            try:
                prefix.append(fixed_map[i])
            except KeyError:
                break
            else:
                del fixed_map[i]
                n_elements -= 1

        sequence = list(sequence)
        for item in prefix:
            if is_combination:
                sequence = sequence[sequence.index(item) + 1:]
            else:
                sequence[sequence.index(item)] = misc.MISSING_ELEMENT
                # More efficient than removing the element, we filter these out
                # later.

        shit_set = {misc.MISSING_ELEMENT} | shit_set
        sequence = [item for item in sequence if item not in shit_set]

        fixed_map = {
            key - len(prefix): value
            for key, value in fixed_map.items()
        }

        perm_space = cls(sequence,
                         n_elements=n_elements,
                         fixed_map=fixed_map,
                         is_combination=is_combination,
                         slice_=slice_,
                         perm_type=perm_type)
        perm_space.prefix = tuple(prefix)
        return perm_space