Exemplo n.º 1
0
class ClusterAlgebra(Parent):
    r"""
    INPUT:

    - ``data`` -- some data defining a cluster algebra.

    - ``scalars`` -- (default ZZ) the scalars on which the cluster algebra
      is defined.

    - ``cluster_variables_prefix`` -- string (default 'x').

    - ``cluster_variables_names`` -- a list of strings.  Superseedes
      ``cluster_variables_prefix``.

    - ``coefficients_prefix`` -- string (default 'y').

    - ``coefficients_names`` -- a list of strings. Superseedes
      ``cluster_variables_prefix``.

    - ``principal_coefficients`` -- bool (default: False). Superseedes any
      coefficient defined by ``data``.
    """

    Element = ClusterAlgebraElement

    def __init__(self, data, **kwargs):
        r"""
        See :class:`ClusterAlgebra` for full documentation.
        """
        # TODO: right now we use ClusterQuiver to parse input data. It looks like a good idea but we should make sure it is.
        # TODO: in base replace LaurentPolynomialRing with the group algebra of a tropical semifield once it is implemented

        # Temporary variables
        Q = ClusterQuiver(data)
        n = Q.n()
        B0 = Q.b_matrix()[:n,:]
        I = identity_matrix(n)
        if 'principal_coefficients' in kwargs and kwargs['principal_coefficients']:
            M0 = I
        else:
            M0 = Q.b_matrix()[n:,:]
        m = M0.nrows()

        # Ambient space for F-polynomials
        # NOTE: for speed purposes we need to have QQ here instead of the more natural ZZ. The reason is that _mutated_F is faster if we do not cast the result to polynomials but then we get "rational" coefficients
        self._U = PolynomialRing(QQ, ['u%s'%i for i in xrange(n)])

        # Storage for computed data
        self._path_dict = dict([ (v, []) for v in map(tuple,I.columns()) ])
        self._F_poly_dict = dict([ (v, self._U(1)) for v in self._path_dict ])

        # Determine the names of the initial cluster variables
        if 'cluster_variables_names' in kwargs:
            if len(kwargs['cluster_variables_names']) == n:
                variables = kwargs['cluster_variables_names']
                cluster_variables_prefix='dummy' # this is just to avoid checking again if cluster_variables_prefix is defined. Make this better before going public
            else:
                    raise ValueError("cluster_variables_names should be a list of %d valid variable names"%n)
        else:
            try:
                cluster_variables_prefix = kwargs['cluster_variables_prefix']
            except:
                cluster_variables_prefix = 'x'
            variables = [cluster_variables_prefix+'%s'%i for i in xrange(n)]
            # why not just put str(i) instead of '%s'%i?

        # Determine scalars
        try:
            scalars = kwargs['scalars']
        except:
            scalars = ZZ

        # Determine coefficients and setup self._base
        if m>0:
            if 'coefficients_names' in kwargs:
                if len(kwargs['coefficients_names']) == m:
                    coefficients = kwargs['coefficients_names']
                else:
                    raise ValueError("coefficients_names should be a list of %d valid variable names"%m)
            else:
                try:
                    coefficients_prefix = kwargs['coefficients_prefix']
                except:
                    coefficients_prefix = 'y'
                if coefficients_prefix == cluster_variables_prefix:
                    offset = n
                else:
                    offset = 0
                coefficients = [coefficients_prefix+'%s'%i for i in xrange(offset,m+offset)]
            # TODO: (***) base should eventually become the group algebra of a tropical semifield
            base = LaurentPolynomialRing(scalars, coefficients)
        else:
            base = scalars
            # TODO: next line should be removed when (***) is implemented
            coefficients = []

        # setup Parent and ambient
        # TODO: (***) _ambient should eventually be replaced with LaurentPolynomialRing(base, variables)
        self._ambient = LaurentPolynomialRing(scalars, variables+coefficients)
        self._ambient_field = self._ambient.fraction_field()
        # TODO: understand why using Algebras() instead of Rings() makes A(1) complain of missing _lmul_
        Parent.__init__(self, base=base, category=Rings(scalars).Commutative().Subobjects(), names=variables+coefficients)

        # Data to compute cluster variables using separation of additions
        # BUG WORKAROUND: if your sage installation does not have trac:`19538` merged uncomment the following line and comment the next
        self._y = dict([ (self._U.gen(j), prod([self._ambient.gen(n+i)**M0[i,j] for i in xrange(m)])) for j in xrange(n)])
        #self._y = dict([ (self._U.gen(j), prod([self._base.gen(i)**M0[i,j] for i in xrange(m)])) for j in xrange(n)])
        self._yhat = dict([ (self._U.gen(j), prod([self._ambient.gen(i)**B0[i,j] for i in xrange(n)])*self._y[self._U.gen(j)]) for j in xrange(n)])

        # Have we principal coefficients?
        self._is_principal = (M0 == I)

        # Store initial data
        self._B0 = copy(B0)
        self._n = n
        self.reset_current_seed()

        # Internal data for exploring the exchange graph
        self.reset_exploring_iterator()

        # Internal data to store exchange relations
        # This is a dictionary indexed by a frozen set of two g-vectors (the g-vectors of the exchanged variables)
        # Exchange relations are, for the moment, a frozen set of precisely two entries (one for each term in the exchange relation's RHS).
        # Each of them contains two things
        # 1) a list of pairs (g-vector, exponent) one for each cluster variable appearing in the term
        # 2) the coefficient part of the term
        # TODO: possibly refactor this producing a class ExchangeRelation with some pretty printing feature
        self._exchange_relations = dict()
        if 'store_exchange_relations' in kwargs and kwargs['store_exchange_relations']:
            self._store_exchange_relations = True
        else:
            self._store_exchange_relations = False

        # Add methods that are defined only for special cases
        if n == 2:
            self.greedy_element = MethodType(greedy_element, self, self.__class__)
            self.greedy_coefficient = MethodType(greedy_coefficient, self, self.__class__)
            self.theta_basis_element = MethodType(theta_basis_element, self, self.__class__)

        # TODO: understand if we need this
        #self._populate_coercion_lists_()

    def __copy__(self):
        other = type(self).__new__(type(self))
        other._U = self._U
        other._path_dict = copy(self._path_dict)
        other._F_poly_dict = copy(self._F_poly_dict)
        other._ambient = self._ambient
        other._ambient_field = self._ambient_field
        other._y = copy(self._y)
        other._yhat = copy(self._yhat)
        other._is_principal = self._is_principal
        other._B0 = copy(self._B0)
        other._n = self._n
        # We probably need to put n=2 initializations here also
        # TODO: we may want to use __init__ to make the initialization somewhat easier (say to enable special cases) This might require a better written __init__
        return other

    def __eq__(self, other):
        return type(self) == type(other) and self._B0 == other._B0 and  self._yhat == other._yhat

    # enable standard coercions: everything that is in the base can be coerced
    def _coerce_map_from_(self, other):
        return self.base().has_coerce_map_from(other)

    def _repr_(self):
        return "Cluster Algebra of rank %s"%self.rk

    def _an_element_(self):
        return self.current_seed().cluster_variable(0)

    @property
    def rk(self):
        r"""
        The rank of ``self`` i.e. the number of cluster variables in any seed of
        ``self``.
        """
        return self._n

    def current_seed(self):
        r"""
        The current seed of ``self``.
        """
        return self._seed

    def set_current_seed(self, seed):
        r"""
        Set ``self._seed`` to ``seed`` if it makes sense.
        """
        if self.contains_seed(seed):
            self._seed = seed
        else:
            raise ValueError("This is not a seed in this cluster algebra.")

    def contains_seed(self, seed):
        computed_sd = self.initial_seed
        computed_sd.mutate(seed._path, mutating_F=False)
        return computed_sd == seed

    def reset_current_seed(self):
        r"""
        Reset the current seed to the initial one
        """
        self._seed = self.initial_seed

    @property
    def initial_seed(self):
        r"""
        Return the initial seed
        """
        n = self.rk
        I = identity_matrix(n)
        return ClusterAlgebraSeed(self._B0, I, I, self)

    @property
    def initial_b_matrix(self):
        n = self.rk
        return copy(self._B0)

    def g_vectors_so_far(self):
        r"""
        Return the g-vectors of cluster variables encountered so far.
        """
        return self._path_dict.keys()

    def F_polynomial(self, g_vector):
        g_vector= tuple(g_vector)
        try:
            return self._F_poly_dict[g_vector]
        except:
            # If the path is known, should this method perform that sequence of mutations to compute the desired F-polynomial?
            # Yes, perhaps with the a prompt first, something like:
            #comp = raw_input("This F-polynomial has not been computed yet.  It can be found using %s mutations.  Continue? (y or n):"%str(directions.__len__()))
            #if comp == 'y':
            #    ...compute the F-polynomial...
            if g_vector in self._path_dict:
                raise ValueError("The F-polynomial with g-vector %s has not been computed yet.  You probably explored the exchange tree with compute_F=False.  You can compute this F-polynomial by mutating from the initial seed along the sequence %s."%(str(g_vector),str(self._path_dict[g_vector])))
            else:
                raise ValueError("The F-polynomial with g-vector %s has not been computed yet."%str(g_vector))

    @cached_method(key=lambda a,b: tuple(b) )
    def cluster_variable(self, g_vector):
        g_vector = tuple(g_vector)
        if not g_vector in self.g_vectors_so_far():
            # Should we let the self.F_polynomial below handle raising the exception?
            raise ValueError("This Cluster Variable has not been computed yet.")
        F_std = self.F_polynomial(g_vector).subs(self._yhat)
        g_mon = prod([self.ambient().gen(i)**g_vector[i] for i in xrange(self.rk)])
        # LaurentPolynomial_mpair does not know how to compute denominators, we need to lift to its fraction field
        F_trop = self.ambient_field()(self.F_polynomial(g_vector).subs(self._y)).denominator()
        return self.retract(g_mon*F_std*F_trop)

    def find_cluster_variable(self, g_vector, depth=infinity):
        r"""
        Returns the shortest mutation path to obtain the cluster variable with
        g-vector ``g_vector`` from the initial seed.

        ``depth``: maximum distance from ``self.current_seed`` to reach.

        WARNING: if this method is interrupted then ``self._sd_iter`` is left in
        an unusable state. To use again this method it is then necessary to
        reset ``self._sd_iter`` via self.reset_exploring_iterator()
        """
        g_vector = tuple(g_vector)
        mutation_counter = 0
        while g_vector not in self.g_vectors_so_far() and self._explored_depth <= depth:
            try:
                seed = next(self._sd_iter)
                self._explored_depth = seed.depth()
            except:
                raise ValueError("Could not find a cluster variable with g-vector %s up to mutation depth %s after performing %s mutations."%(str(g_vector),str(depth),str(mutation_counter)))

            # If there was a way to have the seeds iterator continue after the depth_counter reaches depth,
            # the following code would allow the user to continue searching the exchange graph
            #cont = raw_input("Could not find a cluster variable with g-vector %s up to mutation depth %s."%(str(g_vector),str(depth))+"  Continue searching? (y or n):")
            #if cont == 'y':
            #    new_depth = 0
            #    while new_depth <= depth:
            #        new_depth = raw_input("Please enter a new mutation search depth greater than %s:"%str(depth))
            #    seeds.send(new_depth)
            #else:
            #    raise ValueError("Could not find a cluster variable with g-vector %s after %s mutations."%(str(g_vector),str(mutation_counter)))

            mutation_counter += 1
        return copy(self._path_dict[g_vector])

    def ambient(self):
        return self._ambient

    def ambient_field(self):
        return self._ambient_field

    def lift_to_field(self, x):
        return self.ambient_field()(1)*x.value

    def lift(self, x):
        r"""
        Return x as an element of self._ambient
        """
        return x.value

    def retract(self, x):
        return self(x)

    def gens(self):
        r"""
        Return the generators of :meth:`self.ambient`
        """
        return map(self.retract, self.ambient().gens())

    def seeds(self, depth=infinity, mutating_F=True, from_current_seed=False):
        r"""
        Return an iterator producing all seeds of ``self`` up to distance
        ``depth`` from ``self.initial_seed`` or ``self.current_seed``.

        If ``mutating_F`` is set to false it does not compute F_polynomials
        """
        if from_current_seed:
            seed = self.current_seed()
        else:
            seed = self.initial_seed

        yield seed
        depth_counter = 0
        n = self.rk
        cl = frozenset(seed.g_vectors())
        clusters = {}
        clusters[cl] = [ seed, range(n) ]
        gets_bigger = True
        while gets_bigger and depth_counter < depth:
            gets_bigger = False
            keys = clusters.keys()
            for key in keys:
                sd, directions = clusters[key]
                while directions:
                    i = directions.pop()
                    new_sd  = sd.mutate(i, inplace=False, mutating_F=mutating_F)
                    new_cl = frozenset(new_sd.g_vectors())
                    if new_cl in clusters:
                        j = map(tuple,clusters[new_cl][0].g_vectors()).index(new_sd.g_vector(i))
                        try:
                            clusters[new_cl][1].remove(j)
                        except:
                            pass
                    else:
                        gets_bigger = True
                        # doublecheck this way of producing directions for the new seed: it is taken almost verbatim fom ClusterSeed
                        new_directions = [ j for j in xrange(n) if j > i or new_sd.b_matrix()[j,i] != 0 ]
                        clusters[new_cl] = [ new_sd, new_directions ]
                        # Use this if we want to have the user pass info to the
                        # iterator
                        #new_depth = yield new_sd
                        #if new_depth > depth:
                        #    depth = new_depth
                        yield new_sd
            depth_counter += 1

    def reset_exploring_iterator(self, mutating_F=True):
        self._sd_iter = self.seeds(mutating_F=mutating_F)
        self._explored_depth = 0

    @mutation_parse
    def mutate_initial(self, k):
        r"""
        Mutate ``self`` in direction `k` at the initial cluster.

        INPUT:
        - ``k`` -- integer in between 0 and ``self.rk``
        """
        n = self.rk

        if k not in xrange(n):
            raise ValueError('Cannot mutate in direction %s, please try a value between 0 and %s.'%(str(k),str(n-1)))

        #modify self._path_dict using Nakanishi-Zelevinsky (4.1) and self._F_poly_dict using CA-IV (6.21)
        new_path_dict = dict()
        new_F_dict = dict()
        new_path_dict[tuple(identity_matrix(n).column(k))] = []
        new_F_dict[tuple(identity_matrix(n).column(k))] = self._U(1)

        poly_ring = PolynomialRing(ZZ,'u')
        h_subs_tuple = tuple([poly_ring.gen(0)**(-1) if j==k else poly_ring.gen(0)**max(-self._B0[k][j],0) for j in xrange(n)])
        F_subs_tuple = tuple([self._U.gen(k)**(-1) if j==k else self._U.gen(j)*self._U.gen(k)**max(-self._B0[k][j],0)*(1+self._U.gen(k))**(self._B0[k][j]) for j in xrange(n)])

        for g_vect in self._path_dict:
            #compute new path
            path = self._path_dict[g_vect]
            if g_vect == tuple(identity_matrix(n).column(k)):
                new_path = [k]
            elif path != []:
                if path[0] != k:
                    new_path = [k] + path
                else:
                    new_path = path[1:]
            else:
                new_path = []

            #compute new g-vector
            new_g_vect = vector(g_vect) - 2*g_vect[k]*identity_matrix(n).column(k)
            for i in xrange(n):
                new_g_vect += max(sign(g_vect[k])*self._B0[i,k],0)*g_vect[k]*identity_matrix(n).column(i)
            new_path_dict[tuple(new_g_vect)] = new_path

            #compute new F-polynomial
            h = 0
            trop = tropical_evaluation(self._F_poly_dict[g_vect](h_subs_tuple))
            if trop != 1:
                h = trop.denominator().exponents()[0]-trop.numerator().exponents()[0]
            new_F_dict[tuple(new_g_vect)] = self._F_poly_dict[g_vect](F_subs_tuple)*self._U.gen(k)**h*(self._U.gen(k)+1)**g_vect[k]

        self._path_dict = new_path_dict
        self._F_poly_dict = new_F_dict

        self._B0.mutate(k)

    def explore_to_depth(self, depth):
        while self._explored_depth <= depth:
            try:
                seed = next(self._sd_iter)
                self._explored_depth = seed.depth()
            except:
                break

    def cluster_fan(self, depth=infinity):
        from sage.geometry.cone import Cone
        from sage.geometry.fan import Fan
        seeds = self.seeds(depth=depth, mutating_F=False)
        cones = map(lambda s: Cone(s.g_vectors()), seeds)
        return Fan(cones)

    # DESIDERATA. Some of these are probably unrealistic
    def upper_cluster_algebra(self):
        pass

    def upper_bound(self):
        pass

    def lower_bound(self):
        pass
Exemplo n.º 2
0
class ClusterAlgebra(Parent):
    r"""
    INPUT:

    - ``data`` -- some data defining a cluster algebra.

    - ``scalars`` -- (default ZZ) the scalars on which the cluster algebra
      is defined.

    - ``cluster_variables_prefix`` -- string (default 'x').

    - ``cluster_variables_names`` -- a list of strings.  Superseedes
      ``cluster_variables_prefix``.

    - ``coefficients_prefix`` -- string (default 'y').

    - ``coefficients_names`` -- a list of strings. Superseedes
      ``cluster_variables_prefix``.

    - ``principal_coefficients`` -- bool (default: False). Superseedes any
      coefficient defined by ``data``.
    """

    Element = ClusterAlgebraElement

    def __init__(self, data, **kwargs):
        r"""
        See :class:`ClusterAlgebra` for full documentation.
        """
        # TODO: right now we use ClusterQuiver to parse input data. It looks like a good idea but we should make sure it is.
        # TODO: in base replace LaurentPolynomialRing with the group algebra of a tropical semifield once it is implemented

        # Temporary variables
        Q = ClusterQuiver(data)
        n = Q.n()
        B0 = Q.b_matrix()[:n, :]
        I = identity_matrix(n)
        if 'principal_coefficients' in kwargs and kwargs[
                'principal_coefficients']:
            M0 = I
        else:
            M0 = Q.b_matrix()[n:, :]
        m = M0.nrows()

        # Ambient space for F-polynomials
        # NOTE: for speed purposes we need to have QQ here instead of the more natural ZZ. The reason is that _mutated_F is faster if we do not cast the result to polynomials but then we get "rational" coefficients
        self._U = PolynomialRing(QQ, ['u%s' % i for i in xrange(n)])

        # Storage for computed data
        self._path_dict = dict([(v, []) for v in map(tuple, I.columns())])
        self._F_poly_dict = dict([(v, self._U(1)) for v in self._path_dict])

        # Determine the names of the initial cluster variables
        if 'cluster_variables_names' in kwargs:
            if len(kwargs['cluster_variables_names']) == n:
                variables = kwargs['cluster_variables_names']
                cluster_variables_prefix = 'dummy'  # this is just to avoid checking again if cluster_variables_prefix is defined. Make this better before going public
            else:
                raise ValueError(
                    "cluster_variables_names should be a list of %d valid variable names"
                    % n)
        else:
            try:
                cluster_variables_prefix = kwargs['cluster_variables_prefix']
            except:
                cluster_variables_prefix = 'x'
            variables = [
                cluster_variables_prefix + '%s' % i for i in xrange(n)
            ]
            # why not just put str(i) instead of '%s'%i?

        # Determine scalars
        try:
            scalars = kwargs['scalars']
        except:
            scalars = ZZ

        # Determine coefficients and setup self._base
        if m > 0:
            if 'coefficients_names' in kwargs:
                if len(kwargs['coefficients_names']) == m:
                    coefficients = kwargs['coefficients_names']
                else:
                    raise ValueError(
                        "coefficients_names should be a list of %d valid variable names"
                        % m)
            else:
                try:
                    coefficients_prefix = kwargs['coefficients_prefix']
                except:
                    coefficients_prefix = 'y'
                if coefficients_prefix == cluster_variables_prefix:
                    offset = n
                else:
                    offset = 0
                coefficients = [
                    coefficients_prefix + '%s' % i
                    for i in xrange(offset, m + offset)
                ]
            # TODO: (***) base should eventually become the group algebra of a tropical semifield
            base = LaurentPolynomialRing(scalars, coefficients)
        else:
            base = scalars
            # TODO: next line should be removed when (***) is implemented
            coefficients = []

        # setup Parent and ambient
        # TODO: (***) _ambient should eventually be replaced with LaurentPolynomialRing(base, variables)
        self._ambient = LaurentPolynomialRing(scalars,
                                              variables + coefficients)
        self._ambient_field = self._ambient.fraction_field()
        # TODO: understand why using Algebras() instead of Rings() makes A(1) complain of missing _lmul_
        Parent.__init__(self,
                        base=base,
                        category=Rings(scalars).Commutative().Subobjects(),
                        names=variables + coefficients)

        # Data to compute cluster variables using separation of additions
        # BUG WORKAROUND: if your sage installation does not have trac:`19538` merged uncomment the following line and comment the next
        self._y = dict([
            (self._U.gen(j),
             prod([self._ambient.gen(n + i)**M0[i, j] for i in xrange(m)]))
            for j in xrange(n)
        ])
        #self._y = dict([ (self._U.gen(j), prod([self._base.gen(i)**M0[i,j] for i in xrange(m)])) for j in xrange(n)])
        self._yhat = dict([
            (self._U.gen(j),
             prod([self._ambient.gen(i)**B0[i, j]
                   for i in xrange(n)]) * self._y[self._U.gen(j)])
            for j in xrange(n)
        ])

        # Have we principal coefficients?
        self._is_principal = (M0 == I)

        # Store initial data
        self._B0 = copy(B0)
        self._n = n
        self.reset_current_seed()

        # Internal data for exploring the exchange graph
        self.reset_exploring_iterator()

        # Internal data to store exchange relations
        # This is a dictionary indexed by a frozen set of two g-vectors (the g-vectors of the exchanged variables)
        # Exchange relations are, for the moment, a frozen set of precisely two entries (one for each term in the exchange relation's RHS).
        # Each of them contains two things
        # 1) a list of pairs (g-vector, exponent) one for each cluster variable appearing in the term
        # 2) the coefficient part of the term
        # TODO: possibly refactor this producing a class ExchangeRelation with some pretty printing feature
        self._exchange_relations = dict()
        if 'store_exchange_relations' in kwargs and kwargs[
                'store_exchange_relations']:
            self._store_exchange_relations = True
        else:
            self._store_exchange_relations = False

        # Add methods that are defined only for special cases
        if n == 2:
            self.greedy_element = MethodType(greedy_element, self,
                                             self.__class__)
            self.greedy_coefficient = MethodType(greedy_coefficient, self,
                                                 self.__class__)
            self.theta_basis_element = MethodType(theta_basis_element, self,
                                                  self.__class__)

        # TODO: understand if we need this
        #self._populate_coercion_lists_()

    def __copy__(self):
        other = type(self).__new__(type(self))
        other._U = self._U
        other._path_dict = copy(self._path_dict)
        other._F_poly_dict = copy(self._F_poly_dict)
        other._ambient = self._ambient
        other._ambient_field = self._ambient_field
        other._y = copy(self._y)
        other._yhat = copy(self._yhat)
        other._is_principal = self._is_principal
        other._B0 = copy(self._B0)
        other._n = self._n
        # We probably need to put n=2 initializations here also
        # TODO: we may want to use __init__ to make the initialization somewhat easier (say to enable special cases) This might require a better written __init__
        return other

    def __eq__(self, other):
        return type(self) == type(
            other) and self._B0 == other._B0 and self._yhat == other._yhat

    # enable standard coercions: everything that is in the base can be coerced
    def _coerce_map_from_(self, other):
        return self.base().has_coerce_map_from(other)

    def _repr_(self):
        return "Cluster Algebra of rank %s" % self.rk

    def _an_element_(self):
        return self.current_seed().cluster_variable(0)

    @property
    def rk(self):
        r"""
        The rank of ``self`` i.e. the number of cluster variables in any seed of
        ``self``.
        """
        return self._n

    def current_seed(self):
        r"""
        The current seed of ``self``.
        """
        return self._seed

    def set_current_seed(self, seed):
        r"""
        Set ``self._seed`` to ``seed`` if it makes sense.
        """
        if self.contains_seed(seed):
            self._seed = seed
        else:
            raise ValueError("This is not a seed in this cluster algebra.")

    def contains_seed(self, seed):
        computed_sd = self.initial_seed
        computed_sd.mutate(seed._path, mutating_F=False)
        return computed_sd == seed

    def reset_current_seed(self):
        r"""
        Reset the current seed to the initial one
        """
        self._seed = self.initial_seed

    @property
    def initial_seed(self):
        r"""
        Return the initial seed
        """
        n = self.rk
        I = identity_matrix(n)
        return ClusterAlgebraSeed(self._B0, I, I, self)

    @property
    def initial_b_matrix(self):
        n = self.rk
        return copy(self._B0)

    def g_vectors_so_far(self):
        r"""
        Return the g-vectors of cluster variables encountered so far.
        """
        return self._path_dict.keys()

    def F_polynomial(self, g_vector):
        g_vector = tuple(g_vector)
        try:
            return self._F_poly_dict[g_vector]
        except:
            # If the path is known, should this method perform that sequence of mutations to compute the desired F-polynomial?
            # Yes, perhaps with the a prompt first, something like:
            #comp = raw_input("This F-polynomial has not been computed yet.  It can be found using %s mutations.  Continue? (y or n):"%str(directions.__len__()))
            #if comp == 'y':
            #    ...compute the F-polynomial...
            if g_vector in self._path_dict:
                raise ValueError(
                    "The F-polynomial with g-vector %s has not been computed yet.  You probably explored the exchange tree with compute_F=False.  You can compute this F-polynomial by mutating from the initial seed along the sequence %s."
                    % (str(g_vector), str(self._path_dict[g_vector])))
            else:
                raise ValueError(
                    "The F-polynomial with g-vector %s has not been computed yet."
                    % str(g_vector))

    @cached_method(key=lambda a, b: tuple(b))
    def cluster_variable(self, g_vector):
        g_vector = tuple(g_vector)
        if not g_vector in self.g_vectors_so_far():
            # Should we let the self.F_polynomial below handle raising the exception?
            raise ValueError(
                "This Cluster Variable has not been computed yet.")
        F_std = self.F_polynomial(g_vector).subs(self._yhat)
        g_mon = prod(
            [self.ambient().gen(i)**g_vector[i] for i in xrange(self.rk)])
        # LaurentPolynomial_mpair does not know how to compute denominators, we need to lift to its fraction field
        F_trop = self.ambient_field()(self.F_polynomial(g_vector).subs(
            self._y)).denominator()
        return self.retract(g_mon * F_std * F_trop)

    def find_cluster_variable(self, g_vector, depth=infinity):
        r"""
        Returns the shortest mutation path to obtain the cluster variable with
        g-vector ``g_vector`` from the initial seed.

        ``depth``: maximum distance from ``self.current_seed`` to reach.

        WARNING: if this method is interrupted then ``self._sd_iter`` is left in
        an unusable state. To use again this method it is then necessary to
        reset ``self._sd_iter`` via self.reset_exploring_iterator()
        """
        g_vector = tuple(g_vector)
        mutation_counter = 0
        while g_vector not in self.g_vectors_so_far(
        ) and self._explored_depth <= depth:
            try:
                seed = next(self._sd_iter)
                self._explored_depth = seed.depth()
            except:
                raise ValueError(
                    "Could not find a cluster variable with g-vector %s up to mutation depth %s after performing %s mutations."
                    % (str(g_vector), str(depth), str(mutation_counter)))

            # If there was a way to have the seeds iterator continue after the depth_counter reaches depth,
            # the following code would allow the user to continue searching the exchange graph
            #cont = raw_input("Could not find a cluster variable with g-vector %s up to mutation depth %s."%(str(g_vector),str(depth))+"  Continue searching? (y or n):")
            #if cont == 'y':
            #    new_depth = 0
            #    while new_depth <= depth:
            #        new_depth = raw_input("Please enter a new mutation search depth greater than %s:"%str(depth))
            #    seeds.send(new_depth)
            #else:
            #    raise ValueError("Could not find a cluster variable with g-vector %s after %s mutations."%(str(g_vector),str(mutation_counter)))

            mutation_counter += 1
        return copy(self._path_dict[g_vector])

    def ambient(self):
        return self._ambient

    def ambient_field(self):
        return self._ambient_field

    def lift_to_field(self, x):
        return self.ambient_field()(1) * x.value

    def lift(self, x):
        r"""
        Return x as an element of self._ambient
        """
        return x.value

    def retract(self, x):
        return self(x)

    def gens(self):
        r"""
        Return the generators of :meth:`self.ambient`
        """
        return map(self.retract, self.ambient().gens())

    def seeds(self, depth=infinity, mutating_F=True, from_current_seed=False):
        r"""
        Return an iterator producing all seeds of ``self`` up to distance
        ``depth`` from ``self.initial_seed`` or ``self.current_seed``.

        If ``mutating_F`` is set to false it does not compute F_polynomials
        """
        if from_current_seed:
            seed = self.current_seed()
        else:
            seed = self.initial_seed

        yield seed
        depth_counter = 0
        n = self.rk
        cl = frozenset(seed.g_vectors())
        clusters = {}
        clusters[cl] = [seed, range(n)]
        gets_bigger = True
        while gets_bigger and depth_counter < depth:
            gets_bigger = False
            keys = clusters.keys()
            for key in keys:
                sd, directions = clusters[key]
                while directions:
                    i = directions.pop()
                    new_sd = sd.mutate(i, inplace=False, mutating_F=mutating_F)
                    new_cl = frozenset(new_sd.g_vectors())
                    if new_cl in clusters:
                        j = map(tuple, clusters[new_cl][0].g_vectors()).index(
                            new_sd.g_vector(i))
                        try:
                            clusters[new_cl][1].remove(j)
                        except:
                            pass
                    else:
                        gets_bigger = True
                        # doublecheck this way of producing directions for the new seed: it is taken almost verbatim fom ClusterSeed
                        new_directions = [
                            j for j in xrange(n)
                            if j > i or new_sd.b_matrix()[j, i] != 0
                        ]
                        clusters[new_cl] = [new_sd, new_directions]
                        # Use this if we want to have the user pass info to the
                        # iterator
                        #new_depth = yield new_sd
                        #if new_depth > depth:
                        #    depth = new_depth
                        yield new_sd
            depth_counter += 1

    def reset_exploring_iterator(self, mutating_F=True):
        self._sd_iter = self.seeds(mutating_F=mutating_F)
        self._explored_depth = 0

    @mutation_parse
    def mutate_initial(self, k):
        r"""
        Mutate ``self`` in direction `k` at the initial cluster.

        INPUT:
        - ``k`` -- integer in between 0 and ``self.rk``
        """
        n = self.rk

        if k not in xrange(n):
            raise ValueError(
                'Cannot mutate in direction %s, please try a value between 0 and %s.'
                % (str(k), str(n - 1)))

        #modify self._path_dict using Nakanishi-Zelevinsky (4.1) and self._F_poly_dict using CA-IV (6.21)
        new_path_dict = dict()
        new_F_dict = dict()
        new_path_dict[tuple(identity_matrix(n).column(k))] = []
        new_F_dict[tuple(identity_matrix(n).column(k))] = self._U(1)

        poly_ring = PolynomialRing(ZZ, 'u')
        h_subs_tuple = tuple([
            poly_ring.gen(0)**(-1) if j == k else poly_ring.gen(0)**max(
                -self._B0[k][j], 0) for j in xrange(n)
        ])
        F_subs_tuple = tuple([
            self._U.gen(k)**(-1) if j == k else self._U.gen(j) *
            self._U.gen(k)**max(-self._B0[k][j], 0) *
            (1 + self._U.gen(k))**(self._B0[k][j]) for j in xrange(n)
        ])

        for g_vect in self._path_dict:
            #compute new path
            path = self._path_dict[g_vect]
            if g_vect == tuple(identity_matrix(n).column(k)):
                new_path = [k]
            elif path != []:
                if path[0] != k:
                    new_path = [k] + path
                else:
                    new_path = path[1:]
            else:
                new_path = []

            #compute new g-vector
            new_g_vect = vector(
                g_vect) - 2 * g_vect[k] * identity_matrix(n).column(k)
            for i in xrange(n):
                new_g_vect += max(sign(g_vect[k]) * self._B0[i, k],
                                  0) * g_vect[k] * identity_matrix(n).column(i)
            new_path_dict[tuple(new_g_vect)] = new_path

            #compute new F-polynomial
            h = 0
            trop = tropical_evaluation(self._F_poly_dict[g_vect](h_subs_tuple))
            if trop != 1:
                h = trop.denominator().exponents()[0] - trop.numerator(
                ).exponents()[0]
            new_F_dict[tuple(new_g_vect)] = self._F_poly_dict[g_vect](
                F_subs_tuple) * self._U.gen(k)**h * (self._U.gen(k) +
                                                     1)**g_vect[k]

        self._path_dict = new_path_dict
        self._F_poly_dict = new_F_dict

        self._B0.mutate(k)

    def explore_to_depth(self, depth):
        while self._explored_depth <= depth:
            try:
                seed = next(self._sd_iter)
                self._explored_depth = seed.depth()
            except:
                break

    def cluster_fan(self, depth=infinity):
        from sage.geometry.cone import Cone
        from sage.geometry.fan import Fan
        seeds = self.seeds(depth=depth, mutating_F=False)
        cones = map(lambda s: Cone(s.g_vectors()), seeds)
        return Fan(cones)

    # DESIDERATA. Some of these are probably unrealistic
    def upper_cluster_algebra(self):
        pass

    def upper_bound(self):
        pass

    def lower_bound(self):
        pass