def g22g1(g2, capsize=None):
    '''
    computes all g1 that are in the equivalence class for g2
    '''
    if bfu.isSclique(g2):
        print 'Superclique - any SCC with GCD = 1 fits'
        return set([-1])

    single_cache = {}

    @memo  # memoize the search
    def nodesearch(g, g2, edges, s):
        if edges:
            if bfu.increment(g) == g2:
                s.add(g2num(g))
                if capsize and len(s) > capsize:
                    raise ValueError('Too many elements')
                return g
            e = edges[0]
            for n in g2:

                if (n, e) in single_cache:
                    continue
                if not edge_increment_ok(e[0], n, e[1], g, g2):
                    continue

                mask = add2edges(g, e, n)
                r = nodesearch(g, g2, edges[1:], s)
                del2edges(g, e, n, mask)

        elif bfu.increment(g) == g2:
            s.add(g2num(g))
            if capsize and len(s) > capsize:
                raise ValueError('Too many elements in eqclass')
            return g

    # find all directed g1's not conflicting with g2
    n = len(g2)
    edges = gk.edgelist(g2)
    random.shuffle(edges)
    g = cloneempty(g2)

    for e in edges:
        for n in g2:

            mask = add2edges(g, e, n)
            if not gk.isedgesubset(bfu.increment(g), g2):
                single_cache[(n, e)] = False
            del2edges(g, e, n, mask)

    s = set()
    try:
        nodesearch(g, g2, edges, s)
    except ValueError:
        s.add(0)
    return s
def vg22g1(g2, capsize=None):
    '''
    computes all g1 that are in the equivalence class for g2
    '''
    if bfu.isSclique(g2):
        print 'Superclique - any SCC with GCD = 1 fits'
        return set([-1])

    f = [(add2edges, del2edges),
         (addavedge, delavedge),
         (addacedge, delacedge),
         (addaAedge, delaAedge),
         (addapath, delapath)]
    c = [ok2add2edges,
         ok2addavedge,
         ok2addacedge,
         ok2addaAedge,
         ok2addapath]

    @memo2  # memoize the search
    def nodesearch(g, g2, edges, s):
        if edges:
            # key, checklist = edges.popitem()
            key = random.choice(edges.keys())
            checklist = edges.pop(key)
            adder, remover = f[edge_function_idx(key)]
            checks_ok = c[edge_function_idx(key)]
            for n in checklist:
                mask = adder(g, key, n)
                if gk.isedgesubset(bfu.increment(g), g2):
                    r = nodesearch(g, g2, edges, s)
                    if r and bfu.increment(r) == g2:
                        s.add(g2num(r))
                        if capsize and len(s) > capsize:
                            raise ValueError('Too many elements')
                remover(g, key, n, mask)
            edges[key] = checklist
        else:
            return g

    # find all directed g1's not conflicting with g2
    n = len(g2)
    chlist = checkable(g2)
    g = cloneempty(g2)

    s = set()
    try:
        nodesearch(g, g2, chlist, s)
    except ValueError:
        s.add(0)
    return s
def edge_backtrack2g1_directed(g2, capsize=None):
    '''
    computes all g1 that are in the equivalence class for g2
    '''
    if bfu.isSclique(g2):
        print 'Superclique - any SCC with GCD = 1 fits'
        return set([-1])

    single_cache = {}

    def edgeset(g):
        return set(gk.edgelist(g))

    @memo  # memoize the search
    def nodesearch(g, g2, edges, s):
        if edges:
            e = edges.pop()
            ln = [n for n in g2]
            for n in ln:
                if (n, e) in single_cache:
                    continue
                mask = add2edges(g, e, n)
                if gk.isedgesubset(bfu.increment(g), g2):
                    r = nodesearch(g, g2, edges, s)
                    if r and edgeset(bfu.increment(r)) == edgeset(g2):
                        s.add(g2num(r))
                        if capsize and len(s) > capsize:
                            raise ValueError('Too many elements in eqclass')
                del2edges(g, e, n, mask)
            edges.append(e)
        else:
            return g
    # find all directed g1's not conflicting with g2
    n = len(g2)
    edges = gk.edgelist(g2)
    random.shuffle(edges)
    g = cloneempty(g2)

    for e in edges:
        for n in g2:
            mask = add2edges(g, e, n)
            if not gk.isedgesubset(bfu.increment(g), g2):
                single_cache[(n, e)] = False
            del2edges(g, e, n, mask)

    s = set()
    try:
        nodesearch(g, g2, edges, s)
    except ValueError:
        s.add(0)
    return s
def backtrack_more2(g2, rate=2, capsize=None):
    '''
    computes all g1 that are in the equivalence class for g2
    '''
    if bfu.isSclique(g2):
        print 'Superclique - any SCC with GCD = 1 fits'
        return set([-1])

    f = [(addaVpath, delaVpath, maskaVpath)]
    c = [ok2addaVpath]

    def predictive_check(g, g2, pool, checks_ok, key):
        s = set()
        for u in pool:
            if not checks_ok(key, u, g, g2, rate=rate):
                continue
            s.add(u)
        return s

    @memo2  # memoize the search
    def nodesearch(g, g2, order, inlist, s, cds, pool, pc):
        if order:
            if bfu.undersample(g, rate) == g2:
                s.add(g2num(g))
                if capsize and len(s) > capsize:
                    raise ValueError('Too many elements')
                s.update(supergraphs_in_eq(g, g2, rate=rate))
                return g

            key = order[0]
            if pc:
                tocheck = [x for x in pc if x in cds[len(inlist) - 1][inlist[0]]]
            else:
                tocheck = cds[len(inlist) - 1][inlist[0]]

            if len(order) > 1:
                kk = order[1]
                pc = predictive_check(g, g2, pool[len(inlist)],
                                      c[edge_function_idx(kk)], kk)
            else:
                pc = set()

            adder, remover, masker = f[edge_function_idx(key)]
            checks_ok = c[edge_function_idx(key)]

            for n in tocheck:
                if not checks_ok(key, n, g, g2, rate=rate):
                    continue
                masked = np.prod(masker(g, key, n))
                if masked:
                    nodesearch(g, g2, order[1:], [n] + inlist, s, cds, pool, pc)
                else:
                    mask = adder(g, key, n)
                    nodesearch(g, g2, order[1:], [n] + inlist, s, cds, pool, pc)
                    remover(g, key, n, mask)

        elif bfu.undersample(g, rate) == g2:
            s.add(g2num(g))
            if capsize and len(s) > capsize:
                raise ValueError('Too many elements')
            return g

    # find all directed g1's not conflicting with g2

    startTime = int(round(time.time() * 1000))
    ln = [x for x in itertools.permutations(g2.keys(), rate)] + \
         [(n, n) for n in g2]
    gg = {x: ln for x in gk.edgelist(g2)}
    keys = gg.keys()
    cds, order, idx = conformanceDS(g2, gg, gg.keys(), f=f, c=c)
    endTime = int(round(time.time() * 1000))
    print "precomputed in {:10} seconds".format(round((endTime - startTime) / 1000., 3))
    if 0 in [len(x) for x in order]:
        return set()
    g = cloneempty(g2)

    s = set()
    try:
        nodesearch(g, g2, [keys[i] for i in idx], ['0'], s, cds, order, set())
    except ValueError, e:
        print e
        s.add(0)
def v2g22g1(g2, capsize=None, verbose=True):
    '''
    computes all g1 that are in the equivalence class for g2
    '''
    if bfu.isSclique(g2):
        print 'Superclique - any SCC with GCD = 1 fits'
        return set([-1])

    f = [(add2edges, del2edges, mask2edges),
         (addavedge, delavedge, maskavedge),
         (addacedge, delacedge, maskaCedge),
         (addaAedge, delaAedge, maskaAedge),
         (addapath, delapath, maskapath)]
    c = [ok2add2edges,
         ok2addavedge,
         ok2addacedge,
         ok2addaAedge,
         ok2addapath]

    def predictive_check(g, g2, pool, checks_ok, key):
        s = set()
        for u in pool:
            if not checks_ok(key, u, g, g2):
                continue
            s.add(u)
        return s

    @memo2  # memoize the search
    def nodesearch(g, g2, order, inlist, s, cds, pool, pc):
        if order:
            if bfu.increment(g) == g2:
                s.add(g2num(g))
                if capsize and len(s) > capsize:
                    raise ValueError('Too many elements')
                s.update(supergraphs_in_eq(g, g2))
                return g

            key = order[0]
            if pc:
                tocheck = [x for x in pc if x in cds[len(inlist) - 1][inlist[0]]]
            else:
                tocheck = cds[len(inlist) - 1][inlist[0]]

            if len(order) > 1:
                kk = order[1]
                pc = predictive_check(g, g2, pool[len(inlist)],
                                      c[edge_function_idx(kk)], kk)
            else:
                pc = set()

            adder, remover, masker = f[edge_function_idx(key)]
            checks_ok = c[edge_function_idx(key)]

            for n in tocheck:
                if not checks_ok(key, n, g, g2):
                    continue
                masked = np.prod(masker(g, key, n))
                if masked:
                    nodesearch(g, g2, order[1:], [n] + inlist, s, cds, pool, pc)
                else:
                    mask = adder(g, key, n)
                    nodesearch(g, g2, order[1:], [n] + inlist, s, cds, pool, pc)
                    remover(g, key, n, mask)

        elif bfu.increment(g) == g2:
            s.add(g2num(g))
            if capsize and len(s) > capsize:
                raise ValueError('Too many elements')
            return g

    @memo2  # memoize the search
    def nodesearch0(g, g2, order, inlist, s, cds):

        if order:
            key = order.pop(0)
            tocheck = cds[len(inlist) - 1][inlist[0]]

            adder, remover, masker = f[edge_function_idx(key)]
            checks_ok = c[edge_function_idx(key)]

            if len(tocheck) > 1:
                for n in tocheck:
                    if not checks_ok(key, n, g, g2):
                        continue
                    mask = masker(g, key, n)
                    if not np.prod(mask):
                        mask = adder(g, key, n)
                        r = nodesearch0(g, g2, order, [n] + inlist, s, cds)
                        if r and bfu.increment(r) == g2:
                            s.add(g2num(r))
                            if capsize and len(s) > capsize:
                                raise ValueError('Too many elements')
                        remover(g, key, n, mask)
                    else:
                        r = nodesearch0(g, g2, order, [n] + inlist, s, cds)
                        if r and bfu.increment(r) == g2:
                            s.add(g2num(r))
                            if capsize and len(s) > capsize:
                                raise ValueError('Too many elements')
            elif tocheck:
                (n,) = tocheck
                mask = adder(g, key, n)
                r = nodesearch0(g, g2, order, [n] + inlist, s, cds)
                if r and bfu.increment(r) == g2:
                    s.add(g2num(r))
                    if capsize and len(s) > capsize:
                        raise ValueError('Too many elements')
                remover(g, key, n, mask)

            order.insert(0, key)

        else:
            return g

    # find all directed g1's not conflicting with g2

    startTime = int(round(time.time() * 1000))
    gg = checkable(g2)

    idx = np.argsort([len(gg[x]) for x in gg.keys()])
    keys = [gg.keys()[i] for i in idx]

    cds, order, idx = conformanceDS(g2, gg, keys)
    endTime = int(round(time.time() * 1000))
    if verbose:
        print "precomputed in {:10} seconds".format(round((endTime - startTime) / 1000., 3))
    if 0 in [len(x) for x in order]:
        return set()
    g = cloneempty(g2)

    s = set()
    try:
        nodesearch(g, g2, [keys[i] for i in idx], ['0'], s, cds, order, set())
        # nodesearch0(g, g2, [gg.keys()[i] for i in idx], ['0'], s, cds)
    except ValueError, e:
        print e
        s.add(0)
def backtrack_more(g2, rate=1, capsize=None):
    '''
    computes all g1 that are in the equivalence class for g2
    '''
    if bfu.isSclique(g2):
        print 'Superclique - any SCC with GCD = 1 fits'
        return set([-1])

    single_cache = {}
    if rate == 1:
        ln = [n for n in g2]
    else:
        ln = []
        for x in itertools.combinations_with_replacement(g2.keys(), rate):
            ln.extend(itertools.permutations(x, rate))
        ln = set(ln)

    @memo  # memoize the search
    def nodesearch(g, g2, edges, s):
        if edges:
            if bfu.undersample(g, rate) == g2:
                s.add(g2num(g))
                if capsize and len(s) > capsize:
                    raise ValueError('Too many elements')
                return g
            e = edges[0]
            for n in ln:

                if (n, e) in single_cache:
                    continue
                if not ok2addaVpath(e, n, g, g2, rate=rate):
                    continue

                mask = addaVpath(g, e, n)
                r = nodesearch(g, g2, edges[1:], s)
                delaVpath(g, e, n, mask)

        elif bfu.undersample(g, rate) == g2:
            s.add(g2num(g))
            if capsize and len(s) > capsize:
                raise ValueError('Too many elements in eqclass')
            return g

    # find all directed g1's not conflicting with g2
    n = len(g2)
    edges = gk.edgelist(g2)
    random.shuffle(edges)
    g = cloneempty(g2)

    for e in edges:
        for n in ln:

            mask = addaVpath(g, e, n)
            if not gk.isedgesubset(bfu.undersample(g, rate), g2):
                single_cache[(n, e)] = False
            delaVpath(g, e, n, mask)

    s = set()
    try:
        nodesearch(g, g2, edges, s)
    except ValueError:
        s.add(0)
    return s