Ejemplo n.º 1
0
    def cut(self):
        context = self._get_context()
        num_nodes = self.num_of_nodes()
        node_weights = [1] * num_nodes
        k = 2
        # Debug messages
        # print("Creating hypergraph:")
        # print(f"\t num_nodes: {num_nodes}")
        # print(f"\t num_of_hyperedges: {self.num_of_hyperedges()}")
        # print(f"\t net_indices: {self.net_indices}")
        # print(f"\t nets: {self.nets}")
        # print(f"\t net_weights: {self.net_weights}")
        # print(f"\t node_weights: {node_weights}")
        hypergraph = kahypar.Hypergraph(num_nodes, self.num_of_hyperedges(),
                                        self.net_indices, self.nets, k,
                                        self.net_weights, node_weights)
        kahypar.partition(hypergraph, context)
        left_partition = [
            node for node in range(num_nodes) if hypergraph.blockID(node) == 0
        ]
        right_partition = [
            node for node in range(num_nodes) if hypergraph.blockID(node) == 1
        ]

        # print(left_partition)
        # print(right_partition)

        # IDK what triggers this but a partition can be empty. In that case, tighten the imbalance restriction and retry
        if len(left_partition) == 0 or len(right_partition) == 0:
            self.set_epsilon(self.epsilon / 4)
            return self.cut()

        self.epsilon = self.startEpsilon
        return set(self._to_lnum(left_partition)), set(
            self._to_lnum(right_partition))
Ejemplo n.º 2
0
def hgpa(labels, nclass, random_state):
    """HyperGraph Partitioning Algorithm (HGPA).

    Parameters
    ----------
    labels: Labels generated by multiple clustering algorithms such as K-Means.
    nclass: Number of classes in a consensus clustering label.
    random_state: Used for reproducible results.

    Return
    -------
    label_ce: Consensus clustering label obtained from HGPA.
    """
    # Create hypergraph for kahypar
    H = create_hypergraph(labels)
    n_nodes, n_nets = H.shape

    node_weights = [1] * n_nodes
    edge_weights = [1] * n_nets

    hyperedge_indices = [0]
    hyperedges = []
    HT = H.T
    for i in range(n_nets):
        h = HT.getrow(i)
        idx_row, idx_col = h.nonzero()
        hyperedges += list(idx_col)
        hyperedge_indices.append(len(hyperedges))

    hypergraph = kahypar.Hypergraph(n_nodes, n_nets, hyperedge_indices,
                                    hyperedges, nclass, edge_weights,
                                    node_weights)

    # Settings for kahypar
    context = kahypar.Context()
    config_path = os.path.dirname(
        __file__) + "/kahypar_config/km1_kKaHyPar_sea20.ini"
    context.loadINIconfiguration(config_path)
    if random_state is not None:
        context.setSeed(random_state)
    context.setK(nclass)
    context.setEpsilon(0.03)
    context.suppressOutput(True)

    # Hypergraph partitioning
    kahypar.partition(hypergraph, context)

    label_ce = np.empty(n_nodes, dtype=int)
    for i in range(n_nodes):
        label_ce[i] = hypergraph.blockID(i)

    return label_ce
Ejemplo n.º 3
0
    def _order_by_params(self, graph, nodes, **kwargs):
        graph = self._refresh(graph)
        counter = kwargs.get('counter')

        K = int(kwargs.get('K'))
        eps = kwargs.get('eps', None)
        top = kwargs.get('top', True)
        kwargs_c = copy.copy(kwargs)
        kwargs_c['eps'] = eps[0]
        if not top:
            kwargs_c['K'] = 2
            kwargs_c['eps'] = eps[1]
        context = self._set_context(**kwargs_c)
        cutoff = kwargs.get('cutoff')
        hypergraph, _, edges = self._init_hypergraph_tanner(graph, kwargs_c['K'])
        if not edges:
            return [[nodes, '#']]
        kahypar.partition(hypergraph, context)
        partitions_names = [[] for _ in range(K)]
        for i, n in list(enumerate(nodes)):
            partitions_names[hypergraph.blockID(i)].append(n)
        if any(len(part) == len(nodes) for part in partitions_names):
            return [[nodes, '#']]
        order = []
        all_names = []
        for (i, partite_nodes) in enumerate(
                sorted(partitions_names, key=lambda x: -len(x))):
            if len(partite_nodes) == 0:
                continue
            elif len(partite_nodes) > 1:
                new_stn = counter.cnt
                all_names.append(new_stn)
                new_g, graph, nodes = self._sub(graph, nodes, partite_nodes,
                                                new_stn)
                assert graph.shape[0] == len(nodes) + 1, (graph.shape[0],
                                                          len(nodes))
                kwargs['top'] = False
                if len(partite_nodes) > cutoff:
                    new_order = self._order_by_params(new_g, partite_nodes,
                                                      **kwargs)
                    new_order[-1][1] = new_stn
                else:
                    new_order = [[partite_nodes, new_stn]]
                order += new_order
            else:
                all_names.append(partite_nodes[0])
        order.append([all_names, '#'])
        return order
Ejemplo n.º 4
0
    def test_partition_hypergraph(self):
        context = kahypar.Context()
        context.loadINIconfiguration(
            mydir + "/../..//config/km1_kKaHyPar_dissertation.ini")

        ibm01 = kahypar.createHypergraphFromFile(mydir + "/ISPD98_ibm01.hgr",
                                                 2)

        context.setK(2)
        context.setEpsilon(0.03)
        kahypar.partition(ibm01, context)

        self.assertEqual(kahypar.cut(ibm01), 202)
        self.assertEqual(kahypar.soed(ibm01), 404)
        self.assertEqual(kahypar.connectivityMinusOne(ibm01), 202)
        self.assertEqual(kahypar.imbalance(ibm01, context),
                         0.027603513174403904)
Ejemplo n.º 5
0
def kahypar_subgraph_find_membership(
    inputs,
    output,
    size_dict,
    weight_nodes='const',
    weight_edges='log',
    fix_output_nodes=False,
    parts=2,
    imbalance=0.01,
    compress=0,
    seed=None,
    profile=None,
    mode='direct',
    objective='cut',
    quiet=True,
):
    import kahypar as kahypar

    if seed is None:
        seed = random.randint(0, 2**31 - 1)

    nv = len(inputs)
    if parts >= nv:
        return list(range(nv))

    hg = get_hypergraph(inputs, output, size_dict, accel=False)

    if compress:
        hg.compress(compress)

    winfo = to_sparse(hg, weight_nodes=weight_nodes, weight_edges=weight_edges)

    hypergraph_kwargs = {
        'num_nodes': hg.get_num_nodes(),
        'num_edges': hg.get_num_edges(),
        'index_vector': winfo['hyperedge_indices'],
        'edge_vector': winfo['hyperedges'],
        'k': parts,
    }

    edge_weights, node_weights = {
        (False, False): (None, None),
        (False, True): ([], winfo['node_weights']),
        (True, False): (winfo['edge_weights'], []),
        (True, True): (winfo['edge_weights'], winfo['node_weights']),
    }[winfo['has_edge_weights'], winfo['has_node_weights']]

    if edge_weights or node_weights:
        hypergraph_kwargs['edge_weights'] = edge_weights
        hypergraph_kwargs['node_weights'] = node_weights

    hypergraph = kahypar.Hypergraph(**hypergraph_kwargs)

    if fix_output_nodes:
        # make sure all the output nodes (those with output indices) are in
        # the same partition
        onodes = tuple(hg.output_nodes())

        if parts >= nv - len(onodes) + 1:
            # too many partitions, simply group all outputs and return
            groups = itertools.count(1)
            return [0 if i in onodes else next(groups) for i in range(nv)]

        for i in onodes:
            hypergraph.fixNodeToBlock(i, 0)

        # silences various warnings
        mode = 'recursive'

    if profile is None:
        profile_mode = {'direct': 'k', 'recursive': 'r'}[mode]
        profile = f"{objective}_{profile_mode}KaHyPar_sea20.ini"

    context = kahypar.Context()
    context.loadINIconfiguration(join(KAHYPAR_PROFILE_DIR, profile))
    context.setK(parts)
    context.setSeed(seed)
    context.suppressOutput(quiet)
    context.setEpsilon(imbalance * parts)

    kahypar.partition(hypergraph, context)

    return [hypergraph.blockID(i) for i in hypergraph.nodes()]
Ejemplo n.º 6
0
import os
import kahypar as kahypar

num_nodes = 7
num_nets = 4

hyperedge_indices = [0,2,6,9,12]
hyperedges = [0,2,0,1,3,4,3,4,6,2,5,6]

node_weights = [1,2,3,4,5,6,7]
edge_weights = [11,22,33,44]

k=2

hypergraph = kahypar.Hypergraph(num_nodes, num_nets, hyperedge_indices, hyperedges, k, edge_weights, node_weights)

context = kahypar.Context()
context.loadINIconfiguration("/home/ags/code/kahypar/config/km1_kKaHyPar_sea20.ini")

context.setK(k)
context.setEpsilon(0.03)

kahypar.partition(hypergraph, context)
Ejemplo n.º 7
0
    def partition_hygr(self, h):

        if (h._nverts <= self._tilesize):
            return range(h._nverts)

        tilesize = self._tilesize
        nparts = int((h._nverts + tilesize - 1) / tilesize)
        curnhygr = 1
        kway_bound = nparts
        totalcut = 0
        gpvec = [0] * h._nverts

        context = self._context
        context.setK(2)  # will always partition into 2

        # nparts should be > 1 at this point
        assert (nparts > 1)
        if (nparts & (nparts - 1) != 0):
            kway_bound = int(math.pow(2, int(math.log2(nparts)) + 1))

        # each entry three elems: [hygr, curk, gcids, ne, pe]
        # ne and pe include supernets
        curlvl = [None]
        nextlvl = [[None] * 5, [None] * 5]

        curlvl[0] = [
            h, nparts, [x for x in range(h._nverts)], h._nnets, h._npins
        ]

        while (curnhygr != kway_bound):

            lastid = 0

            for i in range(curnhygr):

                if (curlvl[i][1] == 1):
                    lastid += 1
                    continue

                if (curlvl[i][0] is None):
                    continue

                if (curnhygr > 1 and self._addMsgNets):
                    self._add_send_msg_nets(h, lastid, 2 * curnhygr, gpvec,
                                            curlvl, i)

                # compute target pw
                curk = curlvl[i][1]
                tpw = [None] * 2
                if (curlvl[i][0]._nverts % tilesize != 0):
                    tmp = int((curk + 1) / 2)
                    tpw[0] = tmp * tilesize
                    tpw[1] = curlvl[i][0]._nverts - tpw[0]
                else:
                    if (curk % 2 == 0):
                        tpw[0] = tpw[1] = (curk / 2) * tilesize
                    else:
                        tmp = int(curk / 2)
                        tpw[0] = (tmp + 1) * tilesize
                        tpw[1] = tmp * tilesize

                tpw[0] = int(tpw[0])
                tpw[1] = int(tpw[1])

                # form kahypar hypergraph
                hk = kahypar.Hypergraph(
                    curlvl[i][0]._nverts,
                    curlvl[i][3],  # with msg nets
                    curlvl[i][0]._xpins,
                    curlvl[i][0]._pins,
                    2,
                    curlvl[i][0]._nwghts,
                    curlvl[i][0]._cwghts)
                context.setCustomTargetBlockWeights(tpw)
                context.suppressOutput(True)

                # partition and get part vector
                kahypar.partition(hk, context)
                lpvec = [None] * curlvl[i][0]._nverts
                for v in range(curlvl[i][0]._nverts):
                    lpvec[v] = hk.blockID(v)

                # @TODO if tpw is not achieved, return id perm
                curcut = kahypar.cut(hk)
                totalcut += curcut
                assert (hk.blockWeight(0) == tpw[0]
                        and hk.blockWeight(1) == tpw[1])

                # update global partvec
                for v in range(h._nverts):  # orig hygr
                    if (gpvec[v] > lastid):
                        gpvec[v] += 1
                for v in range(curlvl[i][0]._nverts):
                    if (lpvec[v] == 1):
                        gpvec[curlvl[i][2][v]] += 1

                lastid += 2
                left = 2 * i
                right = 2 * i + 1
                nextlvl[right][1] = int(curk / 2)
                nextlvl[left][1] = nextlvl[right][1] + (curk % 2)

                # bisect
                self._bisect(curlvl, lpvec, nextlvl, i)

            curnhygr = curnhygr * 2

            # update curlvl and nextlvl
            if (curnhygr != nparts):
                curlvl = nextlvl
                nextlvl = [[None] * 5 for i in range(2 * curnhygr)]

        return gpvec
Ejemplo n.º 8
0
def kahypar_subgraph_find_membership(
    inputs,
    output,
    size_dict,
    weight_nodes='constant',
    weight_edges='log',
    fuse_output_inds=False,
    parts=2,
    imbalance=0.01,
    seed=None,
    profile=None,
    mode='direct',
    objective='cut',
    quiet=True,
):
    import kahypar as kahypar

    if seed is None:
        seed = random.randint(0, 2**31 - 1)

    nv = len(inputs)
    if parts >= nv:
        return list(range(nv))

    HG = HyperGraph(inputs, output, size_dict,
                    weight_edges=weight_edges,
                    weight_nodes=weight_nodes,
                    fuse_output_inds=fuse_output_inds)
    hyperedge_indices, hyperedges = HG.to_sparse()

    hypergraph_kwargs = {
        'num_nodes': HG.num_vertices,
        'num_edges': HG.num_edges,
        'index_vector': hyperedge_indices,
        'edge_vector': hyperedges,
        'k': parts,
    }

    edge_weights, node_weights = {
        (False, False): (None, None),
        (False, True): ([], HG.node_weights),
        (True, False): (HG.edge_weights, []),
        (True, True): (HG.edge_weights, HG.node_weights),
    }[HG.has_edge_weights, HG.has_node_weights]

    if edge_weights or node_weights:
        hypergraph_kwargs['edge_weights'] = edge_weights
        hypergraph_kwargs['node_weights'] = node_weights

    hypergraph = kahypar.Hypergraph(**hypergraph_kwargs)

    if profile is None:
        profile_mode = {'direct': 'k', 'recursive': 'r'}[mode]
        profile = f"{objective}_{profile_mode}KaHyPar_dissertation.ini"

    context = kahypar.Context()
    context.loadINIconfiguration(join(KAHYPAR_PROFILE_DIR, profile))
    context.setK(parts)
    context.setSeed(seed)
    context.suppressOutput(quiet)
    context.setEpsilon(imbalance * parts)

    kahypar.partition(hypergraph, context)

    return [hypergraph.blockID(i) for i in hypergraph.nodes()]