コード例 #1
0
    def find_pos_augment_edges(infr, pcc, k=None):
        """
        # [[1, 0], [0, 2], [1, 2], [3, 1]]
        pos_sub = nx.Graph([[0, 1], [1, 2], [0, 2], [1, 3]])
        """
        if k is None:
            pos_k = infr.params['redun.pos']
        else:
            pos_k = k
        pos_sub = infr.pos_graph.subgraph(pcc)

        # TODO:
        # weight by pairs most likely to be comparable

        # First try to augment only with unreviewed existing edges
        unrev_avail = list(nxu.edges_inside(infr.unreviewed_graph, pcc))
        try:
            check_edges = list(
                nxu.k_edge_augmentation(
                    pos_sub, k=pos_k, avail=unrev_avail, partial=False
                )
            )
        except nx.NetworkXUnfeasible:
            check_edges = None
        if not check_edges:
            # Allow new edges to be introduced
            full_sub = infr.graph.subgraph(pcc).copy()
            new_avail = ut.estarmap(infr.e_, nx.complement(full_sub).edges())
            full_avail = unrev_avail + new_avail
            n_max = (len(pos_sub) * (len(pos_sub) - 1)) // 2
            n_complement = n_max - pos_sub.number_of_edges()
            if len(full_avail) == n_complement:
                # can use the faster algorithm
                check_edges = list(
                    nxu.k_edge_augmentation(pos_sub, k=pos_k, partial=True)
                )
            else:
                # have to use the slow approximate algo
                check_edges = list(
                    nxu.k_edge_augmentation(
                        pos_sub, k=pos_k, avail=full_avail, partial=True
                    )
                )
        check_edges = set(it.starmap(e_, check_edges))
        return check_edges
コード例 #2
0
ファイル: annotmatch_funcs.py プロジェクト: warunanc/ibeis
def add_annotmatch_undirected(ibs, aids1, aids2, **kwargs):
    if len(aids1) == 0 and len(aids2) == 0:
        return []
    edges = list(zip(aids1, aids2))
    from ibeis.algo.graph import nx_utils as nxu
    # Enforce new undirected constraint
    edges = ut.estarmap(nxu.e_, edges)
    aids1, aids2 = list(zip(*edges))

    am_rowids = ibs.get_annotmatch_rowid_from_undirected_superkey(aids1, aids2)
    idxs = ut.where([r is None for r in am_rowids])
    # Check which ones are None
    aids1_ = ut.take(aids1, idxs)
    aids2_ = ut.take(aids2, idxs)
    # Create anything that is None
    am_rowids_ = ibs.add_annotmatch(aids2_, aids1_)
    # Use the other rowid if found
    for idx, rowid in zip(idxs, am_rowids_):
        am_rowids[idx] = rowid
    return am_rowids
コード例 #3
0
ファイル: demo.py プロジェクト: yeniherdiyeni/wildbook-ia
def demodata_infr(**kwargs):
    """
    kwargs = {}

    CommandLine:
        python -m wbia.algo.graph.demo demodata_infr --show
        python -m wbia.algo.graph.demo demodata_infr --num_pccs=25
        python -m wbia.algo.graph.demo demodata_infr --profile --num_pccs=100

    Ignore:
        >>> from wbia.algo.graph.demo import *  # NOQA
        >>> from wbia.algo.graph import demo
        >>> import networkx as nx
        >>> kwargs = dict(num_pccs=6, p_incon=.5, size_std=2)
        >>> kwargs = ut.argparse_dict(kwargs)
        >>> infr = demo.demodata_infr(**kwargs)
        >>> pccs = list(infr.positive_components())
        >>> assert len(pccs) == kwargs['num_pccs']
        >>> nonfull_pccs = [cc for cc in pccs if len(cc) > 1 and nx.is_empty(nx.complement(infr.pos_graph.subgraph(cc)))]
        >>> expected_n_incon = len(nonfull_pccs) * kwargs['p_incon']
        >>> n_incon = len(list(infr.inconsistent_components()))
        >>> # TODO can test that we our sample num incon agrees with pop mean
        >>> #sample_mean = n_incon / len(nonfull_pccs)
        >>> #pop_mean = kwargs['p_incon']
        >>> print('status = ' + ut.repr4(infr.status(extended=True)))
        >>> ut.quit_if_noshow()
        >>> infr.show(pickable=True, groupby='name_label')
        >>> ut.show_if_requested()

    Ignore:
        kwargs = {
            'ccs': [[1, 2, 3], [4, 5]]
        }
    """
    import networkx as nx
    import vtool as vt
    from wbia.algo.graph import nx_utils

    def kwalias(*args):
        params = args[0:-1]
        default = args[-1]
        for key in params:
            if key in kwargs:
                return kwargs[key]
        return default

    num_pccs = kwalias('num_pccs', 16)
    size_mean = kwalias('pcc_size_mean', 'pcc_size', 'size', 5)
    size_std = kwalias('pcc_size_std', 'size_std', 0)
    # p_pcc_incon = kwargs.get('p_incon', .1)
    p_pcc_incon = kwargs.get('p_incon', 0)
    p_pcc_incomp = kwargs.get('p_incomp', 0)
    pcc_sizes = kwalias('pcc_sizes', None)

    pos_redun = kwalias('pos_redun', [1, 2, 3])
    pos_redun = ut.ensure_iterable(pos_redun)

    # number of maximum inconsistent edges per pcc
    max_n_incon = kwargs.get('n_incon', 3)

    rng = np.random.RandomState(0)
    counter = 1

    if pcc_sizes is None:
        pcc_sizes = [
            int(randn(size_mean, size_std, rng=rng, a_min=1))
            for _ in range(num_pccs)
        ]
    else:
        num_pccs = len(pcc_sizes)

    if 'ccs' in kwargs:
        # Overwrites other options
        pcc_sizes = list(map(len, kwargs['ccs']))
        num_pccs = len(pcc_sizes)
        size_mean = None
        size_std = 0

    new_ccs = []
    pcc_iter = list(enumerate(pcc_sizes))
    pcc_iter = ut.ProgIter(pcc_iter,
                           enabled=num_pccs > 20,
                           label='make pos-demo')
    for i, size in pcc_iter:
        p = 0.1
        want_connectivity = rng.choice(pos_redun)
        want_connectivity = min(size - 1, want_connectivity)

        # Create basic graph of positive edges with desired connectivity
        g = nx_utils.random_k_edge_connected_graph(size,
                                                   k=want_connectivity,
                                                   p=p,
                                                   rng=rng)
        nx.set_edge_attributes(g, name='evidence_decision', values=POSTV)
        nx.set_edge_attributes(g, name='truth', values=POSTV)
        # nx.set_node_attributes(g, name='orig_name_label', values=i)
        assert nx.is_connected(g)

        # Relabel graph with non-conflicting names
        if 'ccs' in kwargs:
            g = nx.relabel_nodes(g, dict(enumerate(kwargs['ccs'][i])))
        else:
            # Make sure nodes do not conflict with others
            g = nx.relabel_nodes(
                g, dict(enumerate(range(counter,
                                        len(g) + counter + 1))))
            counter += len(g)

        # The probability any edge is inconsistent is `p_incon`
        # This is 1 - P(all edges consistent)
        # which means p(edge is consistent) = (1 - p_incon) / N
        complement_edges = ut.estarmap(nx_utils.e_,
                                       nx_utils.complement_edges(g))
        if len(complement_edges) > 0:
            # compute probability that any particular edge is inconsistent
            # to achieve probability the PCC is inconsistent
            p_edge_inconn = 1 - (1 - p_pcc_incon)**(1 / len(complement_edges))
            p_edge_unrev = 0.1
            p_edge_notcomp = 1 - (1 - p_pcc_incomp)**(1 /
                                                      len(complement_edges))
            probs = np.array([p_edge_inconn, p_edge_unrev, p_edge_notcomp])
            # if the total probability is greater than 1 the parameters
            # are invalid, so we renormalize to "fix" it.
            # if probs.sum() > 1:
            #     warnings.warn('probabilities sum to more than 1')
            #     probs = probs / probs.sum()
            pcumsum = probs.cumsum()
            # Determine which mutually exclusive state each complement edge is in
            # logger.info('pcumsum = %r' % (pcumsum,))
            states = np.searchsorted(pcumsum, rng.rand(len(complement_edges)))

            incon_idxs = np.where(states == 0)[0]
            if len(incon_idxs) > max_n_incon:
                logger.info('max_n_incon = %r' % (max_n_incon, ))
                chosen = rng.choice(incon_idxs, max_n_incon, replace=False)
                states[np.setdiff1d(incon_idxs, chosen)] = len(probs)

            grouped_edges = ut.group_items(complement_edges, states)
            for state, edges in grouped_edges.items():
                truth = POSTV
                if state == 0:
                    # Add in inconsistent edges
                    evidence_decision = NEGTV
                    # TODO: truth could be INCMP or POSTV
                    # new_edges.append((u, v, {'evidence_decision': NEGTV}))
                elif state == 1:
                    evidence_decision = UNREV
                    # TODO: truth could be INCMP or POSTV
                    # new_edges.append((u, v, {'evidence_decision': UNREV}))
                elif state == 2:
                    evidence_decision = INCMP
                    truth = INCMP
                else:
                    continue
                # Add in candidate edges
                attrs = {
                    'evidence_decision': evidence_decision,
                    'truth': truth
                }
                for (u, v) in edges:
                    g.add_edge(u, v, **attrs)
        new_ccs.append(g)
        # (list(g.nodes()), new_edges))

    pos_g = nx.union_all(new_ccs)
    assert len(new_ccs) == len(list(nx.connected_components(pos_g)))
    assert num_pccs == len(new_ccs)

    # Add edges between the PCCS
    neg_edges = []

    if not kwalias('ignore_pair', False):
        logger.info('making pairs')

        pair_attrs_lookup = {
            0: {
                'evidence_decision': NEGTV,
                'truth': NEGTV
            },
            1: {
                'evidence_decision': INCMP,
                'truth': INCMP
            },
            2: {
                'evidence_decision': UNREV,
                'truth': NEGTV
            },  # could be incomp or neg
        }

        # These are the probabilities that one edge has this state
        p_pair_neg = kwalias('p_pair_neg', 0.4)
        p_pair_incmp = kwalias('p_pair_incmp', 0.2)
        p_pair_unrev = kwalias('p_pair_unrev', 0)

        # p_pair_neg = 1
        cc_combos = ((list(g1.nodes()), list(g2.nodes()))
                     for (g1, g2) in it.combinations(new_ccs, 2))
        valid_cc_combos = [(cc1, cc2) for cc1, cc2 in cc_combos
                           if len(cc1) and len(cc2)]
        for cc1, cc2 in ut.ProgIter(valid_cc_combos, label='make neg-demo'):
            possible_edges = ut.estarmap(nx_utils.e_, it.product(cc1, cc2))
            # probability that any edge between these PCCs is negative
            n_edges = len(possible_edges)
            p_edge_neg = 1 - (1 - p_pair_neg)**(1 / n_edges)
            p_edge_incmp = 1 - (1 - p_pair_incmp)**(1 / n_edges)
            p_edge_unrev = 1 - (1 - p_pair_unrev)**(1 / n_edges)

            # Create event space with sizes proportional to probabilities
            pcumsum = np.cumsum([p_edge_neg, p_edge_incmp, p_edge_unrev])
            # Roll dice for each of the edge to see which state it lands on
            possible_pstate = rng.rand(len(possible_edges))
            states = np.searchsorted(pcumsum, possible_pstate)

            flags = states < len(pcumsum)
            stateful_states = states.compress(flags)
            stateful_edges = ut.compress(possible_edges, flags)

            unique_states, groupxs_list = vt.group_indices(stateful_states)
            for state, groupxs in zip(unique_states, groupxs_list):
                # logger.info('state = %r' % (state,))
                # Add in candidate edges
                edges = ut.take(stateful_edges, groupxs)
                attrs = pair_attrs_lookup[state]
                for (u, v) in edges:
                    neg_edges.append((u, v, attrs))
        logger.info('Made {} neg_edges between PCCS'.format(len(neg_edges)))
    else:
        logger.info('ignoring pairs')

    import wbia

    G = wbia.AnnotInference._graph_cls()
    G.add_nodes_from(pos_g.nodes(data=True))
    G.add_edges_from(pos_g.edges(data=True))
    G.add_edges_from(neg_edges)
    infr = wbia.AnnotInference.from_netx(G, infer=kwargs.get('infer', True))
    infr.verbose = 3

    infr.relabel_using_reviews(rectify=False)

    # fontname = 'Ubuntu'
    fontsize = 12
    fontname = 'sans'
    splines = 'spline'
    # splines = 'ortho'
    # splines = 'line'
    infr.set_node_attrs('shape', 'circle')
    infr.graph.graph['ignore_labels'] = True
    infr.graph.graph['dark_background'] = False
    infr.graph.graph['fontname'] = fontname
    infr.graph.graph['fontsize'] = fontsize
    infr.graph.graph['splines'] = splines
    infr.set_node_attrs('width', 29)
    infr.set_node_attrs('height', 29)
    infr.set_node_attrs('fontsize', fontsize)
    infr.set_node_attrs('fontname', fontname)
    infr.set_node_attrs('fixed_size', True)

    # Set synthetic ground-truth attributes for testing
    # infr.apply_edge_truth()
    infr.edge_truth = infr.get_edge_attrs('truth')
    # Make synthetic verif
    infr.dummy_verif = DummyVerif(infr)
    infr.verifiers = {}
    infr.verifiers['match_state'] = infr.dummy_verif
    infr.demokw = kwargs
    return infr