Ejemplo n.º 1
0
    def parse_items(cfg):
        r"""
        Returns:
            list: param_list

        CommandLine:
            python -m dtool.base --exec-parse_items

        Example:
            >>> # ENABLE_DOCTEST
            >>> from dtool.base import *  # NOQA
            >>> from dtool.example_depcache import DummyVsManyConfig
            >>> cfg = DummyVsManyConfig()
            >>> param_list = cfg.parse_items()
            >>> result = ('param_list = %s' % (ut.repr2(param_list, nl=1),))
            >>> print(result)
        """
        namespace_param_list = cfg.parse_namespace_config_items()
        param_names = ut.get_list_column(namespace_param_list, 1)
        needs_namespace_keys = ut.find_duplicate_items(param_names)
        param_list = ut.get_list_column(namespace_param_list, [1, 2])
        # prepend namespaces to variables that need it
        for idx in ut.flatten(needs_namespace_keys.values()):
            name = namespace_param_list[idx][0]
            param_list[idx][0] = name + '_' + param_list[idx][0]
        duplicate_keys = ut.find_duplicate_items(ut.get_list_column(param_list, 0))
        # hack to let version through
        #import utool
        #with utool.embed_on_exception_context:
        assert len(duplicate_keys) == 0, (
            'Configs have duplicate names: %r' % duplicate_keys)
        return param_list
Ejemplo n.º 2
0
    def parse_items(cfg):
        r"""
        Returns:
            list: param_list

        CommandLine:
            python -m dtool.base --exec-parse_items

        Example:
            >>> # ENABLE_DOCTEST
            >>> from dtool.base import *  # NOQA
            >>> from dtool.example_depcache import DummyVsManyConfig
            >>> cfg = DummyVsManyConfig()
            >>> param_list = cfg.parse_items()
            >>> result = ('param_list = %s' % (ut.repr2(param_list, nl=1),))
            >>> print(result)
        """
        namespace_param_list = cfg.parse_namespace_config_items()
        param_names = ut.get_list_column(namespace_param_list, 1)
        needs_namespace_keys = ut.find_duplicate_items(param_names)
        param_list = ut.get_list_column(namespace_param_list, [1, 2])
        # prepend namespaces to variables that need it
        for idx in ut.flatten(needs_namespace_keys.values()):
            name = namespace_param_list[idx][0]
            param_list[idx][0] = name + '_' + param_list[idx][0]
        duplicate_keys = ut.find_duplicate_items(ut.get_list_column(param_list, 0))
        # hack to let version through
        import utool
        with utool.embed_on_exception_context:
            assert len(duplicate_keys) == 0, (
                'Configs have duplicate names: %r' % duplicate_keys)
        return param_list
Ejemplo n.º 3
0
def web_check_uuids(ibs,
                    image_uuid_list=[],
                    qannot_uuid_list=[],
                    dannot_uuid_list=[]):
    r"""
    Args:
        ibs (ibeis.IBEISController):  image analysis api
        image_uuid_list (list): (default = [])
        qannot_uuid_list (list): (default = [])
        dannot_uuid_list (list): (default = [])

    CommandLine:
        python -m ibeis.web.apis_engine --exec-web_check_uuids --show

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.web.apis_engine import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> image_uuid_list = []
        >>> qannot_uuid_list = ibs.get_annot_uuids([1, 1, 2, 3, 2, 4])
        >>> dannot_uuid_list = ibs.get_annot_uuids([1, 2, 3])
        >>> try:
        >>>     web_check_uuids(ibs, image_uuid_list, qannot_uuid_list,
        >>>                     dannot_uuid_list)
        >>> except controller_inject.DuplicateUUIDException:
        >>>     pass
        >>> else:
        >>>     raise AssertionError('Should have gotten DuplicateUUIDException')
        >>> try:
        >>>     web_check_uuids(ibs, [1, 2, 3], qannot_uuid_list,
        >>>                     dannot_uuid_list)
        >>> except controller_inject.WebMissingUUIDException as ex:
        >>>     pass
        >>> else:
        >>>     raise AssertionError('Should have gotten WebMissingUUIDException')
        >>> print('Successfully reported errors')
    """
    # Unique list
    image_uuid_list = list(set(image_uuid_list))
    if qannot_uuid_list is None:
        qannot_uuid_list = []
    if dannot_uuid_list is None:
        dannot_uuid_list = []
    annot_uuid_list = list(set(qannot_uuid_list + dannot_uuid_list))
    # Check for all annot UUIDs exist
    missing_image_uuid_list = ibs.get_image_missing_uuid(image_uuid_list)
    missing_annot_uuid_list = ibs.get_annot_missing_uuid(annot_uuid_list)
    if len(missing_image_uuid_list) > 0 or len(missing_annot_uuid_list) > 0:
        kwargs = {
            'missing_image_uuid_list': missing_image_uuid_list,
            'missing_annot_uuid_list': missing_annot_uuid_list,
        }
        raise controller_inject.WebMissingUUIDException(**kwargs)
    qdup_pos_map = ut.find_duplicate_items(dannot_uuid_list)
    ddup_pos_map = ut.find_duplicate_items(qannot_uuid_list)
    if len(ddup_pos_map) + len(qdup_pos_map) > 0:
        raise controller_inject.DuplicateUUIDException(qdup_pos_map,
                                                       qdup_pos_map)
Ejemplo n.º 4
0
def web_check_uuids(ibs, image_uuid_list=[], qannot_uuid_list=[], dannot_uuid_list=[]):
    r"""
    Args:
        ibs (ibeis.IBEISController):  image analysis api
        image_uuid_list (list): (default = [])
        qannot_uuid_list (list): (default = [])
        dannot_uuid_list (list): (default = [])

    CommandLine:
        python -m ibeis.web.apis_engine --exec-web_check_uuids --show

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.web.apis_engine import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> image_uuid_list = []
        >>> qannot_uuid_list = ibs.get_annot_uuids([1, 1, 2, 3, 2, 4])
        >>> dannot_uuid_list = ibs.get_annot_uuids([1, 2, 3])
        >>> try:
        >>>     web_check_uuids(ibs, image_uuid_list, qannot_uuid_list,
        >>>                     dannot_uuid_list)
        >>> except controller_inject.DuplicateUUIDException:
        >>>     pass
        >>> else:
        >>>     raise AssertionError('Should have gotten DuplicateUUIDException')
        >>> try:
        >>>     web_check_uuids(ibs, [1, 2, 3], qannot_uuid_list,
        >>>                     dannot_uuid_list)
        >>> except controller_inject.WebMissingUUIDException as ex:
        >>>     pass
        >>> else:
        >>>     raise AssertionError('Should have gotten WebMissingUUIDException')
        >>> print('Successfully reported errors')
    """
    # Unique list
    image_uuid_list = list(set(image_uuid_list))
    if qannot_uuid_list is None:
        qannot_uuid_list = []
    if dannot_uuid_list is None:
        dannot_uuid_list = []
    annot_uuid_list = list(set(qannot_uuid_list + dannot_uuid_list))
    # Check for all annot UUIDs exist
    missing_image_uuid_list = ibs.get_image_missing_uuid(image_uuid_list)
    missing_annot_uuid_list = ibs.get_annot_missing_uuid(annot_uuid_list)
    if len(missing_image_uuid_list) > 0 or len(missing_annot_uuid_list) > 0:
        kwargs = {
            'missing_image_uuid_list' : missing_image_uuid_list,
            'missing_annot_uuid_list' : missing_annot_uuid_list,
        }
        raise controller_inject.WebMissingUUIDException(**kwargs)
    qdup_pos_map = ut.find_duplicate_items(dannot_uuid_list)
    ddup_pos_map = ut.find_duplicate_items(qannot_uuid_list)
    if len(ddup_pos_map) + len(qdup_pos_map) > 0:
        raise controller_inject.DuplicateUUIDException(qdup_pos_map, qdup_pos_map)
Ejemplo n.º 5
0
    def find_internal_duplicates(self):
        # First find which files take up the same amount of space
        nbytes = self.get_prop('nbytes')
        dups = ut.find_duplicate_items(nbytes)
        # Now evaluate the hashes of these candidates
        cand_idxs = ut.flatten(dups.values())

        data = ut.ColumnLists({
            'idx':
            cand_idxs,
            'fname':
            self.get_prop('fname', cand_idxs),
            'dname':
            self.get_prop('dname', cand_idxs),
            'full_path':
            self.get_prop('full_path', cand_idxs),
            'nbytes':
            self.get_prop('nbytes', cand_idxs),
        })
        # print(ut.repr4(ut.group_items(fpaths, nbytes)))
        data.ignore = ['full_path', 'dname']
        data.print(ignore=['full_path', 'dname'])
        data['hash'] = self.get_prop('md5', data['idx'])
        data.print(ignore=['full_path', 'hash'])
        data.print(ignore=['full_path', 'dname'])

        multis = data.get_multis('hash')
        multis.print(ignore=data.ignore)
        return multis
Ejemplo n.º 6
0
 def duplicates(self):
     uuid_to_dupxs = ut.find_duplicate_items(self.uuids)
     dup_fpaths = [
         ut.take(self.rel_fpath_list, idxs)
         for idxs in uuid_to_dupxs.values()
     ]
     return dup_fpaths
Ejemplo n.º 7
0
def convert_multigraph_to_graph(G):
    """
    For each duplicate edge make a dummy node.
    TODO: preserve data, keys, and directedness
    """
    import utool as ut
    edge_list = list(G.edges())
    node_list = list(G.nodes())
    dupitem_to_idx = ut.find_duplicate_items(edge_list)
    node_to_freq = ut.ddict(lambda: 0)
    remove_idxs = ut.flatten(dupitem_to_idx.values())
    ut.delete_items_by_index(edge_list, remove_idxs)

    for dup_edge in dupitem_to_idx.keys():
        freq = len(dupitem_to_idx[dup_edge])
        u, v = dup_edge[0:2]
        pair_node = dup_edge
        pair_nodes = [pair_node + tuple([count]) for count in range(freq)]
        for pair_node in pair_nodes:
            node_list.append(pair_node)
            for node in dup_edge:
                node_to_freq[node] += freq
            edge_list.append((u, pair_node))
            edge_list.append((pair_node, v))

    import networkx as nx
    G2 = nx.DiGraph()
    G2.add_edges_from(edge_list)
    G2.add_nodes_from(node_list)
    return G2
Ejemplo n.º 8
0
def assert_unique(item_list, ignore=[], name='list', verbose=None):
    import utool as ut
    dups = ut.find_duplicate_items(item_list)
    ut.delete_dict_keys(dups, ignore)
    if len(dups) > 0:
        raise AssertionError('Found duplicate items in %s: %s' %
                             (name, ut.repr4(dups)))
    if verbose:
        print('No duplicates found in %s' % (name, ))
Ejemplo n.º 9
0
def assert_testdb_annot_consistency(ibs_gt, ibs2, aid_list1, aid_list2):
    """
    just tests uuids

    if anything goes wrong this should fix it:
        from ibeis.other import ibsfuncs
        aid_list1 = ibs_gt.get_valid_aids()
        ibs_gt.update_annot_visual_uuids(aid_list1)
        ibs2.update_annot_visual_uuids(aid_list2)
        ibsfuncs.fix_remove_visual_dupliate_annotations(ibs_gt)
    """
    assert len(aid_list2) == len(aid_list1)
    visualtup1 = ibs_gt.get_annot_visual_uuid_info(aid_list1)
    visualtup2 = ibs2.get_annot_visual_uuid_info(aid_list2)

    _visual_uuid_list1 = [ut.augment_uuid(*tup) for tup in zip(*visualtup1)]
    _visual_uuid_list2 = [ut.augment_uuid(*tup) for tup in zip(*visualtup2)]

    assert ut.hashstr(visualtup1) == ut.hashstr(visualtup2)
    ut.assert_lists_eq(visualtup1[0], visualtup2[0])
    ut.assert_lists_eq(visualtup1[1], visualtup2[1])
    ut.assert_lists_eq(visualtup1[2], visualtup2[2])
    #semantic_uuid_list1 = ibs_gt.get_annot_semantic_uuids(aid_list1)
    #semantic_uuid_list2 = ibs2.get_annot_semantic_uuids(aid_list2)

    visual_uuid_list1 = ibs_gt.get_annot_visual_uuids(aid_list1)
    visual_uuid_list2 = ibs2.get_annot_visual_uuids(aid_list2)

    # make sure visual uuids are still determenistic
    ut.assert_lists_eq(visual_uuid_list1, visual_uuid_list2)
    ut.assert_lists_eq(_visual_uuid_list1, visual_uuid_list1)
    ut.assert_lists_eq(_visual_uuid_list2, visual_uuid_list2)

    if ut.VERBOSE:
        ibs1_dup_annots = ut.debug_duplicate_items(visual_uuid_list1)
        ibs2_dup_annots = ut.debug_duplicate_items(visual_uuid_list2)
    else:
        ibs1_dup_annots = ut.find_duplicate_items(visual_uuid_list1)
        ibs2_dup_annots = ut.find_duplicate_items(visual_uuid_list2)

    # if these fail try ibsfuncs.fix_remove_visual_dupliate_annotations
    assert len(ibs1_dup_annots) == 0
    assert len(ibs2_dup_annots) == 0
Ejemplo n.º 10
0
def assert_unique(item_list, ignore=[], name='list', verbose=None):
    import utool as ut
    dups = ut.find_duplicate_items(item_list)
    ut.delete_dict_keys(dups, ignore)
    if len(dups) > 0:
        raise AssertionError(
            'Found duplicate items in %s: %s' % (
                name, ut.repr4(dups)))
    if verbose:
        print('No duplicates found in %s' % (name,))
Ejemplo n.º 11
0
def assert_testdb_annot_consistency(ibs_gt, ibs2, aid_list1, aid_list2):
    """
    just tests uuids

    if anything goes wrong this should fix it:
        from ibeis.other import ibsfuncs
        aid_list1 = ibs_gt.get_valid_aids()
        ibs_gt.update_annot_visual_uuids(aid_list1)
        ibs2.update_annot_visual_uuids(aid_list2)
        ibsfuncs.fix_remove_visual_dupliate_annotations(ibs_gt)
    """
    assert len(aid_list2) == len(aid_list1)
    visualtup1 = ibs_gt.get_annot_visual_uuid_info(aid_list1)
    visualtup2 = ibs2.get_annot_visual_uuid_info(aid_list2)

    _visual_uuid_list1 = [ut.augment_uuid(*tup) for tup in zip(*visualtup1)]
    _visual_uuid_list2 = [ut.augment_uuid(*tup) for tup in zip(*visualtup2)]

    assert ut.hashstr(visualtup1) == ut.hashstr(visualtup2)
    ut.assert_lists_eq(visualtup1[0], visualtup2[0])
    ut.assert_lists_eq(visualtup1[1], visualtup2[1])
    ut.assert_lists_eq(visualtup1[2], visualtup2[2])
    #semantic_uuid_list1 = ibs_gt.get_annot_semantic_uuids(aid_list1)
    #semantic_uuid_list2 = ibs2.get_annot_semantic_uuids(aid_list2)

    visual_uuid_list1 = ibs_gt.get_annot_visual_uuids(aid_list1)
    visual_uuid_list2 = ibs2.get_annot_visual_uuids(aid_list2)

    # make sure visual uuids are still determenistic
    ut.assert_lists_eq(visual_uuid_list1, visual_uuid_list2)
    ut.assert_lists_eq(_visual_uuid_list1, visual_uuid_list1)
    ut.assert_lists_eq(_visual_uuid_list2, visual_uuid_list2)

    if ut.VERBOSE:
        ibs1_dup_annots = ut.debug_duplicate_items(visual_uuid_list1)
        ibs2_dup_annots = ut.debug_duplicate_items(visual_uuid_list2)
    else:
        ibs1_dup_annots = ut.find_duplicate_items(visual_uuid_list1)
        ibs2_dup_annots = ut.find_duplicate_items(visual_uuid_list2)

    # if these fail try ibsfuncs.fix_remove_visual_dupliate_annotations
    assert len(ibs1_dup_annots) == 0
    assert len(ibs2_dup_annots) == 0
Ejemplo n.º 12
0
    def consolodate_duplicates(self):
        fnames = map(basename, self.rel_fpath_list)
        duplicate_map = ut.find_duplicate_items(fnames)
        groups = []
        for dupname, idxs in duplicate_map.items():
            uuids = self.get_prop('uuids', idxs)
            unique_uuids, groupxs = ut.group_indices(uuids)
            groups.extend(ut.apply_grouping(idxs, groupxs))
        multitons = [g for g in groups if len(g) > 1]
        # singletons = [g for g in groups if len(g) <= 1]

        ut.unflat_take(list(self.fpaths()), multitons)
Ejemplo n.º 13
0
    def consolodate_duplicates(self):
        fnames = map(basename, self.rel_fpath_list)
        duplicate_map = ut.find_duplicate_items(fnames)
        groups = []
        for dupname, idxs in duplicate_map.items():
            uuids = self.get_prop('uuids', idxs)
            unique_uuids, groupxs = ut.group_indices(uuids)
            groups.extend(ut.apply_grouping(idxs, groupxs))
        multitons  = [g for g in groups if len(g) > 1]
        # singletons = [g for g in groups if len(g) <= 1]

        ut.unflat_take(list(self.fpaths()), multitons)
Ejemplo n.º 14
0
 def find_nonunique_names(self):
     fnames = map(basename, self.rel_fpath_list)
     duplicate_map = ut.find_duplicate_items(fnames)
     groups = []
     for dupname, idxs in duplicate_map.items():
         uuids = self.get_prop('uuids', idxs)
         fpaths = self.get_prop('abs', idxs)
         groups = ut.group_items(fpaths, uuids)
         if len(groups) > 1:
             if all(x == 1 for x in map(len, groups.values())):
                 # All groups are different, this is an simpler case
                 print(ut.repr2(groups, nl=3))
             else:
                 # Need to handle the multi-item groups first
                 pass
Ejemplo n.º 15
0
 def find_nonunique_names(self):
     fnames = map(basename, self.rel_fpath_list)
     duplicate_map = ut.find_duplicate_items(fnames)
     groups = []
     for dupname, idxs in duplicate_map.items():
         uuids = self.get_prop('uuids', idxs)
         fpaths = self.get_prop('abs', idxs)
         groups = ut.group_items(fpaths, uuids)
         if len(groups) > 1:
             if all(x == 1 for x in map(len, groups.values())):
                 # All groups are different, this is an simpler case
                 print(ut.repr2(groups, nl=3))
             else:
                 # Need to handle the multi-item groups first
                 pass
Ejemplo n.º 16
0
    def find_internal_duplicates(self):
        # First find which files take up the same amount of space
        nbytes = self.get_prop('nbytes')
        dups = ut.find_duplicate_items(nbytes)
        # Now evaluate the hashes of these candidates
        cand_idxs = ut.flatten(dups.values())

        data = ut.ColumnLists({
            'idx': cand_idxs,
            'fname': self.get_prop('fname', cand_idxs),
            'dname': self.get_prop('dname', cand_idxs),
            'full_path': self.get_prop('full_path', cand_idxs),
            'nbytes': self.get_prop('nbytes', cand_idxs),
        })
        # print(ut.repr4(ut.group_items(fpaths, nbytes)))
        data.ignore = ['full_path', 'dname']
        data.print(ignore=['full_path', 'dname'])
        data['hash'] = self.get_prop('md5', data['idx'])
        data.print(ignore=['full_path', 'hash'])
        data.print(ignore=['full_path', 'dname'])

        multis = data.get_multis('hash')
        multis.print(ignore=data.ignore)
        return multis
Ejemplo n.º 17
0
    def __init__(qparams, query_cfg=None, cfgdict=None):
        """
        Rename to pipeline params

        Structure to store static query pipeline parameters
        parses nested config structure into this flat one

        Args:
            query_cfg (QueryConfig): query_config
            cfgdict (dict or None): dictionary to update query_cfg with

        CommandLine:
            python -m ibeis.algo.hots.query_params --test-__init__

        Example:
            >>> # ENABLE_DOCTEST
            >>> from ibeis.algo.hots.query_params import *  # NOQA
            >>> import ibeis
            >>> ibs = ibeis.opendb('testdb1')
            >>> query_cfg = ibs.cfg.query_cfg
            >>> #query_cfg.pipeline_root = 'asmk'
            >>> cfgdict = {'pipeline_root': 'asmk', 'sv_on': False, 'fg_on': True}
            >>> qparams = QueryParams(query_cfg, cfgdict)
            >>> assert qparams.pipeline_root == 'smk'
            >>> assert qparams.fg_on is True
            >>> result = qparams.query_cfgstr
            >>> print(')_\n'.join(result.split(')_')))

        Example:
            >>> # ENABLE_DOCTEST
            >>> from ibeis.algo.hots.query_params import *  # NOQA
            >>> import ibeis
            >>> ibs = ibeis.opendb('testdb1')
            >>> query_cfg = ibs.cfg.query_cfg
            >>> #query_cfg.pipeline_root = 'asmk'
            >>> cfgdict = dict(rotation_invariance=True)
            >>> qparams = QueryParams(query_cfg, cfgdict)
            >>> ut.assert_eq(qparams.hesaff_params['rotation_invariance'], True)

            _smk_SMK(agg=True,t=0.0,a=3.0,idf)_
            VocabAssign(nAssign=10,a=1.2,s=None,eqw=T)_
            VocabTrain(nWords=8000,init=akmeans++,nIters=128,taids=all)_
            SV(OFF)_
            FEATWEIGHT(ON,uselabel,rf)_
            FEAT(hesaff+sift_)_
            CHIP(sz450)
        """
        # if given custom settings update the config and ensure feasibilty
        if query_cfg is None:
            query_cfg = Config.QueryConfig()
        if cfgdict is not None:
            query_cfg = query_cfg.deepcopy()
            query_cfg.update_query_cfg(**cfgdict)
        # Get flat item list
        param_list = Config.parse_config_items(query_cfg)
        # Assert that there are no config conflicts
        duplicate_keys = ut.find_duplicate_items(
            ut.get_list_column(param_list, 0))
        assert len(duplicate_keys
                   ) == 0, 'Configs have duplicate names: %r' % duplicate_keys
        # Set nexted config attributes as flat qparam properties
        for key, val in param_list:
            setattr(qparams, key, val)
        # Add params not implicitly represented in Config object
        pipeline_root = query_cfg.pipeline_root
        qparams.chip_cfg_dict = query_cfg._featweight_cfg._feat_cfg._chip_cfg.to_dict(
        )
        qparams.flann_params = query_cfg.flann_cfg.get_flann_params()
        qparams.hesaff_params = query_cfg._featweight_cfg._feat_cfg.get_hesaff_params(
        )
        qparams.pipeline_root = pipeline_root
        qparams.vsmany = pipeline_root == 'vsmany'
        qparams.vsone = pipeline_root == 'vsone'
        # Add custom strings to the mix as well
        # TODO; Find better way to specify config strings
        # FIXME: probchip is not in here
        qparams.probchip_cfgstr = query_cfg._featweight_cfg.get_cfgstr(
            use_feat=False, use_chip=False)
        qparams.featweight_cfgstr = query_cfg._featweight_cfg.get_cfgstr()
        qparams.chip_cfgstr = query_cfg._featweight_cfg._feat_cfg._chip_cfg.get_cfgstr(
        )
        qparams.feat_cfgstr = query_cfg._featweight_cfg._feat_cfg.get_cfgstr()
        qparams.nn_cfgstr = query_cfg.nn_cfg.get_cfgstr()
        qparams.nnweight_cfgstr = query_cfg.nnweight_cfg.get_cfgstr()
        qparams.sv_cfgstr = query_cfg.sv_cfg.get_cfgstr()
        qparams.flann_cfgstr = query_cfg.flann_cfg.get_cfgstr()
        qparams.query_cfgstr = query_cfg.get_cfgstr()
        qparams.vocabtrain_cfgstr = query_cfg.smk_cfg.vocabtrain_cfg.get_cfgstr(
        )
        qparams.rrvsone_cfgstr = query_cfg.rrvsone_cfg.get_cfgstr()
Ejemplo n.º 18
0
def show_arch_nx_graph(layers, fnum=None, fullinfo=True):
    r"""

    CommandLine:
        python -m ibeis_cnn.draw_net show_arch_nx_graph:0 --show
        python -m ibeis_cnn.draw_net show_arch_nx_graph:1 --show

    Example0:
        >>> # ENABLE_DOCTEST
        >>> from ibeis_cnn.draw_net import *  # NOQA
        >>> from ibeis_cnn import models
        >>> model = models.mnist.MNISTModel(batch_size=128, output_dims=10,
        >>>                                 data_shape=(24, 24, 3))
        >>> model.init_arch()
        >>> layers = model.get_all_layers()
        >>> show_arch_nx_graph(layers)
        >>> ut.quit_if_noshow()
        >>> import plottool as pt
        >>> ut.show_if_requested()

    Example1:
        >>> # ENABLE_DOCTEST
        >>> from ibeis_cnn.draw_net import *  # NOQA
        >>> from ibeis_cnn import models
        >>> model = models.SiameseCenterSurroundModel(autoinit=True)
        >>> layers = model.get_all_layers()
        >>> show_arch_nx_graph(layers)
        >>> ut.quit_if_noshow()
        >>> import plottool as pt
        >>> ut.show_if_requested()

    """
    import networkx as nx
    import plottool as pt
    import ibeis_cnn.__LASAGNE__ as lasange
    #from matplotlib import offsetbox
    #import matplotlib as mpl

    REMOVE_BATCH_SIZE = True
    from ibeis_cnn import net_strs

    def get_hex_color(layer_type):
        if 'Input' in layer_type:
            return '#A2CECE'
        if 'Conv2D' in layer_type:
            return '#7C9ABB'
        if 'Dense' in layer_type:
            return '#6CCF8D'
        if 'Pool' in layer_type:
            return '#9D9DD2'
        if 'SoftMax' in layer_type:
            return '#7E9FD9'
        else:
            return '#{0:x}'.format(hash(layer_type + 'salt') % 2 ** 24)

    node_dict = {}
    edge_list = []
    edge_attrs = ut.ddict(dict)

    # Make layer ids (ensure no duplicates)
    layer_to_id = {
        l: repr(l) if l.name is None else l.name
        for l in set(layers)
    }
    keys_ = layer_to_id.keys()
    dups = ut.find_duplicate_items(layer_to_id.values())
    for dupval, dupidxs in dups.items():
        newval_fmt = dupval + '_%d'
        for layer in ut.take(keys_, dupidxs):
            newid = ut.get_nonconflicting_string(newval_fmt, layer_to_id.values())
            layer_to_id[layer] = newid

    def layerid(layer):
        return layer_to_id[layer]

    main_nodes = []

    for i, layer in enumerate(layers):
        layer_info = net_strs.get_layer_info(layer)
        layer_type = layer_info['classalias']

        key = layerid(layer)

        color = get_hex_color(layer_info['classalias'])
        # Make label
        lines = []
        if layer_info['name'] is not None:
            lines.append(layer_info['name'])
        if fullinfo:
            lines.append(layer_info['classalias'])
            for attr, val in layer_info['layer_attrs'].items():
                if attr == 'shape' and REMOVE_BATCH_SIZE:
                    val = val[1:]
                if attr == 'output_shape' and REMOVE_BATCH_SIZE:
                    val = val[1:]
                lines.append('{0}: {1}'.format(attr, val))

            nonlinearity = layer_info.get('nonlinearity')
            if nonlinearity is not None:
                alias_map = {
                    'LeakyRectify': 'LReLU',
                }
                val = layer_info['nonlinearity']['type']
                val = alias_map.get(val, val)
                lines.append('nonlinearity:\n{0}'.format(val))

        label = '\n'.join(lines)

        # append node
        is_main_layer = len(layer.params) > 0
        #is_main_layer = len(lasange.layers.get_all_params(layer, trainable=True)) > 0
        if layer_info['classname'] in lasange.layers.normalization.__all__:
            is_main_layer = False
        if layer_info['classname'] in lasange.layers.special.__all__:
            is_main_layer = False
        if layer_info['classname'].startswith('BatchNorm'):
            is_main_layer = False
        if layer_info['classname'].startswith('ElemwiseSum'):
            is_main_layer = True

        if layer_type == 'Input':
            is_main_layer = True

        if hasattr(layer, '_is_main_layer'):
            is_main_layer = layer._is_main_layer

        #if getattr(layer, 'name', '') is not None and getattr(layer, 'name', '') .endswith('/sum'):
        #    is_main_layer = True

        node_attr = dict(name=key, label=label, color=color,
                         fillcolor=color, style='filled',
                         is_main_layer=is_main_layer)

        node_attr['is_main_layer'] = is_main_layer
        if is_main_layer:
            main_nodes.append(key)
        node_attr['classalias'] = layer_info['classalias']

        if is_main_layer or node_attr['classalias'].startswith('Conv'):
            if hasattr(layer, 'shape'):
                if len(layer.shape) == 3:
                    node_attr['out_size'] = (layer.shape[2],
                                             layer.shape[1])
                    node_attr['depth'] = layer.output_shape[0]
            if hasattr(layer, 'output_shape'):
                if len(layer.output_shape) == 4:
                    depth = layer.output_shape[1]
                    width, height = (layer.output_shape[3],
                                     layer.output_shape[2])
                    xshift = -width * (.1 / (depth ** (1 / 3))) / 3
                    yshift = height * (.1 / (depth ** (1 / 3))) / 2
                    node_attr['depth'] = depth
                    node_attr['xshift'] = xshift
                    node_attr['yshift'] = yshift
                    node_attr['out_size'] = (width, height)

                if len(layer.output_shape) == 2:
                    node_attr['out_size'] = (1,
                                             layer.output_shape[1])

        node_dict[key] = node_attr

        _input_layers = []
        if hasattr(layer, 'input_layers'):
            _input_layers += layer.input_layers
        if hasattr(layer, 'input_layer'):
            _input_layers += [layer.input_layer]

        for input_layer in _input_layers:
            parent_key = layerid(input_layer)
            edge = (parent_key, key)
            edge_list.append(edge)

    main_size_ = np.array((100, 100)) * 4
    sub_size = np.array((75, 50)) * 4

    # Setup scaled width and heights
    out_size_list = [v['out_size'] for v in node_dict.values() if 'out_size' in v]
    out_size_list = np.array(out_size_list)
    #out_size_list = out_size_list[out_size_list.T[0] > 1]
    area_arr = np.prod(out_size_list, axis=1)
    main_outsize = np.array(out_size_list[area_arr.argmax()])
    #main_outsize = np.array(out_size_list[area_arr.argmin()])
    scale = main_size_ / main_outsize

    scale_dense_max = .25
    scale_dense_min = 8

    for k, v in node_dict.items():
        if v['is_main_layer'] or v['classalias'].startswith('Conv'):
            if 'out_size' in v:
                # Make dense layers more visible
                if v['classalias'] == 'Dense':
                    v['shape'] = 'rect'
                    v['width'] = scale_dense_min
                    if v['out_size'][1] > main_outsize[1]:
                        v['height'] =  v['out_size'][1] * scale[1] * scale_dense_max
                    elif v['out_size'][1] < scale_dense_min:
                        v['height'] = scale_dense_min * v['out_size'][1]
                    else:
                        v['height'] = v['out_size'][1]
                elif v['classalias'].startswith('Conv'):
                    v['shape'] = 'stack'
                    #v['shape'] = 'rect'
                    v['width'] = v['out_size'][0] * scale[0]
                    v['height'] =  v['out_size'][1] * scale[1]
                else:
                    v['shape'] = 'rect'
                    v['width'] = v['out_size'][0] * scale[0]
                    v['height'] =  v['out_size'][1] * scale[1]
            else:
                v['shape'] = 'rect'
                v['width'] = main_size_[0]
                v['height'] = main_size_[1]
        else:
            #v['shape'] = 'ellipse'
            v['shape'] = 'rect'
            v['style'] = 'rounded'
            v['width'] = sub_size[0]
            v['height'] = sub_size[1]

    key_order = ut.take(layer_to_id, layers)
    node_dict = ut.dict_subset(node_dict, key_order)

    #print('node_dict = ' + ut.repr3(node_dict))

    # Create the networkx graph structure
    G = nx.DiGraph()
    G.add_nodes_from(node_dict.items())
    G.add_edges_from(edge_list)
    for key, val in edge_attrs.items():
        nx.set_edge_attributes(G, key, val)

    # Add invisible structure
    #main_nodes = [key for key, val in
    #              nx.get_node_attributes(G, 'is_main_layer').items() if val]

    main_children = ut.odict()

    #for n1, n2 in ut.itertwo(main_nodes):
    #    print('n1, n2 = %r %r' % (n1, n2))
    #    import utool
    #    utool.embed()
    #    children = ut.nx_all_nodes_between(G, n1, n2)
    #    if n1 in children:
    #        children.remove(n1)
    #    if n2 in children:
    #        children.remove(n2)
    #    main_children[n1] = children

    #    #pass
    #main_children[main_nodes[-1]] = []

    for n1 in main_nodes:
        main_children[n1] = []
        # Main nodes only place constraints on nodes in the next main group.
        # Not their own
        next_main = None
        G.node[n1]['group'] = n1
        for (_, n2) in nx.bfs_edges(G, n1):
            if next_main is None:
                if n2 in main_nodes:
                    next_main = n2
                else:
                    G.node[n2]['group'] = n1
                    main_children[n1].append(n2)
            else:
                if n2 not in list(nx.descendants(G, next_main)):
                    G.node[n2]['group'] = n1
                    main_children[n1].append(n2)

    # Custom positioning
    x = 0
    y = 1000
    #print('main_children = %s' % (ut.repr3(main_children),))

    #main_nodes = ut.isect(list(nx.topological_sort(G)), main_nodes)
    xpad = main_size_[0] * .3
    ypad = main_size_[1] * .3

    # Draw each main node, and then put its children under it
    # Then move to the left and draw the next main node.
    cumwidth = 0
    for n1 in main_nodes:
        cumheight = 0

        maxwidth = G.node[n1]['width']
        for n2 in main_children[n1]:
            maxwidth = max(maxwidth, G.node[n2]['width'])

        cumwidth += xpad
        cumwidth += maxwidth / 2

        pos = np.array([x + cumwidth, y - cumheight])
        G.node[n1]['pos'] = pos
        G.node[n1]['pin'] = 'true'

        height = G.node[n1]['height']
        cumheight += height / 2

        for n2 in main_children[n1]:
            height = G.node[n2]['height']
            cumheight += ypad
            cumheight += height / 2
            pos = np.array([x + cumwidth, y - cumheight])
            G.node[n2]['pos'] = pos
            G.node[n2]['pin'] = 'true'
            cumheight += height / 2

        cumwidth += maxwidth / 2

    # Pin everybody
    nx.set_node_attributes(G, 'pin', 'true')
    layoutkw = dict(prog='neato', splines='line')
    #layoutkw = dict(prog='neato', splines='spline')
    layoutkw = dict(prog='neato', splines='ortho')
    G_ = G.copy()
    # delete lables for positioning
    _labels = nx.get_node_attributes(G_, 'label')
    ut.nx_delete_node_attr(G_, 'label')
    nx.set_node_attributes(G_, 'label', '')
    nolayout = False
    if nolayout:
        G_.remove_edges_from(list(G_.edges()))
    else:
        layout_info = pt.nx_agraph_layout(G_, inplace=True, **layoutkw)  # NOQA
    # reset labels
    if not nolayout:
        nx.set_node_attributes(G_, 'label', _labels)
    _ = pt.show_nx(G_, fontsize=8, arrow_width=.3, layout='custom', fnum=fnum)  # NOQA
    #pt.adjust_subplots(top=1, bot=0, left=0, right=1)
    pt.plt.tight_layout()
Ejemplo n.º 19
0
    def __init__(qparams, query_cfg=None, cfgdict=None):
        """
        Rename to pipeline params

        Structure to store static query pipeline parameters
        parses nested config structure into this flat one

        Args:
            query_cfg (QueryConfig): query_config
            cfgdict (dict or None): dictionary to update query_cfg with

        CommandLine:
            python -m ibeis.algo.hots.query_params --test-__init__

        Example:
            >>> # ENABLE_DOCTEST
            >>> from ibeis.algo.hots.query_params import *  # NOQA
            >>> import ibeis
            >>> ibs = ibeis.opendb('testdb1')
            >>> query_cfg = ibs.cfg.query_cfg
            >>> #query_cfg.pipeline_root = 'asmk'
            >>> cfgdict = {'pipeline_root': 'asmk', 'sv_on': False, 'fg_on': True}
            >>> qparams = QueryParams(query_cfg, cfgdict)
            >>> assert qparams.pipeline_root == 'smk'
            >>> assert qparams.fg_on is True
            >>> result = qparams.query_cfgstr
            >>> print(')_\n'.join(result.split(')_')))

        Example:
            >>> # ENABLE_DOCTEST
            >>> from ibeis.algo.hots.query_params import *  # NOQA
            >>> import ibeis
            >>> ibs = ibeis.opendb('testdb1')
            >>> query_cfg = ibs.cfg.query_cfg
            >>> #query_cfg.pipeline_root = 'asmk'
            >>> cfgdict = dict(rotation_invariance=True)
            >>> qparams = QueryParams(query_cfg, cfgdict)
            >>> ut.assert_eq(qparams.hesaff_params['rotation_invariance'], True)

            _smk_SMK(agg=True,t=0.0,a=3.0,idf)_
            VocabAssign(nAssign=10,a=1.2,s=None,eqw=T)_
            VocabTrain(nWords=8000,init=akmeans++,nIters=128,taids=all)_
            SV(OFF)_
            FEATWEIGHT(ON,uselabel,rf)_
            FEAT(hesaff+sift_)_
            CHIP(sz450)
        """
        # if given custom settings update the config and ensure feasibilty
        if query_cfg is None:
            query_cfg = Config.QueryConfig()
        if cfgdict is not None:
            query_cfg = query_cfg.deepcopy()
            query_cfg.update_query_cfg(**cfgdict)
        # Get flat item list
        param_list = Config.parse_config_items(query_cfg)
        # Assert that there are no config conflicts
        duplicate_keys = ut.find_duplicate_items(ut.get_list_column(param_list, 0))
        assert len(duplicate_keys) == 0, 'Configs have duplicate names: %r' % duplicate_keys
        # Set nexted config attributes as flat qparam properties
        for key, val in param_list:
            setattr(qparams, key, val)
        # Add params not implicitly represented in Config object
        pipeline_root              = query_cfg.pipeline_root
        qparams.chip_cfg_dict      = query_cfg._featweight_cfg._feat_cfg._chip_cfg.to_dict()
        qparams.flann_params       = query_cfg.flann_cfg.get_flann_params()
        qparams.hesaff_params      = query_cfg._featweight_cfg._feat_cfg.get_hesaff_params()
        qparams.pipeline_root      = pipeline_root
        qparams.vsmany             = pipeline_root == 'vsmany'
        qparams.vsone              = pipeline_root == 'vsone'
        # Add custom strings to the mix as well
        # TODO; Find better way to specify config strings
        # FIXME: probchip is not in here
        qparams.probchip_cfgstr   = query_cfg._featweight_cfg.get_cfgstr(
            use_feat=False, use_chip=False)
        qparams.featweight_cfgstr = query_cfg._featweight_cfg.get_cfgstr()
        qparams.chip_cfgstr       = query_cfg._featweight_cfg._feat_cfg._chip_cfg.get_cfgstr()
        qparams.feat_cfgstr       = query_cfg._featweight_cfg._feat_cfg.get_cfgstr()
        qparams.nn_cfgstr         = query_cfg.nn_cfg.get_cfgstr()
        qparams.nnweight_cfgstr   = query_cfg.nnweight_cfg.get_cfgstr()
        qparams.sv_cfgstr         = query_cfg.sv_cfg.get_cfgstr()
        qparams.flann_cfgstr      = query_cfg.flann_cfg.get_cfgstr()
        qparams.query_cfgstr      = query_cfg.get_cfgstr()
        qparams.vocabtrain_cfgstr = query_cfg.smk_cfg.vocabtrain_cfg.get_cfgstr()
        qparams.rrvsone_cfgstr    = query_cfg.rrvsone_cfg.get_cfgstr()
Ejemplo n.º 20
0
 def duplicates(self):
     uuid_to_dupxs = ut.find_duplicate_items(self.uuids)
     dup_fpaths = [ut.take(self.rel_fpath_list, idxs) for idxs in uuid_to_dupxs.values()]
     return dup_fpaths
Ejemplo n.º 21
0
def web_check_uuids(ibs,
                    image_uuid_list=[],
                    qannot_uuid_list=[],
                    dannot_uuid_list=[]):
    r"""
    Args:
        ibs (wbia.IBEISController):  image analysis api
        image_uuid_list (list): (default = [])
        qannot_uuid_list (list): (default = [])
        dannot_uuid_list (list): (default = [])

    CommandLine:
        python -m wbia.web.apis_engine --exec-web_check_uuids --show

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.web.apis_engine import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='testdb1')
        >>> image_uuid_list = []
        >>> qannot_uuid_list = ibs.get_annot_uuids([1, 1, 2, 3, 2, 4])
        >>> dannot_uuid_list = ibs.get_annot_uuids([1, 2, 3])
        >>> try:
        >>>     web_check_uuids(ibs, image_uuid_list, qannot_uuid_list,
        >>>                     dannot_uuid_list)
        >>> except controller_inject.WebDuplicateUUIDException:
        >>>     pass
        >>> else:
        >>>     raise AssertionError('Should have gotten WebDuplicateUUIDException')
        >>> try:
        >>>     web_check_uuids(ibs, [1, 2, 3], qannot_uuid_list,
        >>>                     dannot_uuid_list)
        >>> except controller_inject.WebMissingUUIDException as ex:
        >>>     pass
        >>> else:
        >>>     raise AssertionError('Should have gotten WebMissingUUIDException')
        >>> print('Successfully reported errors')
    """
    import uuid

    # Unique list
    if qannot_uuid_list is None:
        qannot_uuid_list = []
    if dannot_uuid_list is None:
        dannot_uuid_list = []

    annot_uuid_list = qannot_uuid_list + dannot_uuid_list

    # UUID parse check
    invalid_image_uuid_list = []
    for index, image_uuid in enumerate(image_uuid_list):
        try:
            assert isinstance(image_uuid, uuid.UUID)
        except Exception:
            value = (
                index,
                image_uuid,
            )
            invalid_image_uuid_list.append(value)

    invalid_annot_uuid_list = []
    for index, annot_uuid in enumerate(annot_uuid_list):
        try:
            assert isinstance(annot_uuid, uuid.UUID)
        except Exception:
            value = (
                index,
                annot_uuid,
            )
            invalid_annot_uuid_list.append(value)

    if len(invalid_image_uuid_list) > 0 or len(invalid_annot_uuid_list) > 0:
        kwargs = {
            'invalid_image_uuid_list': invalid_image_uuid_list,
            'invalid_annot_uuid_list': invalid_annot_uuid_list,
        }
        raise controller_inject.WebInvalidUUIDException(**kwargs)

    image_uuid_list = list(set(image_uuid_list))
    annot_uuid_list = list(set(annot_uuid_list))
    # Check for all annot UUIDs exist
    missing_image_uuid_list = ibs.get_image_missing_uuid(image_uuid_list)
    missing_annot_uuid_list = ibs.get_annot_missing_uuid(annot_uuid_list)
    if len(missing_image_uuid_list) > 0 or len(missing_annot_uuid_list) > 0:
        kwargs = {
            'missing_image_uuid_list': missing_image_uuid_list,
            'missing_annot_uuid_list': missing_annot_uuid_list,
        }
        raise controller_inject.WebMissingUUIDException(**kwargs)
    ddup_pos_map = ut.find_duplicate_items(qannot_uuid_list)
    qdup_pos_map = ut.find_duplicate_items(dannot_uuid_list)
    if len(ddup_pos_map) + len(qdup_pos_map) > 0:
        raise controller_inject.WebDuplicateUUIDException(
            qdup_pos_map, qdup_pos_map)