示例#1
0
def tryout_web_graphs(self, infr):
    """
    https://plot.ly/python/

    http://bokeh.pydata.org/en/latest/

    pip install bokeh

    Notes:
        http://www.coppelia.io/2014/07/an-a-to-z-of-extra-features-for-the-d3-force-layout/
        http://andrewmellor.co.uk/blog/articles/2014/12/14/d3-networks/
        pip install plotly  # eww need to sign up and get a key
        http://igraph.org/
    import mpld3
    mpld3.save_html(fig, open('fig.html', 'w'))
    mpld3.save_json(fig, open('fig.json', 'w'))
    fig = pt.gcf()
    """
    #import plottool as pt
    # http://andrewmellor.co.uk/blog/articles/2014/12/14/d3-networks/
    from networkx.readwrite import json_graph

    G = infr.graph
    data = json_graph.node_link_data(G)
    json_text = ut.to_json(data, pretty=True)
    ut.writeto('graph.json', json_text)
    ut.editfile('graph.json')

    ut.startfile('d3_example.html')
示例#2
0
def autogen_ipynb(ibs, launch=None, run=None):
    r"""
    Autogenerates standard IBEIS Image Analysis IPython notebooks.

    CommandLine:
        python -m ibeis --tf autogen_ipynb --run --db lynx

        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST --asreport
        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST --noexample --withtags

        python -m ibeis --tf autogen_ipynb --db PZ_MTEST
        # TODO: Add support for dbdir to be specified
        python -m ibeis --tf autogen_ipynb --db ~/work/PZ_MTEST

        python -m ibeis --tf autogen_ipynb --ipynb --db Oxford -a default:qhas_any=\(query,\),dpername=1,exclude_reference=True,dminqual=good
        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST -a default -t best:lnbnn_normalizer=[None,normlnbnn-test]

        python -m ibeis.templates.generate_notebook --exec-autogen_ipynb --db wd_peter_blinston --ipynb

        python -m ibeis --tf autogen_ipynb --db PZ_Master1 --ipynb
        python -m ibeis --tf autogen_ipynb --db PZ_Master1 -a timectrl:qindex=0:100 -t best best:normsum=True --ipynb --noexample
        python -m ibeis --tf autogen_ipynb --db PZ_Master1 -a timectrl --run
        jupyter-notebook Experiments-lynx.ipynb
        killall python

        python -m ibeis --tf autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW -a default:has_any=hasnotch
        python -m ibeis --tf autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW default:proot=vsmany -a default:has_any=hasnotch,mingt=2,qindex=0:50 --noexample

    Example:
        >>> # SCRIPT
        >>> from ibeis.templates.generate_notebook import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> result = autogen_ipynb(ibs)
        >>> print(result)
    """
    dbname = ibs.get_dbname()
    fname = 'Experiments-' + dbname
    nb_fpath = fname + '.ipynb'
    if ut.get_argflag('--cells'):
        notebook_cells = make_ibeis_cell_list(ibs)
        print('\n# ---- \n'.join(notebook_cells))
        return
    # TODO: Add support for dbdir to be specified
    notebook_str = make_ibeis_notebook(ibs)
    ut.writeto(nb_fpath, notebook_str)
    run = ut.get_argflag('--run') if run is None else run
    launch = launch if launch is not None else ut.get_argflag('--ipynb')
    if run:
        run_nb = ut.run_ipython_notebook(notebook_str)
        output_fpath = ut.export_notebook(run_nb, fname)
        ut.startfile(output_fpath)
    elif launch:
        ut.cmd('jupyter-notebook', nb_fpath, detatch=True)
        #ut.cmd('ipython-notebook', nb_fpath)
        #ut.startfile(nb_fpath)
    else:
        print('notebook_str =\n%s' % (notebook_str,))
示例#3
0
def autogen_ipynb(ibs, launch=None, run=None):
    r"""
    Autogenerates standard IBEIS Image Analysis IPython notebooks.

    CommandLine:
        python -m ibeis --tf autogen_ipynb --run --db lynx

        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST --asreport
        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST --noexample --withtags

        python -m ibeis --tf autogen_ipynb --db PZ_MTEST
        # TODO: Add support for dbdir to be specified
        python -m ibeis --tf autogen_ipynb --db ~/work/PZ_MTEST

        python -m ibeis --tf autogen_ipynb --ipynb --db Oxford -a default:qhas_any=\(query,\),dpername=1,exclude_reference=True,dminqual=good
        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST -a default -t best:lnbnn_normalizer=[None,normlnbnn-test]

        python -m ibeis.templates.generate_notebook --exec-autogen_ipynb --db wd_peter_blinston --ipynb

        python -m ibeis --tf autogen_ipynb --db PZ_Master1 --ipynb
        python -m ibeis --tf autogen_ipynb --db PZ_Master1 -a timectrl:qindex=0:100 -t best best:normsum=True --ipynb --noexample
        python -m ibeis --tf autogen_ipynb --db PZ_Master1 -a timectrl --run
        jupyter-notebook Experiments-lynx.ipynb
        killall python

        python -m ibeis --tf autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW -a default:has_any=hasnotch
        python -m ibeis --tf autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW default:proot=vsmany -a default:has_any=hasnotch,mingt=2,qindex=0:50 --noexample

    Example:
        >>> # SCRIPT
        >>> from ibeis.templates.generate_notebook import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> result = autogen_ipynb(ibs)
        >>> print(result)
    """
    dbname = ibs.get_dbname()
    fname = 'Experiments-' + dbname
    nb_fpath = fname + '.ipynb'
    if ut.get_argflag('--cells'):
        notebook_cells = make_ibeis_cell_list(ibs)
        print('\n# ---- \n'.join(notebook_cells))
        return
    # TODO: Add support for dbdir to be specified
    notebook_str = make_ibeis_notebook(ibs)
    ut.writeto(nb_fpath, notebook_str)
    run = ut.get_argflag('--run') if run is None else run
    launch = launch if launch is not None else ut.get_argflag('--ipynb')
    if run:
        run_nb = ut.run_ipython_notebook(notebook_str)
        output_fpath = ut.export_notebook(run_nb, fname)
        ut.startfile(output_fpath)
    elif launch:
        ut.cmd('jupyter-notebook', nb_fpath, detatch=True)
        #ut.cmd('ipython-notebook', nb_fpath)
        #ut.startfile(nb_fpath)
    else:
        print('notebook_str =\n%s' % (notebook_str, ))
示例#4
0
def imwrite_theano_symbolic_graph(thean_expr):
    import theano
    graph_dpath = '.'
    graph_fname = 'symbolic_graph.png'
    graph_fpath = ut.unixjoin(graph_dpath, graph_fname)
    ut.ensuredir(graph_dpath)
    theano.printing.pydotprint(
        thean_expr, outfile=graph_fpath, var_with_name_simple=True)
    ut.startfile(graph_fpath)
    return graph_fpath
示例#5
0
def render_latex_text(input_text, nest_in_doc=False, appname='utool', verbose=None):
    """ testing function """
    import utool as ut
    if verbose is None:
        verbose = ut.VERBOSE
    dpath = ut.get_app_resource_dir(appname)
    # put a latex framgent in a full document
    print(input_text)
    pdf_fpath = ut.compile_latex_text(input_text, dpath=dpath, verbose=verbose)
    ut.startfile(pdf_fpath)
    return pdf_fpath
示例#6
0
def make_bayes_notebook():
    r"""
    CommandLine:
        python -m wbia.unstable.demobayes --exec-make_bayes_notebook

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.unstable.demobayes import *  # NOQA
        >>> result = make_bayes_notebook()
        >>> print(result)
    """
    from wbia.templates import generate_notebook

    initialize = ut.codeblock(r"""
        # STARTBLOCK
        import os
        os.environ['UTOOL_NO_CNN'] = 'True'
        from wbia.unstable.demobayes import *  # NOQA
        # Matplotlib stuff
        import matplotlib as mpl
        %matplotlib inline
        %load_ext autoreload
        %autoreload
        from IPython.core.display import HTML
        HTML("<style>body .container { width:99% !important; }</style>")
        # ENDBLOCK
        """)
    cell_list_def = [
        initialize,
        show_model_templates,
        demo_modes,
        demo_name_annot_complexity,
        # demo_model_idependencies,
        demo_single_add,
        demo_ambiguity,
        demo_conflicting_evidence,
        demo_annot_idependence_overlap,
    ]

    def format_cell(cell):
        if ut.is_funclike(cell):
            header = '# ' + ut.to_title_caps(ut.get_funcname(cell))
            code = (header,
                    ut.get_func_sourcecode(cell, stripdef=True, stripret=True))
        else:
            code = (None, cell)
        return generate_notebook.format_cells(code)

    cell_list = ut.flatten([format_cell(cell) for cell in cell_list_def])
    nbstr = generate_notebook.make_notebook(cell_list)
    logger.info('nbstr = %s' % (nbstr, ))
    fpath = 'demobayes.ipynb'
    ut.writeto(fpath, nbstr)
    ut.startfile(fpath)
示例#7
0
def make_bayes_notebook():
    r"""
    CommandLine:
        python -m ibeis.algo.hots.demobayes --exec-make_bayes_notebook

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.demobayes import *  # NOQA
        >>> result = make_bayes_notebook()
        >>> print(result)
    """
    from ibeis.templates import generate_notebook
    initialize = ut.codeblock(
        r'''
        # STARTBLOCK
        import os
        os.environ['UTOOL_NO_CNN'] = 'True'
        from ibeis.algo.hots.demobayes import *  # NOQA
        # Matplotlib stuff
        import matplotlib as mpl
        %matplotlib inline
        %load_ext autoreload
        %autoreload
        from IPython.core.display import HTML
        HTML("<style>body .container { width:99% !important; }</style>")
        # ENDBLOCK
        '''
    )
    cell_list_def = [
        initialize,
        show_model_templates,
        demo_modes,
        demo_name_annot_complexity,
        ###demo_model_idependencies,
        demo_single_add,
        demo_ambiguity,
        demo_conflicting_evidence,
        demo_annot_idependence_overlap,
    ]
    def format_cell(cell):
        if ut.is_funclike(cell):
            header = '# ' + ut.to_title_caps(ut.get_funcname(cell))
            code = (header, ut.get_func_sourcecode(cell, stripdef=True, stripret=True))
        else:
            code = (None, cell)
        return generate_notebook.format_cells(code)

    cell_list = ut.flatten([format_cell(cell) for cell in cell_list_def])
    nbstr = generate_notebook.make_notebook(cell_list)
    print('nbstr = %s' % (nbstr,))
    fpath = 'demobayes.ipynb'
    ut.writeto(fpath, nbstr)
    ut.startfile(fpath)
示例#8
0
def plot_gps_html(gps_list):
    """ Plots gps coordinates on a map projection

    InstallBasemap:
        sudo apt-get install libgeos-dev
        pip install git+https://github.com/matplotlib/basemap
        http://matplotlib.org/basemap/users/examples.html

        pip install gmplot

        sudo apt-get install netcdf-bin
        sudo apt-get install libnetcdf-dev
        pip install netCDF4

    Ignore:
        pip install git+git://github.com/myuser/foo.git@v123

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.algo.preproc.preproc_occurrence import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='testdb1')
        >>> images = ibs.images()
        >>> # Setup GPS points to draw
        >>> print('Setup GPS points')
        >>> gps_list_ = np.array(images.gps2)
        >>> unixtime_list_ = np.array(images.unixtime2)
        >>> has_gps = np.all(np.logical_not(np.isnan(gps_list_)), axis=1)
        >>> has_unixtime = np.logical_not(np.isnan(unixtime_list_))
        >>> isvalid = np.logical_and(has_gps, has_unixtime)
        >>> gps_list = gps_list_.compress(isvalid, axis=0)
        >>> unixtime_list = unixtime_list_.compress(isvalid)  # NOQA
        >>> plot_image_gps(gps_list)
    """
    import wbia.plottool as pt
    import gmplot
    import matplotlib as mpl
    import vtool as vt

    pt.qt4ensure()

    lat = gps_list.T[0]
    lon = gps_list.T[1]

    # Get extent of
    bbox = vt.bbox_from_verts(gps_list)
    centerx, centery = vt.bbox_center(bbox)

    gmap = gmplot.GoogleMapPlotter(centerx, centery, 13)
    color = mpl.colors.rgb2hex(pt.ORANGE)
    gmap.scatter(lat, lon, color=color, size=100, marker=False)
    gmap.draw('mymap.html')
    ut.startfile('mymap.html')
示例#9
0
 def _on_doubleclick(wgt, qtindex):
     print('[wgt] _on_doubleclick: ')
     col = qtindex.column()
     if wgt.api.col_edit_list[col]:
         print('do nothing special for editable columns')
         return
     model = qtindex.model()
     colname = model.get_header_name(col)
     if colname.endswith('fpath'):
         print('showing fpath')
         fpath = model.get_header_data(colname, qtindex)
         ut.startfile(fpath)
示例#10
0
 def _on_doubleclick(wgt, qtindex):
     print('[wgt] _on_doubleclick: ')
     col = qtindex.column()
     if wgt.api.col_edit_list[col]:
         print('do nothing special for editable columns')
         return
     model = qtindex.model()
     colname = model.get_header_name(col)
     if colname.endswith('fpath'):
         print('showing fpath')
         fpath = model.get_header_data(colname, qtindex)
         ut.startfile(fpath)
示例#11
0
def render_latex_text(input_text,
                      nest_in_doc=False,
                      appname='utool',
                      verbose=None):
    """ testing function """
    import utool as ut
    if verbose is None:
        verbose = ut.VERBOSE
    dpath = ut.get_app_resource_dir(appname)
    # put a latex framgent in a full document
    print(input_text)
    pdf_fpath = ut.compile_latex_text(input_text, dpath=dpath, verbose=verbose)
    ut.startfile(pdf_fpath)
    return pdf_fpath
示例#12
0
def render_latex_text(input_text, nest_in_doc=False, preamb_extra=None,
                      appname='utool', verbose=None):
    """ compiles latex and shows the result """
    import utool as ut
    if verbose is None:
        verbose = ut.VERBOSE
    dpath = ut.ensure_app_resource_dir(appname, 'latex_tmp')
    # put a latex framgent in a full document
    # print(input_text)
    fname = 'temp_render_latex'
    pdf_fpath = ut.compile_latex_text(
        input_text, dpath=dpath, fname=fname, preamb_extra=preamb_extra,
        verbose=verbose)
    ut.startfile(pdf_fpath)
    return pdf_fpath
示例#13
0
def customize_figure(fig, docla):
    #if 'user_stat_list' not in fig.__dict__.keys() or docla:
    #    fig.user_stat_list = []
    #    fig.user_notes = []
    fig.df2_closed = False
    fig.pt_save = functools.partial(save_figure, fig=fig)
    fig.pt_save_and_view = lambda *args, **kwargs: ut.startfile(fig.pt_save(*args, **kwargs))
示例#14
0
 def delayed_screenshot_func():
     if done[0] == 500:
         #back.mainwin.menubar.triggered.emit(back.mainwin.menuFile)
         print('Mouseclick')
         QTest.mouseClick(back.mainwin.menuFile, Qt.LeftButton)
         # This works
         #QTest.mouseClick(back.front.import_button, Qt.LeftButton)
     if done[0] == 1:
         timer2.stop()
         print('screengrab to %r' % (fpath,))
         screenimg = QPixmap.grabWindow(back.mainwin.winId())
         screenimg.save(fpath, 'jpg')
         ut.startfile(fpath)
         print('lub dub2')
     done[0] -= 1
     return None
示例#15
0
 def delayed_screenshot_func():
     if done[0] == 500:
         #back.mainwin.menubar.triggered.emit(back.mainwin.menuFile)
         print('Mouseclick')
         QTest.mouseClick(back.mainwin.menuFile, Qt.LeftButton)
         # This works
         #QTest.mouseClick(back.front.import_button, Qt.LeftButton)
     if done[0] == 1:
         timer2.stop()
         print('screengrab to %r' % (fpath,))
         screenimg = QPixmap.grabWindow(back.mainwin.winId())
         screenimg.save(fpath, 'jpg')
         ut.startfile(fpath)
         print('lub dub2')
     done[0] -= 1
     return None
示例#16
0
def customize_figure(fig, docla):
    #if 'user_stat_list' not in fig.__dict__.keys() or docla:
    #    fig.user_stat_list = []
    #    fig.user_notes = []
    fig.df2_closed = False
    fig.pt_save = functools.partial(save_figure, fig=fig)
    fig.pt_save_and_view = lambda *args, **kwargs: ut.startfile(
        fig.pt_save(*args, **kwargs))
示例#17
0
    def draw_serial(ChapX, expt_name, dbnames, *args):
        dbnames = ut.smart_cast(dbnames, list)
        mpl.rcParams.update(TMP_RC)

        for dbname in dbnames:
            self = ChapX(dbname)
            if expt_name == 'all':
                self.draw_all()
            else:
                draw_func = getattr(self, 'draw_' + expt_name, None)
                if draw_func is None:
                    draw_func = getattr(self, 'write_' + expt_name, None)
                if draw_func is None:
                    raise ValueError('Cannot find a way to draw ' + expt_name)
                fpath = draw_func(*args)
                if ut.get_argflag('--diskshow'):
                    if isinstance(fpath, six.text_type):
                        ut.startfile(fpath)
                    elif fpath is not None:
                        fpath_list = fpath
                        for fpath in fpath_list:
                            ut.startfile(fpath)
示例#18
0
def visualize():
    import networkx as nx
    import torch
    from torch.autograd import Variable

    def make_nx(var, params):
        param_map = {id(v): k for k, v in params.items()}
        print(param_map)
        node_attr = dict(style='filled',
                         shape='box',
                         align='left',
                         fontsize='12',
                         ranksep='0.1',
                         height='0.2')
        seen = set()
        G = nx.DiGraph()

        def size_to_str(size):
            return '(' + (', ').join(['%d' % v for v in size]) + ')'

        def build_graph(var):
            if var not in seen:
                if torch.is_tensor(var):
                    G.add_node(id(var),
                               label=size_to_str(var.size()),
                               fillcolor='orange',
                               **node_attr)
                elif hasattr(var, 'variable'):
                    u = var.variable
                    node_name = '%s\n %s' % (param_map.get(
                        id(u)), size_to_str(u.size()))
                    G.add_node(id(var),
                               label=node_name,
                               fillcolor='lightblue',
                               **node_attr)
                else:
                    G.add_node(id(var),
                               label=str(type(var).__name__),
                               **node_attr)
                seen.add(var)
                if hasattr(var, 'next_functions'):
                    for u in var.next_functions:
                        if u[0] is not None:
                            G.add_edge(id(u[0]), id(var))
                            build_graph(u[0])
                if hasattr(var, 'saved_tensors'):
                    for t in var.saved_tensors:
                        G.add_edge(id(t), id(var))
                        build_graph(t)

        build_graph(var.grad_fn)
        return G

    # inputs = torch.randn(1, 3, 224, 224)
    # resnet18 = models.resnet18()
    # y = resnet18(Variable(inputs))

    inputs = torch.randn(1, 3, 224, 224)
    # model = torchvision.models.resnet18()
    model = torchvision.models.resnet50()

    model = Siamese()

    # y = model(Variable(inputs))
    y = model(Variable(inputs), Variable(inputs))

    params = model.state_dict()
    G = make_nx(y, params)

    import plottool as pt
    pt.dump_nx_ondisk(G, './pytorch_network.png')
    ut.startfile('./pytorch_network.png')
示例#19
0
def draw_twoday_count(ibs, visit_info_list_):
    import copy
    visit_info_list = copy.deepcopy(visit_info_list_)

    aids_day1, aids_day2 = ut.take_column(visit_info_list_, 'aids')
    nids_day1, nids_day2 = ut.take_column(visit_info_list_, 'unique_nids')
    resight_nids = ut.isect(nids_day1, nids_day2)

    if False:
        # HACK REMOVE DATA TO MAKE THIS FASTER
        num = 20
        for info in visit_info_list:
            non_resight_nids = list(set(info['unique_nids']) - set(resight_nids))
            sample_nids2 = non_resight_nids[0:num] + resight_nids[:num]
            info['grouped_aids'] = ut.dict_subset(info['grouped_aids'], sample_nids2)
            info['unique_nids'] = sample_nids2

    # Build a graph of matches
    if False:

        debug = False

        for info in visit_info_list:
            edges = []
            grouped_aids = info['grouped_aids']

            aids_list = list(grouped_aids.values())
            ams_list = ibs.get_annotmatch_rowids_in_cliques(aids_list)
            aids1_list = ibs.unflat_map(ibs.get_annotmatch_aid1, ams_list)
            aids2_list = ibs.unflat_map(ibs.get_annotmatch_aid2, ams_list)
            for ams, aids, aids1, aids2 in zip(ams_list, aids_list, aids1_list, aids2_list):
                edge_nodes = set(aids1 + aids2)
                ##if len(edge_nodes) != len(set(aids)):
                #    #print('--')
                #    #print('aids = %r' % (aids,))
                #    #print('edge_nodes = %r' % (edge_nodes,))
                bad_aids = edge_nodes - set(aids)
                if len(bad_aids) > 0:
                    print('bad_aids = %r' % (bad_aids,))
                unlinked_aids = set(aids) - edge_nodes
                mst_links = list(ut.itertwo(list(unlinked_aids) + list(edge_nodes)[:1]))
                bad_aids.add(None)
                user_links = [(u, v) for (u, v) in zip(aids1, aids2) if u not in bad_aids and v not in bad_aids]
                new_edges = mst_links + user_links
                new_edges = [(int(u), int(v)) for u, v in new_edges if u not in bad_aids and v not in bad_aids]
                edges += new_edges
            info['edges'] = edges

        # Add edges between days
        grouped_aids1, grouped_aids2 = ut.take_column(visit_info_list, 'grouped_aids')
        nids_day1, nids_day2 = ut.take_column(visit_info_list, 'unique_nids')
        resight_nids = ut.isect(nids_day1, nids_day2)

        resight_aids1 = ut.take(grouped_aids1, resight_nids)
        resight_aids2 = ut.take(grouped_aids2, resight_nids)
        #resight_aids3 = [list(aids1) + list(aids2) for aids1, aids2 in zip(resight_aids1, resight_aids2)]

        ams_list = ibs.get_annotmatch_rowids_between_groups(resight_aids1, resight_aids2)
        aids1_list = ibs.unflat_map(ibs.get_annotmatch_aid1, ams_list)
        aids2_list = ibs.unflat_map(ibs.get_annotmatch_aid2, ams_list)

        between_edges = []
        for ams, aids1, aids2, rawaids1, rawaids2 in zip(ams_list, aids1_list, aids2_list, resight_aids1, resight_aids2):
            link_aids = aids1 + aids2
            rawaids3 = rawaids1 + rawaids2
            badaids = ut.setdiff(link_aids, rawaids3)
            assert not badaids
            user_links = [(int(u), int(v)) for (u, v) in zip(aids1, aids2)
                          if u is not None and v is not None]
            # HACK THIS OFF
            user_links = []
            if len(user_links) == 0:
                # Hack in an edge
                between_edges += [(rawaids1[0], rawaids2[0])]
            else:
                between_edges += user_links

        assert np.all(0 == np.diff(np.array(ibs.unflat_map(ibs.get_annot_nids, between_edges)), axis=1))

        import plottool_ibeis as pt
        import networkx as nx
        #pt.qt4ensure()
        #len(list(nx.connected_components(graph1)))
        #print(ut.graph_info(graph1))

        # Layout graph
        layoutkw = dict(
            prog='neato',
            draw_implicit=False, splines='line',
            #splines='curved',
            #splines='spline',
            #sep=10 / 72,
            #prog='dot', rankdir='TB',
        )

        def translate_graph_to_origin(graph):
            x, y, w, h = ut.get_graph_bounding_box(graph)
            ut.translate_graph(graph, (-x, -y))

        def stack_graphs(graph_list, vert=False, pad=None):
            graph_list_ = [g.copy() for g in graph_list]
            for g in graph_list_:
                translate_graph_to_origin(g)
            bbox_list = [ut.get_graph_bounding_box(g) for g in graph_list_]
            if vert:
                dim1 = 3
                dim2 = 2
            else:
                dim1 = 2
                dim2 = 3
            dim1_list = np.array([bbox[dim1] for bbox in bbox_list])
            dim2_list = np.array([bbox[dim2] for bbox in bbox_list])
            if pad is None:
                pad = np.mean(dim1_list) / 2
            offset1_list = ut.cumsum([0] + [d + pad for d in dim1_list[:-1]])
            max_dim2 = max(dim2_list)
            offset2_list = [(max_dim2 - d2) / 2 for d2 in dim2_list]
            if vert:
                t_xy_list = [(d2, d1) for d1, d2 in zip(offset1_list, offset2_list)]
            else:
                t_xy_list = [(d1, d2) for d1, d2 in zip(offset1_list, offset2_list)]

            for g, t_xy in zip(graph_list_, t_xy_list):
                ut.translate_graph(g, t_xy)
                nx.set_node_attributes(g, name='pin', values='true')

            new_graph = nx.compose_all(graph_list_)
            #pt.show_nx(new_graph, layout='custom', node_labels=False, as_directed=False)  # NOQA
            return new_graph

        # Construct graph
        for count, info in enumerate(visit_info_list):
            graph = nx.Graph()
            edges = [(int(u), int(v)) for u, v in info['edges']
                     if u is not None and v is not None]
            graph.add_edges_from(edges, attr_dict={'zorder': 10})
            nx.set_node_attributes(graph, name='zorder', values=20)

            # Layout in neato
            _ = pt.nx_agraph_layout(graph, inplace=True, **layoutkw)  # NOQA

            # Extract components and then flatten in nid ordering
            ccs = list(nx.connected_components(graph))
            root_aids = []
            cc_graphs = []
            for cc_nodes in ccs:
                cc = graph.subgraph(cc_nodes)
                try:
                    root_aids.append(list(ut.nx_source_nodes(cc.to_directed()))[0])
                except nx.NetworkXUnfeasible:
                    root_aids.append(list(cc.nodes())[0])
                cc_graphs.append(cc)

            root_nids = ibs.get_annot_nids(root_aids)
            nid2_graph = dict(zip(root_nids, cc_graphs))

            resight_nids_ = set(resight_nids).intersection(set(root_nids))
            noresight_nids_ = set(root_nids) - resight_nids_

            n_graph_list = ut.take(nid2_graph, sorted(noresight_nids_))
            r_graph_list = ut.take(nid2_graph, sorted(resight_nids_))

            if len(n_graph_list) > 0:
                n_graph = nx.compose_all(n_graph_list)
                _ = pt.nx_agraph_layout(n_graph, inplace=True, **layoutkw)  # NOQA
                n_graphs = [n_graph]
            else:
                n_graphs = []

            r_graphs = [stack_graphs(chunk) for chunk in ut.ichunks(r_graph_list, 100)]
            if count == 0:
                new_graph = stack_graphs(n_graphs + r_graphs, vert=True)
            else:
                new_graph = stack_graphs(r_graphs[::-1] + n_graphs, vert=True)

            #pt.show_nx(new_graph, layout='custom', node_labels=False, as_directed=False)  # NOQA
            info['graph'] = new_graph

        graph1_, graph2_ = ut.take_column(visit_info_list, 'graph')
        if False:
            _ = pt.show_nx(graph1_, layout='custom', node_labels=False, as_directed=False)  # NOQA
            _ = pt.show_nx(graph2_, layout='custom', node_labels=False, as_directed=False)  # NOQA

        graph_list = [graph1_, graph2_]
        twoday_graph = stack_graphs(graph_list, vert=True, pad=None)
        nx.set_node_attributes(twoday_graph, name='pin', values='true')

        if debug:
            ut.nx_delete_None_edge_attr(twoday_graph)
            ut.nx_delete_None_node_attr(twoday_graph)
            print('twoday_graph(pre) info' + ut.repr3(ut.graph_info(twoday_graph), nl=2))

        # Hack, no idea why there are nodes that dont exist here
        between_edges_ = [edge for edge in between_edges
                          if twoday_graph.has_node(edge[0]) and twoday_graph.has_node(edge[1])]

        twoday_graph.add_edges_from(between_edges_, attr_dict={'alpha': .2, 'zorder': 0})
        ut.nx_ensure_agraph_color(twoday_graph)

        layoutkw['splines'] = 'line'
        layoutkw['prog'] = 'neato'
        agraph = pt.nx_agraph_layout(twoday_graph, inplace=True, return_agraph=True, **layoutkw)[-1]  # NOQA
        if False:
            fpath = ut.truepath('~/ggr_graph.png')
            agraph.draw(fpath)
            ut.startfile(fpath)

        if debug:
            print('twoday_graph(post) info' + ut.repr3(ut.graph_info(twoday_graph)))

        _ = pt.show_nx(twoday_graph, layout='custom', node_labels=False, as_directed=False)  # NOQA
示例#20
0
def autogen_ipynb(ibs, launch=None, run=None):
    r"""
    Autogenerates standard IBEIS Image Analysis IPython notebooks.

    CommandLine:
        python -m wbia autogen_ipynb --run --db lynx
        python -m wbia autogen_ipynb --run --db lynx

        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST -p :proot=smk,num_words=64000 default
        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST --asreport
        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST --noexample --withtags
        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST

        python -m wbia autogen_ipynb --ipynb --db STS_SandTigers

        python -m wbia autogen_ipynb --db PZ_MTEST
        # TODO: Add support for dbdir to be specified
        python -m wbia autogen_ipynb --db ~/work/PZ_MTEST

        python -m wbia autogen_ipynb --ipynb --db Oxford -a default:qhas_any=\(query,\),dpername=1,exclude_reference=True,dminqual=good
        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST -a default -t best:lnbnn_normalizer=[None,normlnbnn-test]

        python -m wbia.templates.generate_notebook --exec-autogen_ipynb --db wd_peter_blinston --ipynb

        python -m wbia autogen_ipynb --db PZ_Master1 --ipynb
        python -m wbia autogen_ipynb --db PZ_Master1 -a timectrl:qindex=0:100 -t best best:normsum=True --ipynb --noexample
        python -m wbia autogen_ipynb --db PZ_Master1 -a timectrl --run
        jupyter-notebook Experiments-lynx.ipynb
        killall python

        python -m wbia autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW -a default:has_any=hasnotch
        python -m wbia autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW default:proot=vsmany -a default:has_any=hasnotch,mingt=2,qindex=0:50 --noexample

        python -m wbia autogen_ipynb --db testdb_curvrank --ipynb -t default:proot=CurvRankDorsal
        python -m wbia autogen_ipynb --db testdb_curvrank --ipynb -t default:proot=CurvRankFluke
        python -m wbia autogen_ipynb --db PW_Master --ipynb -t default:proot=CurvRankDorsal

        python -m wbia autogen_ipynb --db testdb_identification --ipynb -t default:proot=Deepsense

    Ignore:
        python -m wbia autogen_ipynb --db WS_ALL

    Example:
        >>> # SCRIPT
        >>> from wbia.templates.generate_notebook import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='testdb1')
        >>> result = autogen_ipynb(ibs)
        >>> print(result)
    """
    dbname = ibs.get_dbname()
    fname = 'Experiments-' + dbname
    nb_fpath = fname + '.ipynb'
    if ut.get_argflag('--cells'):
        notebook_cells = make_wbia_cell_list(ibs)
        print('\n# ---- \n'.join(notebook_cells))
        return
    # TODO: Add support for dbdir to be specified
    notebook_str = make_wbia_notebook(ibs)
    ut.writeto(nb_fpath, notebook_str)
    run = ut.get_argflag('--run') if run is None else run
    launch = launch if launch is not None else ut.get_argflag('--ipynb')
    if run:
        run_nb = ut.run_ipython_notebook(notebook_str)
        output_fpath = ut.export_notebook(run_nb, fname)
        ut.startfile(output_fpath)
    elif launch:
        command = ' '.join(
            [
                'jupyter-notebook',
                '--NotebookApp.iopub_data_rate_limit=10000000',
                '--NotebookApp.token=',
                nb_fpath,
            ]
        )
        ut.cmd2(command, detatch=True, verbose=True)
    else:
        print('notebook_str =\n%s' % (notebook_str,))