Exemplo n.º 1
0
def write_script_lines(line_list, fname):
    exename = 'python'
    regen_cmd = (exename + ' ' + ' '.join(sys.argv)).replace(
        expanduser('~'), '~')
    script_lines = []
    script_lines.append('#!/bin/sh')
    script_lines.append("echo << 'EOF' > /dev/null")
    script_lines.append('RegenCommand:')
    script_lines.append('   ' + regen_cmd)
    script_lines.append('CommandLine:')
    script_lines.append('   sh ' + fname)
    script_lines.append('dont forget to tmuxnew')
    script_lines.append('EOF')
    script_lines.extend(line_list)
    script = '\n'.join(script_lines)
    logger.info(script)
    import wbia
    from os.path import dirname, join

    dpath = dirname(ut.get_module_dir(wbia))
    fpath = join(dpath, fname)
    if not ut.get_argflag('--dryrun'):
        ut.writeto(fpath, script)
        ut.chmod_add_executable(fpath)
    return fname, script, line_list
Exemplo n.º 2
0
def add_to_win32_PATH(script_fpath, *add_path_list):
    r"""
    Writes a registery script to update the PATH variable into the sync registry

    CommandLine:
        python -m utool.util_win32 --test-add_to_win32_PATH --newpath "C:\Program Files (x86)\Graphviz2.38\bin"

    Example:
        >>> # SCRIPT
        >>> from utool.util_win32 import *  # NOQA
        >>> script_fpath = join(ut.truepath('~'), 'Sync/win7/registry', 'UPDATE_PATH.reg')
        >>> new_path = ut.get_argval('--newpath', str, default=None)
        >>> result = add_to_win32_PATH(script_fpath, new_path)
        >>> print(result)
    """
    import utool as ut
    write_dir = dirname(script_fpath)
    key = '[HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Session Manager\Environment]'
    rtype = 'REG_EXPAND_SZ'
    # Read current PATH values
    win_pathlist = list(os.environ['PATH'].split(os.path.pathsep))
    new_path_list = ut.unique_ordered(win_pathlist + list(add_path_list))
    #new_path_list = unique_ordered(win_pathlist, rob_pathlist)
    print('\n'.join(new_path_list))
    pathtxt = pathsep.join(new_path_list)
    varval_list = [('Path', pathtxt)]
    regfile_str = make_regfile_str(key, varval_list, rtype)
    ut.view_directory(write_dir)
    print(regfile_str)
    ut.writeto(script_fpath, regfile_str, mode='wb')
    print('Please have an admin run the script. You may need to restart')
Exemplo n.º 3
0
def dev_autogen_explicit_injects():
    r"""
    CommandLine:
        python -m ibeis --tf dev_autogen_explicit_injects

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.controller_inject import *  # NOQA
        >>> dev_autogen_explicit_injects()
    """
    import ibeis  # NOQA
    classname = CONTROLLER_CLASSNAME
    regen_command = (
        'python -m ibeis.control.controller_inject '
        '--exec-dev_autogen_explicit_injects')
    import ibeis.control.IBEISControl
    conditional_imports = [
        modname for modname in ibeis.control.IBEISControl.AUTOLOAD_PLUGIN_MODNAMES
        if isinstance(modname, tuple)
    ]
    source_block = ut.autogen_explicit_injectable_metaclass(
        classname, regen_command, conditional_imports)
    dpath = ut.get_module_dir(ibeis.control.IBEISControl)
    fpath = ut.unixjoin(dpath, '_autogen_explicit_controller.py')
    ut.writeto(fpath, source_block)
Exemplo n.º 4
0
 def exec_(script):
     import utool as ut
     print('+**** exec %s script *******' % (script.type_))
     print('repo = %r' % (repo,))
     with ut.ChdirContext(repo.dpath):
         if script.is_fpath_valid():
             normbuild_flag = '--no-rmbuild'
             if ut.get_argflag(normbuild_flag):
                 ut.cmd(script.fpath + ' ' + normbuild_flag)
             else:
                 ut.cmd(script.fpath)
         else:
             if script.text is not None:
                 print('ABOUT TO EXECUTE')
                 ut.print_code(script.text, 'bash')
                 if ut.are_you_sure('execute above script?'):
                     from os.path import join
                     scriptdir = ut.ensure_app_resource_dir('utool',
                                                            'build_scripts')
                     script_path = join(scriptdir,
                                        'script_' + script.type_ + '_' +
                                        ut.hashstr27(script.text) + '.sh')
                     ut.writeto(script_path, script.text)
                     _ = ut.cmd('bash ', script_path)  # NOQA
             else:
                 print("CANT QUITE EXECUTE THIS YET")
                 ut.print_code(script.text, 'bash')
     #os.system(scriptname)
     print('L**** exec %s script *******' % (script.type_))
Exemplo n.º 5
0
    def resolve_conflicts(fpath, strat, force=False, verbose=True):
        """
        Parses merge conflits and takes either version
        """
        import utool as ut
        import re
        top_pat = re.escape('<' * 7)
        mid_pat = re.escape('=' * 7)
        bot_pat = re.escape('>' * 7)
        flags = re.MULTILINE | re.DOTALL
        # Pattern to remove the top part
        theirs_pat1 = re.compile('^%s.*?%s.*?$\n' % (top_pat, mid_pat), flags=flags)
        theirs_pat2 = re.compile('^%s.*?$\n' % (bot_pat), flags=flags)
        # Pattern to remove the bottom part
        ours_pat1   = re.compile('^%s.*?%s.*?$\n' % (mid_pat, bot_pat), flags=flags)
        ours_pat2   = re.compile('^%s.*?$\n' % (top_pat), flags=flags)
        strat_pats = {
            'theirs': [theirs_pat1, theirs_pat2],
            'ours': [ours_pat1, ours_pat2],
        }

        text_in = ut.readfrom(fpath)
        text_out = text_in
        strat = 'ours'
        strat = 'theirs'
        for pat in strat_pats[strat]:
            text_out = pat.sub('', text_out)
        if verbose:
            ut.print_difftext(ut.difftext(text_in, text_out, num_context_lines=3))

        if force:
            ut.writeto(fpath, text_out)
Exemplo n.º 6
0
            def exec_(script):
                import utool as ut

                print("+**** exec %s script *******" % (script.type_))
                print("repo = %r" % (repo,))
                with ut.ChdirContext(repo.dpath):
                    if script.is_fpath_valid():
                        normbuild_flag = "--no-rmbuild"
                        if ut.get_argflag(normbuild_flag):
                            ut.cmd(script.fpath + " " + normbuild_flag)
                        else:
                            ut.cmd(script.fpath)
                    else:
                        if script.text is not None:
                            print("ABOUT TO EXECUTE")
                            ut.print_code(script.text, "bash")
                            if ut.are_you_sure("execute above script?"):
                                from os.path import join

                                scriptdir = ut.ensure_app_resource_dir("utool", "build_scripts")
                                script_path = join(
                                    scriptdir, "script_" + script.type_ + "_" + ut.hashstr27(script.text) + ".sh"
                                )
                                ut.writeto(script_path, script.text)
                                _ = ut.cmd("bash ", script_path)  # NOQA
                        else:
                            print("CANT QUITE EXECUTE THIS YET")
                            ut.print_code(script.text, "bash")
                # os.system(scriptname)
                print("L**** exec %s script *******" % (script.type_))
Exemplo n.º 7
0
    def _setup_links(self, cfg_prefix, config=None):
        """
        Called only when setting up an experiment to make a measurement.

        Creates symlinks such that all data is written to a directory that
        depends on a computer name, cfg_prefix and an arbitrary configuration
        dict.

        Then force the link in the basic directory to point to abs_dpath.
        """
        # Setup directory
        from os.path import expanduser
        assert self.dname is not None

        computer_id = ut.get_argval('--comp', default=ut.get_computer_name())

        conf_dpath = ut.ensuredir((expanduser(self.base_dpath), 'configured'))
        comp_dpath = ut.ensuredir((join(conf_dpath, computer_id)))

        link_dpath = ut.ensuredir((self.base_dpath, 'link'))

        # if True:
        #     # move to new system
        #     old_dpath = join(conf_dpath, self.dbname + '_' + computer_id)
        #     if exists(old_dpath):
        #         ut.move(old_dpath, join(comp_dpath, self.dbname))

        try:
            cfgstr = ut.repr3(config.getstate_todict_recursive())
        except AttributeError:
            cfgstr = ut.repr3(config)

        hashid = ut.hash_data(cfgstr)[0:6]
        suffix = '_'.join([cfg_prefix, hashid])
        dbcode = self.dbname + '_' + suffix

        abs_dpath = ut.ensuredir(join(comp_dpath, dbcode))

        self.dname = dbcode
        self.dpath = abs_dpath
        self.abs_dpath = abs_dpath

        # Place a basic link in the base link directory
        links = []
        links.append(expanduser(join(link_dpath, self.dbname)))
        # # Make a configured but computer agnostic link
        # links.append(expanduser(join(conf_dpath, self.dbname)))

        for link in links:
            try:
                # Overwrite any existing link so the most recently used is
                # the default
                self.link = ut.symlink(abs_dpath, link, overwrite=True)
            except Exception:
                if exists(abs_dpath):
                    newpath = ut.non_existing_path(abs_dpath, suffix='_old')
                    ut.move(link, newpath)
                    self.link = ut.symlink(abs_dpath, link)

        ut.writeto(join(abs_dpath, 'info.txt'), cfgstr)
Exemplo n.º 8
0
def add_to_win32_PATH(script_fpath, *add_path_list):
    r"""
    Writes a registery script to update the PATH variable into the sync registry

    CommandLine:
        python -m utool.util_win32 --test-add_to_win32_PATH --newpath "C:\Program Files (x86)\Graphviz2.38\bin"

    Example:
        >>> # SCRIPT
        >>> from utool.util_win32 import *  # NOQA
        >>> script_fpath = join(ut.truepath('~'), 'Sync/win7/registry', 'UPDATE_PATH.reg')
        >>> new_path = ut.get_argval('--newpath', str, default=None)
        >>> result = add_to_win32_PATH(script_fpath, new_path)
        >>> print(result)
    """
    import utool as ut
    write_dir = dirname(script_fpath)
    key = '[HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Session Manager\Environment]'
    rtype = 'REG_EXPAND_SZ'
    # Read current PATH values
    win_pathlist = list(os.environ['PATH'].split(os.path.pathsep))
    new_path_list = ut.unique_ordered(win_pathlist + list(add_path_list))
    #new_path_list = unique_ordered(win_pathlist, rob_pathlist)
    print('\n'.join(new_path_list))
    pathtxt = pathsep.join(new_path_list)
    varval_list = [('Path', pathtxt)]
    regfile_str = make_regfile_str(key, varval_list, rtype)
    ut.view_directory(write_dir)
    print(regfile_str)
    ut.writeto(script_fpath, regfile_str, mode='wb')
    print('Please have an admin run the script. You may need to restart')
Exemplo n.º 9
0
def tryout_web_graphs(self, infr):
    """
    https://plot.ly/python/

    http://bokeh.pydata.org/en/latest/

    pip install bokeh

    Notes:
        http://www.coppelia.io/2014/07/an-a-to-z-of-extra-features-for-the-d3-force-layout/
        http://andrewmellor.co.uk/blog/articles/2014/12/14/d3-networks/
        pip install plotly  # eww need to sign up and get a key
        http://igraph.org/
    import mpld3
    mpld3.save_html(fig, open('fig.html', 'w'))
    mpld3.save_json(fig, open('fig.json', 'w'))
    fig = pt.gcf()
    """
    #import plottool as pt
    # http://andrewmellor.co.uk/blog/articles/2014/12/14/d3-networks/
    from networkx.readwrite import json_graph

    G = infr.graph
    data = json_graph.node_link_data(G)
    json_text = ut.to_json(data, pretty=True)
    ut.writeto('graph.json', json_text)
    ut.editfile('graph.json')

    ut.startfile('d3_example.html')
Exemplo n.º 10
0
def ensure_text(fname, text, repo_dpath='.', force=None, locals_={}, chmod=None):
    """
    Args:
        fname (str):  file name
        text (str):
        repo_dpath (str):  directory path string(default = '.')
        force (bool): (default = False)
        locals_ (dict): (default = {})

    Example:
        >>> # DISABLE_DOCTEST
        >>> from utool.util_git import *  # NOQA
        >>> import utool as ut
        >>> result = setup_repo()
        >>> print(result)
    """
    import utool as ut
    ut.colorprint('Ensuring fname=%r' % (fname), 'yellow')

    if force is None and ut.get_argflag('--force-%s' % (fname,)):
        force = True

    fpath = join(repo_dpath, fname)
    if force or not ut.checkpath(fpath, verbose=2, n=5):
        text_ = ut.remove_codeblock_syntax_sentinals(text)
        fmtkw = locals_.copy()
        fmtkw['fname'] = fname
        text_ = text_.format(**fmtkw) + '\n'
        ut.writeto(fpath, text_)
        try:
            if chmod:
                ut.chmod(fpath, chmod)
        except Exception as ex:
            ut.printex(ex, iswarning=True)
Exemplo n.º 11
0
 def exec_(script):
     import utool as ut
     print('+**** exec %s script *******' % (script.type_))
     print('repo = %r' % (repo,))
     with ut.ChdirContext(repo.dpath):
         if script.is_fpath_valid():
             normbuild_flag = '--no-rmbuild'
             if ut.get_argflag(normbuild_flag):
                 ut.cmd(script.fpath + ' ' + normbuild_flag)
             else:
                 ut.cmd(script.fpath)
         else:
             if script.text is not None:
                 print('ABOUT TO EXECUTE')
                 ut.print_code(script.text, 'bash')
                 if ut.are_you_sure('execute above script?'):
                     from os.path import join
                     scriptdir = ut.ensure_app_resource_dir('utool',
                                                            'build_scripts')
                     script_path = join(scriptdir,
                                        'script_' + script.type_ + '_' +
                                        ut.hashstr27(script.text) + '.sh')
                     ut.writeto(script_path, script.text)
                     _ = ut.cmd('bash ', script_path)  # NOQA
             else:
                 print("CANT QUITE EXECUTE THIS YET")
                 ut.print_code(script.text, 'bash')
     #os.system(scriptname)
     print('L**** exec %s script *******' % (script.type_))
Exemplo n.º 12
0
def inject_python_code2(fpath, patch_code, tag):
    """ Does autogeneration stuff """
    import utool as ut
    text = ut.readfrom(fpath)
    start_tag = '# <%s>' % tag
    end_tag = '# </%s>' % tag
    new_text = ut.replace_between_tags(text, patch_code, start_tag, end_tag)
    ut.writeto(fpath, new_text)
Exemplo n.º 13
0
def inject_python_code2(fpath, patch_code, tag):
    """ Does autogeneration stuff """
    import utool as ut
    text = ut.readfrom(fpath)
    start_tag = '# <%s>' % tag
    end_tag = '# </%s>' % tag
    new_text = ut.replace_between_tags(text, patch_code, start_tag, end_tag)
    ut.writeto(fpath, new_text)
Exemplo n.º 14
0
def autogen_ipynb(ibs, launch=None, run=None):
    r"""
    Autogenerates standard IBEIS Image Analysis IPython notebooks.

    CommandLine:
        python -m ibeis --tf autogen_ipynb --run --db lynx

        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST --asreport
        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST --noexample --withtags

        python -m ibeis --tf autogen_ipynb --db PZ_MTEST
        # TODO: Add support for dbdir to be specified
        python -m ibeis --tf autogen_ipynb --db ~/work/PZ_MTEST

        python -m ibeis --tf autogen_ipynb --ipynb --db Oxford -a default:qhas_any=\(query,\),dpername=1,exclude_reference=True,dminqual=good
        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST -a default -t best:lnbnn_normalizer=[None,normlnbnn-test]

        python -m ibeis.templates.generate_notebook --exec-autogen_ipynb --db wd_peter_blinston --ipynb

        python -m ibeis --tf autogen_ipynb --db PZ_Master1 --ipynb
        python -m ibeis --tf autogen_ipynb --db PZ_Master1 -a timectrl:qindex=0:100 -t best best:normsum=True --ipynb --noexample
        python -m ibeis --tf autogen_ipynb --db PZ_Master1 -a timectrl --run
        jupyter-notebook Experiments-lynx.ipynb
        killall python

        python -m ibeis --tf autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW -a default:has_any=hasnotch
        python -m ibeis --tf autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW default:proot=vsmany -a default:has_any=hasnotch,mingt=2,qindex=0:50 --noexample

    Example:
        >>> # SCRIPT
        >>> from ibeis.templates.generate_notebook import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> result = autogen_ipynb(ibs)
        >>> print(result)
    """
    dbname = ibs.get_dbname()
    fname = 'Experiments-' + dbname
    nb_fpath = fname + '.ipynb'
    if ut.get_argflag('--cells'):
        notebook_cells = make_ibeis_cell_list(ibs)
        print('\n# ---- \n'.join(notebook_cells))
        return
    # TODO: Add support for dbdir to be specified
    notebook_str = make_ibeis_notebook(ibs)
    ut.writeto(nb_fpath, notebook_str)
    run = ut.get_argflag('--run') if run is None else run
    launch = launch if launch is not None else ut.get_argflag('--ipynb')
    if run:
        run_nb = ut.run_ipython_notebook(notebook_str)
        output_fpath = ut.export_notebook(run_nb, fname)
        ut.startfile(output_fpath)
    elif launch:
        ut.cmd('jupyter-notebook', nb_fpath, detatch=True)
        #ut.cmd('ipython-notebook', nb_fpath)
        #ut.startfile(nb_fpath)
    else:
        print('notebook_str =\n%s' % (notebook_str, ))
Exemplo n.º 15
0
def autogen_ipynb(ibs, launch=None, run=None):
    r"""
    Autogenerates standard IBEIS Image Analysis IPython notebooks.

    CommandLine:
        python -m ibeis --tf autogen_ipynb --run --db lynx

        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST --asreport
        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST --noexample --withtags

        python -m ibeis --tf autogen_ipynb --db PZ_MTEST
        # TODO: Add support for dbdir to be specified
        python -m ibeis --tf autogen_ipynb --db ~/work/PZ_MTEST

        python -m ibeis --tf autogen_ipynb --ipynb --db Oxford -a default:qhas_any=\(query,\),dpername=1,exclude_reference=True,dminqual=good
        python -m ibeis --tf autogen_ipynb --ipynb --db PZ_MTEST -a default -t best:lnbnn_normalizer=[None,normlnbnn-test]

        python -m ibeis.templates.generate_notebook --exec-autogen_ipynb --db wd_peter_blinston --ipynb

        python -m ibeis --tf autogen_ipynb --db PZ_Master1 --ipynb
        python -m ibeis --tf autogen_ipynb --db PZ_Master1 -a timectrl:qindex=0:100 -t best best:normsum=True --ipynb --noexample
        python -m ibeis --tf autogen_ipynb --db PZ_Master1 -a timectrl --run
        jupyter-notebook Experiments-lynx.ipynb
        killall python

        python -m ibeis --tf autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW -a default:has_any=hasnotch
        python -m ibeis --tf autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW default:proot=vsmany -a default:has_any=hasnotch,mingt=2,qindex=0:50 --noexample

    Example:
        >>> # SCRIPT
        >>> from ibeis.templates.generate_notebook import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> result = autogen_ipynb(ibs)
        >>> print(result)
    """
    dbname = ibs.get_dbname()
    fname = 'Experiments-' + dbname
    nb_fpath = fname + '.ipynb'
    if ut.get_argflag('--cells'):
        notebook_cells = make_ibeis_cell_list(ibs)
        print('\n# ---- \n'.join(notebook_cells))
        return
    # TODO: Add support for dbdir to be specified
    notebook_str = make_ibeis_notebook(ibs)
    ut.writeto(nb_fpath, notebook_str)
    run = ut.get_argflag('--run') if run is None else run
    launch = launch if launch is not None else ut.get_argflag('--ipynb')
    if run:
        run_nb = ut.run_ipython_notebook(notebook_str)
        output_fpath = ut.export_notebook(run_nb, fname)
        ut.startfile(output_fpath)
    elif launch:
        ut.cmd('jupyter-notebook', nb_fpath, detatch=True)
        #ut.cmd('ipython-notebook', nb_fpath)
        #ut.startfile(nb_fpath)
    else:
        print('notebook_str =\n%s' % (notebook_str,))
Exemplo n.º 16
0
def convert_tests_from_ibeis_to_nose(module_list):
    # PARSE OUT TESTABLE DOCTESTTUPS
    #import utool as ut
    testtup_list = []
    seen_ = set()

    topimport_list = []

    for module in module_list:
        mod_doctest_tup = ut.get_module_doctest_tup(module=module,
                                                    verbose=False,
                                                    allexamples=True)
        enabled_testtup_list, frame_fpath, all_testflags, module = mod_doctest_tup
        flags = [tup.src not in seen_ for tup in enabled_testtup_list]
        enabled_testtup_list = ut.compress(enabled_testtup_list, flags)
        testtup_list.extend(enabled_testtup_list)
        if len(enabled_testtup_list) > 0:
            topimport_list.append('from %s import *  # NOQA' % (module.__name__,))

    print('Found %d test tups' % (len(testtup_list)))

    autogen_test_src_funcs = []
    #import redbaron
    for testtup in testtup_list:
        name = testtup.name
        num  = testtup.num
        src  = testtup.src
        want = testtup.want
        import re
        src = re.sub('# ENABLE_DOCTEST\n', '', src)
        src = re.sub('from [^*]* import \* *# NOQA\n', '', src)
        src = re.sub(r'from [^*]* import \*\n', '', src)

        src = ut.str_between(src, None, 'ut.quit_if_noshow').rstrip('\n')
        src = ut.str_between(src, None, 'ut.show_if_requested').rstrip('\n')
        # import utool
        # utool.embed()
        """
        """
        #flag = testtup.flag
        if want.endswith('\n'):
            want = want[:-1]
        if want:
            #src_node = redbaron.RedBaron(src)
            #if len(src_node.find_all('name', 'result')) > 0:
            #    src_node.append('assert result == %r' % (want,))
            if '\nresult = ' in src:
                src += '\nassert str(result) == %r' % (want,)
        func_src = 'def test_%s_%d():\n' % (name.replace('.', '_'), num,) + ut.indent(src)
        autogen_test_src_funcs.append(func_src)

    autogen_test_src = '\n'.join(topimport_list) + '\n\n\n' + '\n\n\n'.join(autogen_test_src_funcs) + '\n'
    from ibeis import tests
    from os.path import join
    moddir = ut.get_module_dir(tests)
    ut.writeto(join(moddir, 'test_autogen_nose_tests.py'), autogen_test_src)
Exemplo n.º 17
0
def dump_profile_text():
    import utool as ut
    print("Dumping Profile Information")
    profile = ut.PROFILE_FUNC
    output_text, summary_text = get_profile_text(profile)
    #profile.dump_stats('out.lprof')
    print(summary_text)
    ut.writeto('profile_output.txt', output_text + '\n' + summary_text)
    ut.writeto('profile_output.%s.txt' % (ut.get_timestamp()),
               output_text + '\n' + summary_text)
Exemplo n.º 18
0
def make_bayes_notebook():
    r"""
    CommandLine:
        python -m wbia.unstable.demobayes --exec-make_bayes_notebook

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.unstable.demobayes import *  # NOQA
        >>> result = make_bayes_notebook()
        >>> print(result)
    """
    from wbia.templates import generate_notebook

    initialize = ut.codeblock(r"""
        # STARTBLOCK
        import os
        os.environ['UTOOL_NO_CNN'] = 'True'
        from wbia.unstable.demobayes import *  # NOQA
        # Matplotlib stuff
        import matplotlib as mpl
        %matplotlib inline
        %load_ext autoreload
        %autoreload
        from IPython.core.display import HTML
        HTML("<style>body .container { width:99% !important; }</style>")
        # ENDBLOCK
        """)
    cell_list_def = [
        initialize,
        show_model_templates,
        demo_modes,
        demo_name_annot_complexity,
        # demo_model_idependencies,
        demo_single_add,
        demo_ambiguity,
        demo_conflicting_evidence,
        demo_annot_idependence_overlap,
    ]

    def format_cell(cell):
        if ut.is_funclike(cell):
            header = '# ' + ut.to_title_caps(ut.get_funcname(cell))
            code = (header,
                    ut.get_func_sourcecode(cell, stripdef=True, stripret=True))
        else:
            code = (None, cell)
        return generate_notebook.format_cells(code)

    cell_list = ut.flatten([format_cell(cell) for cell in cell_list_def])
    nbstr = generate_notebook.make_notebook(cell_list)
    logger.info('nbstr = %s' % (nbstr, ))
    fpath = 'demobayes.ipynb'
    ut.writeto(fpath, nbstr)
    ut.startfile(fpath)
Exemplo n.º 19
0
 def dummy_preproc_spam(depc, *args, **kwargs):
     config = kwargs.get('config', None)
     if config is None:
         config = {}
     print('Computing notch')
     ut.writeto('tmp.txt', ut.lorium_ipsum())
     for x in zip(*args):
         size = (42, 21)
         uuid = ut.get_zero_uuid()
         vector = np.ones(3)
         yield ('spam', 3665, size, uuid, vector, 'tmp.txt')
Exemplo n.º 20
0
 def dummy_preproc_spam(depc, *args, **kwargs):
     config = kwargs.get('config', None)
     if config is None:
         config = {}
     print('[preproc] Computing spam')
     ut.writeto('tmp.txt', ut.lorium_ipsum())
     for x in zip(*args):
         size = (42, 21)
         uuid = ut.get_zero_uuid()
         vector = np.ones(3)
         yield ('spam', 3665, size, uuid, vector, 'tmp.txt')
Exemplo n.º 21
0
def make_bayes_notebook():
    r"""
    CommandLine:
        python -m ibeis.algo.hots.demobayes --exec-make_bayes_notebook

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.demobayes import *  # NOQA
        >>> result = make_bayes_notebook()
        >>> print(result)
    """
    from ibeis.templates import generate_notebook
    initialize = ut.codeblock(
        r'''
        # STARTBLOCK
        import os
        os.environ['UTOOL_NO_CNN'] = 'True'
        from ibeis.algo.hots.demobayes import *  # NOQA
        # Matplotlib stuff
        import matplotlib as mpl
        %matplotlib inline
        %load_ext autoreload
        %autoreload
        from IPython.core.display import HTML
        HTML("<style>body .container { width:99% !important; }</style>")
        # ENDBLOCK
        '''
    )
    cell_list_def = [
        initialize,
        show_model_templates,
        demo_modes,
        demo_name_annot_complexity,
        ###demo_model_idependencies,
        demo_single_add,
        demo_ambiguity,
        demo_conflicting_evidence,
        demo_annot_idependence_overlap,
    ]
    def format_cell(cell):
        if ut.is_funclike(cell):
            header = '# ' + ut.to_title_caps(ut.get_funcname(cell))
            code = (header, ut.get_func_sourcecode(cell, stripdef=True, stripret=True))
        else:
            code = (None, cell)
        return generate_notebook.format_cells(code)

    cell_list = ut.flatten([format_cell(cell) for cell in cell_list_def])
    nbstr = generate_notebook.make_notebook(cell_list)
    print('nbstr = %s' % (nbstr,))
    fpath = 'demobayes.ipynb'
    ut.writeto(fpath, nbstr)
    ut.startfile(fpath)
Exemplo n.º 22
0
def render_html(html_str):
    """
    makes a temporary html rendering
    """
    import utool as ut
    from os.path import abspath
    import webbrowser

    html_dpath = ut.ensure_app_resource_dir('utool', 'temp_html')
    fpath = abspath(ut.unixjoin(html_dpath, 'temp.html'))
    url = 'file://' + fpath
    ut.writeto(fpath, html_str)
    webbrowser.open(url)
Exemplo n.º 23
0
def dump_profile_text():
    import utool as ut
    print("Dumping Profile Information")
    profile = ut.PROFILE_FUNC
    try:
        output_text, summary_text = get_profile_text(profile)
    except AttributeError:
        print('profile is not on')
    else:
        #profile.dump_stats('out.lprof')
        print(summary_text)
        ut.writeto('profile_output.txt', output_text + '\n' + summary_text)
        ut.writeto('profile_output.%s.txt' % (ut.get_timestamp()),
                   output_text + '\n' + summary_text)
Exemplo n.º 24
0
def convert_tests_from_utool_to_nose(module_list):
    # PARSE OUT TESTABLE DOCTESTTUPS
    #import utool as ut
    testtup_list = []
    seen_ = set()

    topimport_list = []

    for module in module_list:
        mod_doctest_tup = ut.get_module_doctest_tup(module=module,
                                                    verbose=False,
                                                    allexamples=True)
        enabled_testtup_list, frame_fpath, all_testflags, module = mod_doctest_tup
        flags = [tup.src not in seen_ for tup in enabled_testtup_list]
        enabled_testtup_list = ut.compress(enabled_testtup_list, flags)
        testtup_list.extend(enabled_testtup_list)
        if len(enabled_testtup_list) > 0:
            topimport_list.append('from %s import *  # NOQA' % (module.__name__,))

    print('Found %d test tups' % (len(testtup_list)))

    autogen_test_src_funcs = []
    #import redbaron
    for testtup in testtup_list:
        name = testtup.name
        num  = testtup.num
        src  = testtup.src
        want = testtup.want
        import re
        src = re.sub('# ENABLE_DOCTEST\n', '', src)
        src = re.sub('from [^*]* import \* *# NOQA\n', '', src)
        src = re.sub('from [^*]* import \*\n', '', src)
        #flag = testtup.flag
        if want.endswith('\n'):
            want = want[:-1]
        if want:
            #src_node = redbaron.RedBaron(src)
            #if len(src_node.find_all('name', 'result')) > 0:
            #    src_node.append('assert result == %r' % (want,))
            if '\nresult = ' in src:
                src += '\nassert str(result) == %r' % (want,)
        func_src = 'def test_%s_%d():\n' % (name.replace('.', '_'), num,) + ut.indent(src)
        autogen_test_src_funcs.append(func_src)

    autogen_test_src = '\n'.join(topimport_list) + '\n\n\n' + '\n\n\n'.join(autogen_test_src_funcs) + '\n'
    from utool import tests
    from os.path import join
    moddir = ut.get_module_dir(tests)
    ut.writeto(join(moddir, 'test_autogen_nose_tests.py'), autogen_test_src)
Exemplo n.º 25
0
def ensure_text(fname,
                text,
                repo_dpath='.',
                force=None,
                locals_={},
                chmod=None):
    """
    Args:
        fname (str):  file name
        text (str):
        repo_dpath (str):  directory path string(default = '.')
        force (bool): (default = False)
        locals_ (dict): (default = {})

    Example:
        >>> # DISABLE_DOCTEST
        >>> from utool.util_git import *  # NOQA
        >>> import utool as ut
        >>> result = setup_repo()
        >>> print(result)
    """
    import utool as ut
    ut.colorprint('Ensuring fname=%r' % (fname), 'yellow')

    if force is None and ut.get_argflag('--force-%s' % (fname, )):
        force = True

    fpath = join(repo_dpath, fname)
    if force or not ut.checkpath(fpath, verbose=2, n=5):
        text_ = ut.remove_codeblock_syntax_sentinals(text)
        fmtkw = locals_.copy()
        fmtkw['fname'] = fname
        text_ = text_.format(**fmtkw) + '\n'
        ut.writeto(fpath, text_)
        try:
            if chmod:
                ut.chmod(fpath, chmod)
        except Exception as ex:
            ut.printex(ex, iswarning=True)
Exemplo n.º 26
0
def write_script_lines(line_list, fname):
    exename = 'python'
    regen_cmd = (exename + ' ' + ' '.join(sys.argv)).replace(expanduser('~'), '~')
    script_lines = []
    script_lines.append('#!/bin/sh')
    script_lines.append('echo << \'EOF\' > /dev/null')
    script_lines.append('RegenCommand:')
    script_lines.append('   ' + regen_cmd)
    script_lines.append('CommandLine:')
    script_lines.append('   sh ' + fname)
    script_lines.append('dont forget to tmuxnew')
    script_lines.append('EOF')
    script_lines.extend(line_list)
    script = '\n'.join(script_lines)
    print(script)
    import ibeis
    from os.path import dirname, join
    dpath = dirname(ut.get_module_dir(ibeis))
    fpath = join(dpath, fname)
    if not ut.get_argflag('--dryrun'):
        ut.writeto(fpath, script)
        ut.chmod_add_executable(fpath)
    return fname, script, line_list
Exemplo n.º 27
0
def update_wildbook_install_config(webapps_dpath, unpacked_war_dpath):
    """
    CommandLine:
        python -m ibeis ensure_local_war
        python -m ibeis update_wildbook_install_config
        python -m ibeis update_wildbook_install_config --show

    Example:
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> import ibeis
        >>> tomcat_dpath = find_installed_tomcat()
        >>> webapps_dpath = join(tomcat_dpath, 'webapps')
        >>> wb_target = ibeis.const.WILDBOOK_TARGET
        >>> unpacked_war_dpath = join(webapps_dpath, wb_target)
        >>> locals_ = ut.exec_func_src(update_wildbook_install_config, globals())
        >>> #update_wildbook_install_config(webapps_dpath, unpacked_war_dpath)
        >>> ut.quit_if_noshow()
        >>> ut.vd(unpacked_war_dpath)
        >>> ut.editfile(locals_['permission_fpath'])
        >>> ut.editfile(locals_['jdoconfig_fpath'])
        >>> ut.editfile(locals_['asset_store_fpath'])
    """
    mysql_mode = not ut.get_argflag('--nomysql')

    #if ut.get_argflag('--vd'):
    #    ut.vd(unpacked_war_dpath)
    #find_installed_tomcat
    # Make sure permissions are correctly set in wildbook
    # Comment out the line that requires authentication
    permission_fpath = join(unpacked_war_dpath, 'WEB-INF/web.xml')
    ut.assertpath(permission_fpath)
    permission_text = ut.readfrom(permission_fpath)
    lines_to_remove = [
        # '/ImageSetSetMarkedIndividual = authc, roles[admin]'
        '/EncounterSetMarkedIndividual = authc, roles[admin]'
    ]
    new_permission_text = permission_text[:]
    for line in lines_to_remove:
        re.search(re.escape(line), permission_text)
        prefix = ut.named_field('prefix', '\\s*')
        suffix = ut.named_field('suffix', '\\s*\n')
        pattern = ('^' + prefix + re.escape(line) + suffix)
        match = re.search(pattern,
                          permission_text,
                          flags=re.MULTILINE | re.DOTALL)
        if match is None:
            continue
        newline = '<!--%s -->' % (line, )
        repl = ut.bref_field('prefix') + newline + ut.bref_field('suffix')
        new_permission_text = re.sub(pattern,
                                     repl,
                                     permission_text,
                                     flags=re.MULTILINE | re.DOTALL)
        assert new_permission_text != permission_text, (
            'text should have changed')
    if new_permission_text != permission_text:
        print('Need to write new permission texts')
        ut.writeto(permission_fpath, new_permission_text)
    else:
        print('Permission file seems to be ok')

    # Make sure we are using a non-process based database
    jdoconfig_fpath = join(unpacked_war_dpath,
                           'WEB-INF/classes/bundles/jdoconfig.properties')
    print('Fixing backend database config')
    print('jdoconfig_fpath = %r' % (jdoconfig_fpath, ))
    ut.assertpath(jdoconfig_fpath)
    jdoconfig_text = ut.readfrom(jdoconfig_fpath)
    #ut.vd(dirname(jdoconfig_fpath))
    #ut.editfile(jdoconfig_fpath)

    if mysql_mode:
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'mysql',
                                                 False)
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'derby', 1)
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'sqlite', 1)
        mysql_user = '******'
        mysql_passwd = 'somepassword'
        mysql_dbname = 'ibeiswbtestdb'
        # Use mysql
        jdoconfig_text = re.sub('datanucleus.ConnectionUserName = .*$',
                                'datanucleus.ConnectionUserName = '******'datanucleus.ConnectionPassword = .*$',
                                'datanucleus.ConnectionPassword = '******'datanucleus.ConnectionURL *= *jdbc:mysql:.*$',
            'datanucleus.ConnectionURL = jdbc:mysql://localhost:3306/' +
            mysql_dbname,
            jdoconfig_text,
            flags=re.MULTILINE)
        jdoconfig_text = re.sub('^.*jdbc:mysql://localhost:3306/shepherd.*$',
                                '',
                                jdoconfig_text,
                                flags=re.MULTILINE)
    else:
        # Use SQLIIte
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'derby', 1)
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'mysql', 1)
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'sqlite',
                                                 False)
    ut.writeto(jdoconfig_fpath, jdoconfig_text)

    # Need to make sure wildbook can store information in a reasonalbe place
    #tomcat_data_dir = join(tomcat_startup_dir, 'webapps', 'wildbook_data_dir')
    tomcat_data_dir = join(webapps_dpath, 'wildbook_data_dir')
    ut.ensuredir(tomcat_data_dir)
    ut.writeto(join(tomcat_data_dir, 'test.txt'), 'A hosted test file')
    asset_store_fpath = join(unpacked_war_dpath, 'createAssetStore.jsp')
    asset_store_text = ut.read_from(asset_store_fpath)
    #data_path_pat = ut.named_field('data_path', 'new File(".*?").toPath')
    new_line = 'LocalAssetStore as = new LocalAssetStore("example Local AssetStore", new File("%s").toPath(), "%s", true);' % (
        tomcat_data_dir, 'http://localhost:8080/' + basename(tomcat_data_dir))
    # HACKY
    asset_store_text2 = re.sub('^LocalAssetStore as = .*$',
                               new_line,
                               asset_store_text,
                               flags=re.MULTILINE)
    ut.writeto(asset_store_fpath, asset_store_text2)
Exemplo n.º 28
0
    need_encoding_fpaths = []

    for pat in pattern_items:
        print('Checking for pattern: %r' % (pat,))
        for fpath in fpath_list:
            pattern = re.escape(pat)
            found_lines, found_lxs = ut.grepfile(fpath, pattern)
            # DID NOT FIND ENCODING LINE
            if len(found_lines) == 0:
                need_encoding_fpaths.append(fpath)

    print('The following fpaths need encoding lines: ' + ut.list_str(need_encoding_fpaths, strvals=True))

    if do_write or show_diff:
        for fpath in need_encoding_fpaths:
            print('\n-----------------\nFound file without encodeing line: ' + fpath)
            line_list = ut.read_lines_from(fpath)
            linenum = find_encoding_insert_position(line_list)
            if linenum is not None:
                #print(' * linenum = %r' % (linenum,))
                new_lines = line_list[:linenum] + [encoding_line + '\n'] + line_list[linenum:]
                new_text = ''.join(new_lines)
                if show_diff:
                    old_text = ''.join(line_list)
                    textdiff = ut.get_textdiff(old_text, new_text, num_context_lines=1)
                    print('Diff:')
                    ut.print_difftext(textdiff)
                if do_write:
                    ut.writeto(fpath, new_text)
                    pass
Exemplo n.º 29
0
def update_bindings():
    r"""
    Returns:
        dict: matchtups

    CommandLine:
        python ~/local/build_scripts/flannscripts/autogen_bindings.py --exec-update_bindings
        utprof.py ~/local/build_scripts/flannscripts/autogen_bindings.py --exec-update_bindings

    Example:
        >>> # DISABLE_DOCTEST
        >>> from autogen_bindings import *  # NOQA
        >>> import sys
        >>> import utool as ut
        >>> sys.path.append(ut.truepath('~/local/build_scripts/flannscripts'))
        >>> matchtups = update_bindings()
        >>> result = ('matchtups = %s' % (ut.repr2(matchtups),))
        >>> print(result)
        >>> ut.quit_if_noshow()
        >>> import plottool as pt
        >>> ut.show_if_requested()
    """
    from os.path import basename
    import difflib
    import numpy as np
    import re
    binding_names = [
        'build_index',
        'used_memory',
        'add_points',
        'remove_point',

        'compute_cluster_centers',
        'load_index',
        'save_index',
        'find_nearest_neighbors',

        'radius_search',
        'remove_points',
        'free_index',
        'find_nearest_neighbors_index',

        # 'size',
        # 'veclen',
        # 'get_point',
        # 'flann_get_distance_order',
        # 'flann_get_distance_type',
        # 'flann_log_verbosity',

        # 'clean_removed_points',
    ]

    _places = [
        '~/code/flann/src/cpp/flann/flann.cpp',
        '~/code/flann/src/cpp/flann/flann.h',
        '~/code/flann/src/python/pyflann/flann_ctypes.py',
        '~/code/flann/src/python/pyflann/index.py',
    ]

    eof_sentinals = {
        # 'flann_ctypes.py': '# END DEFINE BINDINGS',
        'flann_ctypes.py': 'def ensure_2d_array(arr',
        # 'flann.h': '// END DEFINE BINDINGS',
        'flann.h': '#ifdef __cplusplus',
        'flann.cpp': None,
        'index.py': None,
    }
    block_sentinals = {
        'flann.h': re.escape('/**'),
        'flann.cpp': 'template *<typename Distance>',
        # 'flann_ctypes.py': '\n',
        'flann_ctypes.py': 'flann\.[a-z_.]* =',
        # 'index.py': '    def .*',
        'index.py': '    [^ ].*',
    }
    places = {basename(fpath): fpath for fpath in ut.lmap(ut.truepath, _places)}
    text_dict = ut.map_dict_vals(ut.readfrom, places)
    lines_dict = {key: val.split('\n') for key, val in text_dict.items()}
    orig_texts = text_dict.copy()  # NOQA
    binding_defs = {}
    named_blocks  = {}

    print('binding_names = %r' % (binding_names,))
    for binding_name in binding_names:
        blocks, defs = autogen_parts(binding_name)
        binding_defs[binding_name] = defs
        named_blocks[binding_name] = blocks

    for binding_name in ut.ProgIter(binding_names):
        ut.colorprint('+--- GENERATE BINDING %s -----' % (binding_name,), 'yellow')
        blocks_dict = named_blocks[binding_name]
        for key in places.keys():
            ut.colorprint('---- generating %s for %s -----' % (binding_name, key,), 'yellow')
            # key = 'flann_ctypes.py'
            # print(text_dict[key])
            old_text = text_dict[key]
            line_list = lines_dict[key]
            #text = old_text
            block = blocks_dict[key]

            debug = ut.get_argflag('--debug')
            # debug = True
            # if debug:
            #     print(ut.highlight_code(block, splitext(key)[1]))

            # Find a place in the code that already exists

            searchblock = block
            if key.endswith('.cpp') or key.endswith('.h'):
                searchblock = re.sub(ut.REGEX_C_COMMENT, '', searchblock,
                                     flags=re.MULTILINE | re.DOTALL)
            searchblock = '\n'.join(searchblock.splitlines()[0:3])

            # @ut.cached_func(verbose=False)
            def cached_match(old_text, searchblock):
                def isjunk(x):
                    return False
                    return x in ' \t,*()'
                def isjunk2(x):
                    return x in ' \t,*()'
                # Not sure why the first one just doesnt find it
                # isjunk = None
                sm = difflib.SequenceMatcher(isjunk, old_text, searchblock,
                                             autojunk=False)
                sm0 = difflib.SequenceMatcher(isjunk, old_text, searchblock,
                                              autojunk=True)
                sm1 = difflib.SequenceMatcher(isjunk2, old_text, searchblock,
                                              autojunk=False)
                sm2 = difflib.SequenceMatcher(isjunk2, old_text, searchblock,
                                              autojunk=True)
                matchtups = (sm.get_matching_blocks() +
                             sm0.get_matching_blocks() +
                             sm1.get_matching_blocks() +
                             sm2.get_matching_blocks())
                return matchtups
            matchtups = cached_match(old_text, searchblock)
            # Find a reasonable match in matchtups

            found = False
            if debug:
                # print('searchblock =\n%s' % (searchblock,))
                print('searchblock = %r' % (searchblock,))
            for (a, b, size) in matchtups:
                matchtext = old_text[a: a + size]
                pybind = binding_defs[binding_name]['py_binding_name']
                if re.search(binding_name + '\\b', matchtext) or re.search(pybind + '\\b', matchtext):
                    found = True
                    pos = a + size
                    if debug:
                        print('MATCHING TEXT')
                        print(matchtext)
                    break
                else:
                    if debug and 0:
                        print('Not matching')
                        print('matchtext = %r' % (matchtext,))
                        matchtext2 = old_text[a - 10: a + size + 20]
                        print('matchtext2 = %r' % (matchtext2,))

            if found:
                linelens = np.array(ut.lmap(len, line_list)) + 1
                sumlen = np.cumsum(linelens)
                row = np.where(sumlen < pos)[0][-1] + 1
                #print(line_list[row])
                # Search for extents of the block to overwrite
                block_sentinal = block_sentinals[key]
                row1 = ut.find_block_end(row, line_list, block_sentinal, -1) - 1
                row2 = ut.find_block_end(row + 1, line_list, block_sentinal, +1)
                eof_sentinal = eof_sentinals[key]
                if eof_sentinal is not None:
                    print('eof_sentinal = %r' % (eof_sentinal,))
                    row2 = min([count for count, line in enumerate(line_list) if line.startswith(eof_sentinal)][-1], row2)
                nr = len((block + '\n\n').splitlines())
                new_line_list = ut.insert_block_between_lines(
                    block + '\n', row1, row2, line_list)
                rtext1 = '\n'.join(line_list[row1:row2])
                rtext2 = '\n'.join(new_line_list[row1:row1 + nr])
                if debug:
                    print('-----')
                    ut.colorprint('FOUND AND REPLACING %s' % (binding_name,), 'yellow')
                    print(ut.highlight_code(rtext1))
                if debug:
                    print('-----')
                    ut.colorprint('FOUND AND REPLACED WITH %s' % (binding_name,), 'yellow')
                    print(ut.highlight_code(rtext2))
                if not ut.get_argflag('--diff') and not debug:
                    print(ut.color_diff_text(ut.difftext(rtext1, rtext2, num_context_lines=7, ignore_whitespace=True)))
            else:
                # Append to end of the file
                eof_sentinal = eof_sentinals[key]
                if eof_sentinal is None:
                    row2 = len(line_list) - 1
                else:
                    row2_choice = [count for count, line in enumerate(line_list)
                                   if line.startswith(eof_sentinal)]
                    if len(row2_choice) == 0:
                        row2 = len(line_list) - 1
                        assert False
                    else:
                        row2 = row2_choice[-1] - 1

                # row1 = row2 - 1
                # row2 = row2 - 1
                row1 = row2

                new_line_list = ut.insert_block_between_lines(
                    block + '\n', row1, row2, line_list)
                # block + '\n\n\n', row1, row2, line_list)

                rtext1 = '\n'.join(line_list[row1:row2])
                nr = len((block + '\n\n').splitlines())
                rtext2 = '\n'.join(new_line_list[row1:row1 + nr])

                if debug:
                    print('-----')
                    ut.colorprint('NOT FOUND AND REPLACING %s' % (binding_name,), 'yellow')
                    print(ut.highlight_code(rtext1))
                if debug:
                    print('-----')
                    ut.colorprint('NOT FOUND AND REPLACED WITH %s' % (binding_name,), 'yellow')
                    print(ut.highlight_code(rtext2))

                if not ut.get_argflag('--diff') and not debug:
                    print(ut.color_diff_text(ut.difftext(rtext1, rtext2, num_context_lines=7, ignore_whitespace=True)))
            text_dict[key] = '\n'.join(new_line_list)
            lines_dict[key] = new_line_list
        ut.colorprint('L___  GENERATED BINDING %s ___' % (binding_name,), 'yellow')

    for key in places:
        new_text = '\n'.join(lines_dict[key])
        #ut.writeto(ut.augpath(places[key], '.new'), new_text)
        ut.writeto(ut.augpath(places[key]), new_text)

    for key in places:
        if ut.get_argflag('--diff'):
            difftext = ut.get_textdiff(orig_texts[key], new_text,
                                       num_context_lines=7, ignore_whitespace=True)
            difftext = ut.color_diff_text(difftext)
            print(difftext)
Exemplo n.º 30
0
    def fix_chktex():
        """
        ./texfix.py --fixcite --fix-chktex
        """
        import parse
        fpaths = testdata_fpaths()
        print('Running chktex')
        output_list = [
            ut.cmd('chktex', fpath, verbose=False)[0] for fpath in fpaths
        ]

        fixcite = ut.get_argflag('--fixcite')
        fixlbl = ut.get_argflag('--fixlbl')
        fixcmdterm = ut.get_argflag('--fixcmdterm')

        for fpath, output in zip(fpaths, output_list):
            text = ut.readfrom(fpath)
            buffer = text.split('\n')
            pat = '\n' + ut.positive_lookahead('Warning')
            warn_list = list(
                filter(lambda x: x.startswith('Warning'),
                       re.split(pat, output)))
            delete_linenos = []

            if not (fixcmdterm or fixlbl or fixcite):
                print(' CHOOSE A FIX ')

            modified_lines = []

            for warn in warn_list:
                warnlines = warn.split('\n')
                pres = parse.parse(
                    'Warning {num} in {fpath} line {lineno}: {warnmsg}',
                    warnlines[0])
                if pres is not None:
                    fpath_ = pres['fpath']
                    lineno = int(pres['lineno']) - 1
                    warnmsg = pres['warnmsg']
                    try:
                        assert fpath == fpath_, ('%r != %r' % (fpath, fpath_))
                    except AssertionError:
                        continue
                    if 'No errors printed' in warn:
                        #print('Cannot fix')
                        continue
                    if lineno in modified_lines:
                        print('Skipping modified line')
                        continue
                    if fixcmdterm and warnmsg == 'Command terminated with space.':
                        print('Fix command termination')
                        errorline = warnlines[1]  # NOQA
                        carrotline = warnlines[2]
                        pos = carrotline.find('^')
                        if 0:
                            print('pos = %r' % (pos, ))
                            print('lineno = %r' % (lineno, ))
                            print('errorline = %r' % (errorline, ))
                        modified_lines.append(lineno)
                        line = buffer[lineno]
                        pre_, post_ = line[:pos], line[pos + 1:]
                        newline = (pre_ + '{} ' + post_).rstrip(' ')
                        #print('newline   = %r' % (newline,))
                        buffer[lineno] = newline
                    elif fixlbl and warnmsg == 'Delete this space to maintain correct pagereferences.':
                        print('Fix label newline')
                        fpath_ = pres['fpath']
                        errorline = warnlines[1]  # NOQA
                        new_prevline = buffer[
                            lineno - 1].rstrip() + errorline.lstrip(' ')
                        buffer[lineno - 1] = new_prevline
                        modified_lines.append(lineno)
                        delete_linenos.append(lineno)
                    elif fixcite and re.match(
                            'Non-breaking space \\(.~.\\) should have been used',
                            warnmsg):
                        #print(warnmsg)
                        #print('\n'.join(warnlines))
                        print('Fix citation space')
                        carrotline = warnlines[2]
                        pos = carrotline.find('^')
                        modified_lines.append(lineno)
                        line = buffer[lineno]
                        if line[pos] == ' ':
                            pre_, post_ = line[:pos], line[pos + 1:]
                            newline = (pre_ + '~' + post_).rstrip(' ')
                        else:
                            pre_, post_ = line[:pos + 1], line[pos + 1:]
                            newline = (pre_ + '~' + post_).rstrip(' ')
                            print(warn)
                            print(line[pos])
                            assert False
                            #assert line[pos] == ' ', '%r' % line[pos]
                            break
                        if len(pre_.strip()) == 0:
                            new_prevline = buffer[
                                lineno - 1].rstrip() + newline.lstrip(' ')
                            buffer[lineno - 1] = new_prevline
                            delete_linenos.append(lineno)
                        else:
                            #print('newline   = %r' % (newline,))
                            buffer[lineno] = newline
                    #print(warn)

            if len(delete_linenos) > 0:
                mask = ut.index_to_boolmask(delete_linenos, len(buffer))
                buffer = ut.compress(buffer, ut.not_list(mask))
            newtext = '\n'.join(buffer)

            #ut.dump_autogen_code(fpath, newtext, 'tex', fullprint=False)
            ut.print_difftext(
                ut.get_textdiff(text, newtext, num_context_lines=4))
            if ut.get_argflag('-w'):
                ut.writeto(fpath, newtext)
            else:
                print('Specify -w to finialize change')
Exemplo n.º 31
0
            pattern = re.escape(pat)
            found_lines, found_lxs = ut.grepfile(fpath, pattern)
            # DID NOT FIND ENCODING LINE
            if len(found_lines) == 0:
                need_encoding_fpaths.append(fpath)

    print('The following fpaths need encoding lines: ' +
          ut.list_str(need_encoding_fpaths, strvals=True))

    if do_write or show_diff:
        for fpath in need_encoding_fpaths:
            print('\n-----------------\nFound file without encodeing line: ' +
                  fpath)
            line_list = ut.read_lines_from(fpath)
            linenum = find_encoding_insert_position(line_list)
            if linenum is not None:
                #print(' * linenum = %r' % (linenum,))
                new_lines = line_list[:linenum] + [encoding_line + '\n'
                                                   ] + line_list[linenum:]
                new_text = ''.join(new_lines)
                if show_diff:
                    old_text = ''.join(line_list)
                    textdiff = ut.get_textdiff(old_text,
                                               new_text,
                                               num_context_lines=1)
                    print('Diff:')
                    ut.print_difftext(textdiff)
                if do_write:
                    ut.writeto(fpath, new_text)
                    pass
Exemplo n.º 32
0
def install_wildbook(verbose=ut.NOT_QUIET):
    """
    Script to setup wildbook on a unix based system
    (hopefully eventually this will generalize to win32)

    CommandLine:
        # Reset
        python -m ibeis --tf reset_local_wildbook
        # Setup
        python -m ibeis --tf install_wildbook
        # Startup
        python -m ibeis --tf startup_wildbook_server --show --exec-mode

        # Reset
        python -m ibeis.control.manual_wildbook_funcs --test-reset_local_wildbook
        # Setup
        python -m ibeis.control.manual_wildbook_funcs --test-install_wildbook
        # Startup
        python -m ibeis.control.manual_wildbook_funcs --test-startup_wildbook_server --show --exec-mode


    Example:
        >>> # SCRIPT
        >>> from ibeis.control.manual_wildbook_funcs import *  # NOQA
        >>> verbose = True
        >>> result = install_wildbook()
        >>> print(result)
    """
    # TODO: allow custom specified tomcat directory
    from os.path import basename, splitext, join
    import time
    import re
    import subprocess
    try:
        output = subprocess.check_output(['java', '-version'],
                                         stderr=subprocess.STDOUT)
        _java_version = output.split('\n')[0]
        _java_version = _java_version.replace('java version ', '')
        java_version = _java_version.replace('"', '')
        print('java_version = %r' % (java_version,))
        if not java_version.startswith('1.7'):
            print('Warning wildbook is only supported for java 1.7')
    except OSError:
        output = None
    if output is None:
        raise ImportError(
            'Cannot find java on this machine. '
            'Please install java: http://www.java.com/en/download/')

    tomcat_dpath = find_or_download_tomcat()
    assert tomcat_dpath is not None, 'Could not find tomcat'
    war_fpath = find_or_download_wilbook_warfile()
    war_fname = basename(war_fpath)
    wb_target = splitext(war_fname)[0]

    # Ensure environment variables
    #os.environ['JAVA_HOME'] = find_java_jvm()
    #os.environ['TOMCAT_HOME'] = tomcat_dpath
    #os.environ['CATALINA_HOME'] = tomcat_dpath

    # Move the war file to tomcat webapps if not there
    webapps_dpath = join(tomcat_dpath, 'webapps')
    deploy_war_fpath = join(webapps_dpath, war_fname)
    if not ut.checkpath(deploy_war_fpath, verbose=verbose):
        ut.copy(war_fpath, deploy_war_fpath)

    # Ensure that the war file has been unpacked

    unpacked_war_dpath = join(webapps_dpath, wb_target)
    if not ut.checkpath(unpacked_war_dpath, verbose=verbose):
        # Need to make sure you start catalina in the same directory otherwise
        # the derby databsae gets put in in cwd
        tomcat_startup_dir = get_tomcat_startup_tmpdir()
        with ut.ChdirContext(tomcat_startup_dir):
            # Starting and stoping catalina should be sufficient to unpack the
            # war
            startup_fpath  = join(tomcat_dpath, 'bin', 'startup.sh')
            shutdown_fpath = join(tomcat_dpath, 'bin', 'shutdown.sh')
            ut.cmd(ut.quote_single_command(startup_fpath))
            print('It is NOT ok if the startup.sh fails\n')

            # wait for the war to be unpacked
            for retry_count in range(0, 6):
                time.sleep(1)
                if ut.checkpath(unpacked_war_dpath, verbose=True):
                    break
                else:
                    print('Retrying')

            # ensure that the server is ruuning
            import requests
            print('Checking if we can ping the server')
            response = requests.get('http://localhost:8080')
            if response is None or response.status_code != 200:
                print('There may be an error starting the server')
            else:
                print('Seem able to ping the server')

            # assert tht the war was unpacked
            ut.assertpath(unpacked_war_dpath, (
                'Wildbook war might have not unpacked correctly.  This may '
                'be ok. Try again. If it fails a second time, then there is a '
                'problem.'), verbose=True)

            # shutdown the server
            ut.cmd(ut.quote_single_command(shutdown_fpath))
            print('It is ok if the shutdown.sh fails')
            time.sleep(.5)

    # Make sure permissions are correctly set in wildbook
    # Comment out the line that requires authentication
    permission_fpath = join(unpacked_war_dpath, 'WEB-INF/web.xml')
    ut.assertpath(permission_fpath)
    permission_text = ut.readfrom(permission_fpath)
    lines_to_remove = [
        '/EncounterSetMarkedIndividual = authc, roles[admin]'
    ]
    new_permission_text = permission_text[:]
    for line in lines_to_remove:
        re.search(re.escape(line), permission_text)
        prefix = ut.named_field('prefix', '\\s*')
        suffix = ut.named_field('suffix', '\\s*\n')
        pattern = ('^' + prefix + re.escape(line) + suffix)
        match = re.search(pattern, permission_text,
                          flags=re.MULTILINE | re.DOTALL)
        if match is None:
            continue
        newline = '<!--%s -->' % (line,)
        repl = ut.bref_field('prefix') + newline + ut.bref_field('suffix')
        new_permission_text = re.sub(pattern, repl, permission_text,
                                     flags=re.MULTILINE | re.DOTALL)
        assert new_permission_text != permission_text, (
            'text should have changed')
    if new_permission_text != permission_text:
        print('Need to write new permission texts')
        ut.writeto(permission_fpath, new_permission_text)
    else:
        print('Permission file seems to be ok')

    print('Wildbook is installed and waiting to be started')
Exemplo n.º 33
0
def autogen_ipynb(ibs, launch=None, run=None):
    r"""
    Autogenerates standard IBEIS Image Analysis IPython notebooks.

    CommandLine:
        python -m wbia autogen_ipynb --run --db lynx
        python -m wbia autogen_ipynb --run --db lynx

        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST -p :proot=smk,num_words=64000 default
        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST --asreport
        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST --noexample --withtags
        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST

        python -m wbia autogen_ipynb --ipynb --db STS_SandTigers

        python -m wbia autogen_ipynb --db PZ_MTEST
        # TODO: Add support for dbdir to be specified
        python -m wbia autogen_ipynb --db ~/work/PZ_MTEST

        python -m wbia autogen_ipynb --ipynb --db Oxford -a default:qhas_any=\(query,\),dpername=1,exclude_reference=True,dminqual=good
        python -m wbia autogen_ipynb --ipynb --db PZ_MTEST -a default -t best:lnbnn_normalizer=[None,normlnbnn-test]

        python -m wbia.templates.generate_notebook --exec-autogen_ipynb --db wd_peter_blinston --ipynb

        python -m wbia autogen_ipynb --db PZ_Master1 --ipynb
        python -m wbia autogen_ipynb --db PZ_Master1 -a timectrl:qindex=0:100 -t best best:normsum=True --ipynb --noexample
        python -m wbia autogen_ipynb --db PZ_Master1 -a timectrl --run
        jupyter-notebook Experiments-lynx.ipynb
        killall python

        python -m wbia autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW -a default:has_any=hasnotch
        python -m wbia autogen_ipynb --db humpbacks --ipynb -t default:proot=BC_DTW default:proot=vsmany -a default:has_any=hasnotch,mingt=2,qindex=0:50 --noexample

        python -m wbia autogen_ipynb --db testdb_curvrank --ipynb -t default:proot=CurvRankDorsal
        python -m wbia autogen_ipynb --db testdb_curvrank --ipynb -t default:proot=CurvRankFluke
        python -m wbia autogen_ipynb --db PW_Master --ipynb -t default:proot=CurvRankDorsal

        python -m wbia autogen_ipynb --db testdb_identification --ipynb -t default:proot=Deepsense

    Ignore:
        python -m wbia autogen_ipynb --db WS_ALL

    Example:
        >>> # SCRIPT
        >>> from wbia.templates.generate_notebook import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='testdb1')
        >>> result = autogen_ipynb(ibs)
        >>> print(result)
    """
    dbname = ibs.get_dbname()
    fname = 'Experiments-' + dbname
    nb_fpath = fname + '.ipynb'
    if ut.get_argflag('--cells'):
        notebook_cells = make_wbia_cell_list(ibs)
        print('\n# ---- \n'.join(notebook_cells))
        return
    # TODO: Add support for dbdir to be specified
    notebook_str = make_wbia_notebook(ibs)
    ut.writeto(nb_fpath, notebook_str)
    run = ut.get_argflag('--run') if run is None else run
    launch = launch if launch is not None else ut.get_argflag('--ipynb')
    if run:
        run_nb = ut.run_ipython_notebook(notebook_str)
        output_fpath = ut.export_notebook(run_nb, fname)
        ut.startfile(output_fpath)
    elif launch:
        command = ' '.join(
            [
                'jupyter-notebook',
                '--NotebookApp.iopub_data_rate_limit=10000000',
                '--NotebookApp.token=',
                nb_fpath,
            ]
        )
        ut.cmd2(command, detatch=True, verbose=True)
    else:
        print('notebook_str =\n%s' % (notebook_str,))
Exemplo n.º 34
0
def make_application_icon(exe_fpath, dry=True, props={}):
    r"""
    CommandLine:
        python -m utool.util_ubuntu --exec-make_application_icon --exe=cockatrice --icon=/home/joncrall/code/Cockatrice/cockatrice/resources/cockatrice.png
        python -m utool.util_ubuntu --exec-make_application_icon --exe=cockatrice --icon=/home/joncrall/code/Cockatrice/cockatrice/resources/cockatrice.png
        python -m utool.util_ubuntu --exec-make_application_icon --exe=/opt/zotero/zotero --icon=/opt/zotero/chrome/icons/default/main-window.ico

        python -m utool.util_ubuntu --exec-make_application_icon --exe "env WINEPREFIX="/home/joncrall/.wine" wine C:\\\\windows\\\\command\\\\start.exe /Unix /home/joncrall/.wine32-dotnet45/dosdevices/c:/users/Public/Desktop/Hearthstone.lnk" --path "/home/joncrall/.wine/dosdevices/c:/Program Files (x86)/Hearthstone"
        # Exec=env WINEPREFIX="/home/joncrall/.wine" wine /home/joncrall/.wine/drive_c/Program\ Files\ \(x86\)/Battle.net/Battle.net.exe

        --icon=/opt/zotero/chrome/icons/default/main-window.ico

        python -m utool.util_ubuntu --exec-make_application_icon --exe=/home/joncrall/code/build-ArenaTracker-Desktop_Qt_5_6_1_GCC_64bit-Debug

        update-desktop-database ~/.local/share/applications


    Example:
        >>> # DISABLE_DOCTEST
        >>> from utool.util_ubuntu import *  # NOQA
        >>> import utool as ut
        >>> exe_fpath = ut.get_argval('--exe', default='cockatrice')
        >>> icon = ut.get_argval('--icon', default=None)
        >>> dry = not ut.get_argflag(('--write', '-w'))
        >>> props = {'terminal': False, 'icon': icon}
        >>> result = make_application_icon(exe_fpath, dry, props)
        >>> print(result)
    """
    import utool as ut
    exe_fname_noext = splitext(basename(exe_fpath))[0]
    app_name = exe_fname_noext.replace('_', '-')
    nice_name = ' '.join(
        [word[0].upper() + word[1:].lower()
         for word in app_name.replace('-', ' ').split(' ')]
    )
    lines = [
        '[Desktop Entry]',
        'Name={nice_name}',
        'Exec={exe_fpath}',
    ]

    if 'mime_name' in props:
        lines += ['MimeType=application/x-{mime_name}']

    if 'icon' in props:
        lines += ['Icon={icon}']

    if props.get('path'):
        lines += ['Path={path}']

    # if props.get('comment'):
    #     lines += ['Path={comment}']

    lines += [
        'Terminal={terminal}',
        'Type=Application',
        'Categories=Utility;Application;',
        'Comment=Custom App',
    ]
    fmtdict = locals()
    fmtdict.update(props)

    prefix = ut.truepath('~/.local/share')
    app_codeblock = '\n'.join(lines).format(**fmtdict)
    app_dpath = join(prefix, 'applications')
    app_fpath = join(app_dpath, '{app_name}.desktop'.format(**locals()))

    print(app_codeblock)
    print('---')
    print(app_fpath)
    print('L___')

    if not dry:
        ut.writeto(app_fpath, app_codeblock, verbose=ut.NOT_QUIET, n=None)
        ut.cmd('update-desktop-database ~/.local/share/applications')
Exemplo n.º 35
0
def add_new_mimetype_association(ext, mime_name, exe_fpath=None, dry=True):
    """
    TODO: move to external manager and generalize

    Args:
        ext (str): extension to associate
        mime_name (str): the name of the mime_name to create (defaults to ext)
        exe_fpath (str): executable location if this is for one specific file

    References:
        https://wiki.archlinux.org/index.php/Default_applications#Custom_file_associations

    Args:
        ext (str): extension to associate
        exe_fpath (str): executable location
        mime_name (str): the name of the mime_name to create (defaults to ext)

    CommandLine:
        python -m utool.util_ubuntu --exec-add_new_mimetype_association
        # Add ability to open ipython notebooks via double click
        python -m utool.util_ubuntu --exec-add_new_mimetype_association --mime-name=ipynb+json --ext=.ipynb --exe-fpath=/usr/local/bin/ipynb
        python -m utool.util_ubuntu --exec-add_new_mimetype_association --mime-name=ipynb+json --ext=.ipynb --exe-fpath=jupyter-notebook --force

        python -m utool.util_ubuntu --exec-add_new_mimetype_association --mime-name=sqlite --ext=.sqlite --exe-fpath=sqlitebrowser

    Example:
        >>> # SCRIPT
        >>> from utool.util_ubuntu import *  # NOQA
        >>> import utool as ut
        >>> ext = ut.get_argval('--ext', type_=str, default=None)
        >>> mime_name = ut.get_argval('--mime_name', type_=str, default=None)
        >>> exe_fpath = ut.get_argval('--exe_fpath', type_=str, default=None)
        >>> dry = not ut.get_argflag('--force')
        >>> result = add_new_mimetype_association(ext, mime_name, exe_fpath, dry)
        >>> print(result)
    """
    import utool as ut
    terminal = True

    mime_codeblock = ut.codeblock(
        '''
        <?xml version="1.0" encoding="UTF-8"?>
        <mime-info xmlns="http://www.freedesktop.org/standards/shared-mime-info">
            <mime-type type="application/x-{mime_name}">
                <glob-deleteall/>
                <glob pattern="*{ext}"/>
            </mime-type>
        </mime-info>
        '''
    ).format(**locals())

    prefix = ut.truepath('~/.local/share')
    mime_dpath = join(prefix, 'mime/packages')
    mime_fpath = join(mime_dpath, 'application-x-{mime_name}.xml'.format(**locals()))

    print(mime_codeblock)
    print('---')
    print(mime_fpath)
    print('L___')

    if exe_fpath is not None:
        exe_fname_noext = splitext(basename(exe_fpath))[0]
        app_name = exe_fname_noext.replace('_', '-')
        nice_name = ' '.join(
            [word[0].upper() + word[1:].lower()
             for word in app_name.replace('-', ' ').split(' ')]
        )
        app_codeblock = ut.codeblock(
            '''
            [Desktop Entry]
            Name={nice_name}
            Exec={exe_fpath}
            MimeType=application/x-{mime_name}
            Terminal={terminal}
            Type=Application
            Categories=Utility;Application;
            Comment=Custom App
            '''
        ).format(**locals())
        app_dpath = join(prefix, 'applications')
        app_fpath = join(app_dpath, '{app_name}.desktop'.format(**locals()))

        print(app_codeblock)
        print('---')
        print(app_fpath)
        print('L___')

    # WRITE FILES
    if not dry:
        ut.ensuredir(mime_dpath)
        ut.ensuredir(app_dpath)

        ut.writeto(mime_fpath, mime_codeblock, verbose=ut.NOT_QUIET, n=None)
        if exe_fpath is not None:
            ut.writeto(app_fpath, app_codeblock, verbose=ut.NOT_QUIET, n=None)

        # UPDATE BACKENDS

        #ut.cmd('update-mime-database /usr/share/mime')
        #~/.local/share/applications/mimeapps.list
        print(ut.codeblock(
            '''
            Run these commands:
            update-desktop-database ~/.local/share/applications
            update-mime-database ~/.local/share/mime
            '''
        ))
        if exe_fpath is not None:
            ut.cmd('update-desktop-database ~/.local/share/applications')
        ut.cmd('update-mime-database ~/.local/share/mime')
    else:
        print('dry_run')
Exemplo n.º 36
0
def add_new_mimetype_association(ext, mime_name, exe_fpath=None, dry=True):
    """
    TODO: move to external manager and generalize

    Args:
        ext (str): extension to associate
        mime_name (str): the name of the mime_name to create (defaults to ext)
        exe_fpath (str): executable location if this is for one specific file

    References:
        https://wiki.archlinux.org/index.php/Default_applications#Custom_file_associations

    Args:
        ext (str): extension to associate
        exe_fpath (str): executable location
        mime_name (str): the name of the mime_name to create (defaults to ext)

    CommandLine:
        python -m utool.util_ubuntu --exec-add_new_mimetype_association
        # Add ability to open ipython notebooks via double click
        python -m utool.util_ubuntu --exec-add_new_mimetype_association --mime-name=ipynb+json --ext=.ipynb --exe-fpath=/usr/local/bin/ipynb
        python -m utool.util_ubuntu --exec-add_new_mimetype_association --mime-name=ipynb+json --ext=.ipynb --exe-fpath=jupyter-notebook --force

        python -m utool.util_ubuntu --exec-add_new_mimetype_association --mime-name=sqlite --ext=.sqlite --exe-fpath=sqlitebrowser

    Example:
        >>> # SCRIPT
        >>> from utool.util_ubuntu import *  # NOQA
        >>> import utool as ut
        >>> ext = ut.get_argval('--ext', type_=str, default=None)
        >>> mime_name = ut.get_argval('--mime_name', type_=str, default=None)
        >>> exe_fpath = ut.get_argval('--exe_fpath', type_=str, default=None)
        >>> dry = not ut.get_argflag('--force')
        >>> result = add_new_mimetype_association(ext, mime_name, exe_fpath, dry)
        >>> print(result)
    """
    import utool as ut
    terminal = True

    mime_codeblock = ut.codeblock(
        '''
        <?xml version="1.0" encoding="UTF-8"?>
        <mime-info xmlns="http://www.freedesktop.org/standards/shared-mime-info">
            <mime-type type="application/x-{mime_name}">
                <glob-deleteall/>
                <glob pattern="*{ext}"/>
            </mime-type>
        </mime-info>
        '''
    ).format(**locals())

    prefix = ut.truepath('~/.local/share')
    mime_dpath = join(prefix, 'mime/packages')
    mime_fpath = join(mime_dpath, 'application-x-{mime_name}.xml'.format(**locals()))

    print(mime_codeblock)
    print('---')
    print(mime_fpath)
    print('L___')

    if exe_fpath is not None:
        exe_fname_noext = splitext(basename(exe_fpath))[0]
        app_name = exe_fname_noext.replace('_', '-')
        nice_name = ' '.join(
            [word[0].upper() + word[1:].lower()
             for word in app_name.replace('-', ' ').split(' ')]
        )
        app_codeblock = ut.codeblock(
            '''
            [Desktop Entry]
            Name={nice_name}
            Exec={exe_fpath}
            MimeType=application/x-{mime_name}
            Terminal={terminal}
            Type=Application
            Categories=Utility;Application;
            Comment=Custom App
            '''
        ).format(**locals())
        app_dpath = join(prefix, 'applications')
        app_fpath = join(app_dpath, '{app_name}.desktop'.format(**locals()))

        print(app_codeblock)
        print('---')
        print(app_fpath)
        print('L___')

    # WRITE FILES
    if not dry:
        ut.ensuredir(mime_dpath)
        ut.ensuredir(app_dpath)

        ut.writeto(mime_fpath, mime_codeblock, verbose=ut.NOT_QUIET, n=None)
        if exe_fpath is not None:
            ut.writeto(app_fpath, app_codeblock, verbose=ut.NOT_QUIET, n=None)

        # UPDATE BACKENDS

        #ut.cmd('update-mime-database /usr/share/mime')
        #~/.local/share/applications/mimeapps.list
        print(ut.codeblock(
            '''
            Run these commands:
            update-desktop-database ~/.local/share/applications
            update-mime-database ~/.local/share/mime
            '''
        ))
        if exe_fpath is not None:
            ut.cmd('update-desktop-database ~/.local/share/applications')
        ut.cmd('update-mime-database ~/.local/share/mime')
    else:
        print('dry_run')
Exemplo n.º 37
0
def make_individual_latex_figures(ibs, fpaths_list, flat_case_labels,
                                  cfgx2_shortlbl, case_figdir,
                                  analysis_fpath_list):
    # HACK MAKE LATEX CONVINENCE STUFF
    #print('LATEX HACK')
    if len(fpaths_list) == 0:
        print('nothing to render')
        return
    RENDER = ut.get_argflag('--render')
    DUMP_FIGDEF = ut.get_argflag(('--figdump', '--dump-figdef', '--figdef'))

    if not (DUMP_FIGDEF or RENDER):  # HACK
        return

    latex_code_blocks = []
    latex_block_keys = []

    caption_prefix = ut.get_argval('--cappref', type_=str, default='')
    caption_suffix = ut.get_argval('--capsuf', type_=str, default='')
    cmdaug = ut.get_argval('--cmdaug', type_=str, default='custom')

    selected = None

    for case_idx, (fpaths, labels) in enumerate(zip(fpaths_list, flat_case_labels)):
        if labels is None:
            labels = [cmdaug]
        if len(fpaths) < 4:
            nCols = len(fpaths)
        else:
            nCols = 2

        _cmdname = ibs.get_dbname() + ' Case ' + ' '.join(labels) + '_' + str(case_idx)
        #print('_cmdname = %r' % (_cmdname,))
        cmdname = ut.latex_sanitize_command_name(_cmdname)
        label_str = cmdname
        if len(caption_prefix) == 0:
            caption_str = ut.escape_latex('Casetags: ' +
                                          ut.list_str(labels, nl=False, strvals=True) +
                                          ', db=' + ibs.get_dbname() + '. ')
        else:
            caption_str = ''

        use_sublbls = len(cfgx2_shortlbl) > 1
        if use_sublbls:
            caption_str += ut.escape_latex('Each figure shows a different configuration: ')
            sublbls = ['(' + chr(97 + count) + ') ' for count in range(len(cfgx2_shortlbl))]
        else:
            #caption_str += ut.escape_latex('This figure depicts correct and
            #incorrect matches from configuration: ')
            sublbls = [''] * len(cfgx2_shortlbl)
        def wrap_tt(text):
            return r'{\tt ' + text + '}'
        _shortlbls = cfgx2_shortlbl
        _shortlbls = list(map(ut.escape_latex, _shortlbls))
        # Adjust spacing for breaks
        #tex_small_space = r''
        tex_small_space = r'\hspace{0pt}'
        # Remove query specific config flags in individual results
        _shortlbls = [re.sub('\\bq[^,]*,?', '', shortlbl) for shortlbl in _shortlbls]
        # Let config strings be broken over newlines
        _shortlbls = [re.sub('\\+', tex_small_space + '+' + tex_small_space, shortlbl)
                      for shortlbl in _shortlbls]
        _shortlbls = [re.sub(', *', ',' + tex_small_space, shortlbl)
                      for shortlbl in _shortlbls]
        _shortlbls = list(map(wrap_tt, _shortlbls))
        cfgx2_texshortlbl = ['\n    ' + lbl + shortlbl
                             for lbl, shortlbl in zip(sublbls, _shortlbls)]

        caption_str += ut.conj_phrase(cfgx2_texshortlbl, 'and') + '.\n    '
        caption_str = '\n    ' + caption_prefix + caption_str + caption_suffix
        caption_str = caption_str.rstrip()
        figure_str  = ut.get_latex_figure_str(fpaths,
                                                nCols=nCols,
                                                label_str=label_str,
                                                caption_str=caption_str,
                                                use_sublbls=None,
                                                use_frame=True)
        latex_block = ut.latex_newcommand(cmdname, figure_str)
        latex_block = '\n%----------\n' + latex_block
        latex_code_blocks.append(latex_block)
        latex_block_keys.append(cmdname)

    # HACK
    remove_fpath = ut.truepath('~/latex/crall-candidacy-2015') + '/'

    latex_fpath = join(case_figdir, 'latex_cases.tex')

    if selected is not None:
        selected_keys = selected
    else:
        selected_keys = latex_block_keys

    selected_blocks = ut.dict_take(dict(zip(latex_block_keys, latex_code_blocks)), selected_keys)

    figdef_block = '\n'.join(selected_blocks)
    figcmd_block = '\n'.join(['\\' + key for key in latex_block_keys])

    selected_block = figdef_block + '\n\n' + figcmd_block

    # HACK: need full paths to render
    selected_block_renderable = selected_block
    selected_block = selected_block.replace(remove_fpath, '')
    if RENDER:
        ut.render_latex_text(selected_block_renderable)

    if DUMP_FIGDEF:
        ut.writeto(latex_fpath, selected_block)

    #if NOT DUMP AND NOT RENDER:
    #    print('STANDARD LATEX RESULTS')
    #    cmdname = ibs.get_dbname() + 'Results'
    #    latex_block  = ut.get_latex_figure_str2(analysis_fpath_list, cmdname, nCols=1)
    #    ut.print_code(latex_block, 'latex')
    if DUMP_FIGDEF or RENDER:
        ut.print_code(selected_block, 'latex')
Exemplo n.º 38
0
def make_application_icon(exe_fpath, dry=True, props={}):
    r"""
    CommandLine:
        python -m utool.util_ubuntu --exec-make_application_icon --exe=cockatrice --icon=/home/joncrall/code/Cockatrice/cockatrice/resources/cockatrice.png
        python -m utool.util_ubuntu --exec-make_application_icon --exe=cockatrice --icon=/home/joncrall/code/Cockatrice/cockatrice/resources/cockatrice.png
        python -m utool.util_ubuntu --exec-make_application_icon --exe=/opt/zotero/zotero --icon=/opt/zotero/chrome/icons/default/main-window.ico

        python -m utool.util_ubuntu --exec-make_application_icon --exe "env WINEPREFIX="/home/joncrall/.wine" wine C:\\\\windows\\\\command\\\\start.exe /Unix /home/joncrall/.wine32-dotnet45/dosdevices/c:/users/Public/Desktop/Hearthstone.lnk" --path "/home/joncrall/.wine/dosdevices/c:/Program Files (x86)/Hearthstone"
        # Exec=env WINEPREFIX="/home/joncrall/.wine" wine /home/joncrall/.wine/drive_c/Program\ Files\ \(x86\)/Battle.net/Battle.net.exe

        --icon=/opt/zotero/chrome/icons/default/main-window.ico

        python -m utool.util_ubuntu --exec-make_application_icon --exe=/home/joncrall/code/build-ArenaTracker-Desktop_Qt_5_6_1_GCC_64bit-Debug

        update-desktop-database ~/.local/share/applications


    Example:
        >>> # DISABLE_DOCTEST
        >>> from utool.util_ubuntu import *  # NOQA
        >>> import utool as ut
        >>> exe_fpath = ut.get_argval('--exe', default='cockatrice')
        >>> icon = ut.get_argval('--icon', default=None)
        >>> dry = not ut.get_argflag(('--write', '-w'))
        >>> props = {'terminal': False, 'icon': icon}
        >>> result = make_application_icon(exe_fpath, dry, props)
        >>> print(result)
    """
    import utool as ut
    exe_fname_noext = splitext(basename(exe_fpath))[0]
    app_name = exe_fname_noext.replace('_', '-')
    nice_name = ' '.join(
        [word[0].upper() + word[1:].lower()
         for word in app_name.replace('-', ' ').split(' ')]
    )
    lines = [
        '[Desktop Entry]',
        'Name={nice_name}',
        'Exec={exe_fpath}',
    ]

    if 'mime_name' in props:
        lines += ['MimeType=application/x-{mime_name}']

    if 'icon' in props:
        lines += ['Icon={icon}']

    if props.get('path'):
        lines += ['Path={path}']

    # if props.get('comment'):
    #     lines += ['Path={comment}']

    lines += [
        'Terminal={terminal}',
        'Type=Application',
        'Categories=Utility;Application;',
        'Comment=Custom App',
    ]
    fmtdict = locals()
    fmtdict.update(props)

    prefix = ut.truepath('~/.local/share')
    app_codeblock = '\n'.join(lines).format(**fmtdict)
    app_dpath = join(prefix, 'applications')
    app_fpath = join(app_dpath, '{app_name}.desktop'.format(**locals()))

    print(app_codeblock)
    print('---')
    print(app_fpath)
    print('L___')

    if not dry:
        ut.writeto(app_fpath, app_codeblock, verbose=ut.NOT_QUIET, n=None)
        ut.cmd('update-desktop-database ~/.local/share/applications')
Exemplo n.º 39
0
def _inject_getter_attrs(metaself,
                         objname,
                         attrs,
                         configurable_attrs,
                         depc_name=None,
                         depcache_attrs=None,
                         settable_attrs=None,
                         aliased_attrs=None):
    """
    Used by the metaclass to inject methods and properties into the class
    inheriting from ObjectList1D
    """

    if settable_attrs is None:
        settable_attrs = []
    settable_attrs = set(settable_attrs)

    # Inform the class of which variables will be injected
    metaself._settable_attrs = settable_attrs
    metaself._attrs = attrs
    metaself._configurable_attrs = configurable_attrs
    if depcache_attrs is None:
        metaself._depcache_attrs = []
    else:
        metaself._depcache_attrs = [
            '%s_%s' % (tbl, col) for tbl, col in depcache_attrs
        ]
    if aliased_attrs is not None:
        metaself._attrs_aliases = aliased_attrs
    else:
        metaself._attrs_aliases = {}

    # if not getattr(metaself, '__needs_inject__', True):
    #     return

    attr_to_aliases = ut.invert_dict(metaself._attrs_aliases,
                                     unique_vals=False)

    # What is difference between configurable and depcache getters?
    # Could depcache getters just be made configurable?
    # I guess its just an efficincy thing. Actually its config2_-vs-config
    # FIXME: rectify differences between normal / configurable / depcache
    # getter

    def _make_caching_setter(attrname, _rowid_setter):
        def _setter(self, values, *args, **kwargs):
            if self._ibs is None:
                self._internal_attrs[attrname] = values
            else:
                if self._caching and attrname in self._internal_attrs:
                    self._internal_attrs[attrname] = values
                _rowid_setter(self, self._rowids, values)

        ut.set_funcname(_setter, '_set_' + attrname)
        return _setter

    def _make_caching_getter(attrname, _rowid_getter):
        def _getter(self):
            if self._ibs is None or (self._caching
                                     and attrname in self._internal_attrs):
                data = self._internal_attrs[attrname]
            else:
                data = _rowid_getter(self, self._rowids)
                if self._caching:
                    self._internal_attrs[attrname] = data
            return data

        ut.set_funcname(_getter, '_get_' + attrname)
        return _getter

    # make default version use implicit rowids and another
    # that takes explicit rowids.

    def _make_setters(objname, attrname):
        ibs_funcname = 'set_%s_%s' % (objname, attrname)

        def _rowid_setter(self, rowids, values, *args, **kwargs):
            ibs_callable = getattr(self._ibs, ibs_funcname)
            ibs_callable(rowids, values, *args, **kwargs)

        ut.set_funcname(_rowid_setter, '_rowid_set_' + attrname)
        _setter = _make_caching_setter(attrname, _rowid_setter)
        return _rowid_setter, _setter

    # ---

    def _make_getters(objname, attrname):
        ibs_funcname = 'get_%s_%s' % (objname, attrname)

        def _rowid_getter(self, rowids):
            ibs_callable = getattr(self._ibs, ibs_funcname)
            data = ibs_callable(rowids)
            if self._asarray:
                data = np.array(data)
            return data

        ut.set_funcname(_rowid_getter, '_rowid_get_' + attrname)
        _getter = _make_caching_getter(attrname, _rowid_getter)
        return _rowid_getter, _getter

    def _make_cfg_getters(objname, attrname):
        ibs_funcname = 'get_%s_%s' % (objname, attrname)

        def _rowid_getter(self, rowids):
            ibs_callable = getattr(self._ibs, ibs_funcname)
            data = ibs_callable(rowids, config2_=self._config)
            if self._asarray:
                data = np.array(data)
            return data

        ut.set_funcname(_rowid_getter, '_rowid_get_' + attrname)
        _getter = _make_caching_getter(attrname, _rowid_getter)
        return _rowid_getter, _getter

    def _make_depc_getters(depc_name, attrname, tbl, col):
        def _rowid_getter(self, rowids):
            depc = getattr(self._ibs, depc_name)
            data = depc.get(tbl, rowids, col, config=self._config)
            if self._asarray:
                data = np.array(data)
            return data

        ut.set_funcname(_rowid_getter, '_rowid_get_' + attrname)
        _getter = _make_caching_getter(attrname, _rowid_getter)
        return _rowid_getter, _getter

    # Collect setter / getter functions and properties
    rowid_getters = []
    getters = []
    setters = []
    properties = []
    for attrname in attrs:
        _rowid_getter, _getter = _make_getters(objname, attrname)
        if attrname in settable_attrs:
            _rowid_setter, _setter = _make_setters(objname, attrname)
            setters.append(_setter)
        else:
            _setter = None
        prop = property(fget=_getter, fset=_setter)
        rowid_getters.append((attrname, _rowid_getter))
        getters.append(_getter)
        properties.append((attrname, prop))

    for attrname in configurable_attrs:
        _rowid_getter, _getter = _make_cfg_getters(objname, attrname)
        prop = property(fget=_getter)
        rowid_getters.append((attrname, _rowid_getter))
        getters.append(_getter)
        properties.append((attrname, prop))

    if depcache_attrs is not None:
        for tbl, col in depcache_attrs:
            attrname = '%s_%s' % (tbl, col)
            _rowid_getter, _getter = _make_depc_getters(
                depc_name, attrname, tbl, col)
            prop = property(fget=_getter, fset=None)
            rowid_getters.append((attrname, _rowid_getter))
            getters.append(_getter)
            properties.append((attrname, prop))

    aliases = []

    # Inject all gathered information
    for attrname, func in rowid_getters:
        funcname = ut.get_funcname(func)
        setattr(metaself, funcname, func)
        # ensure aliases have rowid getters
        for alias in attr_to_aliases.get(attrname, []):
            alias_funcname = '_rowid_get_' + alias
            setattr(metaself, alias_funcname, func)

    for func in getters:
        funcname = ut.get_funcname(func)
        setattr(metaself, funcname, func)

    for func in setters:
        funcname = ut.get_funcname(func)
        setattr(metaself, funcname, func)

    for attrname, prop in properties:
        setattr(metaself, attrname, prop)
        for alias in attr_to_aliases.pop(attrname, []):
            aliases.append((alias, attrname))
            setattr(metaself, alias, prop)

    if ut.get_argflag('--autogen-core'):
        # TODO: turn on autogenertion given a flag
        def expand_closure_source(funcname, func):
            source = ut.get_func_sourcecode(func)
            closure_vars = [
                (k, v.cell_contents)
                for k, v in zip(func.func_code.co_freevars, func.func_closure)
            ]
            source = ut.unindent(source)
            import re
            for k, v in closure_vars:
                source = re.sub('\\b' + k + '\\b', ut.repr2(v), source)
            source = re.sub('def .*\(self', 'def ' + funcname + '(self',
                            source)
            source = ut.indent(source.strip(), '    ') + '\n'
            return source

        explicit_lines = []
        # build explicit version for jedi?
        for funcname, func in getters:
            source = expand_closure_source(funcname, func)
            explicit_lines.append(source)
        # build explicit version for jedi?
        for funcname, func in setters:
            source = expand_closure_source(funcname, func)
            explicit_lines.append(source)

        for attrname, prop in properties:
            getter_name = None if prop.fget is None else ut.get_funcname(
                prop.fget)
            setter_name = None if prop.fset is None else ut.get_funcname(
                prop.fset)
            source = '    %s = property(%s, %s)' % (attrname, getter_name,
                                                    setter_name)
            explicit_lines.append(source)

        for alias, attrname in aliases:
            source = '    %s = %s' % (alias, attrname)
            explicit_lines.append(source)

        explicit_source = '\n'.join([
            'from ibeis import _ibeis_object',
            '',
            '',
            'class _%s_base_class(_ibeis_object.ObjectList1D):',
            '    __needs_inject__ = False',
            '',
        ]) % (objname, )
        explicit_source += '\n'.join(explicit_lines)
        explicit_fname = '_autogen_%s_base.py' % (objname, )
        from os.path import dirname, join
        ut.writeto(join(dirname(__file__), explicit_fname),
                   explicit_source + '\n')

    if attr_to_aliases:
        raise AssertionError('Unmapped aliases %r' % (attr_to_aliases, ))
Exemplo n.º 40
0
def update_wildbook_install_config(webapps_dpath, unpacked_war_dpath):
    """
    CommandLine:
        python -m ibeis ensure_local_war
        python -m ibeis update_wildbook_install_config
        python -m ibeis update_wildbook_install_config --show

    Example:
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> import ibeis
        >>> tomcat_dpath = find_installed_tomcat()
        >>> webapps_dpath = join(tomcat_dpath, 'webapps')
        >>> wb_target = ibeis.const.WILDBOOK_TARGET
        >>> unpacked_war_dpath = join(webapps_dpath, wb_target)
        >>> locals_ = ut.exec_func_src(update_wildbook_install_config, globals())
        >>> #update_wildbook_install_config(webapps_dpath, unpacked_war_dpath)
        >>> ut.quit_if_noshow()
        >>> ut.vd(unpacked_war_dpath)
        >>> ut.editfile(locals_['permission_fpath'])
        >>> ut.editfile(locals_['jdoconfig_fpath'])
        >>> ut.editfile(locals_['asset_store_fpath'])
    """
    mysql_mode = not ut.get_argflag('--nomysql')

    #if ut.get_argflag('--vd'):
    #    ut.vd(unpacked_war_dpath)
    #find_installed_tomcat
    # Make sure permissions are correctly set in wildbook
    # Comment out the line that requires authentication
    permission_fpath = join(unpacked_war_dpath, 'WEB-INF/web.xml')
    ut.assertpath(permission_fpath)
    permission_text = ut.readfrom(permission_fpath)
    lines_to_remove = [
        # '/ImageSetSetMarkedIndividual = authc, roles[admin]'
        '/EncounterSetMarkedIndividual = authc, roles[admin]'
    ]
    new_permission_text = permission_text[:]
    for line in lines_to_remove:
        re.search(re.escape(line), permission_text)
        prefix = ut.named_field('prefix', '\\s*')
        suffix = ut.named_field('suffix', '\\s*\n')
        pattern = ('^' + prefix + re.escape(line) + suffix)
        match = re.search(pattern, permission_text,
                          flags=re.MULTILINE | re.DOTALL)
        if match is None:
            continue
        newline = '<!--%s -->' % (line,)
        repl = ut.bref_field('prefix') + newline + ut.bref_field('suffix')
        new_permission_text = re.sub(pattern, repl, permission_text,
                                     flags=re.MULTILINE | re.DOTALL)
        assert new_permission_text != permission_text, (
            'text should have changed')
    if new_permission_text != permission_text:
        print('Need to write new permission texts')
        ut.writeto(permission_fpath, new_permission_text)
    else:
        print('Permission file seems to be ok')

    # Make sure we are using a non-process based database
    jdoconfig_fpath = join(unpacked_war_dpath,
                           'WEB-INF/classes/bundles/jdoconfig.properties')
    print('Fixing backend database config')
    print('jdoconfig_fpath = %r' % (jdoconfig_fpath,))
    ut.assertpath(jdoconfig_fpath)
    jdoconfig_text = ut.readfrom(jdoconfig_fpath)
    #ut.vd(dirname(jdoconfig_fpath))
    #ut.editfile(jdoconfig_fpath)

    if mysql_mode:
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'mysql', False)
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'derby', 1)
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'sqlite', 1)
        mysql_user = '******'
        mysql_passwd = 'somepassword'
        mysql_dbname = 'ibeiswbtestdb'
        # Use mysql
        jdoconfig_text = re.sub(
            'datanucleus.ConnectionUserName = .*$',
            'datanucleus.ConnectionUserName = '******'datanucleus.ConnectionPassword = .*$',
            'datanucleus.ConnectionPassword = '******'datanucleus.ConnectionURL *= *jdbc:mysql:.*$',
            'datanucleus.ConnectionURL = jdbc:mysql://localhost:3306/' + mysql_dbname,
            jdoconfig_text, flags=re.MULTILINE)
        jdoconfig_text = re.sub(
            '^.*jdbc:mysql://localhost:3306/shepherd.*$', '',
            jdoconfig_text, flags=re.MULTILINE)
    else:
        # Use SQLIIte
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'derby', 1)
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'mysql', 1)
        jdoconfig_text = ut.toggle_comment_lines(jdoconfig_text, 'sqlite', False)
    ut.writeto(jdoconfig_fpath, jdoconfig_text)

    # Need to make sure wildbook can store information in a reasonalbe place
    #tomcat_data_dir = join(tomcat_startup_dir, 'webapps', 'wildbook_data_dir')
    tomcat_data_dir = join(webapps_dpath, 'wildbook_data_dir')
    ut.ensuredir(tomcat_data_dir)
    ut.writeto(join(tomcat_data_dir, 'test.txt'), 'A hosted test file')
    asset_store_fpath = join(unpacked_war_dpath, 'createAssetStore.jsp')
    asset_store_text = ut.read_from(asset_store_fpath)
    #data_path_pat = ut.named_field('data_path', 'new File(".*?").toPath')
    new_line = 'LocalAssetStore as = new LocalAssetStore("example Local AssetStore", new File("%s").toPath(), "%s", true);' % (
        tomcat_data_dir,
        'http://localhost:8080/' + basename(tomcat_data_dir)
    )
    # HACKY
    asset_store_text2 = re.sub('^LocalAssetStore as = .*$', new_line, asset_store_text, flags=re.MULTILINE)
    ut.writeto(asset_store_fpath, asset_store_text2)
Exemplo n.º 41
0
def update_bindings():
    r"""
    Returns:
        dict: matchtups

    CommandLine:
        python ~/local/build_scripts/flannscripts/autogen_bindings.py --exec-update_bindings
        utprof.py ~/local/build_scripts/flannscripts/autogen_bindings.py --exec-update_bindings

    Example:
        >>> # DISABLE_DOCTEST
        >>> from autogen_bindings import *  # NOQA
        >>> import sys
        >>> import utool as ut
        >>> sys.path.append(ut.truepath('~/local/build_scripts/flannscripts'))
        >>> matchtups = update_bindings()
        >>> result = ('matchtups = %s' % (ut.repr2(matchtups),))
        >>> print(result)
        >>> ut.quit_if_noshow()
        >>> import plottool as pt
        >>> ut.show_if_requested()
    """
    from os.path import basename
    import difflib
    import numpy as np
    import re
    binding_names = [
        'build_index',
        'used_memory',
        'add_points',
        'remove_point',
        'compute_cluster_centers',
        'load_index',
        'save_index',
        'find_nearest_neighbors',
        'radius_search',
        'remove_points',
        'free_index',
        'find_nearest_neighbors_index',

        # 'size',
        # 'veclen',
        # 'get_point',
        # 'flann_get_distance_order',
        # 'flann_get_distance_type',
        # 'flann_log_verbosity',

        # 'clean_removed_points',
    ]

    _places = [
        '~/code/flann/src/cpp/flann/flann.cpp',
        '~/code/flann/src/cpp/flann/flann.h',
        '~/code/flann/src/python/pyflann/flann_ctypes.py',
        '~/code/flann/src/python/pyflann/index.py',
    ]

    eof_sentinals = {
        # 'flann_ctypes.py': '# END DEFINE BINDINGS',
        'flann_ctypes.py': 'def ensure_2d_array(arr',
        # 'flann.h': '// END DEFINE BINDINGS',
        'flann.h': '#ifdef __cplusplus',
        'flann.cpp': None,
        'index.py': None,
    }
    block_sentinals = {
        'flann.h': re.escape('/**'),
        'flann.cpp': 'template *<typename Distance>',
        # 'flann_ctypes.py': '\n',
        'flann_ctypes.py': 'flann\.[a-z_.]* =',
        # 'index.py': '    def .*',
        'index.py': '    [^ ].*',
    }
    places = {
        basename(fpath): fpath
        for fpath in ut.lmap(ut.truepath, _places)
    }
    text_dict = ut.map_dict_vals(ut.readfrom, places)
    lines_dict = {key: val.split('\n') for key, val in text_dict.items()}
    orig_texts = text_dict.copy()  # NOQA
    binding_defs = {}
    named_blocks = {}

    print('binding_names = %r' % (binding_names, ))
    for binding_name in binding_names:
        blocks, defs = autogen_parts(binding_name)
        binding_defs[binding_name] = defs
        named_blocks[binding_name] = blocks

    for binding_name in ut.ProgIter(binding_names):
        ut.colorprint('+--- GENERATE BINDING %s -----' % (binding_name, ),
                      'yellow')
        blocks_dict = named_blocks[binding_name]
        for key in places.keys():
            ut.colorprint(
                '---- generating %s for %s -----' % (
                    binding_name,
                    key,
                ), 'yellow')
            # key = 'flann_ctypes.py'
            # print(text_dict[key])
            old_text = text_dict[key]
            line_list = lines_dict[key]
            #text = old_text
            block = blocks_dict[key]

            debug = ut.get_argflag('--debug')
            # debug = True
            # if debug:
            #     print(ut.highlight_code(block, splitext(key)[1]))

            # Find a place in the code that already exists

            searchblock = block
            if key.endswith('.cpp') or key.endswith('.h'):
                searchblock = re.sub(ut.REGEX_C_COMMENT,
                                     '',
                                     searchblock,
                                     flags=re.MULTILINE | re.DOTALL)
            searchblock = '\n'.join(searchblock.splitlines()[0:3])

            # @ut.cached_func(verbose=False)
            def cached_match(old_text, searchblock):
                def isjunk(x):
                    return False
                    return x in ' \t,*()'

                def isjunk2(x):
                    return x in ' \t,*()'

                # Not sure why the first one just doesnt find it
                # isjunk = None
                sm = difflib.SequenceMatcher(isjunk,
                                             old_text,
                                             searchblock,
                                             autojunk=False)
                sm0 = difflib.SequenceMatcher(isjunk,
                                              old_text,
                                              searchblock,
                                              autojunk=True)
                sm1 = difflib.SequenceMatcher(isjunk2,
                                              old_text,
                                              searchblock,
                                              autojunk=False)
                sm2 = difflib.SequenceMatcher(isjunk2,
                                              old_text,
                                              searchblock,
                                              autojunk=True)
                matchtups = (sm.get_matching_blocks() +
                             sm0.get_matching_blocks() +
                             sm1.get_matching_blocks() +
                             sm2.get_matching_blocks())
                return matchtups

            matchtups = cached_match(old_text, searchblock)
            # Find a reasonable match in matchtups

            found = False
            if debug:
                # print('searchblock =\n%s' % (searchblock,))
                print('searchblock = %r' % (searchblock, ))
            for (a, b, size) in matchtups:
                matchtext = old_text[a:a + size]
                pybind = binding_defs[binding_name]['py_binding_name']
                if re.search(binding_name + '\\b', matchtext) or re.search(
                        pybind + '\\b', matchtext):
                    found = True
                    pos = a + size
                    if debug:
                        print('MATCHING TEXT')
                        print(matchtext)
                    break
                else:
                    if debug and 0:
                        print('Not matching')
                        print('matchtext = %r' % (matchtext, ))
                        matchtext2 = old_text[a - 10:a + size + 20]
                        print('matchtext2 = %r' % (matchtext2, ))

            if found:
                linelens = np.array(ut.lmap(len, line_list)) + 1
                sumlen = np.cumsum(linelens)
                row = np.where(sumlen < pos)[0][-1] + 1
                #print(line_list[row])
                # Search for extents of the block to overwrite
                block_sentinal = block_sentinals[key]
                row1 = ut.find_block_end(row, line_list, block_sentinal,
                                         -1) - 1
                row2 = ut.find_block_end(row + 1, line_list, block_sentinal,
                                         +1)
                eof_sentinal = eof_sentinals[key]
                if eof_sentinal is not None:
                    print('eof_sentinal = %r' % (eof_sentinal, ))
                    row2 = min([
                        count for count, line in enumerate(line_list)
                        if line.startswith(eof_sentinal)
                    ][-1], row2)
                nr = len((block + '\n\n').splitlines())
                new_line_list = ut.insert_block_between_lines(
                    block + '\n', row1, row2, line_list)
                rtext1 = '\n'.join(line_list[row1:row2])
                rtext2 = '\n'.join(new_line_list[row1:row1 + nr])
                if debug:
                    print('-----')
                    ut.colorprint('FOUND AND REPLACING %s' % (binding_name, ),
                                  'yellow')
                    print(ut.highlight_code(rtext1))
                if debug:
                    print('-----')
                    ut.colorprint(
                        'FOUND AND REPLACED WITH %s' % (binding_name, ),
                        'yellow')
                    print(ut.highlight_code(rtext2))
                if not ut.get_argflag('--diff') and not debug:
                    print(
                        ut.color_diff_text(
                            ut.difftext(rtext1,
                                        rtext2,
                                        num_context_lines=7,
                                        ignore_whitespace=True)))
            else:
                # Append to end of the file
                eof_sentinal = eof_sentinals[key]
                if eof_sentinal is None:
                    row2 = len(line_list) - 1
                else:
                    row2_choice = [
                        count for count, line in enumerate(line_list)
                        if line.startswith(eof_sentinal)
                    ]
                    if len(row2_choice) == 0:
                        row2 = len(line_list) - 1
                        assert False
                    else:
                        row2 = row2_choice[-1] - 1

                # row1 = row2 - 1
                # row2 = row2 - 1
                row1 = row2

                new_line_list = ut.insert_block_between_lines(
                    block + '\n', row1, row2, line_list)
                # block + '\n\n\n', row1, row2, line_list)

                rtext1 = '\n'.join(line_list[row1:row2])
                nr = len((block + '\n\n').splitlines())
                rtext2 = '\n'.join(new_line_list[row1:row1 + nr])

                if debug:
                    print('-----')
                    ut.colorprint(
                        'NOT FOUND AND REPLACING %s' % (binding_name, ),
                        'yellow')
                    print(ut.highlight_code(rtext1))
                if debug:
                    print('-----')
                    ut.colorprint(
                        'NOT FOUND AND REPLACED WITH %s' % (binding_name, ),
                        'yellow')
                    print(ut.highlight_code(rtext2))

                if not ut.get_argflag('--diff') and not debug:
                    print(
                        ut.color_diff_text(
                            ut.difftext(rtext1,
                                        rtext2,
                                        num_context_lines=7,
                                        ignore_whitespace=True)))
            text_dict[key] = '\n'.join(new_line_list)
            lines_dict[key] = new_line_list
        ut.colorprint('L___  GENERATED BINDING %s ___' % (binding_name, ),
                      'yellow')

    for key in places:
        new_text = '\n'.join(lines_dict[key])
        #ut.writeto(ut.augpath(places[key], '.new'), new_text)
        ut.writeto(ut.augpath(places[key]), new_text)

    for key in places:
        if ut.get_argflag('--diff'):
            difftext = ut.get_textdiff(orig_texts[key],
                                       new_text,
                                       num_context_lines=7,
                                       ignore_whitespace=True)
            difftext = ut.color_diff_text(difftext)
            print(difftext)
Exemplo n.º 42
0
 def save(bibman):
     text2 = bibman.dumps()
     ut.writeto(bibman.fpath, text2)
Exemplo n.º 43
0
def install_wildbook(verbose=ut.NOT_QUIET):
    """
    Script to setup wildbook on a unix based system
    (hopefully eventually this will generalize to win32)

    CommandLine:
        # Reset
        ibeis purge_local_wildbook
        ibeis ensure_wb_mysql
        ibeis ensure_local_war
        # Setup
        ibeis install_wildbook
        # ibeis install_wildbook --nomysql
        # Startup
        ibeis startup_wildbook_server --show

        Alternates:
            ibeis install_wildbook --redownload-war
            ibeis install_wildbook --assets
            ibeis startup_wildbook_server --show

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> verbose = True
        >>> result = install_wildbook()
        >>> print(result)
    """
    import requests
    # Ensure that the war file has been unpacked
    tomcat_dpath, webapps_dpath, wb_target = ensure_local_war()

    unpacked_war_dpath = join(webapps_dpath, wb_target)
    tomcat_startup_dir = get_tomcat_startup_tmpdir()
    fresh_install = not ut.checkpath(unpacked_war_dpath, verbose=verbose)
    if fresh_install:
        # Need to make sure you start catalina in the same directory otherwise
        # the derby databsae gets put in in cwd
        with ut.ChdirContext(tomcat_startup_dir):
            # Starting and stoping catalina should be sufficient to unpack the
            # war
            startup_fpath = join(tomcat_dpath, 'bin', 'startup.sh')
            #shutdown_fpath = join(tomcat_dpath, 'bin', 'shutdown.sh')
            ut.cmd(ut.quote_single_command(startup_fpath))
            print('It is NOT ok if the startup.sh fails\n')

            # wait for the war to be unpacked
            for retry_count in range(0, 6):
                time.sleep(1)
                if ut.checkpath(unpacked_war_dpath, verbose=True):
                    break
                else:
                    print('Retrying')

            # ensure that the server is ruuning
            print('Checking if we can ping the server')
            response = requests.get('http://localhost:8080')
            if response is None or response.status_code != 200:
                print('There may be an error starting the server')
            else:
                print('Seem able to ping the server')

            # assert tht the war was unpacked
            ut.assertpath(unpacked_war_dpath, (
                'Wildbook war might have not unpacked correctly.  This may '
                'be ok. Try again. If it fails a second time, then there is a '
                'problem.'),
                          verbose=True)

            # Don't shutdown just yet. Need to create assets

    update_wildbook_install_config(webapps_dpath, unpacked_war_dpath)
    asset_flag_fpath = join(tomcat_startup_dir, 'made_assets.flag')

    # Pinging the server to create asset store
    # Ensureing that createAssetStore exists
    if not ut.checkpath(asset_flag_fpath):
        if not fresh_install:
            startup_wildbook_server()
        #web_url = startup_wildbook_server(verbose=False)
        print('Creating asset store')
        wb_url = 'http://localhost:8080/' + wb_target
        response = requests.get(wb_url + '/createAssetStore.jsp')
        if response is None or response.status_code != 200:
            print('There may be an error starting the server')
            #if response.status_code == 500:
            print(response.text)
            assert False, 'response error'
        else:
            print('Created asset store')
            # Create file signaling we did this
            ut.writeto(asset_flag_fpath, 'True')
        shutdown_wildbook_server(verbose=False)
        print('It is ok if the shutdown fails')
    elif fresh_install:
        shutdown_wildbook_server(verbose=False)

    #127.0.0.1:8080/wildbook_data_dir/test.txt
    print('Wildbook is installed and waiting to be started')
Exemplo n.º 44
0
def main(bib_fpath=None):
    r"""
    intro point to fixbib script

    CommmandLine:
        fixbib
        python -m fixtex bib
        python -m fixtex bib --dryrun
        python -m fixtex bib --dryrun --debug
    """

    if bib_fpath is None:
        bib_fpath = 'My Library.bib'

    # DEBUG = ub.argflag('--debug')
    # Read in text and ensure ascii format
    dirty_text = ut.readfrom(bib_fpath)

    from fixtex.fix_tex import find_used_citations, testdata_fpaths

    if exists('custom_extra.bib'):
        extra_parser = bparser.BibTexParser(ignore_nonstandard_types=False)
        parser = bparser.BibTexParser()
        ut.delete_keys(parser.alt_dict, ['url', 'urls'])
        print('Parsing extra bibtex file')
        extra_text = ut.readfrom('custom_extra.bib')
        extra_database = extra_parser.parse(extra_text, partial=False)
        print('Finished parsing extra')
        extra_dict = extra_database.get_entry_dict()
    else:
        extra_dict = None

    #udata = dirty_text.decode("utf-8")
    #dirty_text = udata.encode("ascii", "ignore")
    #dirty_text = udata

    # parser = bparser.BibTexParser()
    # bib_database = parser.parse(dirty_text)
    # d = bib_database.get_entry_dict()

    print('BIBTEXPARSER LOAD')
    parser = bparser.BibTexParser(ignore_nonstandard_types=False,
                                  common_strings=True)
    ut.delete_keys(parser.alt_dict, ['url', 'urls'])
    print('Parsing bibtex file')
    bib_database = parser.parse(dirty_text, partial=False)
    print('Finished parsing')

    bibtex_dict = bib_database.get_entry_dict()
    old_keys = list(bibtex_dict.keys())
    new_keys = []
    for key in ub.ProgIter(old_keys, label='fixing keys'):
        new_key = key
        new_key = new_key.replace(':', '')
        new_key = new_key.replace('-', '_')
        new_key = re.sub('__*', '_', new_key)
        new_keys.append(new_key)

    # assert len(ut.find_duplicate_items(new_keys)) == 0, 'new keys created conflict'
    assert len(ub.find_duplicates(new_keys)) == 0, 'new keys created conflict'

    for key, new_key in zip(old_keys, new_keys):
        if key != new_key:
            entry = bibtex_dict[key]
            entry['ID'] = new_key
            bibtex_dict[new_key] = entry
            del bibtex_dict[key]

    # The bibtext is now clean. Print it to stdout
    #print(clean_text)
    verbose = None
    if verbose is None:
        verbose = 1

    # Find citations from the tex documents
    key_list = None
    if key_list is None:
        cacher = ub.Cacher('texcite1', enabled=0)
        data = cacher.tryload()
        if data is None:
            fpaths = testdata_fpaths()
            key_list, inverse = find_used_citations(fpaths,
                                                    return_inverse=True)
            # ignore = ['JP', '?', 'hendrick']
            # for item in ignore:
            #     try:
            #         key_list.remove(item)
            #     except ValueError:
            #         pass
            if verbose:
                print('Found %d citations used in the document' %
                      (len(key_list), ))
            data = key_list, inverse
            cacher.save(data)
        key_list, inverse = data

    # else:
    #     key_list = None

    unknown_pubkeys = []
    debug_author = ub.argval('--debug-author', default=None)
    # ./fix_bib.py --debug_author=Kappes

    if verbose:
        print('Fixing %d/%d bibtex entries' %
              (len(key_list), len(bibtex_dict)))

    # debug = True
    debug = False
    if debug_author is not None:
        debug = False

    known_keys = list(bibtex_dict.keys())
    missing_keys = set(key_list) - set(known_keys)
    if extra_dict is not None:
        missing_keys.difference_update(set(extra_dict.keys()))

    if missing_keys:
        print('The library is missing keys found in tex files %s' %
              (ub.repr2(missing_keys), ))

    # Search for possible typos:
    candidate_typos = {}
    sedlines = []
    for key in missing_keys:
        candidates = ut.closet_words(key, known_keys, num=3, subset=True)
        if len(candidates) > 1:
            top = candidates[0]
            if ut.edit_distance(key, top) == 1:
                # "sed -i -e 's/{}/{}/g' *.tex".format(key, top)
                import os
                replpaths = ' '.join(
                    [relpath(p, os.getcwd()) for p in inverse[key]])
                sedlines.append("sed -i -e 's/{}/{}/g' {}".format(
                    key, top, replpaths))
        candidate_typos[key] = candidates
        print('Cannot find key = %r' % (key, ))
        print('Did you mean? %r' % (candidates, ))

    print('Quick fixes')
    print('\n'.join(sedlines))

    # group by file
    just = max([0] + list(map(len, missing_keys)))
    missing_fpaths = [inverse[key] for key in missing_keys]
    for fpath in sorted(set(ub.flatten(missing_fpaths))):
        # ut.fix_embed_globals()
        subkeys = [k for k in missing_keys if fpath in inverse[k]]
        print('')
        ut.cprint('--- Missing Keys ---', 'blue')
        ut.cprint('fpath = %r' % (fpath, ), 'blue')
        ut.cprint('{} | {}'.format('Missing'.ljust(just), 'Did you mean?'),
                  'blue')
        for key in subkeys:
            print('{} | {}'.format(ut.highlight_text(key.ljust(just), 'red'),
                                   ' '.join(candidate_typos[key])))

    # for key in list(bibtex_dict.keys()):

    if extra_dict is not None:
        # Extra database takes precidence over regular
        key_list = list(ut.unique(key_list + list(extra_dict.keys())))
        for k, v in extra_dict.items():
            bibtex_dict[k] = v

    full = ub.argflag('--full')

    for key in key_list:
        try:
            entry = bibtex_dict[key]
        except KeyError:
            continue
        self = BibTexCleaner(key, entry, full=full)

        if debug_author is not None:
            debug = debug_author in entry.get('author', '')

        if debug:
            ut.cprint(' --- ENTRY ---', 'yellow')
            print(ub.repr2(entry, nl=1))

        entry = self.fix()
        # self.clip_abstract()
        # self.shorten_keys()
        # self.fix_authors()
        # self.fix_year()
        # old_pubval = self.fix_pubkey()
        # if old_pubval:
        #     unknown_pubkeys.append(old_pubval)
        # self.fix_arxiv()
        # self.fix_general()
        # self.fix_paper_types()

        if debug:
            print(ub.repr2(entry, nl=1))
            ut.cprint(' --- END ENTRY ---', 'yellow')
        bibtex_dict[key] = entry

    unwanted_keys = set(bibtex_dict.keys()) - set(key_list)
    if verbose:
        print('Removing unwanted %d entries' % (len(unwanted_keys)))
    ut.delete_dict_keys(bibtex_dict, unwanted_keys)

    if 0:
        d1 = bibtex_dict.copy()
        full = True
        for key, entry in d1.items():
            self = BibTexCleaner(key, entry, full=full)
            pub = self.publication()
            if pub is None:
                print(self.entry['ENTRYTYPE'])

            old = self.fix_pubkey()
            x1 = self._pubval()
            x2 = self.standard_pubval(full=full)
            # if x2 is not None and len(x2) > 5:
            #     print(ub.repr2(self.entry))

            if x1 != x2:
                print('x2 = %r' % (x2, ))
                print('x1 = %r' % (x1, ))
                print(ub.repr2(self.entry))

            # if 'CVPR' in self.entry.get('booktitle', ''):
            #     if 'CVPR' != self.entry.get('booktitle', ''):
            #         break
            if old:
                print('old = %r' % (old, ))
            d1[key] = self.entry

    if full:
        d1 = bibtex_dict.copy()

        import numpy as np
        import pandas as pd
        df = pd.DataFrame.from_dict(d1, orient='index')

        paged_items = df[~pd.isnull(df['pub_accro'])]
        has_pages = ~pd.isnull(paged_items['pages'])
        print('have pages {} / {}'.format(has_pages.sum(), len(has_pages)))
        print(ub.repr2(paged_items[~has_pages]['title'].values.tolist()))

        entrytypes = dict(list(df.groupby('pub_type')))
        if False:
            # entrytypes['misc']
            g = entrytypes['online']
            g = g[g.columns[~np.all(pd.isnull(g), axis=0)]]

            entrytypes['book']
            entrytypes['thesis']
            g = entrytypes['article']
            g = entrytypes['incollection']
            g = entrytypes['conference']

        def lookup_pub(e):
            if e == 'article':
                return 'journal', 'journal'
            elif e == 'incollection':
                return 'booksection', 'booktitle'
            elif e == 'conference':
                return 'conference', 'booktitle'
            return None, None

        for e, g in entrytypes.items():
            print('e = %r' % (e, ))
            g = g[g.columns[~np.all(pd.isnull(g), axis=0)]]
            if 'pub_full' in g.columns:
                place_title = g['pub_full'].tolist()
                print(ub.repr2(ub.dict_hist(place_title)))
            else:
                print('Unknown publications')

        if 'report' in entrytypes:
            g = entrytypes['report']
            missing = g[pd.isnull(g['title'])]
            if len(missing):
                print('Missing Title')
                print(ub.repr2(missing[['title', 'author']].values.tolist()))

        if 'journal' in entrytypes:
            g = entrytypes['journal']
            g = g[g.columns[~np.all(pd.isnull(g), axis=0)]]

            missing = g[pd.isnull(g['journal'])]
            if len(missing):
                print('Missing Journal')
                print(ub.repr2(missing[['title', 'author']].values.tolist()))

        if 'conference' in entrytypes:
            g = entrytypes['conference']
            g = g[g.columns[~np.all(pd.isnull(g), axis=0)]]

            missing = g[pd.isnull(g['booktitle'])]
            if len(missing):
                print('Missing Booktitle')
                print(ub.repr2(missing[['title', 'author']].values.tolist()))

        if 'incollection' in entrytypes:
            g = entrytypes['incollection']
            g = g[g.columns[~np.all(pd.isnull(g), axis=0)]]

            missing = g[pd.isnull(g['booktitle'])]
            if len(missing):
                print('Missing Booktitle')
                print(ub.repr2(missing[['title', 'author']].values.tolist()))

        if 'thesis' in entrytypes:
            g = entrytypes['thesis']
            g = g[g.columns[~np.all(pd.isnull(g), axis=0)]]
            missing = g[pd.isnull(g['institution'])]
            if len(missing):
                print('Missing Institution')
                print(ub.repr2(missing[['title', 'author']].values.tolist()))

        # import utool
        # utool.embed()

    # Overwrite BibDatabase structure
    bib_database._entries_dict = bibtex_dict
    bib_database.entries = list(bibtex_dict.values())

    #conftitle_to_types_set_hist = {key: set(val) for key, val in conftitle_to_types_hist.items()}
    #print(ub.repr2(conftitle_to_types_set_hist))

    print('Unknown conference keys:')
    print(ub.repr2(sorted(unknown_pubkeys)))
    print('len(unknown_pubkeys) = %r' % (len(unknown_pubkeys), ))

    writer = BibTexWriter()
    writer.contents = ['comments', 'entries']
    writer.indent = '  '
    writer.order_entries_by = ('type', 'author', 'year')

    new_bibtex_str = bibtexparser.dumps(bib_database, writer)

    # Need to check
    #jegou_aggregating_2012

    # Fix the Journal Abreviations
    # References:
    # https://www.ieee.org/documents/trans_journal_names.pdf

    # Write out clean bibfile in ascii format
    clean_bib_fpath = ub.augpath(bib_fpath.replace(' ', '_'), suffix='_clean')

    if not ub.argflag('--dryrun'):
        ut.writeto(clean_bib_fpath, new_bibtex_str)
Exemplo n.º 45
0
def make_individual_latex_figures(ibs, fpaths_list, flat_case_labels,
                                  cfgx2_shortlbl, case_figdir,
                                  analysis_fpath_list):
    # HACK MAKE LATEX CONVINENCE STUFF
    #print('LATEX HACK')
    if len(fpaths_list) == 0:
        print('nothing to render')
        return
    RENDER = ut.get_argflag('--render')
    DUMP_FIGDEF = ut.get_argflag(('--figdump', '--dump-figdef', '--figdef'))

    if not (DUMP_FIGDEF or RENDER):  # HACK
        return

    latex_code_blocks = []
    latex_block_keys = []

    caption_prefix = ut.get_argval('--cappref', type_=str, default='')
    caption_suffix = ut.get_argval('--capsuf', type_=str, default='')
    cmdaug = ut.get_argval('--cmdaug', type_=str, default='custom')

    selected = None

    for case_idx, (fpaths,
                   labels) in enumerate(zip(fpaths_list, flat_case_labels)):
        if labels is None:
            labels = [cmdaug]
        if len(fpaths) < 4:
            nCols = len(fpaths)
        else:
            nCols = 2

        _cmdname = ibs.get_dbname() + ' Case ' + ' '.join(labels) + '_' + str(
            case_idx)
        #print('_cmdname = %r' % (_cmdname,))
        cmdname = ut.latex_sanitize_command_name(_cmdname)
        label_str = cmdname
        if len(caption_prefix) == 0:
            caption_str = ut.escape_latex(
                'Casetags: ' + ut.list_str(labels, nl=False, strvals=True) +
                ', db=' + ibs.get_dbname() + '. ')
        else:
            caption_str = ''

        use_sublbls = len(cfgx2_shortlbl) > 1
        if use_sublbls:
            caption_str += ut.escape_latex(
                'Each figure shows a different configuration: ')
            sublbls = [
                '(' + chr(97 + count) + ') '
                for count in range(len(cfgx2_shortlbl))
            ]
        else:
            #caption_str += ut.escape_latex('This figure depicts correct and
            #incorrect matches from configuration: ')
            sublbls = [''] * len(cfgx2_shortlbl)

        def wrap_tt(text):
            return r'{\tt ' + text + '}'

        _shortlbls = cfgx2_shortlbl
        _shortlbls = list(map(ut.escape_latex, _shortlbls))
        # Adjust spacing for breaks
        #tex_small_space = r''
        tex_small_space = r'\hspace{0pt}'
        # Remove query specific config flags in individual results
        _shortlbls = [
            re.sub('\\bq[^,]*,?', '', shortlbl) for shortlbl in _shortlbls
        ]
        # Let config strings be broken over newlines
        _shortlbls = [
            re.sub('\\+', tex_small_space + '+' + tex_small_space, shortlbl)
            for shortlbl in _shortlbls
        ]
        _shortlbls = [
            re.sub(', *', ',' + tex_small_space, shortlbl)
            for shortlbl in _shortlbls
        ]
        _shortlbls = list(map(wrap_tt, _shortlbls))
        cfgx2_texshortlbl = [
            '\n    ' + lbl + shortlbl
            for lbl, shortlbl in zip(sublbls, _shortlbls)
        ]

        caption_str += ut.conj_phrase(cfgx2_texshortlbl, 'and') + '.\n    '
        caption_str = '\n    ' + caption_prefix + caption_str + caption_suffix
        caption_str = caption_str.rstrip()
        figure_str = ut.get_latex_figure_str(fpaths,
                                             nCols=nCols,
                                             label_str=label_str,
                                             caption_str=caption_str,
                                             use_sublbls=None,
                                             use_frame=True)
        latex_block = ut.latex_newcommand(cmdname, figure_str)
        latex_block = '\n%----------\n' + latex_block
        latex_code_blocks.append(latex_block)
        latex_block_keys.append(cmdname)

    # HACK
    remove_fpath = ut.truepath('~/latex/crall-candidacy-2015') + '/'

    latex_fpath = join(case_figdir, 'latex_cases.tex')

    if selected is not None:
        selected_keys = selected
    else:
        selected_keys = latex_block_keys

    selected_blocks = ut.dict_take(
        dict(zip(latex_block_keys, latex_code_blocks)), selected_keys)

    figdef_block = '\n'.join(selected_blocks)
    figcmd_block = '\n'.join(['\\' + key for key in latex_block_keys])

    selected_block = figdef_block + '\n\n' + figcmd_block

    # HACK: need full paths to render
    selected_block_renderable = selected_block
    selected_block = selected_block.replace(remove_fpath, '')
    if RENDER:
        ut.render_latex_text(selected_block_renderable)

    if DUMP_FIGDEF:
        ut.writeto(latex_fpath, selected_block)

    #if NOT DUMP AND NOT RENDER:
    #    print('STANDARD LATEX RESULTS')
    #    cmdname = ibs.get_dbname() + 'Results'
    #    latex_block  = ut.get_latex_figure_str2(analysis_fpath_list, cmdname, nCols=1)
    #    ut.print_code(latex_block, 'latex')
    if DUMP_FIGDEF or RENDER:
        ut.print_code(selected_block, 'latex')
Exemplo n.º 46
0
def install_wildbook(verbose=ut.NOT_QUIET):
    """
    Script to setup wildbook on a unix based system
    (hopefully eventually this will generalize to win32)

    CommandLine:
        # Reset
        ibeis purge_local_wildbook
        ibeis ensure_wb_mysql
        ibeis ensure_local_war
        # Setup
        ibeis install_wildbook
        # ibeis install_wildbook --nomysql
        # Startup
        ibeis startup_wildbook_server --show

        Alternates:
            ibeis install_wildbook --redownload-war
            ibeis install_wildbook --assets
            ibeis startup_wildbook_server --show

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> verbose = True
        >>> result = install_wildbook()
        >>> print(result)
    """
    import requests
    # Ensure that the war file has been unpacked
    tomcat_dpath, webapps_dpath, wb_target = ensure_local_war()

    unpacked_war_dpath = join(webapps_dpath, wb_target)
    tomcat_startup_dir = get_tomcat_startup_tmpdir()
    fresh_install = not ut.checkpath(unpacked_war_dpath, verbose=verbose)
    if fresh_install:
        # Need to make sure you start catalina in the same directory otherwise
        # the derby databsae gets put in in cwd
        with ut.ChdirContext(tomcat_startup_dir):
            # Starting and stoping catalina should be sufficient to unpack the
            # war
            startup_fpath  = join(tomcat_dpath, 'bin', 'startup.sh')
            #shutdown_fpath = join(tomcat_dpath, 'bin', 'shutdown.sh')
            ut.cmd(ut.quote_single_command(startup_fpath))
            print('It is NOT ok if the startup.sh fails\n')

            # wait for the war to be unpacked
            for retry_count in range(0, 6):
                time.sleep(1)
                if ut.checkpath(unpacked_war_dpath, verbose=True):
                    break
                else:
                    print('Retrying')

            # ensure that the server is ruuning
            print('Checking if we can ping the server')
            response = requests.get('http://localhost:8080')
            if response is None or response.status_code != 200:
                print('There may be an error starting the server')
            else:
                print('Seem able to ping the server')

            # assert tht the war was unpacked
            ut.assertpath(unpacked_war_dpath, (
                'Wildbook war might have not unpacked correctly.  This may '
                'be ok. Try again. If it fails a second time, then there is a '
                'problem.'), verbose=True)

            # Don't shutdown just yet. Need to create assets

    update_wildbook_install_config(webapps_dpath, unpacked_war_dpath)
    asset_flag_fpath = join(tomcat_startup_dir, 'made_assets.flag')

    # Pinging the server to create asset store
    # Ensureing that createAssetStore exists
    if not ut.checkpath(asset_flag_fpath):
        if not fresh_install:
            startup_wildbook_server()
        #web_url = startup_wildbook_server(verbose=False)
        print('Creating asset store')
        wb_url = 'http://localhost:8080/' + wb_target
        response = requests.get(wb_url + '/createAssetStore.jsp')
        if response is None or response.status_code != 200:
            print('There may be an error starting the server')
            #if response.status_code == 500:
            print(response.text)
            assert False, 'response error'
        else:
            print('Created asset store')
            # Create file signaling we did this
            ut.writeto(asset_flag_fpath, 'True')
        shutdown_wildbook_server(verbose=False)
        print('It is ok if the shutdown fails')
    elif fresh_install:
        shutdown_wildbook_server(verbose=False)

    #127.0.0.1:8080/wildbook_data_dir/test.txt
    print('Wildbook is installed and waiting to be started')