def update_wildbook_config(ibs, wildbook_tomcat_path, dryrun=False): wildbook_properteis_dpath = join(wildbook_tomcat_path, 'WEB-INF/classes/bundles/') print('[ibs.wildbook_signal_eid_list()] Wildbook properties=%r' % ( wildbook_properteis_dpath, )) # The src file is non-standard. It should be remove here as well wildbook_config_fpath_dst = join(wildbook_properteis_dpath, 'commonConfiguration.properties') ut.assert_exists(wildbook_properteis_dpath) # for come reason the .default file is not there, that should be ok though orig_content = ut.read_from(wildbook_config_fpath_dst) content = orig_content content = re.sub('IBEIS_DB_path = .*', 'IBEIS_DB_path = ' + ibs.get_db_core_path(), content) content = re.sub('IBEIS_image_path = .*', 'IBEIS_image_path = ' + ibs.get_imgdir(), content) # Write to the configuration if it is different if orig_content != content: need_sudo = not ut.is_file_writable(wildbook_config_fpath_dst) if need_sudo: quoted_content = '"%s"' % (content, ) print('Attempting to gain sudo access to update wildbook config') command = ['sudo', 'sh', '-c', '\'', 'echo', quoted_content, '>', wildbook_config_fpath_dst, '\''] # ut.cmd(command, sudo=True) command = ' '.join(command) if not dryrun: os.system(command) else: ut.write_to(wildbook_config_fpath_dst, content)
def std_build_command(repo='.'): """ My standard build script names. Calls mingw_build.bat on windows and unix_build.sh on unix """ import utool as ut print("+**** stdbuild *******") print('repo = %r' % (repo,)) if sys.platform.startswith('win32'): #scriptname = './mingw_build.bat' # vtool --rebuild-sver didnt work with this line scriptname = 'mingw_build.bat' else: scriptname = './unix_build.sh' if repo == '': # default to cwd repo = '.' else: os.chdir(repo) ut.assert_exists(scriptname) normbuild_flag = '--no-rmbuild' if ut.get_argflag(normbuild_flag): scriptname += ' ' + normbuild_flag # Execute build ut.cmd(scriptname) #os.system(scriptname) print("L**** stdbuild *******")
def std_build_command(repo="."): """ DEPRICATE My standard build script names. Calls mingw_build.bat on windows and unix_build.sh on unix """ import utool as ut print("+**** stdbuild *******") print("repo = %r" % (repo,)) if sys.platform.startswith("win32"): # vtool --rebuild-sver didnt work with this line # scriptname = './mingw_build.bat' scriptname = "mingw_build.bat" else: scriptname = "./unix_build.sh" if repo == "": # default to cwd repo = "." else: os.chdir(repo) ut.assert_exists(scriptname) normbuild_flag = "--no-rmbuild" if ut.get_argflag(normbuild_flag): scriptname += " " + normbuild_flag # Execute build ut.cmd(scriptname) # os.system(scriptname) print("L**** stdbuild *******")
def std_build_command(repo='.'): """ DEPRICATE My standard build script names. Calls mingw_build.bat on windows and unix_build.sh on unix """ import utool as ut print('+**** stdbuild *******') print('repo = %r' % (repo,)) if sys.platform.startswith('win32'): # vtool --rebuild-sver didnt work with this line #scriptname = './mingw_build.bat' scriptname = 'mingw_build.bat' else: scriptname = './unix_build.sh' if repo == '': # default to cwd repo = '.' else: os.chdir(repo) ut.assert_exists(scriptname) normbuild_flag = '--no-rmbuild' if ut.get_argflag(normbuild_flag): scriptname += ' ' + normbuild_flag # Execute build ut.cmd(scriptname) #os.system(scriptname) print('L**** stdbuild *******')
def run_suite_test(): app_fpath = get_dist_app_fpath() ut.assert_exists(app_fpath, 'app fpath must exist', info=True, verbose=True) ut.cmd(app_fpath + ' --run-utool-tests')
def update_wildbook_ia_config(ibs, wildbook_tomcat_path, dryrun=False): """ #if use_config_file and wildbook_tomcat_path: # # Update the Wildbook configuration to see *THIS* wbia database # with lockfile.LockFile(lock_fpath): # update_wildbook_ia_config(ibs, wildbook_tomcat_path, dryrun) """ wildbook_properteis_dpath = join(wildbook_tomcat_path, 'WEB-INF/classes/bundles/') logger.info('[ibs.update_wildbook_ia_config()] Wildbook properties=%r' % (wildbook_properteis_dpath, )) # The src file is non-standard. It should be remove here as well wildbook_config_fpath_dst = join(wildbook_properteis_dpath, 'commonConfiguration.properties') ut.assert_exists(wildbook_properteis_dpath) # for come reason the .default file is not there, that should be ok though orig_content = ut.read_from(wildbook_config_fpath_dst) content = orig_content # Make sure wildbook knows where to find us if False: # Old way of telling WB where to find IA content = re.sub('IBEIS_DB_path = .*', 'IBEIS_DB_path = ' + ibs.get_db_core_path(), content) content = re.sub('IBEIS_image_path = .*', 'IBEIS_image_path = ' + ibs.get_imgdir(), content) web_port = ibs.get_web_port_via_scan() if web_port is None: raise ValueError('IA web server is not running on any expected port') ia_hostport = 'http://localhost:%s' % (web_port, ) ia_rest_prefix = ut.named_field('prefix', 'IBEISIARestUrl.*') host_port = ut.named_field('host_port', 'http://.*?:[0-9]+') content = re.sub(ia_rest_prefix + host_port, ut.bref_field('prefix') + ia_hostport, content) # Write to the configuration if it is different if orig_content != content: need_sudo = not ut.is_file_writable(wildbook_config_fpath_dst) if need_sudo: quoted_content = '"%s"' % (content, ) logger.info( 'Attempting to gain sudo access to update wildbook config') command = [ 'sudo', 'sh', '-c', "'", 'echo', quoted_content, '>', wildbook_config_fpath_dst, "'", ] # ut.cmd(command, sudo=True) command = ' '.join(command) if not dryrun: os.system(command) else: ut.write_to(wildbook_config_fpath_dst, content)
def get_wildbook_info(ibs, tomcat_dpath=None, wb_target=None): # TODO: Clean this up wildbook_base_url = ibs.get_wildbook_base_url(wb_target) wildbook_tomcat_path = ibs.get_wildbook_tomcat_path(tomcat_dpath, wb_target) # Setup print('Looking for WildBook installation: %r' % ( wildbook_tomcat_path, )) ut.assert_exists(wildbook_tomcat_path, 'Wildbook is not installed on this machine', info=True) return wildbook_base_url, wildbook_tomcat_path
def ensure_model(model, redownload=False): try: url = MODEL_DOMAIN + MODEL_URLS[model] extracted_fpath = ut.grab_file_url(url, appname='ibeis_cnn', redownload=redownload, check_hash=True) except KeyError as ex: ut.printex(ex, 'model is not uploaded', iswarning=True) extracted_fpath = ut.unixjoin(ut.get_app_resource_dir('ibeis_cnn'), model) ut.assert_exists(extracted_fpath) return extracted_fpath
def build_sphinx_apidoc_cmdstr(): print('') print('if this fails try: sudo pip install sphinx') print('') apidoc = 'sphinx-apidoc' if ut.WIN32: winprefix = 'C:/Python27/Scripts/' sphinx_apidoc_exe = winprefix + apidoc + '.exe' else: sphinx_apidoc_exe = apidoc apidoc_argfmt_list = [ sphinx_apidoc_exe, '--force', '--full', '--maxdepth="{maxdepth}"', '--doc-author="{author}"', '--doc-version="{doc_version}"', '--doc-release="{doc_release}"', '--output-dir="_doc"', #'--separate', # Put documentation for each module on its own page '--private', # Include "_private" modules '{pkgdir}', ] outputdir = '_doc' author = ut.parse_author() packages = ut.find_packages(maxdepth=1) assert len( packages) != 0, 'directory must contain at least one package' if len(packages) > 1: assert len(packages) == 1,\ ('FIXME I dont know what to do with more than one root package: %r' % (packages,)) pkgdir = packages[0] version = ut.parse_package_for_version(pkgdir) modpath = dirname(ut.truepath(pkgdir)) apidoc_fmtdict = { 'author': author, 'maxdepth': '8', 'pkgdir': pkgdir, 'doc_version': version, 'doc_release': version, 'outputdir': outputdir, } ut.assert_exists('setup.py') ut.ensuredir('_doc') apidoc_fmtstr = ' '.join(apidoc_argfmt_list) apidoc_cmdstr = apidoc_fmtstr.format(**apidoc_fmtdict) print('[util_setup] autogenerate sphinx docs for %r' % (pkgdir, )) if ut.VERBOSE: print(ut.dict_str(apidoc_fmtdict)) return apidoc_cmdstr, modpath, outputdir
def __init__(drive, root_dpath=None, state_fpath=None): drive.root_dpath = ut.truepath(ut.ensure_unicode(root_dpath)) print('Initializing drive %s' % (drive.root_dpath,)) ut.assert_exists(drive.root_dpath) # Mapping from dpath strings to fpath indexes assert state_fpath is None, 'not yet supported for external analysis' drive.cache_fname = join(drive.root_dpath, 'ut_pathreg_cache.shelf') drive.fpath_bytes_list_ = None drive.dpath_to_fidx_ = None drive.fpath_hashX_list_ = None drive.hash_to_fidxs_ = None drive.cache = ut.ShelfCacher(drive.cache_fname)
def __init__(drive, root_dpath=None, state_fpath=None): drive.root_dpath = ut.truepath(ut.ensure_unicode(root_dpath)) print('Initializing drive %s' % (drive.root_dpath, )) ut.assert_exists(drive.root_dpath) # Mapping from dpath strings to fpath indexes assert state_fpath is None, 'not yet supported for external analysis' drive.cache_fname = join(drive.root_dpath, 'ut_pathreg_cache.shelf') drive.fpath_bytes_list_ = None drive.dpath_to_fidx_ = None drive.fpath_hashX_list_ = None drive.hash_to_fidxs_ = None drive.cache = ut.ShelfCacher(drive.cache_fname)
def build_sphinx_apidoc_cmdstr(): print('') print('if this fails try: sudo pip install sphinx') print('') apidoc = 'sphinx-apidoc' if ut.WIN32: winprefix = 'C:/Python27/Scripts/' sphinx_apidoc_exe = winprefix + apidoc + '.exe' else: sphinx_apidoc_exe = apidoc apidoc_argfmt_list = [ sphinx_apidoc_exe, '--force', '--full', '--maxdepth="{maxdepth}"', '--doc-author="{author}"', '--doc-version="{doc_version}"', '--doc-release="{doc_release}"', '--output-dir="_doc"', #'--separate', # Put documentation for each module on its own page '--private', # Include "_private" modules '{pkgdir}', ] outputdir = '_doc' author = ut.parse_author() packages = ut.find_packages(maxdepth=1) assert len(packages) != 0, 'directory must contain at least one package' if len(packages) > 1: assert len(packages) == 1,\ ('FIXME I dont know what to do with more than one root package: %r' % (packages,)) pkgdir = packages[0] version = ut.parse_package_for_version(pkgdir) modpath = dirname(ut.truepath(pkgdir)) apidoc_fmtdict = { 'author': author, 'maxdepth': '8', 'pkgdir': pkgdir, 'doc_version': version, 'doc_release': version, 'outputdir': outputdir, } ut.assert_exists('setup.py') ut.ensuredir('_doc') apidoc_fmtstr = ' '.join(apidoc_argfmt_list) apidoc_cmdstr = apidoc_fmtstr.format(**apidoc_fmtdict) print('[util_setup] autogenerate sphinx docs for %r' % (pkgdir,)) if ut.VERBOSE: print(ut.dict_str(apidoc_fmtdict)) return apidoc_cmdstr, modpath, outputdir
def grab_selenium_chromedriver(redownload=False): r""" Automatically download selenium chrome driver if needed CommandLine: python -m utool.util_grabdata --test-grab_selenium_chromedriver:1 Example: >>> # DISABLE_DOCTEST >>> ut.grab_selenium_chromedriver() >>> import selenium.webdriver >>> driver = selenium.webdriver.Chrome() >>> driver.get('http://www.google.com') >>> search_field = driver.find_element_by_name('q') >>> search_field.send_keys('puppies') >>> search_field.send_keys(selenium.webdriver.common.keys.Keys.ENTER) Example1: >>> # DISABLE_DOCTEST >>> import selenium.webdriver >>> driver = selenium.webdriver.Firefox() >>> driver.get('http://www.google.com') >>> search_field = driver.find_element_by_name('q') >>> search_field.send_keys('puppies') >>> search_field.send_keys(selenium.webdriver.common.keys.Keys.ENTER) """ import utool as ut import os import stat # TODO: use a better download dir (but it must be in the PATh or selenium freaks out) chromedriver_dpath = ut.ensuredir(ut.truepath('~/bin')) chromedriver_fpath = join(chromedriver_dpath, 'chromedriver') if not ut.checkpath(chromedriver_fpath) or redownload: assert chromedriver_dpath in os.environ['PATH'].split(os.pathsep) # TODO: make this work for windows as well if ut.LINUX and ut.util_cplat.is64bit_python(): import requests rsp = requests.get('http://chromedriver.storage.googleapis.com/LATEST_RELEASE') assert rsp.status_code == 200 url = 'http://chromedriver.storage.googleapis.com/' + rsp.text.strip() + '/chromedriver_linux64.zip' ut.grab_zipped_url(url, download_dir=chromedriver_dpath, redownload=True) else: raise AssertionError('unsupported chrome driver getter script') if not ut.WIN32: st = os.stat(chromedriver_fpath) os.chmod(chromedriver_fpath, st.st_mode | stat.S_IEXEC) ut.assert_exists(chromedriver_fpath) os.environ['webdriver.chrome.driver'] = chromedriver_fpath return chromedriver_fpath
def update_wildbook_ia_config(ibs, wildbook_tomcat_path, dryrun=False): """ #if use_config_file and wildbook_tomcat_path: # # Update the Wildbook configuration to see *THIS* ibeis database # with lockfile.LockFile(lock_fpath): # update_wildbook_ia_config(ibs, wildbook_tomcat_path, dryrun) """ wildbook_properteis_dpath = join(wildbook_tomcat_path, 'WEB-INF/classes/bundles/') print('[ibs.update_wildbook_ia_config()] Wildbook properties=%r' % ( wildbook_properteis_dpath, )) # The src file is non-standard. It should be remove here as well wildbook_config_fpath_dst = join(wildbook_properteis_dpath, 'commonConfiguration.properties') ut.assert_exists(wildbook_properteis_dpath) # for come reason the .default file is not there, that should be ok though orig_content = ut.read_from(wildbook_config_fpath_dst) content = orig_content # Make sure wildbook knows where to find us if False: # Old way of telling WB where to find IA content = re.sub('IBEIS_DB_path = .*', 'IBEIS_DB_path = ' + ibs.get_db_core_path(), content) content = re.sub('IBEIS_image_path = .*', 'IBEIS_image_path = ' + ibs.get_imgdir(), content) ia_hostport = 'http://localhost:5000' ia_rest_prefix = ut.named_field('prefix', 'IBEISIARestUrl.*') host_port = ut.named_field('host_port', 'http://.*?:[0-9]+') content = re.sub(ia_rest_prefix + host_port, ut.bref_field('prefix') + ia_hostport, content) # Write to the configuration if it is different if orig_content != content: need_sudo = not ut.is_file_writable(wildbook_config_fpath_dst) if need_sudo: quoted_content = '"%s"' % (content, ) print('Attempting to gain sudo access to update wildbook config') command = ['sudo', 'sh', '-c', '\'', 'echo', quoted_content, '>', wildbook_config_fpath_dst, '\''] # ut.cmd(command, sudo=True) command = ' '.join(command) if not dryrun: os.system(command) else: ut.write_to(wildbook_config_fpath_dst, content)
def run_app_test(): """ Execute the installed app """ print('[installer] +--- TEST_APP ---') app_fpath = get_dist_app_fpath() ut.assert_exists(app_fpath, 'app fpath must exist', info=True, verbose=True) if ut.DARWIN: #ut.cmd('open ' + ut.unixpath('dist/IBEIS.app')) """ rm -rf ~/Desktop/IBEIS.app rm -rf /Applications/IBEIS.app ls /Applications/IBEIS.app cd /Volumes/IBEIS ib cd dist # Install to /Applications hdiutil attach ~/code/ibeis/dist/IBEIS.dmg cp -R /Volumes/IBEIS/IBEIS.app /Applications/IBEIS.app hdiutil unmount /Volumes/IBEIS open -a /Applications/IBEIS.app chmod +x /Applications/IBEIS.app/Contents/MacOS/IBEISApp cp -R /Volumes/IBEIS/IBEIS.app ~/Desktop open -a ~/Desktop/IBEIS.app chmod +x ~/code/ibeis/dist/IBEIS.app/Contents/MacOS/IBEISApp open -a ~/code/ibeis/dist/IBEIS.app open ~/code/ibeis/dist/IBEIS.app/Contents/MacOS/IBEISApp open ~/Desktop/IBEIS.app ./dist/IBEIS.app/Contents/MacOS/IBEISApp --run-tests """ ut.cmd(app_fpath) else: ut.cmd(app_fpath) print('[installer] L___ FINISH TEST_APP ___')
def test_run_app(): """ Execute the installed app """ print('[installer] +--- TEST_APP ---') app_fpath = get_dist_app_fpath() ut.assert_exists(app_fpath, 'app fpath must exist', info=True, verbose=True) if ut.DARWIN: #ut.cmd('open ' + ut.unixpath('dist/IBEIS.app')) """ rm -rf ~/Desktop/IBEIS.app rm -rf /Applications/IBEIS.app ls /Applications/IBEIS.app cd /Volumes/IBEIS ib cd dist # Install to /Applications hdiutil attach ~/code/ibeis/dist/IBEIS.dmg cp -R /Volumes/IBEIS/IBEIS.app /Applications/IBEIS.app hdiutil unmount /Volumes/IBEIS open -a /Applications/IBEIS.app chmod +x /Applications/IBEIS.app/Contents/MacOS/IBEISApp cp -R /Volumes/IBEIS/IBEIS.app ~/Desktop open -a ~/Desktop/IBEIS.app chmod +x ~/code/ibeis/dist/IBEIS.app/Contents/MacOS/IBEISApp open -a ~/code/ibeis/dist/IBEIS.app open ~/code/ibeis/dist/IBEIS.app/Contents/MacOS/IBEISApp open ~/Desktop/IBEIS.app ./dist/IBEIS.app/Contents/MacOS/IBEISApp --run-tests """ ut.cmd(app_fpath) else: ut.cmd(app_fpath) print('[installer] L___ FINISH TEST_APP ___')
def sync_ibeisdb(remote_uri, dbname, mode='pull', workdir=None, port=22, dryrun=False): """ syncs an ibeisdb without syncing the cache or the chip directory (or the top level image directory because it shouldnt exist unless it is an old hots database) """ print('[sync_ibeisdb] Syncing') print(' * dbname=%r ' % (dbname,)) print(' * remote_uri=%r' % (remote_uri,)) print(' * mode=%r' % (mode)) import ibeis # Excluded temporary and cached data exclude_dirs = list(map(ut.ensure_unixslash, ibeis.const.EXCLUDE_COPY_REL_DIRS)) # Specify local workdir if workdir is None: workdir = ibeis.sysres.get_workdir() local_uri = ut.ensure_unixslash(workdir) if ut.WIN32: # fix for mingw rsync local_uri = ut.ensure_mingw_drive(local_uri) if mode == 'pull': # pull remote to local remote_src = ut.unixjoin(remote_uri, dbname) ut.assert_exists(local_uri) rsync(remote_src, local_uri, exclude_dirs, port, dryrun=dryrun) elif mode == 'push': # push local to remote local_src = ut.unixjoin(local_uri, dbname) if not dryrun: ut.assert_exists(local_src) rsync(local_src, remote_uri, exclude_dirs, port, dryrun=dryrun) if dryrun: ut.assert_exists(local_src) else: raise AssertionError('unknown mode=%r' % (mode,))
def sync_wbiadb(remote_uri, dbname, mode='pull', workdir=None, port=22, dryrun=False): """ syncs an wbiadb without syncing the cache or the chip directory (or the top level image directory because it shouldnt exist unlese it is an old hots database) """ logger.info('[sync_wbiadb] Syncing') logger.info(' * dbname=%r ' % (dbname, )) logger.info(' * remote_uri=%r' % (remote_uri, )) logger.info(' * mode=%r' % (mode)) import wbia assert dbname is not None, 'must specify a database name' # Excluded temporary and cached data exclude_dirs = list( map(ut.ensure_unixslash, wbia.const.EXCLUDE_COPY_REL_DIRS)) # Specify local workdir if workdir is None: workdir = wbia.sysres.get_workdir() local_uri = ut.ensure_unixslash(workdir) if ut.WIN32: # fix for mingw rsync local_uri = ut.ensure_mingw_drive(local_uri) if mode == 'pull': # pull remote to local remote_src = ut.unixjoin(remote_uri, dbname) ut.assert_exists(local_uri) ut.rsync(remote_src, local_uri, exclude_dirs, port, dryrun=dryrun) elif mode == 'push': # push local to remote local_src = ut.unixjoin(local_uri, dbname) if not dryrun: ut.assert_exists(local_src) ut.rsync(local_src, remote_uri, exclude_dirs, port, dryrun=dryrun) if dryrun: ut.assert_exists(local_src) else: raise AssertionError('unknown mode=%r' % (mode, ))
def autogen_sphinx_apidoc(): r""" autogen_sphinx_docs.py Ignore: C:\Python27\Scripts\autogen_sphinx_docs.py autogen_sphinx_docs.py pip uninstall sphinx pip install sphinx pip install sphinxcontrib-napoleon cd C:\Python27\Scripts ls C:\Python27\Scripts """ import utool as ut # TODO: assert sphinx-apidoc exe is found # TODO: make find_exe word? print('') print('if this fails try: sudo pip install sphinx') print('') winprefix = 'C:/Python27/Scripts/' apidoc = 'sphinx-apidoc' sphinx_apidoc_exe = apidoc if not ut.WIN32 else winprefix + apidoc + '.exe' apidoc_argfmt_list = [ sphinx_apidoc_exe, '--force', '--full', '--maxdepth="{maxdepth}"', '--doc-author="{author}"', '--doc-version="{doc_version}"', '--doc-release="{doc_release}"', '--output-dir="_doc"', #'--separate', # Put documentation for each module on its own page '--private', # Include "_private" modules '{pkgdir}', ] outputdir = '_doc' author = ut.parse_author() packages = ut.find_packages(maxdepth=1) assert len(packages) != 0, 'directory must contain at least one package' if len(packages) > 1: assert len(packages) == 1,\ ('FIXME I dont know what to do with more than one root package: %r' % (packages,)) pkgdir = packages[0] version = ut.parse_package_for_version(pkgdir) modpath = dirname(ut.truepath(pkgdir)) apidoc_fmtdict = { 'author': author, 'maxdepth': '8', 'pkgdir': pkgdir, 'doc_version': version, 'doc_release': version, 'outputdir': outputdir, } ut.assert_exists('setup.py') ut.ensuredir('_doc') apidoc_fmtstr = ' '.join(apidoc_argfmt_list) apidoc_cmdstr = apidoc_fmtstr.format(**apidoc_fmtdict) # sphinx-apidoc outputs conf.py to <outputdir>, add custom commands print('[util_setup] autogenerate sphinx docs for %r' % (pkgdir,)) if ut.VERBOSE: print(ut.dict_str(apidoc_fmtdict)) ut.cmd(apidoc_cmdstr, shell=True) # # Change dir to <outputdir> print('chdir' + outputdir) os.chdir(outputdir) # # Make custom edits to conf.py # FIXME: #ext_search_text = ut.unindent( # r''' # extensions = [ # [^\]]* # ] # ''') ext_search_text = r'extensions = \[[^/]*\]' # TODO: http://sphinx-doc.org/ext/math.html#module-sphinx.ext.pngmath #'sphinx.ext.mathjax', exclude_modules = [] # ['ibeis.all_imports'] ext_repl_text = ut.codeblock( ''' MOCK_MODULES = {exclude_modules} if len(MOCK_MODULES) > 0: import mock for mod_name in MOCK_MODULES: sys.modules[mod_name] = mock.Mock() extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', # For LaTeX 'sphinx.ext.pngmath', # For Google Sytle Docstrs # https://pypi.python.org/pypi/sphinxcontrib-napoleon 'sphinxcontrib.napoleon', #'sphinx.ext.napoleon', ] ''' ).format(exclude_modules=str(exclude_modules)) theme_search = 'html_theme = \'default\'' theme_repl = ut.codeblock( ''' import sphinx_rtd_theme html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] ''') head_text = ut.codeblock( ''' from sphinx.ext.autodoc import between import sphinx_rtd_theme import sys import os # Dont parse IBEIS args os.environ['IBIES_PARSE_ARGS'] = 'OFF' os.environ['UTOOL_AUTOGEN_SPHINX_RUNNING'] = 'ON' sys.path.append('{modpath}') sys.path.append(sys.path.insert(0, os.path.abspath("../"))) autosummary_generate = True modindex_common_prefix = ['_'] ''' ).format(modpath=ut.truepath(modpath)) tail_text = ut.codeblock( ''' def setup(app): # Register a sphinx.ext.autodoc.between listener to ignore everything # between lines that contain the word IGNORE app.connect('autodoc-process-docstring', between('^.*IGNORE.*$', exclude=True)) return app ''' ) conf_fname = 'conf.py' conf_text = ut.read_from(conf_fname) conf_text = conf_text.replace('import sys', 'import sys # NOQA') conf_text = conf_text.replace('import os', 'import os # NOQA') conf_text = ut.regex_replace(theme_search, theme_repl, conf_text) conf_text = ut.regex_replace(ext_search_text, ext_repl_text, conf_text) conf_text = head_text + '\n' + conf_text + tail_text ut.write_to(conf_fname, conf_text) # Make the documentation #if ut.LINUX: # ut.cmd('make html', shell=True) #if ut.WIN32: #raw_input('waiting') ut.cmd('make', 'html', shell=True)
def test_suite(): app_fpath = get_dist_app_fpath() ut.assert_exists(app_fpath, 'app fpath must exist', info=True, verbose=True) ut.cmd(app_fpath + ' --run-utool-tests')