def build_openlayers(): """ Task to install and build a compressed version of OpenLayers from github """ info('Installing Openlayers') with pushd(javascript_folder): sh('git clone git://github.com/openlayers/openlayers.git', capture=True) with pushd('openlayers/build/'): info('Building OpenLayers into a compressed file') sh('python build.py', capture=True) shutil.copy('OpenLayers.js', '../')
def checkup_app(): suite = path("src") / "opengeo-suite" options(app_resources=suite / "Resources") if not suite.exists(): with pushd("src"): info("Checking out opengeo suite application") sh("git clone [email protected]:whitmo/opengeo-suite.git") return info("Updating opengeo suite application") with pushd(suite): sh("git pull")
def publish_docs(): """publish documentation to http://geopython.github.io/GeoHealthCheck""" with pushd(options.base.tmp): sh('git clone [email protected]:geopython/GeoHealthCheck.git') with pushd('GeoHealthCheck'): sh('git checkout gh-pages') sh('cp -rp %s/docs/_build/html/* .' % options.base.home) sh('git add .') sh('git commit -am "update live docs [ci skip]"') sh('git push origin gh-pages') shutil.rmtree(options.base.tmp)
def gen_tests_html(): """Generate tests/index.html for online testing""" with pushd("tests"): # ensure manager testsuite is writeable os.chmod(os.path.join("suites", "manager", "data"), 0777) os.chmod(os.path.join("suites", "manager", "data", "records.db"), 0666) sh("python gen_html.py > index.html")
def gen_tests_html(): """Generate tests/index.html for online testing""" with pushd('tests'): # ensure manager testsuite is writeable os.chmod(os.path.join('suites', 'manager', 'data'), 0777) os.chmod(os.path.join('suites', 'manager', 'data', 'records.db'), 0666) sh('python gen_html.py > index.html')
def tarball_unpack(fpath, dest, overwrite=False): """ Dumbly unpack tarballs and zips fpath -- filepath of tarball dest -- folder to upack into @return name of folder created by unpacking """ dest = path(dest) filename = fpath.split("/")[-1] newfile = dest / filename old = set(os.listdir(dest)) dest_folder = dest / filename.split(".tar.")[0] if not dest_folder.exists() and overwrite: shutil.copyfile(str(fpath), str(newfile)) with pushd(dest): catcmd = "zcat" if filename.endswith("bz2") or filename.endswith("bz"): catcmd = "bzcat" cat = subprocess.Popen([catcmd, filename], stdout=subprocess.PIPE) untar = subprocess.Popen(["tar", "-xf", "-"], stdin=cat.stdout, stdout=subprocess.PIPE) info("Unpacking %s" %filename) untar.communicate() os.remove(newfile) return dest_folder
def debug(options): '''Run the buildslave without forking in background.''' # Set buildslave name to be used in buildbot.tac. sys.buildslave_name = pave.getOption( options, 'debug', 'name', default_value=pave.getHostname()) argv = [ 'twistd', '--no_save', '--nodaemon', '--logfile=-', '--python=buildbot.tac', ] sys.argv = argv try: from setproctitle import setproctitle setproctitle # Shut up the linter. except ImportError: setproctitle = lambda t: None setproctitle('buildbot-slave') from twisted.scripts import twistd with pushd(pave.fs.join([pave.path.build, 'slave'])): twistd.run()
def refresh_docs(): """Build sphinx docs from scratch""" make = sphinx_make() with pushd(options.base.docs): sh('%s clean' % make) sh('%s html' % make)
def coverage_publish(): """ Send the coverage report. It expects that the GITHUB_PULL_ID environment variable is set. """ from pkg_resources import load_entry_point import coverage codecov_main = load_entry_point('codecov', 'console_scripts', 'codecov') builder_name = os.environ.get('BUILDER_NAME', pave.getHostname()) github_pull_id = os.environ.get('GITHUB_PULL_ID', '') with pushd(pave.path.build): cov = coverage.Coverage() cov.load() cov.report(show_missing=False) cov.xml_report(outfile='coverage.xml') sys.argv = [ 'codecov', '--build', builder_name, '--file', 'coverage.xml', ] if github_pull_id: # We are publishing for a PR. sys.argv.extend(['--pr', github_pull_id]) codecov_main()
def gen_tests_html(): """Generate tests/index.html for online testing""" with pushd('tests'): # ensure manager testsuite is writeable os.chmod(os.path.join('functionaltests', 'suites', 'manager', 'data'), 0o777) sh('python3 gen_html.py > index.html')
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('directory') # opts = parser.parse_args() summary = Summary() figs_dir = 'summary_figs' os.path.exists(figs_dir) or os.mkdir(figs_dir) with pushd(figs_dir): summary.a_rand_index(summary.sample_letters, summary.input_data, 'l') summary.a_rand_index(summary.sample_states, summary.input_data2, 's') with open('word_list.txt', "w") as f: for num, key in enumerate(summary.word_list): f.write("iter%d:: " % num) for num2, key2 in enumerate(key): f.write("%d:" % num2 + str(key2) + " ") f.write("\n") # plot sample states and letters for idx in range(summary.data_size): summary.plot_states(idx) plt.savefig('sample_states_%d.png' % idx) summary.plot_state_boundaries(idx) plt.savefig('state_boundary_%d.png' % idx) summary.plot_letters(idx) plt.savefig('sample_letters_%d.png' % idx) plt.clf()
def package_tar_gz(options): """Package a .tar.gz distribution""" import tarfile package_name = options.get('package_name', None) if package_name is None: raise Exception('Package name required') filename = path('%s/%s.tar.gz' % (STAGE_DIR, package_name)) if filename.exists(): info('Package %s already exists' % filename) return with pushd(STAGE_DIR): stage_path = '%s/%s' % (STAGE_DIR, package_name) if not path(stage_path).exists(): raise Exception('Directory %s does not exist' % stage_path) tar = tarfile.open(filename, 'w:gz') tar.add(package_name) tar.close()
def build_slick_grid(): """ Task to install slick grid from github """ info('Install SlickGrid') with pushd(javascript_folder): sh('git clone https://github.com/mleibman/SlickGrid.git', capture=True)
def build_d3(): """ Task to install d3.js, a graphing lib, from github. """ info('Installing d3.js') with pushd(javascript_folder): sh('git clone https://github.com/mbostock/d3.git', capture=True)
def coverator_publish(): """ Send the coverage report to coverator. It expects that the GITHUB_PULL_ID environment variable is set and that the repository origin points to the GitHub URL. Also expects the 'coverator_url' configuration to be set. """ from coverator.client import upload_coverage repository = re.split(r'github.com[/:]', SETUP['repository']['github'])[1] builder_name = os.environ.get('BUILDER_NAME', pave.getHostname()) github_pull_id = os.environ.get('GITHUB_PULL_ID', '') branch_name = os.environ.get('BRANCH', '') revision = pave.git.revision with pushd(pave.path.build): args = ['.coverage', repository, builder_name, revision] if branch_name: # We know the branch name from the env. args.append(branch_name) if github_pull_id: # We are publishing for a PR. args.append(github_pull_id) try: upload_coverage(*args, url=SETUP['test']['coverator_url']) except Exception as error: # noqa: cover print('Failed to upload coverage data: %s' % error)
def package_tar_gz(options): """Package a .tar.gz distribution""" import tarfile package_name = options.get("package_name", None) if package_name is None: raise Exception("Package name required") filename = path("%s/%s.tar.gz" % (STAGE_DIR, package_name)) if filename.exists(): info("Package %s already exists" % filename) return with pushd(STAGE_DIR): stage_path = "%s/%s" % (STAGE_DIR, package_name) if not path(stage_path).exists(): raise Exception("Directory %s does not exist" % stage_path) tar = tarfile.open(filename, "w:gz") tar.add(package_name) tar.close()
def make_env(quiet=True): """ Build a virtual environment with petlib installed. """ tell("Make a virtualenv") if os.path.exists("test_env"): return os.mkdir("test_env") with pushd('test_env') as old_dir: sh("virtualenv pltest", capture=quiet)
def buildslave(args): '''Run the buildslave command.''' from buildslave.scripts import runner new_args = ['buildslave'] new_args.extend(args) sys.argv = new_args with pushd(pave.fs.join(pave.path.build, 'slave')): runner.run()
def clean(): "take out the trash" src_dir = easy.options.setdefault("docs", {}).get("src_dir", None) if src_dir is None: src_dir = "src" if easy.path("src").exists() else "." with easy.pushd(src_dir): for pkg in set(easy.options.setup.packages) | set(("tests",)): for filename in glob.glob(pkg.replace(".", os.sep) + "/*.py[oc~]"): easy.path(filename).remove()
def clean(): "take out the trash" src_dir = easy.options.setdefault("docs", {}).get('src_dir', None) if src_dir is None: src_dir = 'src' if easy.path('src').exists() else '.' with easy.pushd(src_dir): for pkg in set(easy.options.setup.packages) | set(("tests",)): for filename in glob.glob(pkg.replace('.', os.sep) + "/*.py[oc~]"): easy.path(filename).remove()
def download(url, dest=".download"): filename = url.split("/")[-1] ppath = path(dest) fpath = ppath / filename if not (ppath / filename).exists(): oldnames = set(ppath.listdir()) with pushd(ppath) as old_dir: sh("wget %s" %url) return fpath
def compile_messages(): """generate .qm/.po files""" # generate UI .qm file sh('lrelease MetaSearch.pro') # generate all .mo files locales = options.base.plugin / 'locale' for locale_dir in os.listdir(locales): with pushd(locales / locale_dir): for filename in os.listdir('LC_MESSAGES'): if filename.endswith('.po'): with pushd('LC_MESSAGES'): sh('msgfmt %s -o %s' % (filename, filename.replace('.po', '.mo'))) # generate docs .mo files with pushd(options.base.docs): sh('sphinx-intl build')
def pyinstaller(): """ Build Python.zip and deploy it to final_pyinstaller """ reset_dir('export\\final_pyinstaller') reset_dir('python\\build') reset_dir('python\\dist') with pushd('python'): sh('pyinstaller run.py') zipdir('python\\dist\\run', 'export\\final_pyinstaller\\Python.zip')
def localedata(): """Install custom locale data for Babel.""" import yaml, babel, copy, cPickle as pickle for source in path('relvlast/localedata').files('*.yml'): data = copy.deepcopy(babel.localedata.load('en')) babel.localedata.merge(data, yaml.load(source.bytes())) with pushd(babel.localedata._dirname): target = source.stripext().basename() + '.dat' with open(target, 'wb') as stream: info('writing ' + target) pickle.dump(data, stream, -1)
def run(): """ Generate content to be opened using local file URL. """ print('Listening on http://localhost:8080. Ctrl+C to stop.') with pushd('deploy'): import SocketServer SocketServer.TCPServer.allow_reuse_address = True # Side-effect import. sys.argv = ['pelican-server', '8080'] from pelican import server
def apidoc(): '''Generates automatic API documentation files..''' module = 'chevah.' + SETUP['folders']['source'] with pushd(pave.path.build): pave.sphinx.apidoc(module=module, destination=['doc', 'api']) pave.fs.copyFile( source=['apidoc_conf.py'], destination=[pave.path.build, 'doc', 'conf.py'], ) pave.sphinx.createHTML()
def testing_db(): from gateway.models import initialize_sql drop_and_create_db('testing') config = ConfigParser.ConfigParser() config.readfp(open('testing.ini')) db_string = config.get('app:gateway', 'db_string') initialize_sql(db_string) with pushd('migrations'): puts('--------------------') puts('Creating test tables') puts('Loading test data') sh('psql -d testing -f load_testing_env.sql')
def refresh_docs(): """Build sphinx docs from scratch""" get_translations() make = sphinx_make() with pushd(options.base.docs): sh('%s clean' % make) sh('sphinx-intl build') for lang in os.listdir(options.base.docs / 'locale'): builddir = '%s/_build/%s' % (options.base.docs, lang) sh('%s -e SPHINXOPTS="-D language=\'%s\'" -e BUILDDIR="%s" html' % (make, lang, builddir))
def run(): """ Generate content to be opened using local file URL. """ print("Listening on http://localhost:8080. Ctrl+C to stop.") with pushd("deploy"): import SocketServer SocketServer.TCPServer.allow_reuse_address = True # Side-effect import. sys.argv = ["pelican-server", "8080"] from pelican import server
def test(options): """Run unit tests""" url = options.get('url', None) if url is None: # run against default server call_task('start') url = 'http://localhost:8000' with pushd('tests'): sh('python run_tests.py %s' % url)
def refresh_docs(): """Build sphinx docs from scratch""" make = sphinx_make() if os.path.exists(options.base.static_docs): shutil.rmtree(options.base.static_docs) with pushd(options.base.docs): sh('%s clean' % make) sh('%s html' % make) source_html_dir = path('%s/docs/_build/html' % BASEDIR) source_html_dir.copytree(options.base.static_docs)
def clean(): """ Task to remove javascript folders. """ with pushd(javascript_folder): if path.exists('openlayers'): warning('Removing openlayers') shutil.rmtree('openlayers') if path.exists('d3'): warning('Removing d3') shutil.rmtree('d3') if path.exists('SlickGrid'): warning('Removing SlickGrid') shutil.rmtree('SlickGrid')
def refresh_docs(): """Build sphinx docs from scratch""" make = sphinx_make() if os.path.exists(options.base.static_docs): shutil.rmtree(options.base.static_docs) with pushd(options.base.docs): sh('%s clean' % make) sh('%s html' % make) sh('mkdir %s' % options.base.static_docs) sh('cp -rp %s/docs/_build/html/* %s' % (BASEDIR, options.base.static_docs))
def publish_docs(): """this script publish Sphinx outputs to github pages""" tempdir = options.base.tmp / 'tempdocs' sh('git clone [email protected]:geopython/MetaSearch.git %s' % tempdir) with pushd(tempdir): sh('git checkout gh-pages') sh('cp -rp %s/docs/_build/html/* .' % options.base.home) sh('git add .') sh('git commit -am "update live docs [ci skip]"') sh('git push origin gh-pages') tempdir.rmtree()
def test_interop_load_eced(args): """ Run the SSH key interoperability tests for loading external ECDSA and Ed25519 keys. """ try: os.mkdir('build') except OSError: """Already exists""" exit_code = 1 with pushd('build'): exit_code = call( "../chevah/keycert/tests/ssh_load_keys_tests.sh ecdsa ed25519", shell=True) sys.exit(exit_code)
def test_interop_generate(args): """ Run the SSH key interoperability tests for internally-generated keys. """ try: os.mkdir('build') except OSError: """Already exists""" exit_code = 1 with pushd('build'): exit_code = call( "../chevah/keycert/tests/ssh_gen_keys_tests.sh", shell=True) sys.exit(exit_code)
def clean(): """clean environment""" if os.path.exists(options.base.install): if os.path.islink(options.base.install): os.unlink(options.base.install) else: shutil.rmtree(options.base.install) if os.path.exists(options.base.tmp): shutil.rmtree(options.base.tmp) if os.path.exists(options.base.ext_libs): shutil.rmtree(options.base.ext_libs) with pushd(options.base.docs): sh('%s clean' % sphinx_make()) for ui_file in os.listdir(options.base.ui): if ui_file.endswith('.py') and ui_file != '__init__.py': os.remove(options.base.plugin / 'ui' / ui_file) sh('git clean -dxf')
def examples(): """ Populate final_examples with Example.zip and unpacked examples """ to_remove = [ 'Examples.lvproj', 'Examples.aliases', 'Examples.lvlps', 'StandaloneDemo.vi', 'ExportDemos.vi', 'Performance.vi', 'Icon.ico', 'IconInstall.ico', 'EndUserLicenseAgreement.rtf', 'About.vi', 'Error.vi', 'Splash.vi'] reset_dir('export\\final_examples') reset_dir('export\\final_examples\\unpacked', unlink=True) #os.unlink('export\\final_examples\\unpacked') shutil.copytree('examples', 'export\\final_examples\\unpacked') # Don't publically release project file, etc. with pushd('export\\final_examples\\unpacked'): for f in to_remove: os.unlink(f) # Zip file containing all examples zipdir('export\\final_examples\\unpacked', 'export\\final_examples\\Examples.zip')
def docs_html(): """ Build HTML documentation and deploy in export directory """ staging_path = 'export\\staging_docs_html' reset_dir(staging_path) os.rmdir(staging_path) # for shutil.copytree shutil.copytree('docs', staging_path) shutil.copytree('export\\final_examples\\unpacked', op.join(staging_path, 'examples')) shutil.copy('export\\final_examples\\Examples.zip', op.join(staging_path, 'examples')) with pushd(staging_path): sh('make clean') sh('make html') html_path = 'export\\final_docs_html' reset_dir(html_path) os.rmdir(html_path) # shutil.copytree demands to create it. WHY NOT. shutil.copytree(op.join(staging_path, '_build\\html'), html_path)
def extract_messages(): """generate .pot/.ts files from sources""" # generate UI .ts file sh('pylupdate4 -noobsolete MetaSearch.pro') # generate .po file from plugin templates env = Environment(extensions=['jinja2.ext.i18n'], loader=FileSystemLoader(options.base.plugin)) msg_strings = [] for tfile in ['service_metadata.html', 'record_metadata_dc.html']: html_file = options.base.plugin / 'resources/templates' / tfile for msg in env.extract_translations(open(html_file).read()): if msg[2] not in msg_strings: msg_strings.append(msg[2]) po_file = options.base.plugin / 'locale/en/LC_MESSAGES/templates.po' with open(po_file, 'w') as po_file_obj: po_file_obj.write('\nmsgid ""\n' 'msgstr ""\n' '"Project-Id-Version: MetaSearch 0.1-dev\\n"\n' '"Report-Msgid-Bugs-To: \\n"\n' '"POT-Creation-Date: 2014-02-25 12:58-0500\\n"\n' '"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n"\n' '"Last-Translator: FULL NAME <EMAIL@ADDRESS>\\n"\n' '"Language-Team: LANGUAGE <*****@*****.**>\\n"\n' '"MIME-Version: 1.0\\n"\n' '"Content-Type: text/plain; charset=UTF-8\\n"\n' '"Content-Transfer-Encoding: 8bit\\n"\n\n') for msg in msg_strings: po_file_obj.write('msgid "%s"\nmsgstr ""\n\n' % msg) # generate docs .po files with pushd(options.base.docs): sh('make gettext') locales_arg = '' for lang in os.listdir('locale'): locales_arg = '%s -l %s' % (locales_arg, lang) sh('sphinx-intl update -p _build/locale %s' % locales_arg)
def main(): #result_file make# parser = argparse.ArgumentParser() parser.add_argument('directory') #opts = parser.parse_args() figs_dir = 'summary_figs' os.path.exists(figs_dir) or os.mkdir(figs_dir) summary = Summary() #evaluation_result save# with pushd(figs_dir): #gen confused matrix summary.letter_confused_matrix() summary.state_confused_matrix() #gen PER and WER summary.culPER() summary.culWER() #gen adjusted rand index summary.a_rand_index(summary.sample_letters, summary.input_data, 'l') summary.a_rand_index(summary.sample_states, summary.input_data2, 's') #gen word list with open('WordList.txt', "w") as f: for num, key in enumerate(summary.word_list): f.write("iter%d:: " % num) for num2, key2 in enumerate(key): f.write("%d:" % num2 + str(key2) + " ") f.write("\n") #multi plot sample states and letters# print "--------------------------------------plot process start--------------------------------------" count = multiprocessing.Value('i', 0) for idx in range(summary.data_size): pr = multiprocessing.Process(target=multi_plot_object, args=(summary, idx, count)) pr.start() time.sleep(0.1) #charm...!!!(koreganaito roop karanukenai) while (1): if count.value > 55: time.sleep(1) print "--------------------------------------plot process completed!!--------------------------------------" break
def docs_chm(): """ Build CHM documentation and deploy in export directory """ staging_path = 'export\\staging_docs_chm' reset_dir(staging_path) os.rmdir(staging_path) # for shutil.copytree shutil.copytree('docs', staging_path) shutil.copytree('export\\final_examples\\unpacked', op.join(staging_path, 'examples')) shutil.copy('export\\final_examples\\Examples.zip', op.join(staging_path, 'examples')) # Disable in-page nav boxes shutil.copy(op.join(staging_path, '_templates\\_layout_chm.html'), op.join(staging_path, '_templates\\layout.html')) with pushd(staging_path): sh('make clean') sh('make htmlhelp') sh('%HHC% {}'.format(op.join(staging_path, '_build\\htmlhelp\\AdvancedPlottingToolkit.hhp')), ignore_error=True) reset_dir('export\\final_docs_chm') shutil.copy(op.join(staging_path, '_build\\htmlhelp\\AdvancedPlottingToolkit.chm'), 'export\\final_docs_chm')
def publish_docs(options): """Publish dev docs to production""" local_path = '_build/html' remote_host = 'pycsw.org' remote_path = '/osgeo/pycsw/pycsw-web/docs/latest' user = options.get('user', False) if not user: raise Exception('OSGeo userid required') call_task('refresh_docs') with pushd(DOCS): # change privs to be group writeable for root, dirs, files in os.walk(local_path): for dfile in files: os.chmod(os.path.join(root, dfile), 0o664) for ddir in dirs: os.chmod(os.path.join(root, ddir), 0o775) # copy documentation sh('scp -r %s%s* %s@%s:%s' % (local_path, os.sep, user, remote_host, remote_path))
def publish_docs(): """this script publish Sphinx outputs to github pages""" tempdir = options.base.tmp / 'tempdocs' sh('git clone [email protected]:geopython/MetaSearch.git %s' % tempdir) with pushd(tempdir): sh('git checkout gh-pages') # copy English to root sh('cp -rp %s/docs/_build/en/html/* .' % options.base.home) # copy all other languages to their own dir for lang in os.listdir(options.base.docs / '_build'): if lang != 'en': # point all resources to english for res in ['_static', '_sources', '_images']: sh('rm -fr %s/docs/_build/%s/html/%s' % (options.base.home, lang, res)) # update .html files to point to English for dfile in os.listdir(options.base.docs / '_build/%s/html' % lang): if dfile.endswith('.html'): lfile = options.base.docs / '_build/%s/html/%s' % \ (lang, dfile) source = open(lfile).read() for res in ['_static', '_sources', '_images']: source = source.replace(res, '../%s' % res) with open(lfile, 'w') as fhl: fhl.write(source) sh('mkdir -p %s' % lang) sh('cp -rp %s/docs/_build/%s/html/* %s' % (options.base.home, lang, lang)) sh('git add .') sh('git commit -am "update live docs [ci skip]"') sh('git push origin gh-pages') tempdir.rmtree()
def main(): #result_file make# parser = argparse.ArgumentParser() parser.add_argument('directory') #opts = parser.parse_args() figs_dir = 'summary_figs' os.path.exists(figs_dir) or os.mkdir(figs_dir) summary = Summary() #evaluation_result save# with pushd(figs_dir): #gen confused matrix summary.letter_confused_matrix() summary.state_confused_matrix() #gen PER and WER summary.culPER() summary.culWER() #gen adjusted rand index summary.a_rand_index(summary.sample_letters,summary.input_data,'l') summary.a_rand_index(summary.sample_states,summary.input_data2,'s') #gen word list with open('WordList.txt',"w") as f: for num, key in enumerate(summary.word_list): f.write("iter%d:: " % num) for num2, key2 in enumerate(key): f.write("%d:" % num2 + str(key2) + " ") f.write("\n") #multi plot sample states and letters# print "--------------------------------------plot process start--------------------------------------" pr_l = [] for idx in range(summary.data_size): pr = multiprocessing.Process(target=multi_plot_object, args=(summary,idx)) pr_l.append(pr) pr.start() for p in pr_l: p.join() print "--------------------------------------plot process completed!!--------------------------------------"
def start_geoserver(options): """ Start GeoServer with GeoNode extensions """ from geonode.settings import GEOSERVER_BASE_URL with pushd('geoserver/bin/'): sh(('JAVA_OPTS="-Xmx512m -XX:MaxPermSize=256m"' ' JAVA_HOME="/usr"' ' sh startup.sh' ' > /dev/null &' )) info('Starting GeoServer on %s' % GEOSERVER_BASE_URL) # wait for GeoServer to start started = waitfor(GEOSERVER_BASE_URL) if not started: # If applications did not start in time we will give the user a chance # to inspect them and stop them manually. info(('GeoServer never started properly or timed out.' 'It may still be running in the background.')) info('The logs are available at geoserver-geonode-ext/jetty.log') sys.exit(1)
def refresh_docs(): """Build sphinx docs from scratch""" with pushd(DOCS): sh('make clean') sh('make html')
def test(options): """Run unit tests""" db_setup = False db_conn = None cfg_files = [] status = 0 url = options.get('url', None) suites = options.get('suites', None) database = options.get('database', 'SQLite3') remote = options.get('remote') timems = options.get('time', None) if url is None: # run against default server call_task('stop') call_task('reset') if database == 'SQLite3': call_task('setup_testdata') call_task('start') url = 'http://*****:*****@localhost/%s' % (user, password, temp_db) if password: sh('set PGPASSWORD=%s' % password) sh('createdb %s -U %s' % (temp_db, user)) sh('createlang --dbname=%s plpythonu -U %s' % (temp_db, user)) # update all default.cfg files to point to test DB cfg_files = glob.glob('tests%ssuites%s*%s*.cfg' % (3 * (os.sep, ))) for cfg in cfg_files: # generate table suite = cfg.split(os.sep)[2] tablename = 'records_cite' if suite == 'manager': tablename = 'records_manager' elif suite == 'apiso': tablename = 'records_apiso' config = configparser.SafeConfigParser() with open(cfg) as read_data: config.readfp(read_data) config.set('repository', 'database', db_conn) config.set('repository', 'table', tablename) with open(cfg, 'wb') as config2: config.write(config2) if suite in ['cite', 'manager', 'apiso']: # setup tables setup_db(db_conn, tablename, home, init_sfsql, init_sfsql) init_sfsql = False if suite in ['cite', 'apiso']: # load test data dirname = '%s%sdata' % (os.path.dirname(cfg), os.sep) load_records(context, db_conn, tablename, dirname) else: raise Exception('Invalid database specified') with pushd('tests'): try: sh(cmd) except BuildFailure as err: status = 1 # stop pycsw instance call_task('stop') if db_setup: # tearDown for cfg in cfg_files: sh('git checkout %s' % cfg) if database == 'PostgreSQL': sh("psql -c \"select pg_terminate_backend(procpid) from pg_stat_activity where datname='%s';\" -U %s" % (temp_db, user)) sh('dropdb %s -U %s' % (temp_db, user)) sh('unset PGPASSWORD') sys.exit(status)
def upgrade(): """upgrade database if changed; be sure to backup first!""" info('Upgrading database...') with pushd(path('%s/GeoHealthCheck' % BASEDIR)): sh('python manage.py db upgrade')