def post_install(self): env.logger.info("Starting post-install") env.logger.info("Load Scalability tests") if exists('Scalability'): with cd('Scalability'): run('git pull') else: _fetch_and_unpack( "git clone git://github.com/pjotrp/Scalability.git") # Now run a post installation routine (for the heck of it) run('./Scalability/scripts/hello.sh') env.logger.info("Load Cross-language tests") if exists('Cross-language-interfacing'): with cd('Cross-language-interfacing'): run('git pull') else: _fetch_and_unpack( "git clone git://github.com/pjotrp/Cross-language-interfacing.git" ) # Special installs for the tests with cd('Cross-language-interfacing'): sudo('./scripts/install-packages-root.sh ') run('./scripts/install-packages.sh') run('./scripts/create_test_files.rb')
def install_augustus(env): default_version = "2.7" version = env.get('tool_version', default_version) url = "http://bioinf.uni-greifswald.de/augustus/binaries/augustus.%s.tar.gz" % version install_dir = env.system_install with _make_tmp_dir() as work_dir: with cd(work_dir): _fetch_and_unpack(url, need_dir=False) env.safe_sudo("mkdir -p '%s'" % install_dir) env.safe_sudo("mv augustus.%s/* '%s'" % (version, install_dir))
def install_augustus(env): version = env.tool_version url = "http://bioinf.uni-greifswald.de/augustus/binaries/augustus.%s.tar.gz" % version install_dir = env.system_install with _make_tmp_dir() as work_dir: with cd(work_dir): _fetch_and_unpack(url, need_dir=False) env.safe_sudo("mkdir -p '%s'" % install_dir) env.safe_sudo("mv * '%s'" % install_dir) env.safe_sudo("echo 'PATH=%s/bin:%s/scripts:$PATH' > %s/env.sh" % (install_dir, install_dir, install_dir)) env.safe_sudo("echo 'export AUGUSTUS_CONFIG_PATH=%s/config' >> %s/env.sh" % (install_dir, install_dir)) _update_default(env, install_dir)
def _install_from_url(env, cpanm_cmd, package): """Check version of a dependency and download and install with cpanm if not up to date. Packages installed via URL have the package name, target version and URL separated with '=='. They can also optionally have a build directory or dependency to remove. """ parts = package.split("==") package, target_version, url = parts[:3] args = {} if len(parts) > 3: for key, value in (x.split("=") for x in parts[3:]): args[key] = value with settings(warn_only=True): cur_version = env.safe_run_output("export PERL5LIB=%s/lib/perl5:${PERL5LIB} && " % env.system_install + """perl -le 'eval "require $ARGV[0]" and print $ARGV[0]->VERSION' %s""" % package) if cur_version != target_version: with cshared._make_tmp_dir() as work_dir: with cd(work_dir): dl_dir = cshared._fetch_and_unpack(url) if args.get("build"): dl_dir = os.path.join(dl_dir, args["build"]) with cd(dl_dir): if args.get("depremove"): for fname in ["Makefile.PL", "MYMETA.json", "MYMETA.yml"]: env.safe_run(r"""sed -i.bak -e '/^.*%s.*/s/^/#/' %s""" % (args["depremove"], fname)) env.safe_run("%s -i --notest --local-lib=%s ." % (cpanm_cmd, env.system_install))
def install_jboss(base_dir): version = "5.1.0" url = "http://downloads.sourceforge.net/project/jboss/JBoss/JBoss-%s.GA/jboss-%s.GA.zip" % \ (version, version) java_dir = _java_dir(base_dir) with cd(java_dir): jboss_dir = _fetch_and_unpack(url) return os.path.join(java_dir, jboss_dir)
def _install_galaxy_search(dirs, config): """Full text search capability with Lucene. """ search_url = "git clone git://github.com/chapmanb/kwd-doc-find.git" with cd(dirs["base"]): lucene_dir = _fetch_and_unpack(search_url) lucene_dir = os.path.join(dirs["base"], lucene_dir) return ("fulltext_search", lucene_dir, "lein deps && lein run :web 8081", "run :web 8081", False)
def install_maven(base_dir): version = "2.2.1" mirror = "mirror.cc.columbia.edu/pub/software/apache" java_dir = _java_dir(base_dir) url = "http://%s/maven/binaries/apache-maven-%s-bin.tar.gz" % ( mirror, version) with cd(java_dir): mvn_dir = _fetch_and_unpack(url) install_mvn = os.path.join(base_dir, "bin", "mvn") dl_mvn = os.path.join(java_dir, mvn_dir, "bin", "mvn") sudo("ln -s %s %s" % (dl_mvn, install_mvn))
def post_install(self): env.logger.info("Starting post-install") env.logger.info("Load Scalability tests") if exists('Scalability'): with cd('Scalability'): run('git pull') else: _fetch_and_unpack("git clone git://github.com/pjotrp/Scalability.git") # Now run a post installation routine (for the heck of it) run('./Scalability/scripts/hello.sh') env.logger.info("Load Cross-language tests") if exists('Cross-language-interfacing'): with cd('Cross-language-interfacing'): run('git pull') else: _fetch_and_unpack("git clone git://github.com/pjotrp/Cross-language-interfacing.git") # Special installs for the tests with cd('Cross-language-interfacing'): sudo('./scripts/install-packages-root.sh ') run('./scripts/install-packages.sh') run('./scripts/create_test_files.rb')
def _install_bii_tools(config): bii_git = "git clone git://github.com/ISA-tools/BioInvIndex.git" mgr_git = "git clone git://github.com/ISA-tools/ISAvalidator-ISAconverter-BIImanager.git" base_dir = os.path.join(config["base_install"], config["bii_dirname"]) if not exists(base_dir): sudo("mkdir -p %s" % base_dir) sudo("chown %s %s" % (env.user, base_dir)) with cd(base_dir): bii_dir = _fetch_and_unpack(bii_git) bii_dir = os.path.join(base_dir, bii_dir) with cd(base_dir): mgr_base_dir = _fetch_and_unpack(mgr_git) mgr_build_dir = "val_conv_manager_gui" with cd(os.path.join(mgr_base_dir, mgr_build_dir)): target_dir = "target" if not exists(target_dir): # need artifacts from BII for tool build with cd(bii_dir): run("mvn clean install -Dmaven.test.skip=true") run("rm -rf ear/target") # now build BII data manager run("sh package.sh") with cd(target_dir): run("unzip BII-data-mgr-*.zip") mgr_dir = os.path.join(base_dir, mgr_base_dir, mgr_build_dir, target_dir) with cd(mgr_dir): with settings(hide("everything"), warn_only=True): result = run("ls -1d BII-data-mgr-*") data_mgr_dir = result.split()[0].strip() mgr_dir = os.path.join(mgr_dir, data_mgr_dir) index_dir = os.path.join(base_dir, config["bii_lucene_index"]) if not exists(index_dir): run("mkdir -p %s" % index_dir) bii_data_dir = os.path.join(base_dir, config["bii_data_dirname"]) if not exists(bii_data_dir): run("mkdir -p %s" % bii_data_dir) return dict(base=base_dir, bii=bii_dir, mgr=mgr_dir, index=index_dir, bii_data=bii_data_dir)
def _install_galaxy_code(dirs, config): with cd(dirs["base"]): code_dir = _fetch_and_unpack("hg clone %s" % config["galaxy_repo"]) code_dir = os.path.join(dirs["base"], code_dir) library_dir = os.path.join(dirs["base"], config["galaxy_datalib"]) config_file = "universe_wsgi.ini" remote_config = os.path.join(code_dir, config_file) if not exists(remote_config): local_config = os.path.join(config["fab_base_dir"], "install_files", config_file) origs = ["DBUSER", "DBPASSWD", "DBNAME", "ADMINUSERS", "LIBRARYDIR"] vals = [config["db_user"], config["db_pass"], config["galaxy_dbname"], config["galaxy_adminusers"], library_dir] put(local_config, remote_config) for old, new in zip(origs, vals): sed(remote_config, old, new) return ("galaxy", code_dir, "sh run.sh", "paster.py serve universe_wsgi.ini", False)