def manage(cmd=''): """ Remote plug of django manage.py """ _dynamic_env() with env.cd(env.source_path): env.run('%s manage.py %s' % (env.python, cmd))
def install(): """ Distro-agnostic way of installing postgresql. """ mod = __import__('fabric_colors.distro.{0}.postgresql'.format(env.distro), fromlist=['{0}.postgresql'.format(env.distro)]) PKG_INSTALL = getattr(mod, 'PKG_INSTALL') PKG_CLIENT_INSTALL = getattr(mod, 'PKG_CLIENT_INSTALL') result1, result2 = installed() if not result1: env.run(PKG_INSTALL) if result2 == "No client": pass elif not result2: env.run(PKG_CLIENT_INSTALL) if not status(): if not chk_data_dir(): print(green("Setting up postgresql for the first time on this host.")) setup_data_dir() initializer() start()
def db(): """ create a db/user considering the deploy target (project_fullname) """ _dynamic_env() env.run("""psql template1 -c "CREATE USER %(project_fullname)s WITH ENCRYPTED PASSWORD '%(db_password)s';" """ % env) # NOQA env.run("psql template1 -c 'CREATE DATABASE %(project_fullname)s OWNER %(project_fullname)s;'" % env) # NOQA
def collect_static_files(): """ Run django `collectstatic` command. """ with env.cd(settings.PROJECT_PATH), prefix(COMMANDS['set_environment']), \ prefix(COMMANDS['activate_virtualenv']): env.run('python rnacentral/manage.py collectstatic --noinput')
def deploy_webapp(fresh=True): handle_prerequisites(fresh) with cd("{{ cookiecutter.repo_name }}"): put("src/{{ cookiecutter.repo_name }}/config/keys/*", "src/{{ cookiecutter.repo_name }}/config/keys/") env.run("sudo docker build -t {{ cookiecutter.repo_name }}_appserver_deploy .") print(green("Succesfully deployed the {{ cookiecutter.repo_name }} appserver. " "Feel free to run 'fab run_webapp_container' ;)."))
def start_supervisord(): """ Starts supervisord """ with virtualenv(): config_file = os.path.join(env.confs, 'supervisord.conf') env.run('supervisord -c %s' % config_file)
def supervisorctl(action, process): """ Performs the action on the process, e.g. supervisorctl start uwsgi. """ with virtualenv(): config_file = os.path.join(env.confs, 'supervisord.conf') env.run('supervisorctl -c %s %s %s' % (config_file, action, process))
def installed(): """ Distro-agnostic way of checking if postgresql is installed. """ mod = __import__('fabric_colors.distro.{0}.postgresql'.format(env.distro), fromlist=['{0}.postgresql'.format(env.distro)]) cmd1 = getattr(mod, 'PKG_INSTALLED_CMD') cmd2 = getattr(mod, 'PKG_CLIENT_INSTALLED_CMD') result2 = "No client" if env.run == local: result1 = local(cmd1, capture=True) if cmd2: result2 = local(cmd2, capture=True) else: result1 = env.run(cmd1) if cmd2: result2 = env.run(cmd2) if result1: print(green("Server already installed.")) else: print(red("Server not yet installed.")) if result2 == "No client": print(green("This distro does not need a client package for postgresql")) elif int(result2) == 1: print(green("Client already installed.")) else: print(red("Client not yet installed.")) return result1, result2
def gen_supervisor_conf(conf_file='local/supervisord.conf.tmp'): """Generates a supervisord conf file based on the django template supervisord.conf""" # Back up first _backup_file(conf_file) env.run('python manage.py supervisor getconfig > %s' % conf_file) print green("Wrote supervisor conf file to %s" % conf_file)
def stop_supervisord(): """ Restarts supervisord """ with virtualenv(): config_file = os.path.join(env.confs, 'supervisord.conf') env.run('supervisorctl -c %s shutdown' % config_file)
def rsync_task(): """ Task wrapper around fabric's rsync_project """ fabric.api.local(env.config.get("pre-rsync-cmd", "")) """ NOTE(jshrake): local_dir must end in a trailing slash From http://docs.fabfile.org/en/1.11/api/contrib/project.html - If local_dir ends with a trailing slash, the files will be dropped inside of remote_dir. E.g. rsync_project("/home/username/project/", "foldername/") will drop the contents of foldername inside of /home/username/project. - If local_dir does not end with a trailing slash (and this includes the default scenario, when local_dir is not specified), remote_dir is effectively the "parent" directory, and new directory named after local_dir will be created inside of it. So rsync_project("/home/username", "foldername") would create a new directory /home/username/foldername (if needed) and place the files there. """ env.run("mkdir -p {0}".format(env.project_dir)) return fabric.contrib.project.rsync_project( local_dir=env.local_project_dir + "/", remote_dir=env.project_dir, delete=True, exclude=env.config.get("rsync-excludes", []), capture=True)
def clean_task(): """ obi clean """ default_clean = "rm -rf {0} || true".format(env.build_dir) clean_cmd = env.config.get("clean-cmd", default_clean) env.run(clean_cmd)
def _get_latest_source(source_folder): """ Updates files on staging server with current git commit on dev branch. """ if env.exists(source_folder + '/.git'): env.run('cd {source_folder} && git fetch'.format(source_folder=source_folder,)) else: env.run('git clone {github_url} {source_folder}'.format(github_url=REPO_URL, source_folder=source_folder)) current_commit = local('git log -n 1 --format=%H', capture=True)
def index_dump(index_types=['mapping', 'data']): """ export the specified index from the ES. :param index_types: a list for the export index type :type index_types: list # https://github.com/taskrabbit/elasticsearch-dump """ esinfo = _get_config() push_json_archive = [] workspace = esinfo['esdump_dir'] with cd(workspace): logging.info("Preparing to dump the index from production.") for t in index_types: esinfo.update({'type': t}) esinfo.update({'file_archive': esinfo['es_prod_index'] + '_' + t + '.json'}) try: env.run('rm -f %(file_archive)s' % esinfo) env.run('elasticdump \ --input=http://%(es_prod_addr)s:9200/%(es_prod_index)s \ --type=%(type)s \ --output=%(file_archive)s' % esinfo) push_json_archive.append(esinfo['file_archive']) except Exception, e: logging.error('dump the indies failure from production, caused of %s', str(e)) try: env.run('tar c ' + ' '.join(push_json_archive) + '| xz -0 > %(new_esdump_file)s' % esinfo) env.run('rm -f %(esdump_file)s' % esinfo) env.run('mv %(new_esdump_file)s %(esdump_file)s' % esinfo) logging.info("dump the index successful!") except Exception, e: logging.error('archive the dump the index failure for %s', str(e))
def deploy_webapp(fresh=True): handle_prerequisites(fresh) with cd("djagolb"): put("src/djagolb/config/keys/*", "src/djagolb/config/keys/") env.run("sudo docker build -t djagolb_appserver_deploy .") print(green("Succesfully deployed the djagolb appserver. " "Feel free to run 'fab run_webapp_container' ;)."))
def syncdb(): """ Runs syncdb (along with any pending south migrations) """ with virtualenv(): django_settings = 'DJANGO_SETTINGS_MODULE=%s' % env.settings env.run('%s ./manage.py syncdb --noinput' % django_settings)
def collectstatic(): """ Runs collectstatic """ with virtualenv(): django_settings = 'DJANGO_SETTINGS_MODULE=%s' % env.settings env.run('%s ./manage.py collectstatic --noinput' % django_settings)
def compress_static_files(): """ Run django compressor. """ with env.cd(settings.PROJECT_PATH), prefix(COMMANDS['set_environment']), \ prefix(COMMANDS['activate_virtualenv']): env.run('python rnacentral/manage.py compress')
def refresh_pg(): snapshot = env.run("sudo -u dxrnacen /nfs/dbtools/delphix/postgres/ebi_list_snapshots.sh -d pgsql-dlvmpub1-010.ebi.ac.uk | tail -1") # bash curses about absence of /home directory, thus we have to filter out snapshot snapshot = re.search("\d\d\d\d\-\d\d\-\d\d\s\d\d\:\d\d$", snapshot).group(0) # e.g. 2018-06-26 11:16 env.run("sudo -u dxrnacen /nfs/dbtools/delphix/postgres/ebi_refresh_vdb.sh -d pgsql-dlvmpub1-010.ebi.ac.uk -S '%s'" % snapshot) slack("Refreshed PG database from '%s' snapshot" % snapshot)
def install_django_requirements(): """ Run pip install. """ with env.cd(settings.PROJECT_PATH), prefix(COMMANDS['set_environment']), \ prefix(COMMANDS['activate_virtualenv']): env.run('pip install --upgrade -r rnacentral/requirements.txt')
def sh_pip_install(pkgs, cmd_path='pip', sudo=True): args = [ cmd_path, 'install', #'--use-mirrors', # unavailable on deb6 pip ] if env.pypi_package_download_cache: cache_dir = env.pypi_package_download_cache.format( username=users.get_username() ) if not os.path.exists(cache_dir): os.makedirs(cache_dir) extra = ['--download-cache={0}'.format(cache_dir)] args.extend(extra) pkgs = list(set(pkgs)) # remove duplicate packages, pip complains args.extend(pkgs) args = ' '.join(args) if sudo: env.sudo(args) else: env.run(args)
def restart_django(restart_url=None): """ Restart django process and visit the website. """ with env.cd(settings.PROJECT_PATH): env.run('touch rnacentral/rnacentral/wsgi.py') if restart_url: requests.get(restart_url)
def restart(): """ Distro-agnostic: restart postgresql """ print(green("Restarting postgresql")) mod = __import__('fabric_colors.distro.{0}.postgresql'.format(env.distro), fromlist=['{0}.postgresql'.format(env.distro)]) RESTART = getattr(mod, 'RESTART') env.run(RESTART)
def flush_memcached(): """ Delete all cached data. """ (host, port) = settings.CACHES['default']['LOCATION'].split(':') cmd = 'echo flush_all | nc {host} {port} -vv'.format(host=host, port=port) with warn_only(): env.run(cmd)
def deploy_db(fresh=True): handle_prerequisites(fresh) with cd("djagolb"): put("src/djagolb/config/keys/*", "src/djagolb/config/keys/") env.run("bash init.sh") env.run("bin/fab build_db_dockerfile") print(green("Succesfully deployed the djagolb database. " "Feel free to run 'fab run_db_container' ;)."))
def initializer(): """ Distro-agnostic: set up initializations script for postgresql """ print(green("Set-up initialization script for postgresql")) mod = __import__('fabric_colors.distro.{0}.postgresql'.format(env.distro), fromlist=['{0}.postgresql'.format(env.distro)]) INITIALIZER = getattr(mod, 'INITIALIZER') env.run(INITIALIZER)
def supervisorctl(action, process): """ Takes as arguments the name of the process as is defined in supervisord.conf and the action that should be performed on it: start|stop|restart. """ supervisor_conf = os.path.join(env.confs, 'supervisord.conf') env.run('supervisorctl -c %s %s %s' % (supervisor_conf, action, process))
def start_supervisor(): """ Start supervisord and memcached on production machine. """ with env.cd(settings.PROJECT_PATH), prefix(COMMANDS['set_environment']), \ prefix(COMMANDS['activate_virtualenv']): env.run('supervisord -c supervisor/supervisor.conf') env.run('supervisorctl -c supervisor/supervisor.conf start memcached')
def deploy_db(fresh=True): handle_prerequisites(fresh) with cd("{{ cookiecutter.repo_name }}"): put("src/{{ cookiecutter.repo_name }}/config/keys/*", "src/{{ cookiecutter.repo_name }}/config/keys/") env.run("bash init.sh") env.run("bin/fab build_db_dockerfile") print(green("Succesfully deployed the {{ cookiecutter.repo_name }} database. " "Feel free to run 'fab run_db_container' ;)."))
def _web_pid(): """Get the pid of the web process""" with quiet(): with _virtualenv(): env.run('python manage.py supervisor getconfig > local/.tmpsupervisord.conf') pid = env.run('supervisorctl -c local/.tmpsupervisord.conf pid web', capture=True) env.run('rm local/.tmpsupervisord.conf') return pid
def _env_set_distro(): # Set env.distro with hide('running', 'stdout'): if env.run == local: name = local('uname -s', capture=True) else: name = env.run('uname -s') env.distro = None if name == "Darwin": env.distro = "mac" elif name == "Linux": result = env.run('cat /etc/*-release') import re result_list = re.findall(r'([^=\s,]+)=([^=\s,]+)', result) for item in result_list: if item[0] == 'ID': env.distro = item[1] return env.distro return env.distro
def _install_nginx(): '''install nginx''' nginx_file = _download_nginx() if os.path.isfile('%s/bin/nginx/sbin/nginx' % _get_pwd()): print 'nginx Already installed.' else: with env.cd(os.path.dirname(nginx_file)): env.run('tar xf %s' % os.path.basename(nginx_file)) with env.cd('nginx-%s' % nginx_version): nginx_install_folder = '%s/bin/nginx' % _get_pwd() if not os.path.isdir(nginx_install_folder): os.makedirs(nginx_install_folder) env.run( './configure --prefix="%s" --with-http_gzip_static_module --with-http_mp4_module --with-http_auth_request_module --with-http_dav_module' % nginx_install_folder) env.run('make') env.run('make install') env.run('make clean') assert os.path.isfile('%s/bin/nginx/sbin/nginx' % _get_pwd()) #shutil.rmtree('redis-%s'%redis_version) _get_nginx_config()
def update_gtsvn(): """ SVN up the various ~/gtsvn/ directories """ if env.no_svn_up: print(yellow("** skipping svn up **")) return with cd(env.svn_path): config = read_config(env.current_dict) svn_langs = [l.get('iso') for l in config.get('Languages') if not l.get('variant', False)] svn_lang_paths = ['langs/%s' % l for l in svn_langs] # TODO: replace langs with specific list of langs from config # file paths = [ 'giella-core/', 'words/', 'art/dicts/', ] + svn_lang_paths print(cyan("** svn up **")) for p in paths: _p = os.path.join(env.svn_path, p) with cd(_p): try: svn_up_cmd = env.run('svn up ' + _p) except: abort( red("\n* svn up failed in <%s>. Prehaps the tree is locked?" % _p) + '\n' + red(" Correct this (maybe with `svn cleanup`) and rerun the command, or run with `no_svn_up`.") ) # TODO: necessary to run autogen just in case? print(cyan("** Compiling giella-core **")) giella_core = os.path.join(env.svn_path, 'giella-core') with cd(giella_core): make_file = env.svn_path + '/giella-core/Makefile' make_ = "make -C %s -f %s" % (giella_core, make_file ) result = env.run(make_)
def compile_strings(): """ Compile .po translations to .mo strings for use in the live server. """ if hasattr(env, 'current_dict'): config = 'configs/%s.config.yaml.in' % env.current_dict with open(config, 'r') as F: _y = yaml.load(F.read()) langs = _y.get('ApplicationSettings', {}).get('locales_available') for lang in langs: # run for each language cmd = "pybabel compile -d translations -l %s" % lang compile_cmd = env.run(cmd) if compile_cmd.failed: print(red("** Compilation failed, aborting.")) else: print(green("** Compilation successful.")) else: cmd = "pybabel compile -d translations" with settings(warn_only=True): compile_cmd = env.run(cmd, capture=True) if compile_cmd.failed: if 'babel.core.UnknownLocaleError' in compile_cmd.stderr: error_line = [l for l in compile_cmd.stderr.splitlines( ) if 'babel.core.UnknownLocaleError' in l] print(red("** String compilation failed, aborting: ") + cyan(''.join(error_line))) print("") print(yellow(" Either: ")) print(yellow( " * rerun the command with the project name, i.e., `fab PROJNAME compile_strings`.")) print( yellow(" * Troubleshoot missing locale. (see Troubleshooting doc)")) else: print(compile_cmd.stderr) print(red("** Compilation failed, aborting.")) else: print(compile_cmd.stdout) print(green("** Compilation successful."))
def get_ipdb(): """ gets the latest ipdb file from maxmind """ with env.cd(env.project_root): env.run('mkdir adomattic/ipdb') env.run('wget -P adomattic/ipdb/ http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz') env.run('gunzip adomattic/ipdb/GeoLite2-City.mmdb.gz')
def coverage(settings='test', reuse='0'): """Run the tests and generate a coverage report""" with env.cd(env.base_path): env.run('coverage erase') env.run( 'REUSE_DB=%s coverage run --branch --source muckrock --omit="*/migrations/*" manage.py test --settings=muckrock.settings.%s' % (reuse, settings)) env.run('coverage html')
def coverage(settings='test'): """Run the tests and generate a coverage report""" with env.cd(env.base_path): env.run('coverage erase') env.run( 'coverage run --branch --source muckrock manage.py test --settings=muckrock.settings.%s' % settings) env.run('coverage html')
def virtualenv_setup(): """ The third step """ env.run("/usr/bin/virtualenv --no-site-packages %(venv_root)s" % env) with env.cd(env.project_root): env.run("mkdir logs") env.run("touch logs/error-django.log")
def bootstrap(): upload() env.cd(env.path) env.run('rm -rf {0}'.format(env.environment)) env.run('mkdir -p {0}'.format(env.environment)) env.run('{0} {1} --no-site-packages'.format(env.virtualenv, env.environment)) update_requirements()
def get_zarafa_schema(): ldap_env = LdapEnv.get() with fs.mkdtemp() as d: shutil.copyfile(join(env.ldap_schema_dir, 'zarafa.schema'), join(d, 'zarafa.schema')) context = { 'include\s+zarafa\.schema': 'include {0}'.format(join(d, 'zarafa.schema')), '__LDAP_ETC_PATH__': ldap_env.etc_path_orig, } conv_fp = join(d, 'schema_convert.conf') text.patch_file(context, join(env.ldap_schema_dir, 'schema_convert.conf.in'), dest=conv_fp, literal=False) # debian6: fails to find slaptest without abs path env.run('/usr/sbin/slaptest -f {0} -F {1}'.format(conv_fp, d)) ldif_file = join(d, 'cn=config', 'cn=schema', 'cn={4}zarafa.ldif') context = { '(?m)^structuralObjectClass: .*$': '', '(?m)^entryUUID: .*$': '', '(?m)^creatorsName: .*$': '', '(?m)^createTimestamp: .*$': '', '(?m)^entryCSN: .*$': '', '(?m)^modifiersName: .*$': '', '(?m)^modifyTimestamp: .*$': '', '(?m)^dn: .*': 'dn: {0}'.format(env.ldap_custom_schema_dn), '(?m)^cn: .*': 'cn: {0}'.format(env.ldap_custom_schema_cn), } text.patch_file(context, ldif_file, dest=ldif_file, literal=False) content = open(ldif_file, 'rt').read() return content.strip()
def mysql_cmd(command, dbname=None, capture=False, no_ascii=False): """Runs a mysql command without specifying which database to run it on. Badly named.""" hostname, port = db_hostname_port() user, password = db_auth() if dbname: cmd = 'mysql --default-character-set=utf8 -u%s -p%s %s -h %s -P %s -e "%s"' % (user, password, dbname, hostname, port, command) else: cmd = 'mysql --default-character-set=utf8 -u%s -p%s -h %s -P %s -e "%s"' % (user, password, hostname, port, command) if no_ascii: cmd = cmd + ' -B -N' return env.run(cmd, capture=capture)
def _get_supervisord_config(): temp = Template(filename='%s/config/supervisor/fearless.conf.tml' % _get_pwd()) conf = temp.render(dir=_get_pwd()) #print conf with open('%s/config/supervisor/fearless.conf' % _get_pwd(), 'wb') as f: f.write(conf) output = env.run('echo_supervisord_conf', capture=True) with open('%s/config/supervisor/supervisord.conf' % _get_pwd(), 'wb') as f: f.write(output + conf) sudo('rm -f /etc/supervisord.conf') sudo('ln -s %s/config/supervisor/supervisord.conf /etc/supervisord.conf' % _get_pwd())
def _install_redis(): '''install redis''' redis_file = _download_redis() if os.path.isfile('%s/bin/redis/bin/redis-server' % _get_pwd()): print 'redis Already installed.' else: with env.cd(os.path.dirname(redis_file)): env.run('tar xf %s' % os.path.basename(redis_file)) with env.cd('redis-%s' % redis_version): redis_install_folder = '%s/bin/redis' % _get_pwd() if not os.path.isdir(redis_install_folder): os.makedirs(redis_install_folder) env.run('make') env.run('make PREFIX="%s" install' % redis_install_folder) env.run('make clean') assert os.path.isfile('%s/bin/redis/bin/redis-server' % _get_pwd())
def git_updates(git_branch=None): """ Perform git updates. """ with env.cd(settings.PROJECT_PATH): if git_branch: env.run('git checkout {branch}'.format(branch=git_branch)) env.run('git pull') env.run('git submodule update')
def provision(): """ This will copy the provision script from the repo and execute it. """ env.run('rm -rf codalab_scripts/*') env.run('mkdir -p codalab_scripts') put(pathjoin(PROJECT_DIR, 'scripts/ubuntu/'), 'codalab_scripts/') env.run('chmod a+x codalab_scripts/ubuntu/provision') sudo('codalab_scripts/ubuntu/provision %s' % env.DEPLOY_USER)
def status(): """Get the status of supervisor processes""" with hide('running'): with _virtualenv(): env.run( 'python manage.py supervisor getconfig > local/.tmpsupervisord.conf' ) env.run('supervisorctl -c local/.tmpsupervisord.conf status') env.run('rm local/.tmpsupervisord.conf')
def install_corenlp_server(): Directory = raw_input("Please enter the full path of the directory\ where you want to install corenlp server, Plase\ dont slash afterwards : ") with cd(Directory): env.run( "git clone https://github.com/Wordseer/stanford-corenlp-python.git" ) with cd(stanford - corenlp - python): env.run( "wget http://nlp.stanford.edu/software/stanford-corenlp-full-2014-08-27.zip" ) env.run("unzip stanford-corenlp-full-2014-08-27.zip") env.run("python setup.py install") return
def virtual_env(): """ This method installs the virual environment and after installing virtual environment installs the git. After installing the git installs the reuiqred repository """ if not exists(VIRTUAL_ENVIRONMENT, use_sudo=True): run("virtualenv MadMachinesNLP01") with cd(VIRTUAL_ENVIRONMENT): #put(PATH, VIRTUAL_ENVIRONMENT) env.run( "git clone https://github.com/kaali-python/MadMachinesNLP01.git" ) with prefix("source bin/activate"): if confirm("Do you want to install requirements.txt again??"): env.run("pip install pyopenssl ndg-httpsclient pyasn1") env.run("pip install numpy") env.run("pip install -r MadMachinesNLP01/requirements.txt")
def restart_running(): hostname = env.real_hostname find_running() with cd(env.neahtta_path): running_services = search_running() failures = [] for s, pid in running_services: print(cyan("** Restarting service for <%s> **" % s)) stop = env.run("sudo service nds-%s stop" % s) if not stop.failed: start = env.run("sudo service nds-%s start" % s) if not start.failed: print(green("** <%s> Service has restarted successfully **" % s)) else: failures.append((s, pid)) else: failures.append((s, pid)) if len(failures) > 0: print(red("** something went wrong while restarting the following **")) for f in failures: print (s, pid)
def update_app(): """Updates the code, database, and static files""" with _virtualenv(): print green("Running migrations...") env.run('python manage.py migrate') print green("Gathering and preprocessing static files...") env.run('python manage.py collectstatic --noinput') env.run('python manage.py compress')
def git_pull(): """ pull from git Usage: fab <env> git_pull """ with env.cd(env.project_root): env.run('git fetch' % env) env.run('git checkout %(branch)s; git pull' % env) env.run('git submodule update --init --recursive' % env)
def status(): """ Distro-agnostic checking postgresql status """ cmd = 'if ps aux | grep -v grep | grep -i "postgres"; then echo 1; else echo 0; fi' if env.run == local: result = local(cmd, capture=True) else: result = env.run(cmd) if result: print(green("Postgresql server is running.")) else: print(red("Postgresql server is not running.")) return result
def installed(): """ Distro-agnostic way of checking if postgis2 is installed. """ mod = __import__('fabric_colors.distro.{0}.postgis'.format(env.distro), fromlist=['{0}.postgis'.format(env.distro)]) cmd = getattr(mod, 'PKG_INSTALLED_CMD') if env.run == local: result = local(cmd, capture=True) else: result = env.run(cmd) if result: print(red("Already installed.")) return result
def detect_distro(): distro = None if fs.sh_which('lsb_release'): stdout = env.run('lsb_release -a', capture=True) name = text.safefind('(?m)^Distributor ID:\s*(.*)$', stdout) release = text.safefind('(?m)^Release:\s*(.*)$', stdout) distro = Distro(name, release) else: redhat_release = '/etc/redhat-release' if os.path.exists(redhat_release): content = fs.sh_cat(redhat_release) name, release = text.safefind('(.*) release (.*) [(].*[)]', content) distro = Distro(name, release) return distro
def update(skipreq=True): """ Update the remote target. By default it skips checking requirements. Use 'update:skipreq=False' to force updating requirements. """ with prefix('workon %(venv)s' % env): env.run('git pull') if env.less: run('lessc -x %(projectdir)s/static/less/theme-default/style.less > %(projectdir)s/static/css/theme-default/style.css' % env) if skipreq in ("False", "false"): # Have to test against a string, because the skipreq parameter is not a boolean, but a string. execute(update_requirements) env.run('./manage.py collectstatic --noinput --no-default-ignore') env.run('cp fcgi/django_%(settings)s.fcgi fcgi/django.fcgi' % env)
def update(tag=None): with virtualenv(env.venvpath): with cd(env.DEPLOY_PATH): env.run('git pull') if tag: update_to_tag(tag=tag) requirements() with cd('codalab'): config_gen(config=env.DJANGO_CONFIG, settings_module=env.SETTINGS_MODULE) env.run('./manage syncdb --noinput') # When South is enabled #env.run('./manage migrate') env.run('./manage collectstatic --noinput')
def chk_data_dir(): """ Distro-agnostic checking postgresql's data directory """ mod = __import__('fabric_colors.distro.{0}.postgresql'.format(env.distro), fromlist=['{0}.postgresql'.format(env.distro)]) DEFAULT_DATA_DIR = getattr(mod, 'DEFAULT_DATA_DIR') cmd = """if [ -e "{0}" ]; then echo 1; else echo ""; fi"""\ .format(DEFAULT_DATA_DIR) if env.run == local: result = local(cmd, capture=True) else: result = env.run(cmd) if result: print(green("Postgresql server's data directory is present.")) else: print(red("Postgresql server's data directory is not present.")) return result
def deploy_remotely(git_branch=None, restart_url='http://rnacentral.org', quick=False): """ Run deployment remotely. """ git_updates(git_branch) update_npm() if not quick: rsync_local_files() collect_static_files() compress_static_files() flush_memcached() restart_django(restart_url) if not git_branch: with env.cd(settings.PROJECT_PATH): git_branch = env.run('git rev-parse --abbrev-ref HEAD') slack("Deployed '%s' at %s: <http://rnacentral.org|rnacentral.org>" % (git_branch, env.host))
def runserver(send_sigusr1=False): """ Run the development server.""" _path = 'configs/%s.config.yaml' % env.current_dict try: open(_path, 'r').read() except IOError: if env.real_hostname not in running_service: _path = 'configs/%s.config.yaml.in' % env.current_dict print(yellow("** Production config not found, using development (*.in)")) else: print( red("** Production config not found, and on a production server. Exiting.")) sys.exit(-1) compile_strings() # TODO: option to turn on or off reloader. cmd = ['NDS_CONFIG=%s' % _path, 'python neahtta.py dev'] if send_sigusr1: cmd.append('--send-sigusr1') # when instructed to send SIGUSR1, the parent of this process must know # about it. def forward_sigusr1(signum, _frame): assert signum == SIGUSR1 os.kill(os.getppid(), SIGUSR1) with temporary_signal_handler(SIGUSR1, forward_sigusr1): print(green("** Starting development server.")) run_cmd = env.run(' '.join(cmd)) if run_cmd.failed: print(red("** Starting failed for some reason.")) sys.exit(-1)
def deploy_locally(git_branch=None, restart_url='http://rnacentral.org', quick=False): """ Run deployment locally. """ git_updates(git_branch) update_npm() collect_static_files() compress_static_files() if not quick: install_django_requirements() flush_memcached() restart_django(restart_url) if not git_branch: with env.cd(settings.PROJECT_PATH): git_branch = env.run( 'git rev-parse --abbrev-ref HEAD', capture=True) # env.run == local, takes capture arg slack( "Deployed '%s' at ves-hx-a4: <http://test.rnacentral.org|test.rnacentral.org>" % git_branch)