def setup_celery(): require('virtualenv_root', provided_by=env.deployments) user = env.unique_identifier vhost = env.unique_identifier allowed_chars = 'abcdefghijklmnopqrstuvwxyz0123456789' password = ''.join(random.choice(allowed_chars) for i in range(10)) _add_to_dotenv('BROKER_URL', 'amqp://%s:%s@localhost:5672/%s' % (user, password, vhost)) _add_to_dotenv('CELERY_RESULT_BACKEND', 'redis://127.0.0.1:6379/0') print magenta("Create Monit File") with cd('/home/www-data/etc/monit.d/'): run('echo "check process %s_celery with pidfile /home/www-data/projects/%s/tmp/celery.pid" > %s_celery' % (env.unique_identifier, env.project, env.unique_identifier)) run('echo "\t#!/bin/sh" >> %s_celery' % env.unique_identifier) run("echo '\t%s' >> %s_celery" % ('start program = "/home/www-data/projects/%s/startstop.sh start celery"' % env.project, env.unique_identifier)) run("echo '\t%s' >> %s_celery" % ('stop program = "/home/www-data/projects/%s/startstop.sh stop celery"' % env.project, env.unique_identifier)) run('echo "\tif 5 restarts within 5 cycles then timeout" >> %s_celery' % env.unique_identifier) run('echo "\tif totalmemory > 30000 kb for 5 cycles then alert" >> %s_celery' % env.unique_identifier) run('echo "\tif totalmemory > 210.0 MB for 10 cycles then restart" >> %s_celery' % env.unique_identifier) run('echo "\tif totalcpu > 50%% for 5 cycles then alert" >> %s_celery' % env.unique_identifier) run('echo "\tif totalcpu > 50%% for 10 cycles then restart" >> %s_celery' % env.unique_identifier) print yellow("Create a rabbitmq user and a rabbitmq vhost in Web GUI (Tip:Lastpass)") print yellow("username: %s" % user) print yellow("pw: %s" % password) print yellow("vhost: %s" % vhost) print yellow("Give user (%s) all permissions for vhost (%s)" % (user, vhost)) print yellow("On server: monit reload")
def color_test(): for x in range(0, 2): print colors.blue('Blue text', bold=False) + '\n' time.sleep(0.2) print colors.cyan('cyan text', bold=False) time.sleep(0.2) print colors.green('green text', bold=False) time.sleep(0.2) print colors.magenta('magenta text', bold=False) time.sleep(0.2) print colors.red('red text', bold=False) time.sleep(0.2) print colors.white('white text', bold=False) time.sleep(0.2) print colors.yellow('yellow text', bold=False) time.sleep(0.2) print colors.blue('Blue text bold', bold=True) time.sleep(0.2) print colors.cyan('cyan text bold', bold=True) time.sleep(0.2) print colors.green('green text bold', bold=True) time.sleep(0.2) print colors.magenta('magenta text bold', bold=True) time.sleep(0.2) print colors.red('red text bold', bold=True) time.sleep(0.2) print colors.white('white text bold', bold=True) time.sleep(0.2) print colors.yellow('yellow text bold', bold=True) time.sleep(0.2)
def tag_commit(self): if env.offline: self._offline_tag_commit() return pattern = ".*-{}-.*".format(re.escape(self._environment)) github = _get_github() repo = github.repository('dimagi', 'commcare-hq') for tag in repo.tags(self._max_tags): if re.match(pattern, tag.name): self._last_tag = tag.name break if not self._last_tag: print magenta( 'Warning: No previous tag found in last {} tags for {}'.format( self._max_tags, self._environment)) tag_name = "{}-{}-deploy".format(self.timestamp, self._environment) msg = "{} deploy at {}".format(self._environment, self.timestamp) user = github.me() repo.create_tag( tag=tag_name, message=msg, sha=self.deploy_ref, obj_type='commit', tagger={ 'name': user.login, 'email': user.email or '{}@dimagi.com'.format(user.login), 'date': datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), }) self._deploy_tag = tag_name
def syncdb(): """runs syncdb on the remote host""" require('project_root', provided_by=env.deployments) print magenta("Syncronize database") with cd(env.project_root): with prefix('source env/bin/activate'): run('./manage.py syncdb --noinput')
def push(self, directory=None): """ Push configuration files that have changes, given user confirmation. """ host_generated_dir = self._get_host_generated_dir(directory) host_remotes_dir = self._get_host_remotes_dir(directory) for conffile in self.conffiles: conffile.pull(host_remotes_dir) for conffile in self.conffiles: conffile.generate(host_generated_dir) has_diff = lambda conffile: conffile.diff(host_generated_dir, host_remotes_dir, True) with_diffs = filter(has_diff, self.conffiles) if not with_diffs: print(magenta('No configuration files to push for {host}' .format(host=self.host))) return print(magenta('The following configuration files have changed for {host}:' .format(host=self.host))) print for conffile in with_diffs: print(magenta('\t' + conffile.remote)) if options.assume_yes or confirm('Push configuration files to {host}?' .format(host=self.host), default=False): for conffile in with_diffs: conffile.push(host_generated_dir)
def bootstrap(): """ initialize remote host environment (virtualenv, deploy, update) """ require('root', provided_by=env.deployments) print magenta("Cloning Repository") with cd(env.root): run("git clone %s" % env.git_repository) # some one time setup things with cd(env.project_root): if env.git_branch != 'master': run('git checkout %s' % (env.git_branch,)) run('mkdir static') run('mkdir media') with cd(env.code_root): run('ln -sf settings_%s.py settings.py' % env.environment) # create virtualenv and install all the requirements execute('create_virtualenv') execute('update_requirements') execute('create_database') execute('syncdb') execute('migrate') print magenta("Load initial data") with cd(env.project_root), prefix('source env/bin/activate'): run('./manage.py loaddata allink_user.json') # only compile messages if locale folder is present if os.path.isdir('locale'): execute('compilemessages') execute('collectstatic')
def style_check(): """Runs Python static code checkers against the code. Although more for style reasons, these are quite helpful in identifying problems with the code. A file will be generated at ./.logs/style.log for perusal. Due to how pylint works it must be invoked manually. """ utils.fastprint("Checking Python code style ... ") with api.settings(api.hide('warnings'), warn_only=True): pep8 = api.local('pep8 .', True) pyflakes = api.local('pyflakes .', True) # Print them out to a file so we can peruse them later. log = open('./.log/style.log', 'w') log.write("pep8:\n%s\n\npyflakes:\n%s" % (pep8, pyflakes)) if pep8: print(colors.magenta("fail", True)) elif pyflakes: print(colors.magenta("fail", True)) else: print(colors.green(" ok ", True)) if pep8 or pyflakes: print(colors.magenta("Please check ./.log/style.log.", True)) print(colors.yellow("Please be sure to run pylint manually.", True)) return (pep8 and pyflakes)
def service(name, action, check_status=True): c = fabric.context_managers with sudo('root'), c.settings(c.hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True): info('Service: {} {}', name, action) if check_status: output = run('service {} status'.format(name), pty=False, combine_stderr=True) if output.return_code != 0: puts(indent(magenta(output))) return elif action in output: puts( indent('...has status {}'.format( magenta(output[len(name) + 1:])))) return output = run('service {} {}'.format(name, action), pty=False, combine_stderr=True) if output.return_code != 0: puts(indent(magenta(output)))
def push(self, generated_dir, remotes_dir): """ Push configuration files that have changes, given user confirmation. """ host_generated_dir = os.sep.join([generated_dir, options.get_hostname()]) host_remotes_dir = os.sep.join([remotes_dir, options.get_hostname()]) for conffile in self.conffiles: conffile.pull(host_remotes_dir) for conffile in self.conffiles: conffile.generate(host_generated_dir) has_diff = lambda conffile: conffile.diff(host_generated_dir, host_remotes_dir, True) with_diffs = filter(has_diff, self.conffiles) if not with_diffs: print(magenta('No configuration files to push for {host}'.format(host=options.get_hostname()))) return print(magenta('The following configuration files have changed for {host}:'.format(host=options.get_hostname()))) print for conffile in with_diffs: print(magenta('\t' + conffile.remote)) if confirm('Push configuration files to {host}?'.format(host=options.get_hostname()), default=False): for conffile in with_diffs: conffile.push(host_generated_dir)
def link_settings(self): # link production settings settings_dict = self.ini.get("settings") self.base = \ '%s/%s/src/%s/%s/' %\ (self.ini.get('remote_projects_dir'), self.ini.get('project_address'), self.ini.get('project_appname'), self.ini.get('project_appname')) # if None given use production settings filename = settings_dict.get("active_setting", "production") + '.py' self.original = '%sconfigs/%s' % (self.base, filename) self.target = '%ssettings.py' % (self.base) if not exists(self.original): raise ImproperlyConfigured( red('original settings file does not exists, ' 'check active_setting key @legend')) if exists(self.target) and not settings_dict.get('overwrite_settings', False): print yellow('skipping linking,' 'since target file already exists') else: print green('linking settings file \'%s\'' % filename) if exists(self.target): print magenta('removing old link') run('rm %s' % self.target) run('ln -s %s %s' % (self.original, self.target))
def configure_nginx(): #Do this for new servers only run("sudo /etc/init.d/nginx start") print green("Copying nginx.config virtual host file for ajibika.org to the sites-available directory") with settings(warn_only=True): if file_exists("/etc/nginx/sites-available/www.ajibika.org"): run("sudo rm /etc/nginx/sites-enabled/www.ajibika.org") result = put("conf/www.ajibika.org", "/etc/nginx/sites-available/", use_sudo=True) if result.failed and not confirm("Unable to copy www.ajibika.org to sites-enabled dir. Continue anyway?"): abort("Aborting at user request.") print green("conf/www.ajibika.org has been copied") print red("Removing old nginx configs") if file_exists("/etc/nginx/sites-enabled/default"): result = run("sudo rm /etc/nginx/sites-enabled/default") if result.failed and not confirm("Unable to Removing old nginx configs. Continue anyway?"): abort("Aborting at user request.") print magenta("Now Symlinking the ajibika virtual host file to sites sites-enabled") if not file_exists("/etc/nginx/sites-enabled/www.ajibika.org"): with settings(warn_only=True): result = run("sudo ln -s /etc/nginx/sites-available/www.ajibika.org /etc/nginx/sites-enabled/www.ajibika.org") if result.failed and not confirm("Unable to symlink the angani \ virtual host file to sites sites-enabled. Continue anyway?"): abort("Aborting at user request.") print "sudo reload nginx" run("sudo /etc/init.d/nginx reload")
def rm(): with hide('running'): print magenta("[1/2] Stop all service...", bold=True) print green(local("docker-compose stop", capture=True)) print magenta("[2/2] Remove all container...", bold=True) print green(local("docker-compose rm -f", capture=True))
def bootstrap(): """ initialize remote host environment (virtualenv, deploy, update) """ require('root', provided_by=env.deployments) print magenta("Cloning Repository") with cd(env.root): run("git clone %s" % env.git_repository) # some one time setup things with cd(env.project_root): if env.git_branch != 'master': run('git checkout %s' % (env.git_branch, )) run('mkdir static') run('mkdir media') with cd(env.code_root): run('ln -sf settings_%s.py settings.py' % env.environment) # create virtualenv and install all the requirements execute('create_virtualenv') execute('update_requirements') execute('create_database') execute('syncdb') execute('migrate') print magenta("Load initial data") with cd(env.project_root), prefix('source env/bin/activate'): run('./manage.py loaddata allink_user.json') # only compile messages if locale folder is present if os.path.isdir('locale'): execute('compilemessages') execute('collectstatic')
def provision(): print(magenta('Starting Provisioning')) message = 'Waiting for puppet to become available' with hide('everything'): with settings(warn_only=True): while 1: sys.stdout.write("\r" + magenta(message) + " ") sys.stdout.flush() # we don't have a puppet master here # so we need to poll if run("which puppet").succeeded: sys.stdout.write("\n") sys.stdout.flush() break message = message + white('.') time.sleep(2) # this AMI does not let you log in as root. # we need to be sure the agent-forwarding is active # when we provision, so we pass -E on top of the default # fabric sudo prefix. The default rackspace images # allow you to ssh as root sudo_prefix = "sudo -S -E -p '%(sudo_prompt)s' " % env with settings(sudo_prefix=sudo_prefix): sudo("puppet apply --modulepath '/home/ubuntu/configuration/modules' /home/ubuntu/configuration/site.pp")
def destroy(branch='', source = ''): global project warnings.simplefilter('ignore') if len(source) == 0: folder = branch.split('/')[-1] else: folder = source if len(folder) == 0: print red("The folder not found") return print green("Start remove folder") source = project['path'] + folder with cd(project['path']): with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True): print magenta("Check exists folder source to delete") print yellow("RUN: ls -la | grep %s" % folder) result = run("ls -la | grep %s" % folder) if result: print yellow("RUN: rm -rf %s " % folder) sudo("rm -rf %s" % folder) print green("The folder %s was delete successfully" % folder) else: print green("The folder %s not exists on server" % folder) print green("Completed remove folder")
def link_settings(self): # link production settings settings_dict = self.ini.get("settings") self.base = \ '%s/%s/src/%s/%s/' %\ (self.ini.get('remote_projects_dir'), self.ini.get('project_address'), self.ini.get('project_appname'), self.ini.get('project_appname')) # if None given use production settings filename = settings_dict.get("active_setting", "production") + '.py' self.original = '%sconfigs/%s' % (self.base, filename) self.target = '%ssettings.py' % (self.base) if not exists(self.original): raise ImproperlyConfigured( red('original settings file does not exists, ' 'check active_setting key @legend')) if exists(self.target) and not settings_dict.get( 'overwrite_settings', False): print yellow('skipping linking,' 'since target file already exists') else: print green('linking settings file \'%s\'' % filename) if exists(self.target): print magenta('removing old link') run('rm %s' % self.target) run('ln -s %s %s' % (self.original, self.target))
def setup(replace=False): """gaeの環境を構築する。 1. zipファイルを置くディレクトリを生成 2. google_appengineのSDKのzipファイルをダウンロード 3. 取得したzipファイルを解凍する :replace: Trueの場合は、取得したzipファイルを解凍すると生成されるgoogle_appengine ディレクトリを一度削除して、新しいSDKに置き換える """ require("gae_download_url", "gae_lib_dir") if not contrib.files.exists(env.gae_lib_dir): run("mkdir %s" % env.gae_lib_dir) with cd(env.gae_lib_dir): base = os.path.basename(env.gae_download_url) if contrib.files.exists(base): print(magenta("you already downloaded %s." % base)) else: run("wget %s" % env.gae_download_url) if replace: run("rm -fr google_appengine") if contrib.files.exists("google_appengine"): print(magenta("google_appengine already exists.")) else: run("unzip %s" % base)
def provision(): print(magenta('Starting Provisioning')) message = 'Waiting for puppet to become available' with hide('everything'): with settings(warn_only=True): while 1: sys.stdout.write("\r" + magenta(message) + " ") sys.stdout.flush() # we don't have a puppet master here # so we need to poll if run("which puppet").succeeded: sys.stdout.write("\n") sys.stdout.flush() break message = message + white('.') time.sleep(2) # this AMI does not let you log in as root. # we need to be sure the agent-forwarding is active # when we provision, so we pass -E on top of the default # fabric sudo prefix. The default rackspace images # allow you to ssh as root sudo_prefix = "sudo -S -E -p '%(sudo_prompt)s' " % env with settings(sudo_prefix=sudo_prefix): sudo( "puppet apply --modulepath '/home/ubuntu/configuration/modules' /home/ubuntu/configuration/site.pp" )
def delete_pyc(): print magenta("Delete *.pyc files") command = 'find . -name \*.pyc -print0 | xargs -0 rm' if env.is_local: run_local(command) else: with cd(env.project_root): run(command)
def compilemessages(): """compiles all translations""" print magenta("Compile messages") if env.is_local: run_local('./manage.py compilemessages') else: with cd(env.project_root), prefix('source env/bin/activate'): run('./manage.py compilemessages')
def update_requirements(): """update external dependencies on remote host """ require('root', provided_by=('local', ) + env.deployments) print magenta("Update requirements") if env.is_local: run_local('pip install --requirement REQUIREMENTS_LOCAL') else: _update_requirements_remote()
def update_html5(): '''Update HTML5-Boilerplate.''' local("bash setup/copy_bootstrap.bash .") puts(colors.magenta("Showing git status, if there are no updates, then the subsequent commit will fail:")) local("git status") puts(colors.magenta("Committing...")) local("git commit -a -m 'Updated Bootstrap'") puts(colors.magenta("Updated HTML5-Boilerplate"))
def ps(state='run'): with hide('running'): if state == 'all': print magenta("[All process state]", bold=True) print yellow(local("docker ps -a", capture=True)) else: print magenta("[Running process state]", bold=True) print yellow(local("docker ps", capture=True))
def create_database(): database_name = env.django_settings.UNIQUE_PREFIX print magenta("Create database") if env.is_local: run_local('mysql --user=$MYSQL_USER -p$MYSQL_PASSWORD -e "CREATE DATABASE %s;"' % database_name) else: database = env.django_settings.UNIQUE_PREFIX run('mysql --user=$MYSQL_USER -p$MYSQL_PASSWORD -e "CREATE DATABASE db_%s;"' % database)
def update_requirements(): """update external dependencies on remote host """ require('root', provided_by=('local',) + env.deployments) print magenta("Update requirements") if env.is_local: run_local('pip install --requirement REQUIREMENTS_LOCAL') else: _update_requirements_remote()
def test_lexicon_definitions(self): skw = { 'split_compounds': True, 'non_compounds_only': False, 'no_derivations': False, 'return_raw_data': True, } _str_norm = 'string(normalize-space(%s))' def test_the_case(case, result): # test multiple results expect_in = case.get('expected_definitions', False) unexpect_in = case.get('unexpected_definitions', False) if expect_in: test_func = self.assertIn err_msg = "Could not find definition." _in = expect_in print " expect: " + cyan(repr(_in)) print " result: " + magenta(repr(result)) if unexpect_in: test_func = self.assertNotIn err_msg = "Unexpected definition." _in = unexpect_in print " DONT expect: " + cyan(repr(_in)) print " result: " + magenta(repr(result)) if not expect_in and not unexpect_in: _in = [] print yellow(" Not expecting any result.") print " result: " + magenta(repr(result)) print " " + green("PASSED") for _i in _in: passed = True try: test_func(_i, result) except Exception, exc: passed = False if passed: print " " + green("PASSED") else: print " " + red("FAILED") + ': ' + repr(_i) if err_msg: print " > " + yellow(err_msg) failuretrack.add( "LexiconDefinitions", exc, _i, ' '.join(_in), ' '.join(result), err_msg, )
def update_js_requirements(): """ update external javascript dependencies on remote host """ require('root', provided_by=('local', ) + env.deployments) print magenta("Install javascript requirements") if env.is_local: run_local('npm install') else: with cd(env.project_root): run('npm install')
def update_js_requirements(): """ update external javascript dependencies on remote host """ require('root', provided_by=('local',) + env.deployments) print magenta("Install javascript requirements") if env.is_local: run_local('npm install') else: with cd(env.project_root): run('npm install')
def test_color(): print blue('blue', bold=True) print red('red') print green('green') print yellow('yellow') print white('white') print cyan('cyan') print magenta('magenta') print black('black')
def check_test_server(): """Verify that test server is running.""" try: urllib2.urlopen(TEST_URL) except urllib2.URLError: print magenta("\nTest server is not running. Run `fab serve_test_server`.") sys.exit(1)
def up(): with hide('running'): for i in xrange(len(container)): print magenta('['+str(i+1)+'/'+str(len(container))+'] Start up '+container_name[i]+'...', bold=True) print green(local(startup_command[i], capture=True)) if i == 0: for wait in xrange(10, 0, -1): print yellow('[info] Wait '+str(wait)+' second for service ' + container_name[i]) time.sleep(1)
def update_html5(): '''Update HTML5-Boilerplate.''' local("cd html5 && git pull origin master") local("bash setup/copy_html5.bash .") puts(colors.magenta("Showing git status, if there are no updates, then the subsequent commit will fail:")) local("git status") puts(colors.magenta("Committing...")) local("git commit -a -m 'Updated HTML5'") puts(colors.magenta("Updated HTML5-Boilerplate"))
def setenv(**kwargs): """ Task to set 'env' variables from the command line. """ for (key, value) in kwargs.iteritems(): if key in env: print "%s: env['%s'] = %s (was: %s)" %(colors.red("SetEnv"), colors.magenta(key), colors.cyan(value), colors.cyan(env[key])) else: print "%s: env['%s'] = %s" %(colors.green("SetEnv"), colors.magenta(key), colors.cyan(value)) env[key] = value
def update_requirements(): """ update external dependencies on remote host """ require('project_root', provided_by=('local', ) + env.deployments) print magenta("Update requirements") if env.is_local: run_local('pip install --requirement REQUIREMENTS_LOCAL') else: with cd(env.project_root): with prefix('source env/bin/activate'): run('pip install --requirement REQUIREMENTS_SERVER')
def update_requirements(): """ update external dependencies on remote host """ require('project_root', provided_by=('local',) + env.deployments) print magenta("Update requirements") if env.is_local: run_local('pip install --requirement REQUIREMENTS_LOCAL') else: with cd(env.project_root): with prefix('source env/bin/activate'): run('pip install --requirement REQUIREMENTS_SERVER')
def create_database(): database_name = env.django_settings.UNIQUE_PREFIX print magenta("Create database") if env.is_local: run_local( 'mysql --user=$MYSQL_USER -p$MYSQL_PASSWORD -e "CREATE DATABASE %s;"' % database_name) else: database = env.django_settings.UNIQUE_PREFIX run('mysql --user=$MYSQL_USER -p$MYSQL_PASSWORD -e "CREATE DATABASE db_%s;"' % database)
def update_html5(): '''Update HTML5-Boilerplate.''' local("cd html5 && git pull origin master") local("bash setup/copy_html5.bash .") puts( colors.magenta( "Showing git status, if there are no updates, then the subsequent commit will fail:" )) local("git status") puts(colors.magenta("Committing...")) local("git commit -a -m 'Updated HTML5'") puts(colors.magenta("Updated HTML5-Boilerplate"))
def makemessages(**kwargs): """pulls out all strings marked for translation""" require('root', provided_by=('local',)) if not env.is_local: utils.abort('runs on local env only. usage: fab local makemessages:lang=fr') if 'lang' in kwargs: utils.abort('missing language. usage: fab local makemessages:lang=fr') print magenta("Make messages") cmd = './manage.py makemessages --domain=%s --locale=%s --ignore=env/* --ignore=node_modules/*' run_local(cmd % ('django', kwargs['lang'])) utils.puts('If you have javascript translations, don\'t forget to run:') utils.puts(cmd % ('djangojs', kwargs['lang']))
def makemessages(**kwargs): """pulls out all strings marked for translation""" require('root', provided_by=('local', )) if not env.is_local: utils.abort( 'runs on local env only. usage: fab local makemessages:lang=fr') if 'lang' in kwargs: utils.abort('missing language. usage: fab local makemessages:lang=fr') print magenta("Make messages") cmd = './manage.py makemessages --domain=%s --locale=%s --ignore=env/* --ignore=node_modules/*' run_local(cmd % ('django', kwargs['lang'])) utils.puts('If you have javascript translations, don\'t forget to run:') utils.puts(cmd % ('djangojs', kwargs['lang']))
def deploy_commcare(confirm="yes", resume='no', offline='no', skip_record='no'): """Preindex and deploy if it completes quickly enough, otherwise abort fab <env> deploy_commcare:confirm=no # do not confirm fab <env> deploy_commcare:resume=yes # resume from previous deploy fab <env> deploy_commcare:offline=yes # offline deploy fab <env> deploy_commcare:skip_record=yes # skip record_successful_release """ _require_target() if strtobool(confirm) and ( not _confirm_translated() or not console.confirm( 'Are you sure you want to preindex and deploy to ' '{env.deploy_env}?'.format(env=env), default=False)): utils.abort('Deployment aborted.') env.full_deploy = True if resume == 'yes': try: cached_payload = retrieve_cached_deploy_env() checkpoint_index = retrieve_cached_deploy_checkpoint() except Exception: print(red('Unable to resume deploy, please start anew')) raise env.update(cached_payload) env.resume = True env.checkpoint_index = checkpoint_index or 0 print( magenta('You are about to resume the deploy in {}'.format( env.code_root))) env.offline = offline == 'yes' if env.offline: print( magenta('You are about to run an offline deploy.' 'Ensure that you have run `fab prepare_offline_deploy`.')) offline_ops.check_ready() if not console.confirm( 'Are you sure you want to do an offline deploy?'.format( default=False)): utils.abort('Task aborted') # Force ansible user and prompt for password env.user = '******' env.password = getpass('Enter the password for the ansbile user: ') _deploy_without_asking(skip_record)
def create_database(): database_name = env.unique_identifier print magenta("Create database") if env.is_local: run_local('psql -U $PGUSER -d postgres -c "CREATE DATABASE %s;"' % database_name) else: user = env.unique_identifier database = env.unique_identifier allowed_chars = 'abcdefghijklmnopqrstuvwxyz0123456789' password = ''.join(random.choice(allowed_chars) for i in range(10)) run('sudo nine-manage-databases database create --user=nmd_%s --password=%s nmd_%s' % (user, password, database)) _add_to_dotenv('DATABASE_URL', 'postgres://nmd_%s:%s@localhost/nmd_%s' % (user, password, database)) _add_to_dotenv('PG_USER', 'nmd_%s' % (user)) _add_to_dotenv('PG_PASSWORD', '%s' % (password))
def help(): """Prints help.""" show(green("Available options:")) show(red("conf_file") + ": " + yellow("JSON configuration file to use")) show(red("instance") + ": " + yellow("name of the instance (can be " "specified using in the settings)")) show(magenta("setup_environment") + ": " + yellow("if a full environment " "configuration should be perfomed (default: False)")) show(magenta("requirements") + ": " + yellow("if requirements should be " "installed (default: True)")) show(blue("locals_path") + ": " + yellow("path to local settings")) show(blue("branch") + ": " + yellow("repository branch to use")) show(blue("commit") + ": " + yellow("repository commit to use"))
def bounce_services(*args, **kwargs): """ Restarts the services on HOST from the BOUNCE_SERVICES list of the settings. runs: sudo service X restart (where X is each member of the BOUNCE_SERVICES list) :restart_nginx=True will also restart nginx """ if not env.deploy_settings.BOUNCE_SERVICES: return None STATUS = {'+': 'Running', '-': 'Stopped/Waiting', '?': 'Unknown'} BSOIR = env.deploy_settings.BOUNCE_SERVICES_ONLY_IF_RUNNING print cyan("Bouncing processes...{0}").format( "(BOUNCING_SERVICES_ONLY_IF_RUNNING)" if BSOIR else "") the_services = env.deploy_settings.BOUNCE_SERVICES print cyan(the_services) there = [] not_there = [] for service in env.deploy_settings.BOUNCE_SERVICES: status = sudo('service %s status' % service, quiet=True) if re.search(r'unrecognized service', status): not_there.append(service) continue if re.search(r'{} stop/waiting'.format(service), status): sglyph = '-' elif re.search(r'{} start/running'.format(service), status): sglyph = '+' else: sglyph = '?' there.append((sglyph, service)) for status, service in there: print green("{0}: {1}".format(service, STATUS[status])) if status != '+' and BSOIR: print red("{} NOT bouncing".format(service)) continue sudo('service %s restart' % service) for s in not_there: print magenta("{0} not found on {1}".format(s, env.deploy_settings.HOST)) if bool_opt('restart_nginx', kwargs, default=False): restart_nginx()
def create_database(): database_name = env.unique_identifier print magenta("Create database") if env.is_local: run_local('psql -U $PGUSER -d postgres -c "CREATE DATABASE %s;"' % database_name) else: user = env.unique_identifier database = env.unique_identifier allowed_chars = 'abcdefghijklmnopqrstuvwxyz0123456789' password = ''.join(random.choice(allowed_chars) for i in range(10)) run('psql -U $PGUSER -d postgres -c "CREATE USER %s WITH PASSWORD \'%s\';"' % (user, password)) run('psql -U $PGUSER -d postgres -c "CREATE DATABASE %s;"' % database) run('psql -U $PGUSER -d postgres -c "GRANT ALL PRIVILEGES ON DATABASE %s to %s;"' % (database, user)) run('psql -U $PGUSER -d postgres -c "GRANT %s to $PGUSER;"' % user) _add_to_dotenv('DATABASE_URL', 'postgres://%s:%s@localhost/%s' % (user, password, database))
def create_monit_file(): require('virtualenv_root', provided_by=env.deployments) print magenta("Create Monit File") with cd('/home/www-data/etc/monit.d/'): run('echo "check process %s_gunicorn with pidfile /home/www-data/projects/%s/tmp/gunicorn.pid" > %s_gunicorn' % (env.unique_identifier, env.project, env.unique_identifier)) run('echo "\t#!/bin/sh" >> %s_gunicorn' % env.unique_identifier) run("echo '\t%s' >> %s_gunicorn" % ('start program = "/home/www-data/projects/%s/startstop.sh start gunicorn"' % env.project, env.unique_identifier)) run("echo '\t%s' >> %s_gunicorn" % ('stop program = "/home/www-data/projects/%s/startstop.sh stop gunicorn"' % env.project, env.unique_identifier)) run('echo "\t%s" >> %s_gunicorn' % ("if failed unixsocket /home/www-data/projects/%s/tmp/gunicorn.sock then restart" % env.project, env.unique_identifier)) run('echo "\tif 5 restarts within 5 cycles then timeout" >> %s_gunicorn' % env.unique_identifier) run('echo "\tif totalmemory > 200.0 MB for 5 cycles then alert" >> %s_gunicorn' % env.unique_identifier) run('echo "\tif totalmemory > 210.0 MB for 10 cycles then restart" >> %s_gunicorn' % env.unique_identifier) run('echo "\tif totalcpu > 50%% for 5 cycles then alert" >> %s_gunicorn' % env.unique_identifier) run('echo "\tif totalcpu > 50%% for 10 cycles then restart" >> %s_gunicorn' % env.unique_identifier) run('monit reload')
def bzr_wc_target_exists_plain_no_force(): """ Test working copy when target is an already existing plain directory and force was not specified. """ test = 'bzr_wc_target_exists_plain_no_force' wt = '%s-test-%s' % (DIR, test) puts(magenta('Executing test: %s' % test)) from fabric.api import run from fabtools.files import is_dir from fabtools import require run('mkdir %s' % wt) assert not is_dir(path.join(wt, '.bzr')) try: require.bazaar.working_copy(REMOTE_URL, wt) except SystemExit: pass else: assert False, "working_copy didn't raise exception" assert not is_dir(path.join(wt, '.bzr'))
def bzr_wc_target_exists_local_mods_no_force(): """ Test working copy when a target already exists and has local modifications but force was not specified. """ test = 'bzr_wc_target_exists_local_mods_no_force' wt = '%s-test-%s' % (DIR, test) puts(magenta('Executing test: %s' % test)) from fabric.api import cd, run from fabtools.files import is_dir from fabtools import require require.bazaar.working_copy(REMOTE_URL, wt) assert is_dir(wt) with cd(wt): assert run('bzr status') == '' run('echo "# a new comment" >> __init__.py') assert run('bzr status') != '' try: require.bazaar.working_copy(REMOTE_URL, wt) except SystemExit: pass else: assert False, "working_copy didn't raise exception"
def bzr_wc_target_exists_local_mods_force(): """ Test working copy when a target already exists and has local modifications and force was specified. """ test = 'bzr_wc_target_exists_local_mods_force' wt = '%s-test-%s' % (DIR, test) puts(magenta('Executing test: %s' % test)) from fabric.api import cd, run from fabtools.files import is_dir from fabtools import require require.bazaar.working_copy(REMOTE_URL, wt) assert is_dir(wt) with cd(wt): assert run('bzr status') == '' run('echo "# a new comment" >> __init__.py') assert run('bzr status') != '' require.bazaar.working_copy(REMOTE_URL, wt, force=True) assert run('bzr status %s' % wt) == ''
def smartputs(prefix): if env.host_string in env.roledefs['django']: sputs(prefix, green('【Django 应用服务器】[{}]'.format(env.host_string))) elif env.host_string in env.roledefs['java']: sputs(prefix, red('【Java 应用服务器】[{}]'.format(env.host_string))) else: sputs(prefix, magenta('🌵 【未知类型服务器】[{}]'.format(env.host_string)))
def _get_github(): login_or_token, password = _get_github_credentials() if env.tag_deploy_commits and not login_or_token: print( magenta("Warning: Creation of release tags is disabled. " "Provide Github auth details to enable release tags.")) return Github(login_or_token=login_or_token, password=password)
def deploy(): '''Sync code from here to the servers''' global env global SITE_NAME # Two separate calculations because Mac has HOME=/Users/swaroop and # Linux has HOME=/home/swaroop and therefore cannot use the same dirname. local_dir = os.path.join(os.getenv('HOME'), 'web', SITE_NAME, 'private', SITE_NAME) remote_dir = os.path.join('/home', os.getlogin(), 'web', SITE_NAME, 'private', SITE_NAME) _transfer_files(local_dir, env.host + ':' + remote_dir, ssh_port=env.port) sudo('apache2ctl graceful') try: urllib2.urlopen('http://' + env.host_string) except urllib2.HTTPError as x: warn( colors.red( "Failed! Code deployment was a disaster. Apache is throwing {0}." .format(x))) showlogs() return puts( colors.magenta('Success! The {0} server has been updated.'.format( env.host_string)))
class ColorFormatter(logging.Formatter): """ Format log message based on the message level http://stackoverflow.com/questions/1343227/can-pythons-logging-format-be-modified-depending-on-the-message-log-level """ # Setup formatters for each of the levels err_fmt = red("ERR [%(filename)s(%(lineno)d)] %(msg)s") warn_fmt = magenta("WARN [%(filename)s(%(lineno)d)]: %(msg)s") dbg_fmt = yellow("DBG [%(filename)s]: %(msg)s") info_fmt = green("INFO: %(msg)s") def __init__(self, fmt="%(name)s %(levelname)s: %(msg)s"): logging.Formatter.__init__(self, fmt) def format(self, record): # Save the original format configured by the user # when the logger formatter was instantiated format_orig = self._fmt # Replace the original format with one customized by logging level if record.levelno == 10: # DEBUG self._fmt = ColorFormatter.dbg_fmt elif record.levelno == 20: # INFO self._fmt = ColorFormatter.info_fmt elif record.levelno == 30: # WARN self._fmt = ColorFormatter.warn_fmt elif record.levelno == 40: # ERROR self._fmt = ColorFormatter.err_fmt # Call the original formatter class to do the grunt work result = logging.Formatter.format(self, record) # Restore the original format configured by the user self._fmt = format_orig return result
def configuration_deliver(): print(magenta('Delivering Configuration')) path = env.real_fabfile put('{0}/configuration/configuration.tgz'.format(path), 'configuration.tgz') run('rm -rf ./configuration') run('tar -xzf ./configuration.tgz')