def install_extension_from_wp(type, name, version): if version == 'master': if is_extension_installed(type, name): sudo('wp %s update %s --allow-root' % (type, name)) else: install_cmd = sudo('wp %s install %s --allow-root' % (type, name)) if install_cmd.return_code == 0: puts(green('%s %s installed successfully.')) else: puts(red('%s %s could not install.' % (type, name))) else: if not is_extension_installed(type, name): puts(cyan("Plugin not installed, installing...")) url = get_wordpess_download_url_for_extension(type=type, name=name, version=version) with settings(warn_only=True): install_cmd = sudo('wp %s install %s --allow-root --activate --force' % (type, url)) if install_cmd.return_code == 0: puts(green('%s %s installed successfully.' % (type, name))) else: puts(red('Failed to install %s' % name)) raise SystemExit("Failed to install %s" % name) elif version != get_extension_version(type, name): puts(cyan('Plugin not installed at the correct version, reinstalling')) uninstall_extension(type, name) url = get_wordpess_download_url_for_extension(type=type, name=name, version=version) with settings(warn_only=True): install_cmd = sudo('wp %s install %s --allow-root' % (type, url)) if install_cmd.return_code == 0: puts(green('%s %s installed successfully.' % (type, name))) else: puts(red('Failed to install %s' % name)) raise SystemExit("Failed to install %s " % name)
def test(): """ Runs nose test suite """ local('flake8 {}'.format(env.project_name)) print cyan('flake8 passed!', bold=True) local('python {}/manage.py test'.format(env.project_name))
def report(self, failed, passed): print cyan(self.message_style.format('TEST RESULTS', "Name: {0} Passed: " "{1} Failed: {2} ".format( self.name, str(passed), str(failed))))
def initial_deploy(action=''): # clone repo run('chmod 711 /home/{project_name}'.format(**env)) if not exists(os.path.join(env.project_path, '.git')): with cd(os.path.dirname(os.path.abspath(env.project_path))): # avoid ssh asking us to verify the fingerprint append('/home/%s/.ssh/config' % env.project_name, 'Host talpor.com\n\tStrictHostKeyChecking no\n') print(cyan('Cloning Repo...', bold=True)) run('git clone %s %s' % (env.repository, env.project_name)) else: print(cyan('Repository already cloned', bold=True)) # start virtualenv if not exists(env.venv_path): print(cyan('Creating Virtualenv...', bold=True)) run('virtualenv %s' % env.venv_path) if exists(os.path.join(env.project_path, 'Gemfile')): gem_home = '{venv_path}/gems'.format(**env) run('echo "export GEM_HOME=\'{gem_home}\'" >> ' '{venv_path}/bin/postactivate'.format(gem_home=gem_home, **env)) run('echo "export GEM_PATH=\'\'" >> ' '{venv_path}/bin/postactivate'.format(**env)) run('mkdir ' + gem_home) run('source ~/.bash_profile') cmd('bundle install') else: print(cyan('Virtualenv already exists', bold=True)) print(cyan('Deploying...', bold=True)) deploy(action='force')
def push(): require('environment') require('branch') print cyan('Pushing to Heroku...') require('environment') local('git push {} {}:master'.format(env.environment, env.branch))
def deploy(): """ Deploy local copy of repository to target WP Engine environment. """ require('settings', provided_by=["production", "staging", ]) if env.branch != 'rollback': rollback_sha1 = _get_rollback_sha1() if rollback_sha1: print(colors.cyan("Setting rollback point...")) local('git tag -af rollback %s -m "rollback tag"' % rollback_sha1) local('git fetch') else: print(colors.yellow("No .git-ftp.log found on server. Unable to set rollback point.")) print(colors.cyan("Checking out branch: %s" % env.branch)) local('git checkout %s' % env.branch) local('git submodule update --init --recursive') with settings(warn_only=True): print(colors.cyan("Deploying...")) ret = _deploy(env.path) if ret.return_code and ret.return_code > 0: if ret.return_code in [8, 5, ]: print(colors.cyan("Found no existing git repo on ftp host, initializing...")) _initial_deploy(env.path)
def __init__(self, component_deployer): self.message_style = "[{0: <20}] {1}" self.name = self.__class__.__name__ self.component_deployer = component_deployer self.environment = self.component_deployer.read_environment() self.roles = self.component_deployer.get_roles() print cyan(self.message_style.format('TEST STARTING', self.name))
def handle_create_app(ns): spicy_pkg_root_dir = os.path.dirname(__file__) #spicy_app_tpl_root = os.path.join(spicy_pkg_root_dir) source_app_dir = os.path.join(spicy_pkg_root_dir, 'app') #: provide more context vars here template_ctx = dict( APPNAME=ns.appname.lower(), #: would be 'appname' in templates APPNAME_CLASS=ns.appname.capitalize(), #: would be 'Appname' APP_DESCRIPTION=ns.description ) print_info('Source for new app:\n**** {}'.format(source_app_dir)) print_info('Creating app catalog') dest_app_dir = os.path.join(os.getcwd(), ns.appname) print_info('Copying source app to dest'.format(dest_app_dir)) print_info('{0} -> {1}'.format(cyan(source_app_dir), cyan(dest_app_dir))) try: shutil.copytree(source_app_dir, dest_app_dir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(dest_app_dir): while True: proceed = raw_input( 'Overwrite existing app catalog ({})? y\\n: '.format( ns.appname)) if proceed in 'nN': print_info('Cancel') return elif proceed in 'yY': print_warn('Overwriting') shutil.rmtree(dest_app_dir) shutil.copytree(source_app_dir, dest_app_dir) break else: print_warn('Press y, Y, n or N') continue print_info('Processing new app') for path, subdirs, files in os.walk(dest_app_dir): for name in files: file_with_path = os.path.join(path, name) #file_dir = os.path.dirname(file_with_path) if file_with_path.endswith(('.py',)): # Post-copying processing (template rendering) template_str = codecs.open( file_with_path, 'r', encoding='utf-8').read() if template_str: print_info( 'Processing now: {}'.format(green(file_with_path))) template = string.Template(template_str) # using safe substituting, it's not throwing exceptions result_str = template.safe_substitute(template_ctx) fh = codecs.open(file_with_path, 'w+', encoding='utf-8') fh.write(result_str) fh.close()
def color_test(): for x in range(0, 2): print colors.blue('Blue text', bold=False) + '\n' time.sleep(0.2) print colors.cyan('cyan text', bold=False) time.sleep(0.2) print colors.green('green text', bold=False) time.sleep(0.2) print colors.magenta('magenta text', bold=False) time.sleep(0.2) print colors.red('red text', bold=False) time.sleep(0.2) print colors.white('white text', bold=False) time.sleep(0.2) print colors.yellow('yellow text', bold=False) time.sleep(0.2) print colors.blue('Blue text bold', bold=True) time.sleep(0.2) print colors.cyan('cyan text bold', bold=True) time.sleep(0.2) print colors.green('green text bold', bold=True) time.sleep(0.2) print colors.magenta('magenta text bold', bold=True) time.sleep(0.2) print colors.red('red text bold', bold=True) time.sleep(0.2) print colors.white('white text bold', bold=True) time.sleep(0.2) print colors.yellow('yellow text bold', bold=True) time.sleep(0.2)
def up_repos(target_path, repositories, user): print yellow('Downloading repositories') if not exists(target_path): create_dir(target_path, user) for repo in repositories: print cyan(repo['name']) update_repo(repo, target_path, user)
def verify_prerequisites(): """ Checks to make sure you have curl (with ssh) and git-ftp installed, Attempts installation via brew if you do not. """ with settings(warn_only=True): print(colors.cyan("Verifying your installation of curl supports sftp...")) ret = capture('curl -V | grep sftp') if ret.return_code == 1: import sys if sys.platform.startswith('darwin'): print(colors.yellow( 'Your version of curl does not support sftp. Attempting installation of curl with sftp support via brew...')) capture('brew update') capture('brew install curl --with-ssh') capture('brew link --force curl') else: print(colors.red( 'Your version of curl does not support sftp. You may have to recompile it with sftp support. See the deploy-tools README for more information.' )) else: print(colors.green('Your installation of curl supports sftp!')) print(colors.cyan('Ensuring you have git-ftp installed...')) ret = capture('git ftp --version') if ret.return_code == 1: print(colors.yellow( 'You do not have git-ftp installed. Attempting installation via brew...')) capture('brew update') capture('brew install git-ftp') else: print(colors.green('You have git-ftp installed!')) print(colors.green('Your system is ready to deploy code!'))
def create_instance(self): print(cyan("Creating instance")) aws_key = { 'aws_access_key_id': self.conf['AWS_ACCESS_KEY'], 'aws_secret_access_key': self.conf['AWS_SECRET_KEY'] } conn = boto.ec2.connect_to_region(self.ec2conf['region'], **aws_key) image = conn.get_all_images(self.ec2conf['amis']) reservation = image[0].run(1, 1, self.ec2conf['keypair'], self.ec2conf['secgroups'], instance_type=self.ec2conf['instancetype']) instance = reservation.instances[0] conn.create_tags([instance.id], {"Name": self.conf['INSTANCE_NAME_TAG']}) while instance.state == u'pending': print(yellow("Instance state: %s. Will check again in 10 seconds" % instance.state)) time.sleep(10) instance.update() print(green("Instance state: %s" % instance.state)) print(cyan("Public DNS (add this to your EC2_INSTANCES group in conf.json): \n--> %s" % instance.public_dns_name)) return instance.public_dns_name
def install_node_dep(parent_folder, folder_name, user, node_bin_path): with cd(parent_folder): with cd(folder_name): if exists('package.json'): print cyan('Installing %s required node packages' % folder_name) sudo('%s/npm install -d' % node_bin_path, user=user) sudo('%s/npm link' % node_bin_path, user=user)
def provision(self): self._pre_provision_check() # Install all other components and configure CLC self.chef_manager.clear_run_list(self.all_hosts) for role_dict in self.config['roles']: component_name = role_dict.keys().pop() self.chef_manager.add_to_run_list(self.roles[component_name], self._get_recipe_list(component_name)) self._run_chef_on_hosts(self.all_hosts) if self.roles['riak-head']: riak_head = self.roles['riak-head'] self.chef_manager.add_to_run_list(riak_head, ['riakcs-cluster::mergecreds']) self._run_chef_on_hosts(riak_head) riak_nodes = self.roles['riak-node'] self.chef_manager.add_to_run_list(riak_nodes, ['riakcs-cluster::join']) self._run_chef_on_hosts(riak_nodes) if self.roles['clc']: clc = self.roles['clc'] self.chef_manager.add_to_run_list(clc, ['eucalyptus::configure']) if self.role_builder.get_euca_attributes()['network']['mode'] == 'VPCMIDO': self.chef_manager.add_to_run_list(clc, ['midokura::create-first-resources']) self._run_chef_on_hosts(clc) if self.roles['clc']: print cyan('Setting up admin credentials.') clc = self.roles['clc'] self.chef_manager.clear_run_list(clc) self.chef_manager.add_to_run_list(clc, ['eucalyptus::setupcreds']) self._run_chef_on_hosts(clc) print green('Provision has completed successfully. Your cloud is now configured and ready to use.')
def render_task(self, task, template, avail): """ Renders a supervisor task from 'tasks' """ managed = True if task['name'] in avail else False try: # stop task first if managed if managed: sudo('supervisorctl stop %s' % task['name']) # check for use_custom_logging flag, if valid then proceed # to create approporiate Log instance for this tasks if "use_custom_logging" and "logging" in task: print cyan("will attempt to use Log..") logging = task["logging"] try: l = Log(logging) l.run() except Exception, e: print red(e.message) else: # attach log object to context task["log"] = l c = Context(task) target_filename = task.get("filename") with open(target_filename, 'w+') as target: target.write(template.render(c)) tt = "%s/tasks/%s" % (self.remote_dir_conf, target_filename) # send rendered task file to remote put(target_filename, tt)
def syncmedia(): """ Synchronizes local and remote media directories. Potentially messy. """ require('hosts') _confirmtask() with cd(env.path): _setperms('a+r', env.media_path) fixprojectperms() _setperms('g+w', env.public_path) rsync_command = r"""rsync -av -e 'ssh -p %s' %s@%s:%s %s""" % ( env.port, env.user, env.host, env.media_path.rstrip('/') + '/', 'public/media' ) #print(red(rsync_command)) print(cyan('-- syncmedia // syncing from server to local')) print local(rsync_command, capture=False) rsync_command = r"""rsync -av /sites/%s/public/media/ -e 'ssh -p %s' %s@%s:%s""" % ( env.project_user, env.port, env.user, env.host, env.media_path.rstrip('/') + '/' ) print(cyan('-- syncmedia // syncing from local to server')) print local(rsync_command, capture=False) _setowner(env.public_path)
def install(): """ Fully set up a brand new Ubuntu production environment from scratch """ # Creating remote user if applicable print(cyan('Creating remote user %s...' % env.user)) temp_user = env.user env.user = '******' if sudo('id %s' % temp_user, warn_only=True).failed: sudo('adduser %s' % temp_user) sudo('echo "%s ALL=(ALL:ALL) ALL" >> /etc/sudoers' % temp_user) env.user = temp_user # Install packages print(cyan('Updating Ubuntu packages to most recent version...')) sudo('add-apt-repository ppa:certbot/certbot') sudo('apt-get update && apt-get -y upgrade') print(cyan('Downloading important programs (servers and databases and python and shit)...')) sudo('apt-get install -y build-essential python python-dev git python-pip python-virtualenv fail2ban ' 'python-certbot-nginx postgresql postgresql-contrib libpq-dev nginx uwsgi uwsgi-plugin-python') # Configure log location print(cyan('Creating uwsgi log location...')) sudo('mkdir -p /var/log/thekevincrane') sudo('chown www-data:www-data /var/log/thekevincrane') # Create application directories and fetch the repo print(cyan('Installing full application...')) pull() update_deps() set_permissions(env.proj_root) # Configure nginx, uwsgi, and postgres configure() print(green('Finished installing production system (%s)!' % env.host))
def config_db(pg_uname=None, pg_pword=None): """ Initialize Postgres DB for thekevincrane """ print(cyan('Configuring Postgres...')) if not pg_uname: pg_uname = prompt('Enter a username for Postgres:', default=env.user, validate=r'^[^\']*$') # Check if username exists before trying to create new one if sudo('psql -tAc "SELECT 1 FROM pg_roles WHERE rolname=\'%s\'" | grep -q 1' % pg_uname, user='******', quiet=True).succeeded: print(yellow('Postgres user %s already exists, skipping.' % pg_uname)) else: if not pg_pword: pg_pword = prompt('Enter a password for Postgres user %s:' % pg_uname, validate=r'^[^\']*$') sudo('psql -c "CREATE USER %s WITH CREATEDB PASSWORD \'%s\';"' % (pg_uname, pg_pword), user='******') # Check if database exists before trying to create new one if sudo('psql -tAc "SELECT 1 FROM pg_catalog.pg_database WHERE datname = \'%s\';" | grep -q 1' % env.pg_db, user='******', quiet=True).succeeded: print(yellow('Postgres database %s already exists, skipping.' % env.pg_db)) else: sudo('psql -c "CREATE DATABASE %s"' % env.pg_db, user='******') print(cyan('Restarting Postgres...')) sudo('service postgresql restart') print(cyan('Creating initial database tables...')) with virtualenv(): sudo("echo 'db.create_all()' | APP_ENV=prod ./manage.py shell", warn_only=True) sudo("echo 'db.create_all()' | APP_ENV=dev ./manage.py shell", warn_only=True) print(green('Finishing configuring Postgres!'))
def optimize(): """ Applies optimizations to reduce file sizes """ optimizable_extensions = get_optimizable_extensions() file_set = get_optimizable_files(optimizable_extensions) files_no = len(file_set) compressed_total = 0 original_total = 0 for index, (file_basename, file_extension) in enumerate(file_set): puts(green("%.1f%% done (%d/%d)" % ((index+1.)*100./files_no, index+1, files_no))) original_path = '%s%s' % (file_basename, file_extension) original_size = os.path.getsize(original_path) original_total += original_size for kind in optimizable_extensions: if file_extension in optimizable_extensions[kind]['extensions']: optimize_file(original_path, optimizable_extensions[kind]) compressed_size = os.path.getsize(original_path) compressed_total += compressed_size if compressed_size < original_size: puts(cyan('\tcompressed %d => %d => %d%%' % (original_size, compressed_size, (compressed_size * 100 / original_size)))) puts(cyan('>>> Reduced from %d to %d bytes (%d%% of the original size)' % (original_total, compressed_total, (compressed_total * 100 / original_total))))
def verify_prerequisites(): """ Checks to make sure you have curl (with ssh) and git-ftp installed, Attempts installation via brew if you do not. """ with settings(warn_only=True): print(colors.cyan("Verifying your installation of curl supports sftp...")) ret = local('curl -V | grep sftp', capture=True) if ret.return_code == 1: print(colors.yellow( 'Your version of curl does not support sftp. Attempting installation of curl with sftp support via brew...')) local('brew update') local('brew install curl --with-ssh') local('brew link --force curl') else: print(colors.green('Your installation of curl supports sftp!')) print(colors.cyan('Ensuring you have git-ftp installed...')) ret = local('git ftp --version', capture=True) if ret.return_code == 1: print(colors.yellow( 'You do not have git-ftp installed. Attempting installation via brew...')) local('brew update') local('brew install git-ftp') else: print(colors.green('You have git-ftp installed!')) print(colors.green('Your system is ready to deploy code!'))
def show_sudo_users_and_groups(ug, nopasswd): """ Helper function that prints out users and groups with sudo (or no passwd sudo) rights. """ ug_users = [] ug_groups = [] nopasswd_string = "" if nopasswd: nopasswd_string = "no password " if not ug: print(red("There are no users or groups with {0}sudo rights.".format(nopasswd_string))) return ug_users, ug_groups for item in ug: if item[0] == "%": ug_groups.append(item[1:]) else: ug_users.append(item) if ug_users: print(green("Users with {0}sudo rights:".format(nopasswd_string))) print(cyan(ug_users)) else: print(red("No users with {0}sudo rights".format(nopasswd_string))) if ug_groups: print(green("Groups with {0}sudo rights:".format(nopasswd_string))) print(cyan(ug_groups)) else: print(red("No groups with {0}sudo rights".format(nopasswd_string))) print("\n") # just formatting return ug_users, ug_groups
def __init__(self, host_string): self.host_string = host_string # No need to `deactivate` for this calls, it's pure shell. self.user, self.tilde = run('echo "${USER},${HOME}"', quiet=True).strip().split(',') self.get_platform() self.get_uname() self.get_virtual_machine() if not QUIET: print('Remote is {release} {host} {vm}{arch}, user ' '{user} in {home}.'.format( release='Apple OSX {0}'.format(self.mac.release) if self.is_osx else self.lsb.DESCRIPTION, host=cyan(self.uname.nodename), vm=('VMWare ' if self.is_vmware else 'Parallels ') if self.is_vm else '', arch=self.uname.machine, user=cyan(self.user), home=self.tilde, ))
def _git_push(bare_repo_path, branch_to_push): print cyan('Syncing code...', bold=True) if env.user: git_login = '******' % (env.user, env.host) else: git_login = env.host local('git push -vf ssh://%s%s %s:%s' % (git_login, bare_repo_path, branch_to_push, branch_to_push))
def deploy(branch=None): branch_to_push = GIT_BRANCH if branch == None else branch if not _scary_confirm('You\'re pushing to the production server. Set condition one throughout the ship!'): return print '' # Compile assets locally print cyan('Compiling assets...', bold=True) local('rm -rf static/.webassets-cache') local('rm -rf static/.generated') local('. venv/bin/activate && python build_assets.py') print '' # Push the latest code up _git_push(GIT_PATH, branch_to_push) print '' # Check out the latest code _git_checkout_to_dir(GIT_PATH, APP_PATH, branch_to_push) with cd(APP_PATH): print yellow('Installing requirements...', bold=True) run('source venv/bin/activate && pip install -r requirements.txt') print cyan('Uploading assets...', bold=True) put('static/.webassets-cache/', APP_PATH + '/static/') put('static/.generated/', APP_PATH + '/static/') print '' _restart_uwsgi() print '' print green('Deploy to %s OK.' % env.host, bold=True)
def supervisor_process_stop(process_name): """ Assuming the supervisord process is running, stop one of its processes """ print(cyan('Asking supervisor to stop %s' % process_name)) supervisor_pid_regex = re.compile('^\d+') status_regex = re.compile('^%s\s*(\S*)' % process_name) with hide('running', 'stdout'): supervisord_cmd_result = venvcmd("supervisorctl pid") match = supervisor_pid_regex.match(supervisord_cmd_result) if not match: print(cyan('Supervisord doesn\'t seem to be running, nothing to stop')) return for try_num in range(20): venvcmd("supervisorctl stop %s" % process_name) with hide('running', 'stdout'): status_cmd_result = venvcmd("supervisorctl status %s" % process_name) match = status_regex.match(status_cmd_result) if match: status = match.group(1) if(status == 'STOPPED'): print(green("%s is stopped" % process_name)) break elif(status == 'RUNNING'): venvcmd("supervisorctl stop %s" % process_name) elif(status == 'STOPPING'): print(status) else: print("unexpected status: %s" % status) sleep(1) else: print(red('Unable to parse status (bad regex?)')) print(status_cmd_result) exit()
def invenio_conf(): """ Upload and update Invenio configuration """ puts(cyan(">>> Configuring Invenio..." % env)) invenio_local = env.get('CFG_INVENIO_CONF', None) invenio_local_remote = os.path.join(env.CFG_INVENIO_PREFIX, 'etc/invenio-local.conf') if not invenio_local: puts(red(">>> CFG_INVENIO_CONF not specified, using built-in template for invenio-local.conf...")) puts(">>> Writing invenio-local.conf to %s ..." % invenio_local_remote) if not invenio_local: write_template(invenio_local_remote, env, tpl_str=INVENIO_LOCAL_TPL, use_sudo=True) else: try: write_template(invenio_local_remote, env, tpl_file=invenio_local, use_sudo=True) except TemplateNotFound: puts(red("Could not find template %s" % invenio_local)) if not confirm("Use built-in template for invenio-local.conf?"): abort("User aborted") else: write_template(invenio_local_remote, env, tpl_str=INVENIO_LOCAL_TPL, use_sudo=True) if confirm(cyan("Run config update")): inveniomanage("config update") inveniomanage("bibfield config load")
def pull_request(message=None, base=GIT_DEFAULT_BASE): print(cyan("Sending pull request to %s/%s." % (GIT_REMOTE_NAME, base))) if confirm(green('Default message: %s' % get_commit_message(message=message))): title = get_commit_message(message=message) else: title = get_commit_message(message=prompt(green("Enter message: "))) data = { "title": title, "body": "", "head": "{user}:{branch}".format(user=GITHUB['user'], branch=get_branch_name()), "base": base } response = post(url=GITHUB['urls']['pull_request'], data=json.dumps(data)) if response.status_code == 201: print(cyan("Pull Request was sent to %s/%s." % (GIT_REMOTE_NAME, base))) elif response.status_code == 422: print(cyan("Pull-request was sent before.")) else: print(response)
def deploy(current_version): timer_starts = time.time() Login() Predeploy() print(cyan("##### GIT and Links Creation #####")) if not exists('{}'.format(current_version)): run("mkdir -p {}".format(current_version)) with cd('{}'.format(current_version)): fabric_customs.Git_clone(config['git']['branch'], config['git']['company'], config['git']['repo']) print(cyan("##### Prostdeploy Tasks #####")) with cd('{0}/{1}'.format(current_version, config['app_name'])): for value in config['shared_files']: run('ln -sf {0}/{1} {1}'.format(config['shared_path'], value)) Postdeploy(config['environment'], config['app_type']) with cd('{}'.format(config['deploy_path'])): if exists('current'): run('unlink current') run('ln -sf {} current'.format(current_version)) Postlink() print(cyan("##### Prostdeploy Tasks #####")) with warn_only(): fabric_customs.Erase_olddeploy(config['keep_releases']) print(cyan("##### DEPLOY ENDED #####")) print(cyan("The deploy took: {} seconds".format(time.time() - timer_starts)))
def check_dokku_server(self): print((cyan(':' * 72))) print((cyan('checking dokku connection & account: {}'.format(self.dokku_host)))) with lcd(self.project_dir): command = 'ssh dokku@{0} apps'.format(self.dokku_host) out = local(command, capture=True) print((cyan(out))) wanted_apps = {} have_apps = {} for t in self.dokku_targets: if t == 'production': key = '{0}'.format(self.app_name) else: key = '{0}-{1}'.format(self.app_name, t) wanted_apps[t] = key have_apps[t] = False for l in out.split('\n'): for k, v in wanted_apps.iteritems(): if l == v: have_apps[k] = True for k, v in have_apps.iteritems(): if not v: print((yellow('app not installed: {}'.format(wanted_apps[k])))) if prompt('Create it? Y/n', default='y').lower() == 'y': self.create_app(wanted_apps[k])
def test(): """ Runs nose test suite """ local('flake8 {}'.format(env.project_name)) print cyan('flake8 passed!', bold=True) local("python {}/manage.py test --attr='!skip,!skip_local'".format(env.project_name))
def collect_static_files(): """ Collect static files such as pictures """ print(cyan('Collecting static files')) venvcmd('./manage.py collectstatic --noinput')
def push(remote='origin', branch='master'): """git push commit""" print(cyan("Pulling changes from repo ( %s / %s)..." % (remote, branch))) local("git push %s %s" % (remote, branch))
def cluster(): """Setup the cluster for parallel commands, in general you should always run this task before any other""" print colors.cyan('Running cluster: %s' % ', '.join(_load_hosts().keys())) env.parallel = True
def sync(remote='origin', branch='master'): """git pull and push commit""" pull(branch, remote) push(branch, remote) print(cyan("Git Synced!"))
def log(i, color=green): p, s = (ctx('django.project_name'), env.stage) print('{}:{}> {}'.format(blue(p), cyan(s), color(i)))
def flushmemcache(): """ Resetting all data in memcached """ print(cyan('Resetting all data in memcached :')) run('echo "flush_all" | /bin/netcat -q 2 127.0.0.1 11211')
def check_syntax(): """ Syntax check on Puppet config. """ print cyan('\nChecking puppet syntax...') do("find puppet -type f -name '*.pp' | xargs puppet parser validate")
def deploy_docs(): print cyan('Deploying docs...') local('mkdocs gh-deploy')
def info(message): """Print info message.""" print(cyan(message))
def build(): """ Build the package. Use this for testing package construction. """ puts(cyan('Building the package...')) local("git-buildpackage")