def cleanup_dumps_instance(args, name): print "" print "Clean up dumps for %s." % name instance = args.get_instance(name) config = instance.config dsn = parse_dsn(config['dsn']) host = dsn['host'] ls = shell_capture('ssh %s ls backup' % host) fnames = [name for name in ls.split() if name.startswith(dsn['dbname']) and name.endswith('.dump')] if len(fnames) <= args.keep: print "Nothing to remove." return fnames.sort() trash = fnames[:-args.keep] print "The following dump files will be deleted:" for fname in trash: print "\t%s" % fname print "" answer = '' while answer not in ('y', 'n', 'yes', 'no'): answer = raw_input("Delete these folders? (y or n) ").lower() if answer in ('y', 'yes'): for fname in trash: shell("ssh %s rm backup/%s" % (host, fname)) count = len(trash) if count == 1: print "One file deleted." else: print "%d files deleted." % count else: print "No files deleted."
def _require_externals(*exes): for exe in exes: which = shell_capture('which %s' % exe).strip() if not which or not os.path.exists(which): log.warn("Missing external program: %s", exe) log.warn("Some functionality may not work.")
def upgrade(args): # Calculate paths and do some sanity checking _get_paths(args) if args.this_build != args.current_build: args.parser.error("Upgrade must be run from current build.") if os.path.exists(args.next_build): args.parser.error("Next build directory already exists: %s" % args.next_build) # Check out the next build and run the buildout svn_url = args.get_setting('svn_url', None) if svn_url: shell('svn co %s %s' % (svn_url, args.next_build)) else: git_url = args.get_setting('git_url') branch = args.get_setting('git_branch', 'master') shell('git clone --branch %s %s %s' % ( branch, git_url, args.next_build)) os.chdir(args.next_build) venv_cmd = 'virtualenv -p python2.6 --no-site-packages .' venv_cmd = args.get_setting('build.virtualenv', venv_cmd) shell(venv_cmd) shell('bin/python bootstrap.py') shell('bin/buildout') # See whether this update requires an evolution step. If upgrade requires # an evolution step, we make backups of the instance databases before # running evolve. evolve_output = shell_capture('bin/karlserve evolve') needs_evolution = 'Not evolving' in evolve_output if needs_evolution: log.info("Evolution required.") # Put current build into readonly mode os.chdir(args.current_build) set_mode('readonly') # Dump databases for backup for name in args.instances: instance = args.get_instance(name) dbargs = parse_dsn(instance.config['dsn']) dumpfile = 'backup/%s-%s.dump' % (dbargs['dbname'], datetime.datetime.now().strftime('%Y.%m.%d.%H.%M.%S')) shell('ssh %s pg_dump -h localhost -U %s -f %s -F c -Z 0 %s' % (dbargs['host'], dbargs['user'], dumpfile, dbargs['dbname'])) # Run evolution step in next build os.chdir(args.next_build) shell('bin/karlserve evolve --latest') else: log.info("Evolution not required.") # Update symlink pointer to make next build the current build link = os.path.join(os.path.dirname(args.next_build), 'current') os.remove(link) os.symlink(args.next_build, link) # Restart new build in normal mode shell('bin/karlserve mode -s normal') os.chdir(args.current_build) shell('bin/supervisorctl shutdown') log.info("Waiting for supervisor to shutown...") time.sleep(1) while os.path.exists('var/supervisord.pid'): log.info("Waiting...") time.sleep(1) os.chdir(args.next_build) shell('bin/supervisord')