def restart_webserver(verbosity=False): """Restart the webserver via an ssh call to host(makana) This function assumes that this file that you are reading is in the exact same location in the second host machine makana. In the BPG setup, these hosts share the remoted network file system, so this is the case. Essentially, one of these two commands is executed, depending upon the verbosity given: ssh -tt makana <this_file_name>.py --restart_helper ssh -tt makana <this_file_name>.py --restart_helper --verbose """ options = [ 'ssh', '-tt', 'makana', get_self_executable(os.path.abspath(__file__)), '--restart_helper' ] if verbosity: options.append('--verbose') _custom_command("Calling restart on makana", options, verbosity=verbosity) send_email(from_address, admin_emails, subject="Webserver Restarted", message="FYI: Makana webserver was just restarted", verbosity=False) # No need to clutter/say we're sending email if verbosity: press_enter()
def vacuum_all(verbosity=False): if verbosity: sys.stdout.write("Beginning vacuuming of databases...\n") logging.info('%s: Beginning vacuuming of databases' % datetime.now()) databases = get_databases() for db in databases: vacuum_database(db, verbosity=False) logging.info('%s: Vacuuming complete.' % datetime.now()) if verbosity: sys.stdout.write("Vacuuming complete.\n") press_enter()
def check_families(input_file, verbosity=True): """Read list of families and iterate call check_one_family on each""" logging.info("Beginning families check.") num_corrupt = 0 family_list_file = open(input_file, 'r') for family in family_list_file: if not check_one_family(family.strip(), verbosity): num_corrupt = num_corrupt + 1 logging.info(\ "Finished checking families. %d families corrupt" % num_corrupt) if verbosity: sys.stdout.write("\n\t%d Families were corrupted.\n\n" % num_corrupt) press_enter()
def check_disk_space(verbosity=False): for node in nodes: if verbosity: print "Checking %s" % node self_executable = get_self_executable(__file__) retcode = subprocess.call( ['/usr/bin/ssh', node, self_executable, '-ms']) if retcode != 0: print "ERROR trying to check on %s..." % node logging.error("Error trying to check on the %s..." % node) else: logging.info("Node %s succesfully checked." % node) if verbosity: press_enter()
def export(base_dir, repository, repo_name, verbosity=False, has_local_settings=False): _OLD = '_old' _NEW = '_new' # Step 1 of 5: Preliminary checks if not os.path.exists(base_dir): info(verbosity, "%s doesn't exist. Trying to create." % base_dir) try: os.makedirs(base_dir) except OSError: error(verbosity, "Unable to create directory: %s" % base_dir) return False exported_orig = os.path.join(base_dir, repo_name) exported_old = os.path.join(base_dir, "%s%s" % (repo_name, _OLD)) exported_new = os.path.join(base_dir, "%s%s" % (repo_name, _NEW)) check_stale_dir(exported_old, verbosity=verbosity) check_stale_dir(exported_new, verbosity=verbosity) # Step 2 of 5: Export _new copy if not _export( base_dir, repository, "%s%s" % (repo_name, _NEW), verbosity=verbosity): return False # Step 3 of 5 (optional): Copy local_settings.py if has_local_settings: local_filename = os.path.join(exported_orig, local_settings_file) local_new_filename = os.path.join(exported_new, local_settings_file) if os.path.exists(local_filename): info(verbosity, "\tCopying local settings.") shutil.copyfile(local_filename, local_new_filename) else: error(verbosity, "No local settings. Creating empty file") open(local_new_filename, 'w').close() # Step 4 of 5: Put new directories in place # This wrapped in an blanket exception because any failure places server # in inconsistent state try: info(verbosity, "\tPutting changes in place: Renaming directories.") # Move original to old if os.path.exists(exported_orig): os.rename(exported_orig, exported_old) # Move new to original os.rename(exported_new, exported_orig) except: error(verbosity, "Rename of directories failed. Webserver in unstable state.") sys.exit(1) # Step 5 of 5: Put new directories in place # Delete original (now called _old) if os.path.exists(exported_old): info(verbosity, "\tRemoving old files.") shutil.rmtree(exported_old) info(verbosity, "Update completed") if verbosity: press_enter()
# This will be removed once we establish a solid working procedure that # has been tested. paranoid_file = open(os.path.expanduser('~/webserver_clean.out'), 'w') paranoid_file.write(data) paranoid_file.close() # This will also be removed when systems are guaranteed to be running well send_email('*****@*****.**', [ '*****@*****.**', ], subject="Results of Webserver Temporary Cleaning", message=data, verbosity=False) if verbosity: press_enter() def main(): """Options are passed and possibly silent scripting called""" parser = OptionParser(version='%prog 0.3') parser.add_option( '-v', '--verbose', dest='verbose', action="store_true", help="Give verbose output (default is no output or interaction)", default=False) parser.add_option('-c',
def copy_logs(verbosity=False): print get_self_executable(__file__) press_enter()
def take_db_backup(verbosity=False): if verbosity: print "Starting database backup..." # Create directory to put backup try: os.makedirs(db_root) except OSError: error = "ERROR: Unable to make the backup directory '%s'" % db_root sys.stderr.write('\n%s...\n\n' % error) if verbosity: press_enter() logging.error('%s: %s'% (datetime.now(), error)) return False # Connect to DB conn = connect("dbname=postgres host=db") # Decide on type of backup backup_type = current_date.day % len(BACKUP_TYPES) backup_type = INDIVIDUAL_DB # HERE: hacked if backup_type == FULL: if verbosity: print "Full backup" else: # If INDIVIDUAL_DB or ROWS, iterate over all databases curs = conn.cursor() curs.execute("SELECT datname from pg_database") rows = curs.fetchall() databases = [db[0] for db in rows] databases = filter(lambda db: db not in EXCLUDE_DB, databases) if verbosity: print "Getting databases to backup..." logging.info('%s: Beginning backup of databases' % datetime.now()) for db in databases: if backup_type == INDIVIDUAL_DB: filename = "%s_%s.sql" % (db, stamp) filepath = os.path.join(db_root, filename) # Dumping database if verbosity: print "\tDumping %s database..." % db logging.info('%s: Dumping %s database' % (datetime.now(), db)) begin_time = datetime.now() retcode = subprocess.call(['pg_dump', '-h', 'db', '--file=%s' % filepath, db, '-o']) if not retcode == 0: logging.error('%s: Dump failed!' % datetime.now()) if verbosity: sys.stderr.write("Dump failed!") end_time = datetime.now() diff = (end_time - begin_time).seconds if verbosity: print "\tDump complete." logging.info('%s: Completed in %d seconds' % (datetime.now(), diff)) # Create compressed tar file from original if verbosity: print "\tCompressing %s database..." % db os.chdir(db_root) compressed_filename = "%s_%s.sql.tar.gz" % (db, stamp) tar = tarfile.open(compressed_filename, 'w:gz') tar.add(filename) tar.close() # At least on case, from the command line, we've seen the # compressed file not created (if it's too small) if os.path.exists(compressed_filename): os.remove(filename) if verbosity: print "\tCompression complete..." if backup_type == ROWS: pass del curs logging.info('%s: Completed backup of databases' % datetime.now()) if verbosity: print "Completed backup of databases..." press_enter()