def main(): send_gmail.init_gmail() try: if username is None or apikey is None: raise Exception("Username and/or API key is not set!") #test_selexor_alive() # Not working, might be that selexor's not performing HTTP requests # correctly acquired_vessel = test_selexor_acquire() # test_selexor_release(acquired_vessel) except: print("integrationtestlib.handle_exception(Exception occurred when contacting selexor, ERROR_EMAIL_SUBJECT)") exit()
def main(): # initialize the gmail module success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) #add Eric Kimbrel to the email notify list integrationtestlib.notify_list.append("*****@*****.**") try: integrationtestlib.log("Looking up time_servers") # verify that there are at least 8 time servers running servers = advertise_lookup("time_server") if len(servers) < 8: integrationtestlib.log('WARNING: only '+str(len(servers))+' timeservers are running!') integrationtestlib.notify('WARNING: test_time_servers_running.py FAILED, only '+str(len(servers))+' timeservers are running!', "test_time_servers_running test failed") integrationtestlib.log("Finished looking up test servers... Test Passed") print "........................................................\n" except: integrationtestlib.notify("Test failed for an unknown reason:\n" + traceback.format_exc(), "test_time_servers_running test failed")
def main(): """ <Purpose> Initialize the gmail info that is needed to send email notifications. Call the function to check if enough opendht servers are running. <Arguments> None <Exceptions> None <Side_Effects> None <Return> None """ # Setup the gmail user/password to use when sending email. success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # Run the test to see how many servers are up. check_opendht_servers()
def main(): # initialize the gmail module success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # PART 1 verify that there are at least 10 nat forwarders running on # each key notify_str = '' for nat_forwarder_key in nat_forwarder_keys: integrationtestlib.log("Looking up nat forwarders for " + repr(nat_forwarder_key)) nat_forwarders = [] try: nat_forwarders = advertise.advertise_lookup(nat_forwarder_key) except Exception, e: integrationtestlib.handle_exception( "Got exception when looking up nat forwarders", NAT_TEST_FAIL_NOTICE) return if len(nat_forwarders) < 10: notify_str += ('WARNING: only ' + str(len(nat_forwarders)) + ' nat forwarders are advertising under the key: ' + repr(nat_forwarder_key) + '\n' + "Advertising forwarders: " + str(nat_forwarders) + '\n')
def main(): """ <Purpose> Initialize the gmail info that is needed to send email notifications. Call the function to check if enough opendht servers are running. <Arguments> None <Exceptions> None <Side_Effects> None <Return> None """ # Setup the gmail user/password to use when sending email. success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # Run the test to see how many servers are up. check_opendht_servers()
def init(): """ Returns an XMLRPC Client instance for a valid username and privatekey, sets up the mail notification system """ try: private_key_string = open(PRIVATE_KEY_FILE).read() except IOError: print 'Please provide a valid private key file. You may obtain a ' + \ 'private key by registering a new account on the SeattleGENI website.' sys.exit(1) try: client = seattleclearinghouse_xmlrpc.SeattleClearinghouseClient( username=USERNAME, private_key_string=private_key_string, allow_ssl_insecure=ALLOW_SSL_INSECURE) success, msg = send_gmail.init_gmail() if not success: raise Exception(msg) return client except Exception, e: _log('SeattleGENI test failed. Error: ' + str(e)) sys.exit(2)
def main(): # initialize the gmail module success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) notify_str = "" connected_server = "" integrationtestlib.log("Starting test_time_tcp.py...") # Whenever we fail to do tcp_time_updatetime, we add the exception string exception_string_list = [] # get the time 5 times and make sure they are reasonably close test_start = getruntime() print test_start times = [] # Keep a list of servers we connected to or tried to connect to for time. server_list = [] # Connect to 5 servers and retrieve the time. for i in range(5): try: connected_server = tcp_time_updatetime(12345) current_time = time_gettime() times.append(current_time) server_list.append(connected_server) integrationtestlib.log("Calling time_gettime(). Retrieved time: " + str(current_time)) except Exception, e: exception_string_list.append( {"Server": connected_server, "Exception": str(e), "Traceback": str(traceback.format_exc())} ) pass
def main(): # initialize the gmail module success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) #add Eric Kimbrel to the email notify list integrationtestlib.notify_list.append("*****@*****.**") try: integrationtestlib.log("Looking up time_servers") # verify that there are at least 8 time servers running servers = advertise_lookup("time_server") if len(servers) < 8: integrationtestlib.log('WARNING: only ' + str(len(servers)) + ' timeservers are running!') integrationtestlib.notify( 'WARNING: test_time_servers_running.py FAILED, only ' + str(len(servers)) + ' timeservers are running!', "test_time_servers_running test failed") integrationtestlib.log( "Finished looking up test servers... Test Passed") print "........................................................\n" except: integrationtestlib.notify( "Test failed for an unknown reason:\n" + traceback.format_exc(), "test_time_servers_running test failed")
def main(): # Modify the global versions of these variables. global TEMP_DIR, XMLRPC_PROXY # Setup the integration test library. This must be done before changing # directories. success, explanation = send_gmail.init_gmail() if not success: integrationtestlib.log("Failed to execute init_gmail(): " + explanation) sys.exit(1) # Add any extra error log recipients. integrationtestlib.notify_list.extend(NOTIFY_LIST) # Establish a temporary directory to do our work in. TEMP_DIR = tempfile.mkdtemp() os.chdir(TEMP_DIR) # Each test can reuse this proxy. XMLRPC_PROXY = xmlrpclib.ServerProxy(XMLRPC_PROXY_URL) # The main event! try: test_results = run_tests() report_results(test_results) finally: # Remove the temporary directory we've been working in. shutil.rmtree(TEMP_DIR)
def main(): # initialize the gmail module success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) notify_str = '' integrationtestlib.log("Starting test_time_tcp.py...") # Whenever we fail to do tcp_time_updatetime, we add the exception string exception_string_list = [] # get the time 5 times and make sure they are reasonably close test_start = getruntime() times = [] # Keep a list of servers we connected to or tried to connect to for time. server_list = [] # Connect to 5 servers and retrieve the time. for i in range (5): try: connected_server = tcp_time_updatetime(12345) current_time = time_gettime() times.append(current_time) server_list.append(connected_server) integrationtestlib.log("Calling time_gettime(). Retrieved time: " + str(current_time)) except Exception,e: exception_string_list.append({'Server' : connected_server, 'Exception' : str(e), 'Traceback' : str(traceback.format_exc())}) pass
def main(): # Modify the global versions of these variables. global TEMP_DIR, XMLRPC_PROXY # Setup the integration test library. This must be done before changing # directories. success, explanation = send_gmail.init_gmail() if not success: integrationtestlib.log('Failed to execute init_gmail(): ' + explanation) sys.exit(1) # Add any extra error log recipients. integrationtestlib.notify_list.extend(NOTIFY_LIST) # Establish a temporary directory to do our work in. TEMP_DIR = tempfile.mkdtemp() os.chdir(TEMP_DIR) # Each test can reuse this proxy. XMLRPC_PROXY = xmlrpclib.ServerProxy(XMLRPC_PROXY_URL) # The main event! try: test_results = run_tests() report_results(test_results) finally: # Remove the temporary directory we've been working in. shutil.rmtree(TEMP_DIR)
def main(): success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) command = "df -h | grep '/dev/sda' | awk '{print $5}'" command_output_fd = os.popen(command) # Get the output and get rid of the extra lines and % sign. disk_use_percent = int(command_output_fd.read().split()[0][:-1]) hostname = socket.gethostname() + ".cs.washington.edu" subject = "High disk usage" if disk_use_percent > 95: message = "CRITICAL: Very High Disk Usage on %s: %s percent used" % ( hostname, disk_use_percent) integrationtestlib.notify(message, subject) irc_seattlebot.send_msg(message) elif disk_use_percent > 90: message = "WARNING: High disk usage on %s: %s percent used" % ( hostname, disk_use_percent) integrationtestlib.notify(message, subject)
def main(): # initialize the gmail module success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # PART 1 verify that there are at least 10 nat forwarders running on # each key notify_str = '' for nat_forwarder_key in nat_forwarder_keys: integrationtestlib.log("Looking up nat forwarders for " + repr(nat_forwarder_key)) nat_forwarders = [] try: nat_forwarders = advertise.advertise_lookup(nat_forwarder_key) except Exception, e: integrationtestlib.handle_exception("Got exception when looking up nat forwarders", NAT_TEST_FAIL_NOTICE) return if len(nat_forwarders) < 10: notify_str += ('WARNING: only '+ str(len(nat_forwarders)) + ' nat forwarders are advertising under the key: ' + repr(nat_forwarder_key) + '\n' + "Advertising forwarders: " + str(nat_forwarders) + '\n')
def main(): """ <Purpose> Program's main. <Arguments> None. <Exceptions> All exceptions are caught. <Side Effects> None. <Returns> None. """ # setup the gmail user/password to use when sending email success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) key = random.randint(4,2**30) value = random.randint(4,2**30) ttlval = 60 # put(key,value) with ttlval into the Centralized HT integrationtestlib.log("calling centralizedadvertise_announce(key: " + str(key) + ", val: " + str(value) + ", ttl: " + str(ttlval) + ")") try: centralizedadvertise_announce(key,value,ttlval) except: integrationtestlib.handle_exception("centralizedadvertise_announce() failed") sys.exit(0) # a 30 second timer to email the notify_list on slow lookups lookup_timedout_timer = threading.Timer(30, lookup_timedout) # start the lookup timer lookup_timedout_timer.start() # get(key) from the centralized HT integrationtestlib.log("calling centralizedadvertise_lookup(key: " + str(key) + ")") try: ret_value = centralizedadvertise_lookup(key) print ret_value #Check if the value being returned is the one we want if value not in ret_value: raise Exception("incorrect value returned") #Check in case random.randint() produces same key again. elif len(ret_value) > 1 : raise Exception("Multiple copies of same key") except: integrationtestlib.handle_exception("centralizedadvertise_lookup() failed", "centralizedputget monitor script failure") sys.exit(0) lookup_timedout_timer.cancel() lookup_done_event.set() return
def main(): send_gmail.init_gmail() try: if username is None or apikey is None: raise Exception("Username and/or API key is not set!") #test_selexor_alive() # Not working, might be that selexor's not performing HTTP requests # correctly acquired_vessel = test_selexor_acquire() # test_selexor_release(acquired_vessel) except: print( "integrationtestlib.handle_exception(Exception occurred when contacting selexor, ERROR_EMAIL_SUBJECT)" ) exit()
def main(): """ <Purpose> Program's main. <Arguments> None. <Exceptions> All exceptions are caught. <Side Effects> None. <Returns> None. """ # setup the gmail user/password to use when sending email success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) key = random.randint(4,2**30) value = random.randint(4,2**30) ttlval = 60 # put(key,value) with ttlval into the Centralized HT integrationtestlib.log("calling centralizedadvertise_announce(key: " + str(key) + ", val: " + str(value) + ", ttl: " + str(ttlval) + ")") try: centralizedadvertise_announce(key,value,ttlval) except: integrationtestlib.handle_exception("centralizedadvertise_announce() failed") sys.exit(0) # a 30 second timer to email the notify_list on slow lookups lookup_timedout_timer = threading.Timer(30, lookup_timedout) # start the lookup timer lookup_timedout_timer.start() # get(key) from the centralized HT integrationtestlib.log("calling centralizedadvertise_lookup(key: " + str(key) + ")") try: ret_value = centralizedadvertise_lookup(key) print ret_value # TODO: check the return value as well ret_value = int(ret_value[0]) if (ret_value != value): integrationtestlib.handle_exception("ret_value is incorrect") print ("ret_value is incorrect") except: integrationtestlib.handle_exception("centralizedadvertise_lookup() failed") sys.exit(0) lookup_timedout_timer.cancel() lookup_done_event.set() return
def main(): """ <Purpose> Call check_nodes with the two different servers: opendht and central. Retrieve the result and then notify developers if result is unusual <Exceptions> None <Side Effects> May send out an email or notify on irc. <Return> None """ #setup the gmail for sending notification success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) notification_subject = "test_lookup_node_states() failed" max_acceptdonation_nodes = 50 max_canonical_nodes = 50 max_movingtoonepercent = 20 min_onepercentmanyevents_nodes = 300 central_results = check_nodes('central') integrationtestlib.log("Lookup results for central: "+ str(central_results)) #check to see if any of the results is not normal, and send notifications accordingly #also send a message to irc. if central_results['acceptdonation'] > max_acceptdonation_nodes: message="Too many nodes in acceptdonation state: "+str(central_results['acceptdonation'])+"\nResults from 'central' server:\n"+str(central_results) print message integrationtestlib.notify(message, notification_subject) elif central_results['canonical'] > max_canonical_nodes: message="Too many nodes in canonical state: "+str(central_results['canonical'])+"\nResults from 'central' server:\n"+str(central_results) print message integrationtestlib.notify(message, notification_subject) elif central_results['onepercent_manyevent'] < min_onepercentmanyevents_nodes: message="Too few nodes in onepercentmanyevents state: "+str(central_results['onepercent_manyevent'])+"\nResults from 'central' server:\n"+str(central_results) print message integrationtestlib.notify(message, notification_subject) opendht_results = check_nodes('opendht') print opendht_results
def main(): # Initialize the gmail setup. success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) print "Beginning query." success = True try: # Query zenodotus if not _dns_mapping_exists(zenodotus_servername, zenodotus_ipaddr): print "Zenodotus failed to respond properly!" # Query is invalid! success = False integrationtestlib.notify( "Error: Zenodotus has failed to correctly respond; the machine has likely been rebooted. Please restart the zenodotus server on zenodotus@blackbox. This report will be re-sent hourly while the problem persists.", "Cron: Zenodotus failure", ) # Check that advertised values work # Map an entirely random IP to a random DNS name. The mapped IP does # not have to actually exist (but should still be valid). random_ip_address = _generate_random_ip_address() random_publickey = rsa_gen_pubpriv_keys(1024)[0] random_publickey_string = rsa_publickey_to_string(random_publickey) random_subdomain = "test-" + sha_hexhash(random_publickey_string) random_dns_entry = random_subdomain + "." + zenodotus_servername print "Announcing", random_dns_entry, random_ip_address advertise_announce(random_dns_entry, random_ip_address, 60) if not _dns_mapping_exists(random_dns_entry, random_ip_address): print "Zenodotus failed to respond properly to advertised subdomain!" # Query is invalid! success = False integrationtestlib.notify( "Error: Zenodotus has failed to correctly respond to an advertised subdomain; there might be something wrong with the advertise server. This report will be re-sent hourly while the problem persists.", "Cron: Zenodotus failure", ) except Exception, e: print "Unknown error!" print str(e) success = False integrationtestlib.notify("Error: Zenodotus seems to be down! Error data: " + str(e), "Cron: Zenodotus failure")
def main(): # Initialize the gmail setup. success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) print "Beginning query." success = True try: # Query zenodotus if not _dns_mapping_exists(zenodotus_servername, zenodotus_ipaddr): print "Zenodotus failed to respond properly!" # Query is invalid! success = False integrationtestlib.notify( "Error: Zenodotus has failed to correctly respond; the machine has likely been rebooted. Please restart the zenodotus server on zenodotus@blackbox. This report will be re-sent hourly while the problem persists.", "Cron: Zenodotus failure") # Check that advertised values work # Map an entirely random IP to a random DNS name. The mapped IP does # not have to actually exist (but should still be valid). random_ip_address = _generate_random_ip_address() random_publickey = rsa_gen_pubpriv_keys(1024)[0] random_publickey_string = rsa_publickey_to_string(random_publickey) random_subdomain = "test-" + sha_hexhash(random_publickey_string) random_dns_entry = random_subdomain + '.' + zenodotus_servername print "Announcing", random_dns_entry, random_ip_address advertise_announce(random_dns_entry, random_ip_address, 60) if not _dns_mapping_exists(random_dns_entry, random_ip_address): print "Zenodotus failed to respond properly to advertised subdomain!" # Query is invalid! success = False integrationtestlib.notify( "Error: Zenodotus has failed to correctly respond to an advertised subdomain; there might be something wrong with the advertise server. This report will be re-sent hourly while the problem persists.", "Cron: Zenodotus failure") except Exception, e: print "Unknown error!" print str(e) success = False integrationtestlib.notify( "Error: Zenodotus seems to be down! Error data: " + str(e), "Cron: Zenodotus failure")
def main(): """ <Purpose> Call check_nodes with the two different servers: opendht and central. Retrieve the result and then notify developers if result is unusual <Exceptions> None <Side Effects> May send out an email or notify on irc. <Return> None """ #setup the gmail for sending notification success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) notification_subject = "test_lookup_node_states() failed" max_acceptdonation_nodes = 50 max_canonical_nodes = 50 max_movingtotwopercent = 20 min_twopercent_nodes = 300 central_results = check_nodes('central') integrationtestlib.log("Lookup results for central: "+ str(central_results)) #check to see if any of the results is not normal, and send notifications accordingly #also send a message to irc. if central_results['acceptdonation'] > max_acceptdonation_nodes: message="Too many nodes in acceptdonation state: "+str(central_results['acceptdonation'])+"\nResults from 'central' server:\n"+str(central_results) print message integrationtestlib.notify(message, notification_subject) elif central_results['canonical'] > max_canonical_nodes: message="Too many nodes in canonical state: "+str(central_results['canonical'])+"\nResults from 'central' server:\n"+str(central_results) print message integrationtestlib.notify(message, notification_subject) elif central_results['twopercent'] < min_twopercent_nodes: message="Too few nodes in twopercent state: "+str(central_results['twopercent'])+"\nResults from 'central' server:\n"+str(central_results) print message integrationtestlib.notify(message, notification_subject)
def main(): """ <Purpose> Runs at regular time intervals to make sure that critical processes on a machine are still up and running. If a critical process is not running then system admins are sent an email, as well as a message is posted on the IRC. <Exceptions> None <Usage> This script takes no arguments. A typical use of this script is to have it run periodically using something like the following crontab line: */15 * * * * export GMAIL_USER='******' && export GMAIL_PWD='password' && /usr/bin/python /home/seattle/monitor_scripts/monitor_processes.py > /home/seattle/monitor_scripts/cron_log.monitor_processes """ # setup the gmail user/password to use when sending email success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) #integrationtestlib.notify_list.append("*****@*****.**") #processes that should be running on seattle server seattle_process_list=['advertiseserver.py'] #The commands that should be run on seattle to get all the required processes seattle_command = ["ps auwx | grep python | grep -v grep | grep geni | awk '{print $14}'"] #processes that should be running on seattleclearinghouse server seattleclearinghouse_process_list=['transition_donation_to_canonical.py', 'transition_onepercentmanyevents_to_canonical.py', 'transition_canonical_to_twopercent.py', 'transition_twopercent_to_twopercent.py', 'check_active_db_nodes.py', 'apache2', '/usr/sbin/mysqld', 'backend_daemon.py', 'lockserver_daemon.py'] #The commands that should be run on seattleclearinghouse to get all the required processes seattleclearinghouse_command = ["ps auwx | grep python | grep -v grep | grep clearinghouse | awk '{print $12}'"] seattleclearinghouse_command.append("ps auwx | grep apache | grep -v grep | grep root | awk '{print $11}'") seattleclearinghouse_command.append("ps auwx | grep mysqld | grep -v grep | awk '{print $11}'") seattleclearinghouse_command.append("ps auwx | grep python | grep -v grep | grep justinc | awk '{print $12}'") #run monitor processes with the right command if sys.argv[1] == '-seattle': monitor_processes(seattle_process_list, seattle_command, "seattle") elif sys.argv[1] == '-seattleclearinghouse': monitor_processes(seattleclearinghouse_process_list, seattleclearinghouse_command, "seattleclearinghouse")
def main(): """ <Purpose> Runs at regular time intervals to make sure that critical processes on a machine are still up and running. If a critical process is not running then system admins are sent an email, as well as a message is posted on the IRC. <Exceptions> None <Usage> This script takes no arguments. A typical use of this script is to have it run periodically using something like the following crontab line: */15 * * * * export GMAIL_USER='******' && export GMAIL_PWD='password' && /usr/bin/python /home/seattle/monitor_scripts/monitor_processes.py > /home/seattle/monitor_scripts/cron_log.monitor_processes """ # setup the gmail user/password to use when sending email success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) #processes that should be running on seattle server seattle_process_list=['advertiseserver.py'] #The commands that should be run on seattle to get all the required processes seattle_command = ["ps auwx | grep python | grep -v grep | grep geni | awk '{print $14}'"] #processes that should be running on seattleclearinghouse server seattleclearinghouse_process_list=['transition_donation_to_canonical.py', 'transition_onepercentmanyevents_to_canonical.py', 'transition_canonical_to_twopercent.py', 'transition_twopercent_to_twopercent.py', 'check_active_db_nodes.py', 'apache2', '/usr/sbin/mysqld', 'backend_daemon.py', 'lockserver_daemon.py'] #The commands that should be run on seattleclearinghouse to get all the required processes seattleclearinghouse_command = ["ps auwx | grep python | grep -v grep | grep geni | awk '{print $12}'"] seattleclearinghouse_command.append("ps auwx | grep apache | grep -v grep | grep root | awk '{print $11}'") seattleclearinghouse_command.append("ps auwx |grep mysqld |grep mysql | awk '{print $11}'") seattleclearinghouse_command.append("ps auwx | grep python | grep -v grep | grep justinc | awk '{print $12}'") #run monitor processes with the right command if sys.argv[1] == '-seattle': monitor_processes(seattle_process_list, seattle_command, "seattle") elif sys.argv[1] == '-seattleclearinghouse': monitor_processes(seattleclearinghouse_process_list, seattleclearinghouse_command, "seattleclearinghouse")
def main(): # Make the XML-RPC proxy accessible across the whole program. global XMLRPC_PROXY # Each test can reuse this proxy. XMLRPC_PROXY = xmlrpclib.ServerProxy(XMLRPC_PROXY_URL) # Setup the integration test library. success, explanation = send_gmail.init_gmail() if not success: integrationtestlib.log('Failed to execute init_gmail(): ' + explanation) sys.exit(1) # Add any extra error log recipients. integrationtestlib.notify_list.extend(NOTIFY_LIST) # The main event! run_tests() report_results()
def main(): """ <Purpose> Call check_nodes and then notify developers if result is unusual. <Exceptions> None <Side Effects> May send out a notification email. <Return> None """ # Setup the gmail lib for sending notification success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) notification_subject = "test_lookup_node_states() failed" results = check_nodes() integrationtestlib.log("Lookup results: " + str(results)) # Check to see if any of the results is not normal, and # send notifications accordingly. message = "Too many nodes in state " if results['acceptdonation'] > max_acceptdonation_nodes: message += "acceptdonation: " + str(results['acceptdonation']) elif results['canonical'] > max_canonical_nodes: message += "canonical: " + str(results['canonical']) if results['twopercent'] < min_twopercent_nodes: message = "Too few nodes in state twopercent: " + str( results['twopercent']) message += "\nLookup results:\n" + str(results) print message integrationtestlib.notify(message, notification_subject)
def main(): # initialize the gmail module success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # add Eric Kimbrel to the email notify list integrationtestlib.notify_list.append("*****@*****.**") notify_str = "" # PART 1 verify that there are at least 10 nat forwarders running integrationtestlib.log("Looking up nat forwarders") try: nodes = nat_forwarder_list_lookup() total_nodes = advertise_lookup("NATFORWARDERRUNNING") except: nodes = [] # make sure we fail if there was an excpetion if len(nodes) < 10: integrationtestlib.log("WARNING: only " + str(len(nodes)) + " forwarders are avaiable") integrationtestlib.log("WARNING: only " + str(len(total_nodes)) + " forwarders are running") notify_str += ( "WARNING: test_nat_servers_running.py FAILED, only " + str(len(nodes)) + " nat forwarders are avaiable!, " + str(len(total_nodes)) + " are running." ) # PART 2 check that nat forwarders are responsive integrationtestlib.log("Checking that we can talk to a nat forwarder") try: response = nat_check_bi_directional(getmyip(), random.randint(20000, 62000)) except Exception, e: notify_str += "WARNING: could not a get a response from nat forwarders: " + str(e) integrationtestlib.log("WARNING: could not get a response from nat forwarders " + str(e))
def main(): success, explanation_str = send_gmail.init_gmail() #integrationtestlib.notify_list=['*****@*****.**'] if not success: integrationtestlib.log(explanation_str) sys.exit(0) command = "df -h | grep '/dev/sda' | awk '{print $5}'" command_output_fd = os.popen(command) # Get the output and get rid of the extra lines and % sign. disk_use_percent = int(command_output_fd.read().split()[0][:-1]) command_output_fd.close() disk_free_command = "df -h | grep '/dev/sda' | awk '{print $4}'" disk_free_fd = os.popen(disk_free_command) free_space = disk_free_fd.read() disk_free_fd.close() hostname = socket.gethostname() + ".poly.edu" subject = "High disk usage" if disk_use_percent >= 95: message = "CRITICAL: Very High Disk Usage on %s: %s percent used.\n" % ( hostname, disk_use_percent) message += "Disk space free: %s" % free_space integrationtestlib.log(message) integrationtestlib.notify(message, subject) irc_seattlebot.send_msg(message) elif disk_use_percent > 90: message = "WARNING: High disk usage on %s: %s percent used.\n" % ( hostname, disk_use_percent) message += "Disk space free: %s" % free_space integrationtestlib.log(message) integrationtestlib.notify(message, subject) print "Current disk usage: %s percent" % disk_use_percent print "Free disk space: %s" % free_space
def main(): success,explanation_str = send_gmail.init_gmail() #integrationtestlib.notify_list=['*****@*****.**'] if not success: integrationtestlib.log(explanation_str) sys.exit(0) command = "df -h | grep '/dev/sda' | awk '{print $5}'" command_output_fd = os.popen(command) # Get the output and get rid of the extra lines and % sign. disk_use_percent = int(command_output_fd.read().split()[0][:-1]) command_output_fd.close() disk_free_command = "df -h | grep '/dev/sda' | awk '{print $4}'" disk_free_fd = os.popen(disk_free_command) free_space = disk_free_fd.read() disk_free_fd.close() hostname = socket.gethostname() + ".poly.edu" subject = "High disk usage" if disk_use_percent >= 95: message = "CRITICAL: Very High Disk Usage on %s: %s percent used.\n" % ( hostname, disk_use_percent) message += "Disk space free: %s" % free_space integrationtestlib.log(message) integrationtestlib.notify(message, subject) irc_seattlebot.send_msg(message) elif disk_use_percent > 90: message = "WARNING: High disk usage on %s: %s percent used.\n" % ( hostname, disk_use_percent) message += "Disk space free: %s" % free_space integrationtestlib.log(message) integrationtestlib.notify(message, subject) print "Current disk usage: %s percent" % disk_use_percent print "Free disk space: %s" % free_space
def main(): # Make the XML-RPC proxy accessible across the whole program. global XMLRPC_PROXY # Each test can reuse this proxy. XMLRPC_PROXY = xmlrpclib.ServerProxy(XMLRPC_PROXY_URL) # Setup the integration test library. success, explanation = send_gmail.init_gmail() if not success: integrationtestlib.log('Failed to execute init_gmail(): ' + explanation) sys.exit(1) # Add any extra error log recipients. #integrationtestlib.notify_list=['*****@*****.**'] integrationtestlib.notify_list.extend(NOTIFY_LIST) # The main event! run_tests() report_results()
def init(): """ Returns an XMLRPC Client instance for a valid username and privatekey, sets up the mail notification system """ try: private_key_string = open(PRIVATE_KEY_FILE).read() except IOError: print 'Please provide a valid private key file. You may obtain a ' + \ 'private key by registering a new account on the SeattleGENI website.' sys.exit(1) try: client = seattleclearinghouse_xmlrpc.SeattleClearinghouseClient(username=USERNAME, private_key_string=private_key_string, allow_ssl_insecure=ALLOW_SSL_INSECURE) success, msg = send_gmail.init_gmail() if not success: raise Exception(msg) return client except Exception, e: _log('SeattleGENI test failed. Error: ' + str(e)) sys.exit(2)
def main(): """ <Purpose> This is the main launching point for the integration tests. We check both our servers to ensure that both geoip servers are up and running. """ # List of all the geoip servers. geoip_server_list = ['http://geoipserver.poly.edu:12679', 'http://geoipserver2.poly.edu:12679'] # Keep track of stats. servers_down = [] servers_running_properly = [] servers_with_bad_response = [] # Initialize the integration test lib. success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # Contact each server to ensure they are running properly. for server in geoip_server_list: success, error_type, error_str = test_server(server) if success: servers_running_properly.append(server) else: if error_type == INCORRECT_ASSERTION: # If we got the wrong answer, we note what the error was. servers_with_bad_response.append((server, error_str)) elif error_type == SERVER_DOWN: servers_down.append((server, error_str)) # Initialize the email text and subject. email_subject = "" email_text = "" # If all the servers are not running properly we send out emails. if len(servers_running_properly) != len(geoip_server_list): # If all servers are down, then we are in trouble. if len(servers_running_properly) == 0: email_subject = "CRITICAL: All GeoIP servers are down!" email_text = "CRITICAL: All GeoIP servers are down or are not running properly.\n\n" # Only some servers are not functioning properly. else: email_subject = "WARNING: Some GeoIP servers are down or are not running properly." email_text = "Not all the GeoIP servers are running properly!\n\n" email_text += "Servers that are up and running properly:\n" for server_name in servers_running_properly: email_text += "\t%s\n" % server_name # If servers are unresponseive. if len(servers_down) > 0: email_text += "Servers that are down:\n" for (server_name, server_error) in servers_down: email_text += "\t%s, Error: %s\n" % (server_name, server_error) # If servers are giving bad response. if len(servers_with_bad_response) > 0 : email_text += "Servers with bad response:\n" for (server_name, server_error) in servers_with_bad_response: email_text += "\t%s, Error: %s\n" % (server_name, server_error) # Send out the notification email. integrationtestlib.notify(email_text, email_subject) else: email_text += "Servers that are up and running properly:\n" for server_name in servers_running_properly: email_text += "\t%s\n" % server_name integrationtestlib.log(email_text)
def main(): """ <Purpose> Checks out milestones.txt from the repository. Interprets it and sends email if anyone is late on their sprints. <Arguments> None <Exceptions> Not sure. <Side Effects> Sends email. <Returns> Returns 1 if anyone is late. Otherwise returns 0. """ global notify_list global svn_repo_path # grab latest milestones revision from the repo milestones_txt = command_output("svn cat http://seattle.cs.washington.edu/svn/seattle/trunk/milestones.txt") # failure example: # milestones_txt = """ #:sprint #01/01/2009 #eye candy #789 alper parallelize resource acquisition and release #sean redesign GENI portal #""" # setup gmail lib if enable_email: gmail_user, gmail_pwd = open("/var/local/svn/hooks/gmail_account","r").read().strip().split() send_gmail.init_gmail(gmail_user=gmail_user, gmail_pwd=gmail_pwd) # check if any of the sprint personnel are past deadline sprints = parse_sprints(milestones_txt) if sprints is None: # syntax error in parsing milestones_txt print "syntax error in parsing milestones file" if enable_email: # send email to notify_list members for email in notify_list: send_gmail.send_gmail(email, "svn-hook milestokes-checker syntax error in milestones file", "", "") return 1 for sprint in sprints: sprint_date = datetime.strptime("%s 00:00:00"%(sprint['date']), "%m/%d/%Y %H:%M:%S") # print sprint_date if sprint_date <= datetime.now(): if sprint['date'] == datetime.now().strftime("%m/%d/%Y"): # sprint due today notify = True else: # sprint in the past notify = False notify_str = ''' For the %s sprint for the %s strike force: '''%(sprint['date'], sprint['force']) # check if we need to notify for user in sprint['users']: try: rev = "Completed as of revision %i"%(int(user.split(' ')[0])) task = ' '.join(user.split(' ')[2:]) user = user.split(' ')[1] except ValueError: rev = "Failed to complete" # always notify when someone failed in a sprint notify = True task = ' '.join(user.split(' ')[1:]) user = user.split(' ')[0] notify_str += ''' User %s Task %s %s'''%(user, task, rev) + '\n\n' if notify: print notify_str if enable_email: # send email to notify_list members for email in notify_list: send_gmail.send_gmail(email, "[status] strike force: %s, sprint: %s"%(sprint['force'], sprint['date']), notify_str, "") return 0
def main(): """ <Purpose> Program's main. <Arguments> None. <Exceptions> All exceptions are caught. <Side Effects> None. <Returns> None. """ # setup the gmail user/password to use when sending email success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) integrationtestlib.notify_list.append("*****@*****.**") ttlval = 60 subject = "opendht with repy test failed" fail_count = 0 message = "openDHTadvertise_lookup() or openDHTadvertise_announce failed 5 times." for i in range(5): key = random.randint(4, 2 ** 30) value = random.randint(4, 2 ** 30) # put(key,value) with ttlval into OpenDHT integrationtestlib.log( "calling openDHTadvertise_announce(key: " + str(key) + ", val: " + str(value) + ", ttl: " + str(ttlval) + ")" ) try: openDHTadvertise_announce(key, value, ttlval, proxiestocheck=10) except: fail_count += 1 message = ( message + "\nAnouncing with key: " + str(key) + ", value: " + str(value) + ", ttlval: " + str(ttlval) ) if fail_count >= 5: integrationtestlib.handle_exception(message, subject) sys.exit(0) continue # a 60 second timer to email the notify_list on slow lookups lookup_timedout_timer = threading.Timer(60, lookup_timedout) # start the lookup timer lookup_timedout_timer.start() # get(key) from OpenDHT integrationtestlib.log("calling openDHTadvertise_lookup(key: " + str(key) + ")") try: ret_value = openDHTadvertise_lookup(key, proxiestocheck=10) # TODO: check the return value as well # ret_value = int(ret_value[0]) except: fail_count += 1 message = message + "Looking up with key: " + str(key) if fail_count >= 5: integrationtestlib.handle_exception(message, subject) sys.exit(0) lookup_timedout_timer.cancel() lookup_done_event.set() return
def main(): """ <Purpose> Program's main. <Arguments> None. <Exceptions> All exceptions are caught. <Side Effects> None. <Returns> None. """ # setup the gmail user/password to use when sending email success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) integrationtestlib.notify_list.append("*****@*****.**") key = str(random.randint(4, 2**30)) value = str(random.randint(4, 2**30)) ttlval = 60 subject = "DOR with repy test failed" # put(key,value) with ttlval into DOR integrationtestlib.log("calling DORadvertise_announce(key: " + str(key) + ", val: " + str(value) + ", ttl: " + str(ttlval) + ")") try: DORadvertise_announce(key, value, ttlval) except: message = "DORadvertise_lookup() failed.\nFailed while doing DORadvertise_announce(). " message = message + "Anouncing with key: " + key + ", value: " + value + ", ttlval: " + str( ttlval) integrationtestlib.handle_exception("DORadvertise_announce() failed", subject) sys.exit(0) # a 60 second timer to email the notify_list on slow lookups lookup_timedout_timer = threading.Timer(60, lookup_timedout) # start the lookup timer lookup_timedout_timer.start() # get(key) from DOR integrationtestlib.log("calling DORadvertise_lookup(key: " + str(key) + ")") try: ret_value = DORadvertise_lookup(key) # TODO: check the return value as well # ret_value = int(ret_value[0]) except: message = "DORadvertise_lookup() failed.\nFailed while doing DORadvertise_lookup(). " message = message + "Looking up with key: " + key integrationtestlib.handle_exception(message, subject) sys.exit(0) lookup_timedout_timer.cancel() lookup_done_event.set() return
def main(): """ <Purpose> Program's main. <Arguments> None. <Exceptions> All exceptions are caught. <Side Effects> None. <Returns> None. """ # setup the gmail user/password to use when sending email success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) integrationtestlib.notify_list.append("*****@*****.**") key = str(random.randint(4,2**30)) value = str(random.randint(4,2**30)) ttlval = 60 subject = "DOR with repy test failed" # put(key,value) with ttlval into DOR integrationtestlib.log("calling DORadvertise_announce(key: " + str(key) + ", val: " + str(value) + ", ttl: " + str(ttlval) + ")") try: DORadvertise_announce(key, value, ttlval) except: message = "DORadvertise_lookup() failed.\nFailed while doing DORadvertise_announce(). " message = message + "Anouncing with key: " + key + ", value: " + value + ", ttlval: " + str(ttlval) integrationtestlib.handle_exception("DORadvertise_announce() failed", subject) sys.exit(0) # a 60 second timer to email the notify_list on slow lookups lookup_timedout_timer = threading.Timer(60, lookup_timedout) # start the lookup timer lookup_timedout_timer.start() # get(key) from DOR integrationtestlib.log("calling DORadvertise_lookup(key: " + str(key) + ")") try: ret_value = DORadvertise_lookup(key) # TODO: check the return value as well # ret_value = int(ret_value[0]) except: message = "DORadvertise_lookup() failed.\nFailed while doing DORadvertise_lookup(). " message = message + "Looking up with key: " + key integrationtestlib.handle_exception(message, subject) sys.exit(0) lookup_timedout_timer.cancel() lookup_done_event.set() return
def main(): """ <Purpose> Program's main. <Arguments> None. <Exceptions> All exceptions are caught. <Side Effects> None. <Returns> None. """ # setup the gmail user/password to use when sending email success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # download and install Seattle download_and_install() # sleep for a while, giving Clearinghouse time to process this new node integrationtestlib.log("sleeping for 30 minutes...") time.sleep(1800) # retrieve the vesseldict from installed seattle integrationtestlib.log("retrieving vesseldict from installed Seattle") dict = {} try: f = open(prefix + "/seattle/seattle_repy/vesseldict", "r") lines = f.readlines() f.close() dict = eval(lines[0]) except: integrationtestlib.handle_exception( "failed to open/read/eval vesseldict file", "seattle downloadandinstall failed!") # uninstall Seattle and remove its dir uninstall_remove() sys.exit(0) # check if the vesseldict conforms to expectations integrationtestlib.log("checking for twopercent pubkey in vessels..") passed = False try: for vname, vdata in dict.items(): for k in vdata['userkeys']: if k == twopercent_publickey: integrationtestlib.log("passed") passed = True break if passed: break except e: integrationtestlib.handle_exception( "failed in checking for twopercent key\n\nvesseldict is: " + str(dict), "seattle downloadandinstall failed!") # uninstall Seattle and remove its dir uninstall_remove() sys.exit(0) # if vesseldict not as expected, notify some people if not passed: text = "check for twopercent key:\n" + str( twopercent_publickey) + "..\n\nfailed\n\nvesseldict is: " + str( dict) integrationtestlib.log(text) integrationtestlib.notify(text, "seattle downloadandinstall failed!") # uninstall Seattle and remove its dir uninstall_remove() return
def main(): """ <Purpose> This is the main launching point for the integration tests. We check both our servers to ensure that both geoip servers are up and running. """ # List of all the geoip servers. geoip_server_list = [ 'http://geoipserver.poly.edu:12679', 'http://geoipserver2.poly.edu:12679' ] # Keep track of stats. servers_down = [] servers_running_properly = [] servers_with_bad_response = [] # Initialize the integration test lib. success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # Contact each server to ensure they are running properly. for server in geoip_server_list: success, error_type, error_str = test_server(server) if success: servers_running_properly.append(server) else: if error_type == INCORRECT_ASSERTION: # If we got the wrong answer, we note what the error was. servers_with_bad_response.append((server, error_str)) elif error_type == SERVER_DOWN: servers_down.append((server, error_str)) # Initialize the email text and subject. email_subject = "" email_text = "" # If all the servers are not running properly we send out emails. if len(servers_running_properly) != len(geoip_server_list): # If all servers are down, then we are in trouble. if len(servers_running_properly) == 0: email_subject = "CRITICAL: All GeoIP servers are down!" email_text = "CRITICAL: All GeoIP servers are down or are not running properly.\n\n" # Only some servers are not functioning properly. else: email_subject = "WARNING: Some GeoIP servers are down or are not running properly." email_text = "Not all the GeoIP servers are running properly!\n\n" email_text += "Servers that are up and running properly:\n" for server_name in servers_running_properly: email_text += "\t%s\n" % server_name # If servers are unresponseive. if len(servers_down) > 0: email_text += "Servers that are down:\n" for (server_name, server_error) in servers_down: email_text += "\t%s, Error: %s\n" % (server_name, server_error) # If servers are giving bad response. if len(servers_with_bad_response) > 0: email_text += "Servers with bad response:\n" for (server_name, server_error) in servers_with_bad_response: email_text += "\t%s, Error: %s\n" % (server_name, server_error) # Send out the notification email. integrationtestlib.notify(email_text, email_subject) else: email_text += "Servers that are up and running properly:\n" for server_name in servers_running_properly: email_text += "\t%s\n" % server_name integrationtestlib.log(email_text)
def main(): """ <Purpose> Program's main. <Arguments> None. <Exceptions> All exceptions are caught. <Side Effects> None. <Returns> None. """ # setup the gmail user/password to use when sending email success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) # download and install Seattle download_and_install() # sleep for a while, giving GENI time to process this new node integrationtestlib.log("sleeping for 30 minutes...") time.sleep(1800) # retrieve the vesseldict from installed seattle integrationtestlib.log("retrieving vesseldict from installed Seattle") dict = {} try: f = open(prefix + "/seattle/seattle_repy/vesseldict", "r") lines = f.readlines() f.close() dict = eval(lines[0]) except: integrationtestlib.handle_exception( "failed to open/read/eval vesseldict file", "seattle downloadandinstall failed!" ) # uninstall Seattle and remove its dir uninstall_remove() sys.exit(0) # check if the vesseldict conforms to expectations integrationtestlib.log("checking for twopercent pubkey in vessels..") passed = False try: for vname, vdata in dict.items(): for k in vdata["userkeys"]: if k == twopercent_publickey: integrationtestlib.log("passed") passed = True break if passed: break except e: integrationtestlib.handle_exception( "failed in checking for twopercent key\n\nvesseldict is: " + str(dict), "seattle downloadandinstall failed!" ) # uninstall Seattle and remove its dir uninstall_remove() sys.exit(0) # if vesseldict not as expected, notify some people if not passed: text = "check for twopercent key:\n" + str(twopercent_publickey) + "..\n\nfailed\n\nvesseldict is: " + str(dict) integrationtestlib.log(text) integrationtestlib.notify(text, "seattle downloadandinstall failed!") # uninstall Seattle and remove its dir uninstall_remove() return
def main(): """ <Purpose> Ping all the machines to see if they are up <Exceptions> none <Side Effects> Prints the ping result <Returns> None. """ # setup the gmail user/password to use when sending email success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) integrationtestlib.log("pinging critical machines") #list of machines thats getting pinged pinglist = [] #list that contains all the ping results result_queue = [] #create a thread for each machine and ping them for host in machine_list: ping_current_machine = ping(str(host), result_queue) pinglist.append(ping_current_machine) ping_current_machine.start() #join all the threads for ping_host in pinglist: ping_host.join() #variable that keeps track if any machines are down ALL_MACHINES_RUNNING = True error_message="WARNING: Seattle machines are down! Seattle developers please check on the machines.\n" error_message+="Displaying ping result:\n" #check to see if all the results were successful #on failures notify the admins and send a message to the irc for (success, ping_result) in result_queue: if not success: ALL_MACHINES_RUNNING = False error_message += ping_result+"\n" #if all machines were pinged successfully, notify on irc if option -m was used to run ping_machines.py if ALL_MACHINES_RUNNING: if len(sys.argv) >= 2 and sys.argv[1] == '-m': irc_seattlebot.send_msg("The machines: "+str(machine_list)+" were pinged successfully") else: integrationtestlib.notify(error_message, "Seattle machines down!") irc_seattlebot.send_msg(error_message) print time.ctime() + " : Done pinging all machiens." print "--------------------------------------------"
def main(): """ <Purpose> Ping all the machines to see if they are up <Exceptions> none <Side Effects> Prints the ping result <Returns> None. """ # setup the gmail user/password to use when sending email success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) integrationtestlib.log("pinging critical machines") #list of machines thats getting pinged pinglist = [] #list that contains all the ping results result_queue = [] #create a thread for each machine and ping them for host in machine_list: ping_current_machine = ping(str(host), result_queue) pinglist.append(ping_current_machine) ping_current_machine.start() #join all the threads for ping_host in pinglist: ping_host.join() #variable that keeps track if any machines are down ALL_MACHINES_RUNNING = True error_message = "WARNING: Seattle machines are down! Seattle developers please check on the machines.\n" error_message += "Displaying ping result:\n" #check to see if all the results were successful #on failures notify the admins and send a message to the irc for (success, ping_result) in result_queue: if not success: ALL_MACHINES_RUNNING = False error_message += ping_result + "\n" #if all machines were pinged successfully, notify on irc if option -m was used to run ping_machines.py if ALL_MACHINES_RUNNING: if len(sys.argv) >= 2 and sys.argv[1] == '-m': irc_seattlebot.send_msg("The machines: " + str(machine_list) + " were pinged successfully") else: integrationtestlib.notify(error_message, "Seattle machines down!") irc_seattlebot.send_msg(error_message) print time.ctime() + " : Done pinging all machiens." print "--------------------------------------------"
def main(): gmail_file_name = "/home/abhishek/monitor_script/seattle_gmail_info" result = send_gmail.init_gmail('*****@*****.**','testmail')
def main(): success,explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) updater_pid = None repy_path = test_directory + os.sep + REPY_RELATIVE_PATH logfile_name = "softwareupdater.old" logfile_path = repy_path + logfile_name # Wrap in a try...except so that the test stops running once we run # into an error. We also want to unconditionally cleanup the test # files, so that subsequent tests start fresh. try: download_and_unpack_seattle() # We want to make sure that the installed versions can update, so we # need to get the URL from the softwareupdater from within the # installer. Because of this, we have no choice but to delay the # import until now, otherwise the module would not yet exist. sys.path.insert(0, REPY_RELATIVE_PATH) import softwareupdater metainfo_url = softwareupdater.softwareurl + 'metainfo' # Retrieve the update metainfo file. metainfo = urllib2.urlopen(metainfo_url).read() # The metainfo file may be useful for debugging purposes if # something goes wrong. open(test_directory + os.sep + 'metainfo', 'w').write(metainfo) hashes = get_metainfo_hashes(metainfo) # I think its safe to assume that each release will always have this # file... file_to_modify_name = "nmmain.py" file_to_modify_path = repy_path + file_to_modify_name # We don't particularly care how this file is modified, as long as # its hash changes. It should be okay if we append some data to the # end of the file. file_to_modify = open(file_to_modify_path, 'a') file_to_modify.seek(0, 2) file_to_modify.write("\n# An update should remove this line\n") file_to_modify.close() # The updater will take some time to run... updater_pid = start_updater_process() integrationtestlib.log("sleeping for 60 minutes...") time.sleep(60 * 60) # We have to assume that the metainfo file didn't change since we # downloaded it, so this may give us some false negatives if a push # was done after we retrieved the metainfo file. if softwareupdater.get_file_hash(file_to_modify_path) != hashes[file_to_modify_name]: # Maybe the signature expired? updater_log = open(logfile_path, 'r') last_line = updater_log.readlines()[-1].strip() updater_log.close() if "[do_rsync] Something is wrong with the metainfo: ['Expired signature', 'Expired signature']" in last_line: # Not sure if we should notify the administrators about this one... raise Exception("Metainfo signature expired!") elif "Another software updater old process" in last_line: raise Exception("Seattle failed to update because another software updater is currently running! Please investigate what is causing the extra updater to run.") else: # Something really bad happened... raise Exception("Seattle failed to restore the modified file to the updated version. If there was a new release of Seattle done in the last hour, then it is possible that this is a false negative.") # Make sure that we actually got a chance to run, since only one # instance of the software updater is supposed to be running on each # machine. updater_log = open(logfile_path, 'r') last_line = updater_log.readlines()[-1].strip() updater_log.close() except Exception, e: backup_directory_name = "backup-" + time.strftime("%Y-%m-%d-%H:%M:%S") text = "The updater integration test failed!\n" text += "The seattle directory will be backed up to: '"+backup_directory_name+"'\n" text += "In addition, the metainfo file and the downloaded seattle_linux.tgz will also be in the backup directory.\n" text += "----------------------------\n" text += traceback.format_exc() integrationtestlib.log(text) integrationtestlib.notify(text, "Seattle Update Test Failed!") # Since something bad happened, we want to keep the directory # intact, in case it helps with any debugging. backup_seattle_directory_path = backup_directory_name + os.sep + "seattle" os.makedirs(backup_seattle_directory_path) os.rename("seattle", backup_seattle_directory_path) os.rename("metainfo", backup_directory_name + os.sep + "metainfo") os.rename("seattle_linux.tgz", backup_directory_name + os.sep + "seattle_linux.tgz") # We do some cleaning up after, so update the log path logfile_path = backup_seattle_directory_path + os.sep + "seattle_repy/" + logfile_name
def main(): """ <Purpose> Program's main. <Arguments> None. <Exceptions> All exceptions are caught. <Side Effects> None. <Returns> None. """ # setup the gmail user/password to use when sending email success, explanation_str = send_gmail.init_gmail() if not success: integrationtestlib.log(explanation_str) sys.exit(0) integrationtestlib.notify_list.append("*****@*****.**") ttlval = 60 subject = "opendht with repy test failed" fail_count = 0 message = "openDHTadvertise_lookup() or openDHTadvertise_announce failed 5 times." for i in range(5): key = random.randint(4, 2**30) value = random.randint(4, 2**30) # put(key,value) with ttlval into OpenDHT integrationtestlib.log("calling openDHTadvertise_announce(key: " + str(key) + ", val: " + str(value) + ", ttl: " + str(ttlval) + ")") try: openDHTadvertise_announce(key, value, ttlval, proxiestocheck=10) except: fail_count += 1 message = message + "\nAnouncing with key: " + str( key) + ", value: " + str(value) + ", ttlval: " + str(ttlval) if fail_count >= 5: integrationtestlib.handle_exception(message, subject) sys.exit(0) continue # a 60 second timer to email the notify_list on slow lookups lookup_timedout_timer = threading.Timer(60, lookup_timedout) # start the lookup timer lookup_timedout_timer.start() # get(key) from OpenDHT integrationtestlib.log("calling openDHTadvertise_lookup(key: " + str(key) + ")") try: ret_value = openDHTadvertise_lookup(key, proxiestocheck=10) # TODO: check the return value as well # ret_value = int(ret_value[0]) except: fail_count += 1 message = message + "Looking up with key: " + str(key) if fail_count >= 5: integrationtestlib.handle_exception(message, subject) sys.exit(0) lookup_timedout_timer.cancel() lookup_done_event.set() return
def main(): """ <Purpose> Checks out milestones.txt from the repository. Interprets it and sends email if anyone is late on their sprints. <Arguments> None <Exceptions> Not sure. <Side Effects> Sends email. <Returns> Returns 1 if anyone is late. Otherwise returns 0. """ global notify_list global svn_repo_path # grab latest milestones revision from the repo milestones_txt = command_output( "svn cat http://seattle.cs.washington.edu/svn/seattle/trunk/milestones.txt" ) # failure example: # milestones_txt = """ #:sprint #01/01/2009 #eye candy #789 alper parallelize resource acquisition and release #sean redesign GENI portal #""" # setup gmail lib if enable_email: gmail_user, gmail_pwd = open("/var/local/svn/hooks/gmail_account", "r").read().strip().split() send_gmail.init_gmail(gmail_user=gmail_user, gmail_pwd=gmail_pwd) # check if any of the sprint personnel are past deadline sprints = parse_sprints(milestones_txt) if sprints is None: # syntax error in parsing milestones_txt print "syntax error in parsing milestones file" if enable_email: # send email to notify_list members for email in notify_list: send_gmail.send_gmail( email, "svn-hook milestokes-checker syntax error in milestones file", "", "") return 1 for sprint in sprints: sprint_date = datetime.strptime("%s 00:00:00" % (sprint['date']), "%m/%d/%Y %H:%M:%S") # print sprint_date if sprint_date <= datetime.now(): if sprint['date'] == datetime.now().strftime("%m/%d/%Y"): # sprint due today notify = True else: # sprint in the past notify = False notify_str = ''' For the %s sprint for the %s strike force: ''' % (sprint['date'], sprint['force']) # check if we need to notify for user in sprint['users']: try: rev = "Completed as of revision %i" % (int( user.split(' ')[0])) task = ' '.join(user.split(' ')[2:]) user = user.split(' ')[1] except ValueError: rev = "Failed to complete" # always notify when someone failed in a sprint notify = True task = ' '.join(user.split(' ')[1:]) user = user.split(' ')[0] notify_str += ''' User %s Task %s %s''' % (user, task, rev) + '\n\n' if notify: print notify_str if enable_email: # send email to notify_list members for email in notify_list: send_gmail.send_gmail( email, "[status] strike force: %s, sprint: %s" % (sprint['force'], sprint['date']), notify_str, "") return 0