def main(): """Main entry.""" # Clear all loggers to make sure the following basicConfig take effect. logging.shutdown() reload(logging) logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %H:%M:%S', level=logging.DEBUG) with ts_mon_config.SetupTsMonGlobalState(service_name='kill_slow_queries', indirect=True): count = 0 parser, options, args = parse_options() if not verify_options_and_args(options, args): parser.print_help() return 1 try: while True: result_log_strs, count = kill_slow_queries( options.user, options.password, options.timeout) if result_log_strs: gmail_lib.send_email( options.mail, 'Successfully killed slow autotest db queries', 'Below are killed queries:\n%s' % result_log_strs) m = 'chromeos/autotest/afe_db/killed_slow_queries' metrics.Counter(m).increment_by(count) time.sleep(options.timeout) except Exception as e: m = 'chromeos/autotest/afe_db/failed_to_kill_query' metrics.Counter(m).increment() logging.error('Failed to kill slow db queries.\n%s', e) raise
def send_email(bug, bug_template): """Send email to the owner and cc's to notify the TestBug. @param bug: TestBug instance. @param bug_template: A template dictionary specifying the default bug filing options for failures in this suite. """ autotest_stats.Counter(EMAIL_COUNT_KEY % 'total').increment() to_set = set(bug.cc) if bug.cc else set() if bug.owner: to_set.add(bug.owner) if bug_template.get('cc'): to_set = to_set.union(bug_template.get('cc')) if bug_template.get('owner'): to_set.add(bug_template.get('owner')) recipients = ', '.join(to_set) try: gmail_lib.send_email( recipients, bug.title(), bug.summary(), retry=False, creds_path=site_utils.get_creds_abspath(EMAIL_CREDS_FILE)) except Exception: autotest_stats.Counter(EMAIL_COUNT_KEY % 'fail').increment() raise
def send_email(bug, bug_template): """Send email to the owner and cc's to notify the TestBug. @param bug: TestBug instance. @param bug_template: A template dictionary specifying the default bug filing options for failures in this suite. """ to_set = set(bug.cc) if bug.cc else set() if bug.owner: to_set.add(bug.owner) if bug_template.get('cc'): to_set = to_set.union(bug_template.get('cc')) if bug_template.get('owner'): to_set.add(bug_template.get('owner')) recipients = ', '.join(to_set) if not recipients: logging.warning('No owner/cc found. Will skip sending a mail.') return success = False try: gmail_lib.send_email( recipients, bug.title(), bug.summary(), retry=False, creds_path=site_utils.get_creds_abspath(EMAIL_CREDS_FILE)) success = True finally: (metrics.Counter('chromeos/autotest/errors/send_bug_email').increment( fields={'success': success}))
def main(): """Main entry.""" logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %H:%M:%S', level=logging.DEBUG) with ts_mon_config.SetupTsMonGlobalState(service_name='kill_slow_queries', indirect=True): count = 0 parser, options, args = parse_options() if not verify_options_and_args(options, args): parser.print_help() return 1 try: while True: result_log_strs, count = kill_slow_queries( options.user, options.password, options.timeout) if result_log_strs: gmail_lib.send_email( options.mail, 'Successfully killed slow autotest db queries', 'Below are killed queries:\n%s' % result_log_strs) m = 'chromeos/autotest/afe_db/killed_slow_queries' metrics.Counter(m).increment_by(count) time.sleep(options.timeout) except Exception as e: logging.error('Failed to kill slow db queries.\n%s', e) gmail_lib.send_email( options.mail, 'Failed to kill slow autotest db queries.', ('Error occurred during killing slow db queries:\n%s\n' 'Detailed logs can be found in /var/log/slow_queries.log on db' ' backup server.\nTo avoid db crash, please check ASAP.') % e) raise
def main(): """Main entry.""" logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %H:%M:%S', level=logging.DEBUG) parser, options, args = parse_options() if not verify_options_and_args(options, args): parser.print_help() return 1 try: result_log_strs = kill_slow_queries(options.user, options.password, options.timeout) if result_log_strs: gmail_lib.send_email( options.mail, 'Successfully killed slow autotest db queries', 'Below are killed queries:\n%s' % result_log_strs) except Exception as e: logging.error('Failed to kill slow db queries.\n%s', e) gmail_lib.send_email( options.mail, 'Failed to kill slow autotest db queries.', ('Error occurred during killing slow db queries:\n%s\n' 'Detailed logs can be found in /var/log/slow_queries.log on db ' 'backup server.\nTo avoid db crash, please check ASAP.') % e) raise
def send_notification_email(email_list, title, msg): """Send notification to all email addresses in email list. @param email_list: a email address list which receives notification email, whose format is like: [[email protected], [email protected], [email protected],...] so that users could also specify multiple email addresses by using config '--email' or '-e'. @param title: the title of the email to be sent. @param msg: the content of the email to be sent. """ gmail_lib.send_email(','.join(email_list), title, msg)
def main(): """main script. """ parser = argparse.ArgumentParser() parser.add_argument( '--span', type=int, dest='span', default=1, help=('Number of hours that stats should be collected. ' 'If it is set to 24, the end time of stats being ' 'collected will set to the mid of the night. ' 'Default is set to 1 hour.')) parser.add_argument('-e', '--email', dest='email', default=None, help='Email any errors to the given email address.') options = parser.parse_args() boards = host_label_utils.get_all_boards() pools = ['bvt', 'suites', 'cq'] if options.span == 24: today = datetime.combine(date.today(), datetime.min.time()) end_time = time_utils.to_epoch_time(today) else: now = datetime.now() end_time = datetime(year=now.year, month=now.month, day=now.day, hour=now.hour) end_time = time_utils.to_epoch_time(end_time) start_time = end_time - timedelta(hours=options.span).total_seconds() print('Collecting host stats from %s to %s...' % (time_utils.epoch_time_to_date_string(start_time), time_utils.epoch_time_to_date_string(end_time))) errors = [] if not boards: errors.append('Error! No board found in metadb.') for board in boards: for pool in pools: error = report_stats(board, pool, start_time, end_time, options.span) if error: errors.append(error) if options.email and errors: gmail_lib.send_email(options.email, 'Error occured when collecting host stats.', '\n'.join(errors))
def send_email(self, to_string, subject, body): """Mails out emails to the addresses listed in to_string. @param to_string: is split into a list which can be delimited by any of: ';', ',', ':' or any whitespace. @param subject: String, email subject. @param body: String, message body """ # Create list from string removing empty strings from the list. to_list = [x for x in re.split('\s|,|;|:', to_string) if x] if not to_list: return to_string = ','.join(to_list) try: gmail_lib.send_email(to_string, subject, body) except Exception: logging.exception('Sending email failed:')
def _send_email(arguments, tag, subject, recipients, body): """Send an inventory e-mail message. The message is logged in the selected log directory using `tag` for the file name. If the --print option was requested, the message is neither logged nor sent, but merely printed on stdout. @param arguments Parsed command-line options. @param tag Tag identifying the inventory for logging purposes. @param subject E-mail Subject: header line. @param recipients E-mail addresses for the To: header line. @param body E-mail message body. """ logging.debug('Generating email: "%s"', subject) all_recipients = ', '.join(recipients) report_body = '\n'.join([ 'To: %s' % all_recipients, 'Subject: %s' % subject, '', body, '']) if arguments.debug: print report_body else: filename = os.path.join(arguments.logdir, tag) try: report_file = open(filename, 'w') report_file.write(report_body) report_file.close() except EnvironmentError as e: logging.error('Failed to write %s: %s', filename, e) try: gmail_lib.send_email(all_recipients, subject, body) except Exception as e: logging.error('Failed to send e-mail to %s: %s', all_recipients, e)
def main(): """Entry point for test_push script.""" arguments = parse_arguments() updated_repo_heads = get_head_of_repos(UPDATED_REPOS) updated_repo_msg = '\n'.join( ['%s: %s' % (k, v) for k, v in updated_repo_heads.iteritems()]) try: # Use daemon flag will kill child processes when parent process fails. use_daemon = not arguments.continue_on_failure # Verify all the DUTs at the beginning of testing push. reverify_all_push_duts() time.sleep(15) # Wait 15 secs for the verify test to start. check_dut_inventory(arguments.num_duts, arguments.pool) queue = multiprocessing.Queue() push_to_prod_suite = multiprocessing.Process( target=test_suite_wrapper, args=(queue, PUSH_TO_PROD_SUITE, EXPECTED_TEST_RESULTS, arguments)) push_to_prod_suite.daemon = use_daemon push_to_prod_suite.start() # TODO(dshi): Remove following line after crbug.com/267644 is fixed. # Also, merge EXPECTED_TEST_RESULTS_AU to EXPECTED_TEST_RESULTS # AU suite will be on shard until crbug.com/634049 is fixed. au_suite = multiprocessing.Process(target=test_suite_wrapper, args=(queue, AU_SUITE, EXPECTED_TEST_RESULTS_AU, arguments, True)) au_suite.daemon = use_daemon au_suite.start() # suite test with --create_and_return flag asynchronous_suite = multiprocessing.Process( target=test_suite_wrapper, args=(queue, DUMMY_SUITE, EXPECTED_TEST_RESULTS_DUMMY, arguments, False, True)) asynchronous_suite.daemon = True asynchronous_suite.start() # Test suite for testbed testbed_suite = multiprocessing.Process( target=test_suite_wrapper, args=(queue, TESTBED_SUITE, EXPECTED_TEST_RESULTS_TESTBED, arguments, False, False, True)) testbed_suite.daemon = use_daemon testbed_suite.start() while (push_to_prod_suite.is_alive() or au_suite.is_alive() or asynchronous_suite.is_alive() or testbed_suite.is_alive()): check_queue(queue) time.sleep(5) check_queue(queue) push_to_prod_suite.join() au_suite.join() asynchronous_suite.join() testbed_suite.join() # All tests pass, push prod-next branch for UPDATED_REPOS. push_prod_next_branch(updated_repo_heads) except Exception as e: print 'Test for pushing to prod failed:\n' print str(e) # Abort running jobs when choose not to continue when there is failure. if not arguments.continue_on_failure: for suite_id in all_suite_ids: if AFE.get_jobs(id=suite_id, finished=False): AFE.run('abort_host_queue_entries', job=suite_id) # Send out email about the test failure. if arguments.email: gmail_lib.send_email( arguments.email, 'Test for pushing to prod failed. Do NOT push!', ('Test CLs of the following repos failed. Below are the ' 'repos and the corresponding test HEAD.\n\n%s\n\n.' 'Error occurred during test:\n\n%s\n\n' 'All logs have been saved to ' '/var/log/test_push/test_push.log on push master. Detail ' 'debugging info can be found at go/push-to-prod' % (updated_repo_msg, str(e)) + '\n'.join(run_suite_output))) raise finally: # Reverify all the hosts reverify_all_push_duts() message = ( '\nAll tests are completed successfully, the prod branch of the ' 'following repos ready to be pushed to the hash list below.\n' '%s\n\n\nInstructions for pushing to prod are available at ' 'https://goto.google.com/autotest-to-prod' % updated_repo_msg) print message # Send out email about test completed successfully. if arguments.email: gmail_lib.send_email( arguments.email, 'Test for pushing to prod completed successfully', message)
def main(): """Entry point for test_push script.""" arguments = parse_arguments() try: # Close existing bugs. New bug should be filed in dummy_Fail test. old_issue_ids = close_bug() queue = multiprocessing.Queue() push_to_prod_suite = multiprocessing.Process( target=test_suite_wrapper, args=(queue, PUSH_TO_PROD_SUITE, EXPECTED_TEST_RESULTS, arguments)) push_to_prod_suite.start() # TODO(dshi): Remove following line after crbug.com/267644 is fixed. # Also, merge EXPECTED_TEST_RESULTS_AU to EXPECTED_TEST_RESULTS au_suite = multiprocessing.Process( target=test_suite_wrapper, args=(queue, AU_SUITE, EXPECTED_TEST_RESULTS_AU, arguments)) au_suite.start() shard_suite = multiprocessing.Process( target=test_suite_wrapper, args=(queue, DUMMY_SUITE, EXPECTED_TEST_RESULTS_DUMMY, arguments, True)) shard_suite.start() # suite test with --create_and_return flag asynchronous_suite = multiprocessing.Process( target=test_suite_wrapper, args=(queue, DUMMY_SUITE, EXPECTED_TEST_RESULTS_DUMMY, arguments, True, True)) asynchronous_suite.start() bug_filing_checked = False while (push_to_prod_suite.is_alive() or au_suite.is_alive() or shard_suite.is_alive() or asynchronous_suite.is_alive()): check_queue(queue) # Check bug filing results to fail early if bug filing failed. if not bug_filing_checked and not push_to_prod_suite.is_alive(): check_bug_filed_and_deduped(old_issue_ids) bug_filing_checked = True time.sleep(5) check_queue(queue) push_to_prod_suite.join() au_suite.join() shard_suite.join() asynchronous_suite.join() except Exception as e: print 'Test for pushing to prod failed:\n' print str(e) # Send out email about the test failure. if arguments.email: gmail_lib.send_email( arguments.email, 'Test for pushing to prod failed. Do NOT push!', ('Errors occurred during the test:\n\n%s\n\n' % str(e) + 'run_suite output:\n\n%s' % '\n'.join(run_suite_output))) raise message = ('\nAll tests are completed successfully, prod branch is ready to' ' be pushed.') print message # Send out email about test completed successfully. if arguments.email: gmail_lib.send_email( arguments.email, 'Test for pushing to prod completed successfully', message)