def test_notifications_for_cancelled_jobs():

    print "Creating proxy ..."
    cream_testing.create_proxy("sarabINFN","dteam")

    print "Creating jdl"
    jdl_fname = cream_testing.sleep_jdl("dteam","300", "/tmp")

    print "Submitting job " + jdl_fname
    cream_job_id = cream_testing.submit_job(jdl_fname, "cream-06.pd.infn.it:8443/cream-pbs-cert" )
    print cream_job_id
    time.sleep(60)

    print "Verifying status of job " + cream_job_id
    cream_regression.job_status_should_be_in(cream_job_id, ['PENDING', 'RUNNING', 'REALLY-RUNNING'])

    print "Cancelling job " + cream_job_id
    cream_testing.cancel_job(cream_job_id)

    print "Sleeping 200sec"
    time.sleep(200)

    job_num = blah_testing.get_job_num_from_jid(cream_job_id)
    #job_num = "985342878"
    blah_parser_log_file_name = blah_testing.get_blah_parser_log_file_name()
    local_blah_parser_log_file = cream_regression.get_file_from_ce(blah_parser_log_file_name, "/tmp")
    #local_blah_parser_log_file = "/tmp/local_copy_of_a_cream_file"
    time.sleep(200)

    notifications_list = blah_testing.get_notifications_in_blah_parser_log(local_blah_parser_log_file, job_num)

    print notifications_list

    print blah_testing.check_notifications_for_cancelled(notifications_list)
Example #2
0
def test_notifications_for_normally_finished_jobs():

    # Create_proxy
    cream_testing.create_proxy("sarabINFN", "dteam")

    # sleep_jdl(100)
    jdl_fname = cream_testing.sleep_jdl("dteam", "100", "/tmp")

    #submit()
    cream_job_id = cream_testing.submit_job(
        jdl_fname, "cream-06.pd.infn.it:8443/cream-pbs-cert")
    print cream_job_id
    time.sleep(5)

    #get_final_status()
    print "Getting final job status ... "
    final_job_status = cream_testing.get_final_status(cream_job_id)
    print "Final job status = " + final_job_status

    time.sleep(200)

    job_num = blah_testing.get_job_num_from_jid(cream_job_id)

    blah_parser_log_file_name = blah_testing.get_blah_parser_log_file_name()
    local_blah_parser_log_file = cream_regression.get_file_from_ce(
        blah_parser_log_file_name, "/tmp")

    notifications_list = blah_testing.get_notifications_in_blah_parser_log(
        local_blah_parser_log_file, job_num)

    print notifications_list

    print blah_testing.check_notifications_for_normally_finished(
        notifications_list)
def test_notifications_for_normally_finished_jobs():

    # Create_proxy
    cream_testing.create_proxy("sarabINFN","dteam")

    # sleep_jdl(100)
    jdl_fname = cream_testing.sleep_jdl("dteam","100", "/tmp")

    #submit()
    cream_job_id = cream_testing.submit_job(jdl_fname, "cream-06.pd.infn.it:8443/cream-pbs-cert" )
    print cream_job_id
    time.sleep(5)

    #get_final_status()
    print "Getting final job status ... "
    final_job_status = cream_testing.get_final_status(cream_job_id)
    print "Final job status = " + final_job_status

    time.sleep(200)

    job_num = blah_testing.get_job_num_from_jid(cream_job_id)
    
    blah_parser_log_file_name = blah_testing.get_blah_parser_log_file_name()
    local_blah_parser_log_file = cream_regression.get_file_from_ce(blah_parser_log_file_name, "/tmp")
    
    notifications_list = blah_testing.get_notifications_in_blah_parser_log(local_blah_parser_log_file, job_num)    

    print notifications_list

    print blah_testing.check_notifications_for_normally_finished(notifications_list)
Example #4
0
def test_notifications_for_cancelled_jobs():

    print "Creating proxy ..."
    cream_testing.create_proxy("sarabINFN","dteam")

    print "Creating jdl"
    jdl_fname = cream_testing.sleep_jdl("dteam","300", "/tmp")

    print "Submitting job " + jdl_fname
    cream_job_id = cream_testing.submit_job(jdl_fname, "cream-06.pd.infn.it:8443/cream-pbs-cert" )
    print cream_job_id
    time.sleep(60)

    print "Verifying status of job " + cream_job_id
    cream_regression.job_status_should_be_in(cream_job_id, ['PENDING', 'RUNNING', 'REALLY-RUNNING'])

    print "Cancelling job " + cream_job_id
    cream_testing.cancel_job(cream_job_id)

    print "Sleeping 200sec"
    time.sleep(200)

    job_num = blah_testing.get_job_num_from_jid(cream_job_id)
    
    blah_parser_log_file_name = blah_testing.get_blah_parser_log_file_name()
    local_blah_parser_log_file = cream_regression.get_file_from_ce(blah_parser_log_file_name, "/tmp")
    
    time.sleep(200)

    notifications_list = blah_testing.get_notifications_in_blah_parser_log(local_blah_parser_log_file, job_num)

    print notifications_list

    print blah_testing.check_notifications_for_cancelled(notifications_list)
Example #5
0
def saturate_batch_system(jdl_file_name='empty'):
    '''
       | Description: | Reads from test suite configuration file the value of total CPU number      |
       |              | present in the batch cluster and submits a number of jobs equal to          |
       |              | the total CPU number, to saturete the batch system reading submission       |
       |              | parameters from configuration file.                                         |
       | Arguments:   | None                                                                        |
       | Returns:     | The list of cream job ids of submitted jobs                                 |
       | Exceprtions: | TestsuiteError | if an error is present in parameters read from config      |
    '''
    my_conf = cream_testsuite_conf.CreamTestsuiteConfSingleton()
    tot_cpu_in_batch_cluster = my_conf.getParam('batch_system', 'tot_cpu_num')
    vo = my_conf.getParam('submission_info', 'vo')
    proxy_pass = my_conf.getParam('submission_info', 'proxy_pass')
    ce_endpoint = my_conf.getParam('submission_info', 'ce_endpoint')
    cream_queue = my_conf.getParam('submission_info', 'cream_queue')
    #output_dir = my_conf.getParam('testsuite_behaviour','tmp_dir')
    output_dir = regression_vars.tmp_dir

    ce = ce_endpoint + "/" + cream_queue

    if len(tot_cpu_in_batch_cluster) == 0:
        raise testsuite_exception.TestsuiteError(
            "Mandatory parameter tot_cpu_num is empty. Check testsuite configuration"
        )
    if len(vo) == 0:
        raise testsuite_exception.TestsuiteError(
            "Mandatory parameter vo is empty. Check testsuite configuration")
    if len(output_dir) == 0:
        raise testsuite_exception.TestsuiteError(
            "Mandatory parameter tmp_dir is empty. Check testsuite configuration"
        )

    print "Creating proxy ..."
    cream_testing.create_proxy(proxy_pass, vo)

    jdl_fname = ""
    if jdl_file_name == 'empty':
        print "Creating jdl"
        jdl_fname = cream_testing.sleep_jdl(vo, "300", output_dir)
    else:
        jdl_fname = jdl_file_name

    print "Submitting " + tot_cpu_in_batch_cluster + " jobs ..."
    cream_job_ids = list()
    cream_job_ids = submit_n_jobs(tot_cpu_in_batch_cluster, jdl_fname)

    print cream_job_ids

    return cream_job_ids
def saturate_batch_system(jdl_file_name='empty'):
    '''
       | Description: | Reads from test suite configuration file the value of total CPU number      |
       |              | present in the batch cluster and submits a number of jobs equal to          |
       |              | the total CPU number, to saturete the batch system reading submission       |
       |              | parameters from configuration file.                                         |
       | Arguments:   | None                                                                        |
       | Returns:     | The list of cream job ids of submitted jobs                                 |
       | Exceprtions: | TestsuiteError | if an error is present in parameters read from config      |
    '''
    my_conf = cream_testsuite_conf.CreamTestsuiteConfSingleton()
    tot_cpu_in_batch_cluster = my_conf.getParam('batch_system','tot_cpu_num')
    vo = my_conf.getParam('submission_info','vo')
    proxy_pass = my_conf.getParam('submission_info','proxy_pass')
    ce_endpoint = my_conf.getParam('submission_info','ce_endpoint')
    cream_queue = my_conf.getParam('submission_info', 'cream_queue')
    #output_dir = my_conf.getParam('testsuite_behaviour','tmp_dir')
    output_dir = regression_vars.tmp_dir

    ce = ce_endpoint + "/" + cream_queue


    if len(tot_cpu_in_batch_cluster) == 0: 
        raise testsuite_exception.TestsuiteError("Mandatory parameter tot_cpu_num is empty. Check testsuite configuration")
    if len(vo) == 0:
        raise testsuite_exception.TestsuiteError("Mandatory parameter vo is empty. Check testsuite configuration")
    if len(output_dir) == 0:
        raise testsuite_exception.TestsuiteError("Mandatory parameter tmp_dir is empty. Check testsuite configuration")
        
    print "Creating proxy ..."
    cream_testing.create_proxy(proxy_pass, vo)

    jdl_fname = ""
    if jdl_file_name == 'empty':
        print "Creating jdl"
        jdl_fname = cream_testing.sleep_jdl(vo, "300", output_dir)
    else:
        jdl_fname = jdl_file_name

    print "Submitting " + tot_cpu_in_batch_cluster + " jobs ..."
    cream_job_ids = list()
    cream_job_ids = submit_n_jobs(tot_cpu_in_batch_cluster, jdl_fname)
     
    print cream_job_ids

    return cream_job_ids
Example #7
0
import cream_regression, cream_testing, batch_sys_mng, time, datetime, sys

print "Creating proxy"
cream_testing.create_proxy("sarabINFN","dteam")
print "proxy created"

print "Creating jdl"
jdl_fname = cream_regression.create_jdl_CREAM_111("/tmp")
print "jdl created"

print "Submitting job"
#  submit_job(jdl_path,ce_endpoint,delegId=None)
#cream_job_id = cream_testing.submit_job(jdl_fname, "cream-06.pd.infn.it:8443/cream-pbs-cert" )
cream_job_id = cream_testing.submit_job(jdl_fname, "cream-23.pd.infn.it:8443/cream-lsf-cert" )
print "Job submitted"


print "waiting until job is finished"
final_job_status = cream_testing.get_final_status(cream_job_id)

print "Job finished. Job final status = " + final_job_status

print "Getting job output"
output_path = cream_regression.get_job_output("/tmp", cream_job_id)

print "Checking job output"
result = cream_regression.check_job_out_CREAM_111(output_path)

print result
def test_notifications_for_suspended_resumed_jobs():

    print "Creating proxy ..."
    cream_testing.create_proxy("sarabINFN","dteam")

    print "Creating jdl"
    jdl_fname = cream_testing.sleep_jdl("dteam","300", "/tmp")


    # With PBS and SGE it is possible to suspend only IDLE (i.e. not yet running) jobs.
    # To perform the test, first saturate the batch system, second submit the job which
    # willbe under test.
    # At the end of the test cancel all submitted job to free the batch system
    print "Saturating batch system"
    job_ids_list = blah_testing.saturate_batch_system()

    print "Submitting job " + jdl_fname
    cream_job_id = cream_testing.submit_job(jdl_fname, "cream-06.pd.infn.it:8443/cream-pbs-cert" )
    print cream_job_id
    print "Sleeping 1min"
    time.sleep(60)

    print "Verifying status of job " + cream_job_id
    cream_regression.job_status_should_be_in(cream_job_id, ['IDLE'])

    print "Suspending job ..."
    cream_testing.suspend_job(cream_job_id)

    print "Sleeping 10sec"
    time.sleep(10)

    #check if job suspended on batch system. It is enough check status and verify it is 'HELD'
    print "Verifying status of job " + cream_job_id + ". Should be in 'HELD'"
    cream_regression.job_status_should_be_in(cream_job_id, ['HELD'])
  
    #count = int(len(job_ids_list)/4) 
    #print "Sleep " + str(count) + "sec"
    #time.sleep(count)
    
    print "Cancel job saturating batch system"
    blah_testing.cancel_list_of_jobs(job_ids_list)

    print "Sleeping 5 min ..."
    time.sleep(300)

    print "Resuming job ..."
    cream_testing.resume_job(cream_job_id)

    print "Getting final job status ... "
    final_job_status = cream_testing.get_final_status(cream_job_id)
    print "Final job status = " + final_job_status

    print "Sleeping 30sec"
    time.sleep(30)

    job_num = blah_testing.get_job_num_from_jid(cream_job_id)
    
    blah_parser_log_file_name = blah_testing.get_blah_parser_log_file_name()
    local_blah_parser_log_file = cream_regression.get_file_from_ce(blah_parser_log_file_name, "/tmp")
    
    print "Sleeping 6min."
    time.sleep(360)

    print "Get notifications list in blah parser log file local copy"
    notifications_list = blah_testing.get_notifications_in_blah_parser_log(local_blah_parser_log_file, job_num)

    print notifications_list

    print blah_testing.check_notifications_for_resumed(notifications_list)

    print "Cancelling all jobs..."
    cream_testing.cancel_all_jobs("cream-06.pd.infn.it:8443")
Example #9
0
def test_notifications_for_suspended_resumed_jobs():

    print "Creating proxy ..."
    cream_testing.create_proxy("sarabINFN", "dteam")

    print "Creating jdl"
    jdl_fname = cream_testing.sleep_jdl("dteam", "300", "/tmp")

    # With PBS and SGE it is possible to suspend only IDLE (i.e. not yet running) jobs.
    # To perform the test, first saturate the batch system, second submit the job which
    # willbe under test.
    # At the end of the test cancel all submitted job to free the batch system
    print "Saturating batch system"
    job_ids_list = blah_testing.saturate_batch_system()

    print "Submitting job " + jdl_fname
    cream_job_id = cream_testing.submit_job(
        jdl_fname, "cream-06.pd.infn.it:8443/cream-pbs-cert")
    print cream_job_id
    print "Sleeping 1min"
    time.sleep(60)

    print "Verifying status of job " + cream_job_id
    cream_regression.job_status_should_be_in(cream_job_id, ['IDLE'])

    print "Suspending job ..."
    cream_testing.suspend_job(cream_job_id)

    print "Sleeping 10sec"
    time.sleep(10)

    #check if job suspended on batch system. It is enough check status and verify it is 'HELD'
    print "Verifying status of job " + cream_job_id + ". Should be in 'HELD'"
    cream_regression.job_status_should_be_in(cream_job_id, ['HELD'])

    #count = int(len(job_ids_list)/4)
    #print "Sleep " + str(count) + "sec"
    #time.sleep(count)

    print "Cancel job saturating batch system"
    blah_testing.cancel_list_of_jobs(job_ids_list)

    print "Sleeping 5 min ..."
    time.sleep(300)

    print "Resuming job ..."
    cream_testing.resume_job(cream_job_id)

    print "Getting final job status ... "
    final_job_status = cream_testing.get_final_status(cream_job_id)
    print "Final job status = " + final_job_status

    print "Sleeping 30sec"
    time.sleep(30)

    job_num = blah_testing.get_job_num_from_jid(cream_job_id)

    blah_parser_log_file_name = blah_testing.get_blah_parser_log_file_name()
    local_blah_parser_log_file = cream_regression.get_file_from_ce(
        blah_parser_log_file_name, "/tmp")

    print "Sleeping 6min."
    time.sleep(360)

    print "Get notifications list in blah parser log file local copy"
    notifications_list = blah_testing.get_notifications_in_blah_parser_log(
        local_blah_parser_log_file, job_num)

    print notifications_list

    print blah_testing.check_notifications_for_resumed(notifications_list)

    print "Cancelling all jobs..."
    cream_testing.cancel_all_jobs("cream-06.pd.infn.it:8443")