def test_flow_scope_job(api_type): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('j1', max_fails=0, expect_invocations=0, expect_order=None) api.job('j2', max_fails=0, expect_invocations=0, expect_order=None) with raises(FlowScopeException): with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: pass ctrl1.invoke('j1') with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: pass ctrl1.invoke('j1') with raises(FlowScopeException): with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: with ctrl1.parallel() as ctrl2: pass ctrl2.invoke('j2') with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: with ctrl1.serial() as ctrl2: ctrl2.invoke('j1') ctrl2.invoke('j2')
def test_flow_scope_parallel(api_type): with api_select.api(__file__, api_type) as api: api.flow_job() with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: pass with ctrl1.parallel(1): pass with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: pass with ctrl1.serial(1): pass with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: with ctrl1.parallel() as ctrl2: ctrl2.parallel(1) ctrl2.parallel(1) with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: with ctrl1.parallel() as ctrl2: ctrl1.parallel(1)
def test_flow_scope_job(): with mock_api.api(__file__) as api: api.flow_job() api.job("j1", 0.01, max_fails=0, expect_invocations=1, expect_order=1) api.job("j2", 0.01, max_fails=0, expect_invocations=0, expect_order=None) with raises(FlowScopeException): with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: pass ctrl1.invoke("j1") with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: pass ctrl1.invoke("j1") with raises(FlowScopeException): with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: with ctrl1.parallel() as ctrl2: pass ctrl2.invoke("j2") with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: with ctrl1.serial() as ctrl2: ctrl2.invoke("j1") ctrl2.invoke("j2")
def test_flow_scope_parallel(): with mock_api.api(__file__) as api: api.flow_job() with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: pass with ctrl1.parallel(1): pass with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: pass with ctrl1.serial(1): pass with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: with ctrl1.parallel() as ctrl2: ctrl2.parallel(1) ctrl2.parallel(1) with raises(FlowScopeException): with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: with ctrl1.parallel() as ctrl2: ctrl1.parallel(1)
def test_invoke_unchecked_parallel(api_type): with api_select.api(__file__, api_type, login=True) as api: api.job('j11_unchecked', max_fails=0, expect_invocations=1, expect_order=None, exec_time=30, unknown_result=True) api.job('j12', max_fails=0, expect_invocations=1, expect_order=1, exec_time=5) api.job('j13_unchecked', max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1: ctrl1.invoke_unchecked('j11_unchecked') ctrl1.invoke('j12') ctrl1.invoke_unchecked('j13_unchecked')
def test_propagation_warn_only_nested_parallel_serial(api_type): with api_select.api(__file__, api_type, login=True) as api: api.flow_job() api.job('j11', max_fails=0, expect_invocations=1, expect_order=1) api.job('j21', max_fails=0, expect_invocations=1, expect_order=1) api.job('j22_fail', max_fails=1, expect_invocations=1, expect_order=1, serial=True) api.job('j23', max_fails=0, expect_invocations=0, expect_order=None) with raises(FinalResultException) as exinfo: with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1: ctrl1.invoke('j11') with ctrl1.serial( propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2: ctrl2.invoke('j21') ctrl2.invoke('j22_fail') ctrl2.invoke('j23') assert exinfo.value.result == BuildResult.UNSTABLE return 77
def test_timeout_inner_level_serial(api_type): with api_select.api(__file__, api_type, login=True) as api: api.job('quick11', max_fails=0, expect_invocations=1, expect_order=None) api.job('quick21', max_fails=0, expect_invocations=1, expect_order=None) api.job('wait20', max_fails=0, expect_invocations=1, expect_order=None, exec_time=20, unknown_result=True, serial=True) with raises(FlowTimeoutException) as exinfo: with parallel(api, timeout=3000, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1: ctrl1.invoke('quick11') with ctrl1.serial(timeout=8) as ctrl2: ctrl2.invoke('quick21') ctrl2.invoke('wait20') assert "Timeout after:" in str(exinfo.value) assert ", in flow ['jenkinsflow_test__timeout_inner_level_serial__quick21', 'jenkinsflow_test__timeout_inner_level_serial__wait20']. Unfinished jobs:['jenkinsflow_test__timeout_inner_level_serial__wait20']" in str( exinfo.value)
def test_single_level_errors_parallel(): with mock_api.api(__file__) as api: api.flow_job() api.job( "quick", exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1, params=(("s1", "", "desc"), ("c1", "false", "desc")), ) api.job("quick_fail", exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1) api.job("wait10", exec_time=10, max_fails=0, expect_invocations=1, expect_order=1) api.job("wait10_fail", exec_time=10, max_fails=1, expect_invocations=1, expect_order=1) api.job("wait5", exec_time=5, max_fails=0, expect_invocations=1, expect_order=1) api.job("wait5_fail", exec_time=5, max_fails=1, expect_invocations=1, expect_order=1) with raises(FailedChildJobsException): with parallel(api, timeout=40, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke("quick", s1="", c1=False) ctrl.invoke("quick_fail") ctrl.invoke("wait10") ctrl.invoke("wait10_fail") ctrl.invoke("wait5") ctrl.invoke("wait5_fail")
def test_timeout_inner_level_serial(): with mock_api.api(__file__) as api: api.job("quick11", exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=None) api.job("quick21", exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=None) api.job( "wait20", exec_time=20, max_fails=0, expect_invocations=1, expect_order=None, unknown_result=True, serial=True, ) with raises(FlowTimeoutException) as exinfo: with parallel(api, timeout=3000, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1: ctrl1.invoke("quick11") with ctrl1.serial(timeout=8) as ctrl2: ctrl2.invoke("quick21") ctrl2.invoke("wait20") assert "Timeout after:" in exinfo.value.message assert ( ", in flow ['jenkinsflow_test__timeout_inner_level_serial__quick21', 'jenkinsflow_test__timeout_inner_level_serial__wait20']. Unfinished jobs:['jenkinsflow_test__timeout_inner_level_serial__wait20']" in exinfo.value.message )
def test_reporting_invocation_parallel(api_type, capsys): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('j11', max_fails=0, expect_invocations=1, expect_order=1) api.job('j12', max_fails=0, expect_invocations=1, invocation_delay=1.0, exec_time=1.5, initial_buildno=7, expect_order=2) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=0.5/api.speedup) as ctrl1: ctrl1.invoke('j11') ctrl1.invoke('j12') sout, _ = capsys.readouterr() assert lines_in( api_type, sout, "^Flow Invocation (1/1,1/1): ('jenkinsflow_test__reporting_invocation_parallel__j11', 'jenkinsflow_test__reporting_invocation_parallel__j12')", "^Job Invocation (1/1,1/1): http://x.x/job/jenkinsflow_test__reporting_invocation_parallel__j11", "^Job Invocation (1/1,1/1): http://x.x/job/jenkinsflow_test__reporting_invocation_parallel__j12", ) assert lines_in( api_type, sout, "^Job Invocation (1/1,1/1): http://x.x/job/jenkinsflow_test__reporting_invocation_parallel__j11", build_started_msg(api, "jenkinsflow_test__reporting_invocation_parallel__j11", 1), ) assert lines_in( api_type, sout, "^Job Invocation (1/1,1/1): http://x.x/job/jenkinsflow_test__reporting_invocation_parallel__j12", build_started_msg(api, "jenkinsflow_test__reporting_invocation_parallel__j12", 8) )
def test_abort(capsys): with api_select.api(__file__, login=True) as api: if api.api_type == ApiType.SCRIPT: return api.flow_job() api.job('quick', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1) api.job('wait10_abort', exec_time=10, max_fails=0, expect_invocations=1, expect_order=1, final_result='ABORTED') api.job('wait1_fail', exec_time=1, max_fails=1, expect_invocations=1, expect_order=1) if api.api_type != ApiType.MOCK: subprocess32.Popen([sys.executable, jp(here, "abort_job.py"), __file__, 'abort', 'wait10_abort']) with raises(FailedChildJobsException) as exinfo: with parallel(api, timeout=40, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick') ctrl.invoke('wait10_abort') ctrl.invoke('wait1_fail') assert "wait10_abort" in exinfo.value.message assert "wait1_fail" in exinfo.value.message sout, _ = capsys.readouterr() assert_lines_in( sout, re.compile("^ABORTED: 'jenkinsflow_test__abort__wait10_abort' - build: .*/jenkinsflow_test__abort__wait10_abort.* after:"), )
def test_single_level_errors_parallel(capsys): with mock_api.api(__file__) as api: api.flow_job() api.job('quick', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1, params=(('s1', '', 'desc'), ('c1', 'false', 'desc'))) api.job('quick_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1) api.job('wait10', exec_time=10, max_fails=0, expect_invocations=1, expect_order=1) api.job('wait10_fail', exec_time=10, max_fails=1, expect_invocations=1, expect_order=1) api.job('wait5', exec_time=5, max_fails=0, expect_invocations=1, expect_order=1) api.job('wait5_fail', exec_time=5, max_fails=1, expect_invocations=1, expect_order=1) with raises(FailedChildJobsException) as exinfo: with parallel(api, timeout=40, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick', s1='', c1=False) ctrl.invoke('quick_fail') ctrl.invoke('wait10') ctrl.invoke('wait10_fail') ctrl.invoke('wait5') ctrl.invoke('wait5_fail') assert "quick_fail" in exinfo.value.message assert "wait10_fail" in exinfo.value.message assert "wait5_fail" in exinfo.value.message sout, _ = capsys.readouterr() assert "FAILURE: 'jenkinsflow_test__single_level_errors_parallel__quick_fail' - build: " in sout assert "job/jenkinsflow_test__single_level_errors_parallel__quick_fail/" in sout assert "/console after:" in sout
def test_single_level_errors_parallel(api_type, capsys): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('quick', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1, params=(('s1', '', 'desc'), ('c1', 'false', 'desc'))) api.job('quick_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1) api.job('wait10', exec_time=10, max_fails=0, expect_invocations=1, expect_order=1) api.job('wait10_fail', exec_time=10, max_fails=1, expect_invocations=1, expect_order=1) api.job('wait5', exec_time=5, max_fails=0, expect_invocations=1, expect_order=1) api.job('wait5_fail', exec_time=5, max_fails=1, expect_invocations=1, expect_order=1) with raises(FailedChildJobsException) as exinfo: with parallel(api, timeout=40, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick', s1='', c1=False) ctrl.invoke('quick_fail') ctrl.invoke('wait10') ctrl.invoke('wait10_fail') ctrl.invoke('wait5') ctrl.invoke('wait5_fail') assert "quick_fail" in str(exinfo.value) assert "wait10_fail" in str(exinfo.value) assert "wait5_fail" in str(exinfo.value) sout, _ = capsys.readouterr() assert_lines_in( api_type, sout, re.compile("^FAILURE: 'jenkinsflow_test__single_level_errors_parallel__quick_fail' - build: .*/jenkinsflow_test__single_level_errors_parallel__quick_fail.* after:"), )
def test_replace_invocation_class_log_override(api_type, capsys): if api_type == ApiType.JENKINS: from jenkinsflow.jenkins_api import Invocation class LogInvocation(Invocation): def console_url(self): return "HELLO LOG" elif api_type == ApiType.SCRIPT: from jenkinsflow.script_api import Invocation class LogInvocation(Invocation): def console_url(self): return "HELLO LOG" else: raise Exception("Invalid ApiType: " + repr(api_type)) with api_select.api(__file__, api_type, invocation_class=LogInvocation) as api: api.flow_job() api.job('j1', max_fails=0, expect_invocations=1, expect_order=1, params=(('s1', '', 'desc'), ('c1', 'false', 'desc'))) api.job('j2', max_fails=0, expect_invocations=1, expect_order=1, params=(('s1', '', 'desc'), ('c1', 'false', 'desc'))) with parallel(api, timeout=40, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('j1', s1='', c1=False) ctrl.invoke('j2', s1='', c1=False) sout, _ = capsys.readouterr() assert "HELLO LOG" in sout
def test_multiple_invocations_parallel_same_flow_no_args_singlequeued(capsys): """ Jenkins automatically throws away queued builds of parameterless jobs when another build is invoked, so that a max of one build can be queued """ with api_select.api(__file__) as api: if api.api_type in (ApiType.MOCK, ApiType.SCRIPT): # TODO return is_hudson = os.environ.get('HUDSON_URL') if is_hudson: # TODO investigate why this test fails in Hudson xfail("Doesn't pass in Hudson") return api.flow_job() num_j1_invocations = 20 api.job('j1', exec_time=3, max_fails=0, expect_invocations=num_j1_invocations, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1: for _ in range(0, num_j1_invocations): ctrl1.invoke('j1') # Note: This output order depends on the job NOT allowing concurrent builds, AND on the order of polling in jenkins_api! sout, _ = capsys.readouterr() assert_lines_in( sout, "^Invoking Job (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", "^Invoking Job (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", "^Invoking Job (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", build_started_msg(api, "jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", 1), "^SUPERSEDED: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1'", "^SUPERSEDED: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1'", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Status RUNNING - build:", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Status QUEUED - ", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' stopped running", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Status IDLE - build: #", "^SUCCESS: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1'", build_started_msg(api, "jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", 2), "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' stopped running", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Status IDLE - build: #", "^SUCCESS: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1'", "^parallel flow: (", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUCCESS", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUPERSEDED", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUPERSEDED", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUPERSEDED", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUCCESS", "^)", ) # Make sure that first and last are SUCCESS re.match("parallel flow: \\(\n *job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUCCESS", sout) re.match("job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUCCESS\n *\\)", sout)
def _test_demo(demo, api_type): load_demo_jobs(demo, api_type) with api_select.api(__file__, api_type, fixed_prefix="jenkinsflow_demo__") as api: api.job(demo.__name__ + "__0flow", 0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1: ctrl1.invoke(demo.__name__ + "__0flow")
def _test_demo(demo, api_type): load_demo_jobs(demo, api_type) with api_select.api(__file__, api_type, fixed_prefix="jenkinsflow_demo__") as api: api.job(demo.__name__ + "__0flow", max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1: ctrl1.invoke(demo.__name__ + "__0flow")
def test_propagation_warn_only_parallel(env_base_url, fake_java, capfd): with api_select.api(__file__) as api: api.flow_job() api.job('j1_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1) api.job('j2', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, username=username, password=password, job_name_prefix=api.job_name_prefix, report_interval=3, propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl1: ctrl1.invoke('j1_fail') ctrl1.invoke('j2')
def test_invoke_unchecked_dont_wait_parallel(): with api_select.api(__file__, login=True) as api: api.flow_job() api.job('j11_slow_unchecked', exec_time=100, max_fails=0, expect_invocations=1, expect_order=1, unknown_result=True) api.job('j12', exec_time=5, max_fails=0, expect_invocations=1, expect_order=2) with parallel(api, timeout=50, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1: ctrl1.invoke_unchecked('j11_slow_unchecked') ctrl1.invoke('j12')
def test_timeout_top_level_parallel(): with api_select.api(__file__) as api: api.job('quick', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=None, params=(('s1', '', 'desc'), ('c1', 'false', 'desc'))) api.job('wait20', exec_time=20, max_fails=0, expect_invocations=1, expect_order=None, unknown_result=True) with raises(FlowTimeoutException): with parallel(api, timeout=8, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick', s1='', c1=False) ctrl.invoke('wait20')
def test_propagate_unstable_parallel_single_unstable(env_base_url, fake_java, capfd): with mock_api.api(__file__) as api: api.flow_job() api.job('j11_unstable', 0.01, max_fails=0, expect_invocations=1, expect_order=1, final_result='unstable') with parallel(api, timeout=70, username=username, password=password, job_name_prefix=api.job_name_prefix) as ctrl1: ctrl1.invoke('j11_unstable') assert ctrl1.result == BuildResult.UNSTABLE
def test_single_level_errors_parallel(api_type, capsys): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('quick', max_fails=0, expect_invocations=1, expect_order=1, params=(('s1', '', 'desc'), ('c1', 'false', 'desc'))) api.job('quick_fail', max_fails=1, expect_invocations=1, expect_order=1) api.job('wait10', max_fails=0, expect_invocations=1, expect_order=1, exec_time=10) api.job('wait10_fail', max_fails=1, expect_invocations=1, expect_order=1, exec_time=10) api.job('wait5', max_fails=0, expect_invocations=1, expect_order=1, exec_time=5) api.job('wait5_fail', max_fails=1, expect_invocations=1, expect_order=1, exec_time=5) with raises(FailedChildJobsException) as exinfo: with parallel(api, timeout=40, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick', s1='', c1=False) ctrl.invoke('quick_fail') ctrl.invoke('wait10') ctrl.invoke('wait10_fail') ctrl.invoke('wait5') ctrl.invoke('wait5_fail') assert "quick_fail" in str(exinfo.value) assert "wait10_fail" in str(exinfo.value) assert "wait5_fail" in str(exinfo.value) sout, _ = capsys.readouterr() assert lines_in( api_type, sout, re.compile( "^FAILURE: 'jenkinsflow_test__single_level_errors_parallel__quick_fail' - build: .*/jenkinsflow_test__single_level_errors_parallel__quick_fail.* after:" ), )
def test_multiple_invocations_parallel_same_flow_no_args_singlequeued(api_type, capsys): """ Jenkins automatically throws away queued builds of parameterless jobs when another build is invoked, so that a max of one build can be queued """ with api_select.api(__file__, api_type) as api: is_hudson = os.environ.get('HUDSON_URL') if is_hudson: # TODO investigate why this test fails in Hudson xfail("Doesn't pass in Hudson") return api.flow_job() num_inv = 20 api.job('j1', max_fails=0, expect_invocations=num_inv, expect_order=1, exec_time=15) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=0.1, poll_interval=0.1) as ctrl1: for _ in range(0, num_inv): ctrl1.invoke('j1') sout, _ = capsys.readouterr() any_superseeded = re.compile(" +job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-[0-9]+ SUPERSEDED") assert lines_in( api_type, sout, "^Job Invocation-1 (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", "^Job Invocation-5 (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", "^Job Invocation-{} (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1".format(num_inv), ( build_started_msg(api, "jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", 1, invocation_number=1), "^SUPERSEDED: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1'", "^SUPERSEDED: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1'", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-1 Status RUNNING - build:", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-{} Status QUEUED - ".format(num_inv), "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-1 stopped running", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-1 Status IDLE - build: #", "^SUCCESS: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-1", build_started_msg(api, "jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1", 2, invocation_number=num_inv), "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-{} stopped running".format(num_inv), "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-{} Status IDLE - build: #".format(num_inv), "^SUCCESS: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-{}".format(num_inv), ), "^parallel flow: (", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-1 SUCCESS", # python3 TODO: *[any_superseeded for _ in range(0, num_inv)] any_superseeded, any_superseeded, any_superseeded, "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' Invocation-{} SUCCESS".format(num_inv), "^)", ) # Make sure that first and last are SUCCESS re.match("parallel flow: \\(\n *job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUCCESS", sout) re.match("job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_no_args_singlequeued__j1' SUCCESS\n *\\)", sout)
def test_invoke_unchecked_parallel(): with api_select.api(__file__, login=True) as api: api.job('j11_unchecked', exec_time=30, max_fails=0, expect_invocations=1, expect_order=None, unknown_result=True) api.job('j12', exec_time=5, max_fails=0, expect_invocations=1, expect_order=1) api.job('j13_unchecked', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1: ctrl1.invoke_unchecked('j11_unchecked') ctrl1.invoke('j12') ctrl1.invoke_unchecked('j13_unchecked')
def test_single_level_parallel(api_type): with api_select.api(__file__, api_type) as api: api.job('quick', 0.01, max_fails=0, expect_invocations=1, expect_order=1, params=(('s1', 'Hi', 'desc'), ('c1', ('true', 'maybe', 'false'), 'desc'))) api.job('wait10', 10, max_fails=0, expect_invocations=1, expect_order=1) api.job('wait5', 5, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=20, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick', password='******', s1='WORLD', c1='maybe') ctrl.invoke('wait10') ctrl.invoke('wait5')
def test_demos_with_errors(api_type): demo = errors load_demo_jobs(demo, api_type) with api_select.api(__file__, api_type, fixed_prefix="jenkinsflow_demo__") as api: api.job(demo.__name__ + "__0flow", 0.01, max_fails=1, expect_invocations=1, expect_order=1) with raises(JobControlFailException): with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1: ctrl1.invoke(demo.__name__ + "__0flow")
def test_demos_with_errors(api_type): demo = errors load_demo_jobs(demo, api_type) with api_select.api(__file__, api_type, fixed_prefix="jenkinsflow_demo__") as api: api.job(demo.__name__ + "__0flow", max_fails=1, expect_invocations=1, expect_order=1) with raises(JobControlFailException): with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1: ctrl1.invoke(demo.__name__ + "__0flow")
def test_single_level_parallel(api_type): with api_select.api(__file__, api_type) as api: api.job('quick', max_fails=0, expect_invocations=1, expect_order=1, params=(('s1', 'Hi', 'desc'), ('c1', ('true', 'maybe', 'false'), 'desc'))) api.job('wait10', max_fails=0, expect_invocations=1, expect_order=1, exec_time=10) api.job('wait5', max_fails=0, expect_invocations=1, expect_order=1, exec_time=5) with parallel(api, timeout=20, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick', password='******', s1='WORLD', c1='maybe') ctrl.invoke('wait10') ctrl.invoke('wait5')
def test_propagate_unstable_parallel_single_unstable(api_type): with api_select.api(__file__, api_type, login=True) as api: api.flow_job() api.job('j11_unstable', max_fails=0, expect_invocations=1, expect_order=1, final_result='unstable') with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, raise_if_unsuccessful=False) as ctrl1: ctrl1.invoke('j11_unstable') assert ctrl1.result == BuildResult.UNSTABLE return 77
def test_propagation_warn_only_parallel(api_type, fake_java): with api_select.api(__file__, api_type, login=True) as api: pre_existing_fake_cli(api_type) api.flow_job() api.job('j1_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1) api.job('j2', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3, propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl1: ctrl1.invoke('j1_fail') ctrl1.invoke('j2')
def test_propagate_unstable_parallel_single_unstable(api_type, fake_java): with api_select.api(__file__, api_type, login=True) as api: pre_existing_fake_cli(api_type) api.flow_job() api.job('j11_unstable', 0.01, max_fails=0, expect_invocations=1, expect_order=1, final_result='unstable') with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1: ctrl1.invoke('j11_unstable') assert ctrl1.result == BuildResult.UNSTABLE
def test_missing_jobs_allowed_still_missing_parallel(api_type): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('j1', max_fails=0, expect_invocations=1, expect_order=1) api.job('j2', max_fails=0, expect_invocations=1, expect_order=1) with raises(FailedChildJobsException): with parallel(api, 20, job_name_prefix=api.job_name_prefix, allow_missing_jobs=True) as ctrl1: ctrl1.invoke('j1') ctrl1.invoke('missingA') ctrl1.invoke('j2')
def test_retry_parallel_toplevel(api_type): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('j11', max_fails=0, expect_invocations=1, expect_order=1) api.job('j12_fail', max_fails=1, expect_invocations=2, expect_order=1) api.job('j13', max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, max_tries=2) as ctrl1: ctrl1.invoke('j11') ctrl1.invoke('j12_fail') ctrl1.invoke('j13')
def test_retry_unchecked_quick_parallel_toplevel(): with api_select.api(__file__) as api: api.flow_job() api.job('j11_unchecked', 0.1, max_fails=1, expect_invocations=2, expect_order=1) api.job('j12_fail', 1, max_fails=1, expect_invocations=2, expect_order=1) api.job('j13', 0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, max_tries=2) as ctrl1: ctrl1.invoke_unchecked('j11_unchecked') ctrl1.invoke('j12_fail') ctrl1.invoke('j13')
def test_empty_flow_parallel_serial(api_type): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('j1', max_fails=0, expect_invocations=1, expect_order=1) api.job('j2', max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1: ctrl1.invoke('j1') with ctrl1.serial(): pass ctrl1.invoke('j2')
def test_missing_jobs_allowed_still_missing_parallel(): with mock_api.api(__file__) as api: api.flow_job() api.job('j1', 0.01, max_fails=0, expect_invocations=1, expect_order=1) api.job('j2', 0.01, max_fails=0, expect_invocations=1, expect_order=1) with raises(FailedChildJobsException): with parallel(api, 20, job_name_prefix=api.job_name_prefix, allow_missing_jobs=True) as ctrl1: ctrl1.invoke('j1') ctrl1.invoke('missingA') ctrl1.invoke('j2')
def test_empty_flow_parallel_serial(): with mock_api.api(__file__) as api: api.flow_job() api.job("j1", exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1) api.job("j2", exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1: ctrl1.invoke("j1") with ctrl1.serial(): pass ctrl1.invoke("j2")
def test_retry_unchecked_long_running_parallel_toplevel(): with api_select.api(__file__, login=True) as api: api.flow_job() api.job('j11_unchecked', 20, max_fails=1, expect_invocations=1, expect_order=1, unknown_result=True) api.job('j12_fail', 0.01, max_fails=1, expect_invocations=2, expect_order=1) api.job('j13', 0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, max_tries=2) as ctrl1: ctrl1.invoke_unchecked('j11_unchecked') ctrl1.invoke('j12_fail') ctrl1.invoke('j13')
def test_empty_flow_parallel_serial(api_type): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1) api.job('j2', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1: ctrl1.invoke('j1') with ctrl1.serial(): pass ctrl1.invoke('j2')
def test_retry_parallel_toplevel_fail(): with mock_api.api(__file__) as api: api.flow_job() api.job('j11', 0.01, max_fails=0, expect_invocations=1, expect_order=1) api.job('j12_fail', 0.01, max_fails=2, expect_invocations=2, expect_order=1) api.job('j13', 0.01, max_fails=0, expect_invocations=1, expect_order=1) with raises(FailedChildJobsException): with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, max_tries=2) as ctrl1: ctrl1.invoke('j11') ctrl1.invoke('j12_fail') ctrl1.invoke('j13')
def _test_demo(demo): # TODO: script api is not configured to run demos if test_cfg.selected_api() == ApiType.SCRIPT: return load_demo_jobs(demo) with api_select.api(__file__, fixed_prefix="jenkinsflow_demo__") as api: api.job(demo.__name__ + "__0flow", 0.01, max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1: ctrl1.invoke(demo.__name__ + "__0flow")
def test_flow_setup_exception_job(api_type): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('j1', max_fails=0, expect_invocations=0, expect_order=None) with raises(Exception) as exinfo: with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: raise Exception("Not good") ctrl1.invoke('j1') assert str(exinfo.value) == "Not good" with raises(Exception) as exinfo: with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1: raise Exception("Not good") ctrl1.invoke('j1') assert str(exinfo.value) == "Not good"
def test_multiple_invocations_parallel_same_flow_queued(api_type, capsys): with api_select.api(__file__, api_type) as api: is_hudson = os.environ.get('HUDSON_URL') if is_hudson: # TODO investigate why this test fails in Hudson xfail("Doesn't pass when using Hudson") return api.flow_job() _params = (('password', '', 'Some password'), ('s1', '', 'Some string argument')) api.job('j1', max_fails=0, expect_invocations=3, expect_order=1, exec_time=3, params=_params) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1: ctrl1.invoke('j1', password='******', s1='invocation1') ctrl1.invoke('j1', password='******', s1='invocation2') ctrl1.invoke('j1', password='******', s1='invocation3') sout, _ = capsys.readouterr() assert lines_in( api_type, sout, "^Job Invocation-1 (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1", "^Job Invocation-2 (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1", "^Job Invocation-3 (1/1,1/1): http://x.x/job/jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1", ( build_started_msg(api, "jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1", 1, invocation_number=1), "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-1 stopped running", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-1 Status IDLE - build: #", "^SUCCESS: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-1", build_started_msg(api, "jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1", 2, invocation_number=2), "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-2 stopped running", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-2 Status IDLE - build: #", "^SUCCESS: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-2", build_started_msg(api, "jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1", 3, invocation_number=3), "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-3 stopped running", "^job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-3 Status IDLE - build: #", "^SUCCESS: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-3", ), "^parallel flow: (", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-1 SUCCESS", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-2 SUCCESS", "job: 'jenkinsflow_test__multiple_invocations_parallel_same_flow_queued__j1' Invocation-3 SUCCESS", "^)", )
def test_reporting_invocation_parallel(api_type, capsys): with api_select.api(__file__, api_type) as api: api.flow_job() api.job('j11', max_fails=0, expect_invocations=1, expect_order=1) api.job('j12', max_fails=0, expect_invocations=1, invocation_delay=1.0, exec_time=1.5, initial_buildno=7, expect_order=2) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=0.5 / api.speedup) as ctrl1: ctrl1.invoke('j11') ctrl1.invoke('j12') sout, _ = capsys.readouterr() assert lines_in( api_type, sout, "^Flow Invocation (1/1,1/1): ('jenkinsflow_test__reporting_invocation_parallel__j11', 'jenkinsflow_test__reporting_invocation_parallel__j12')", "^Job Invocation (1/1,1/1): http://x.x/job/jenkinsflow_test__reporting_invocation_parallel__j11", "^Job Invocation (1/1,1/1): http://x.x/job/jenkinsflow_test__reporting_invocation_parallel__j12", ) assert lines_in( api_type, sout, "^Job Invocation (1/1,1/1): http://x.x/job/jenkinsflow_test__reporting_invocation_parallel__j11", build_started_msg( api, "jenkinsflow_test__reporting_invocation_parallel__j11", 1), ) assert lines_in( api_type, sout, "^Job Invocation (1/1,1/1): http://x.x/job/jenkinsflow_test__reporting_invocation_parallel__j12", build_started_msg( api, "jenkinsflow_test__reporting_invocation_parallel__j12", 8))
def test_retry_unchecked_long_running_parallel_toplevel(api_type): with api_select.api(__file__, api_type, login=True) as api: api.flow_job() api.job('j11_unchecked', max_fails=1, expect_invocations=1, expect_order=1, exec_time=20, unknown_result=True) api.job('j12_fail', max_fails=1, expect_invocations=2, expect_order=1) api.job('j13', max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, max_tries=2) as ctrl1: ctrl1.invoke_unchecked('j11_unchecked') ctrl1.invoke('j12_fail') ctrl1.invoke('j13')
def test_timeout_top_level_parallel(api_type): with api_select.api(__file__, api_type, login=True) as api: api.job('quick', max_fails=0, expect_invocations=1, expect_order=None, params=(('s1', '', 'desc'), ('c1', 'false', 'desc'))) api.job('wait20', max_fails=0, expect_invocations=1, expect_order=None, exec_time=20, unknown_result=True) with raises(FlowTimeoutException): with parallel(api, timeout=8, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick', s1='', c1=False) ctrl.invoke('wait20')
def test_propagation_warn_only_nested_parallel_serial_continue_fail(api_type): with api_select.api(__file__, api_type, login=True) as api: api.flow_job() api.job('j11', max_fails=0, expect_invocations=1, expect_order=1) api.job('j21', max_fails=0, expect_invocations=1, expect_order=1) api.job('j22_fail', max_fails=1, expect_invocations=1, expect_order=1) api.job('j23_fail', max_fails=1, expect_invocations=1, expect_order=1) with raises(FailedChildJobsException): with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1: ctrl1.invoke('j11') with ctrl1.serial() as ctrl2: ctrl2.invoke('j21') with ctrl2.serial(propagation=Propagation. FAILURE_TO_UNSTABLE) as ctrl3: ctrl3.invoke('j22_fail') ctrl2.invoke('j23_fail')
def test_retry_unchecked_quick_parallel_outer_level(api_type): with api_select.api(__file__, api_type, login=True) as api: api.flow_job() api.job('j11_unchecked', max_fails=2, expect_invocations=3, expect_order=1) api.job('j12_fail', max_fails=2, expect_invocations=3, expect_order=1, exec_time=1) api.job('j13', max_fails=0, expect_invocations=1, expect_order=1) with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, max_tries=2) as ctrl1: with ctrl1.parallel(max_tries=2) as ctrl2: ctrl2.invoke_unchecked('j11_unchecked') ctrl2.invoke('j12_fail') ctrl2.invoke('j13')
def test_abort(api_type, capsys): with api_select.api(__file__, api_type, login=True) as api: api.flow_job() api.job('quick', max_fails=0, expect_invocations=1, expect_order=1) api.job('wait10_abort', max_fails=0, expect_invocations=1, expect_order=1, exec_time=20, final_result='ABORTED') api.job('wait1_fail', max_fails=1, expect_invocations=1, expect_order=1, exec_time=1) abort(api, 'wait10_abort', 10) with raises(FailedChildJobsException) as exinfo: with parallel(api, timeout=40, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl: ctrl.invoke('quick') ctrl.invoke('wait10_abort') ctrl.invoke('wait1_fail') assert "wait10_abort" in str(exinfo.value) assert "wait1_fail" in str(exinfo.value) sout, _ = capsys.readouterr() assert lines_in( api_type, sout, re.compile( "^ABORTED: 'jenkinsflow_test__abort__wait10_abort' - build: .*/jenkinsflow_test__abort__wait10_abort.* after:" ), )
def test_empty_flow_top_level_parallel(api_type): with api_select.api(__file__, api_type) as api: api.flow_job() with parallel(api, timeout=70): pass