Пример #1
0
def test_220():
    """Test async usage_tracking behavior for issue #220 """

    print("This test assumes /etc/resolv being misconfigured")
    with open("/etc/resolv.conf", 'r') as f:
        for line in f.readlines():
            line = line.strip()
            print("Line: [{}]".format(line))
            if line.startswith("nameserver") and line != "nameserver 2.2.2.2":
                assert False, "/etc/resolv.conf should be misconfigured"

    start = time.time()
    set_stream_logger()
    dfk = DataFlowKernel(config=config)
    delta = time.time() - start
    print("Time taken : ", delta)
    assert delta < 1, "DFK took too much time to start, delta:{}".format(delta)
    dfk.cleanup()
Пример #2
0
def run_checkpointed(n=2, mode="task_exit"):
    """ This test runs n apps that will fail with Division by zero error,
    followed by 1 app that will succeed. The checkpoint should only have 1 task.
    """

    from parsl.tests.configs.local_threads import config
    config["globals"]["checkpointMode"] = mode
    dfk = DataFlowKernel(config=config)

    @App('python', dfk, cache=True)
    def cached_rand(x):
        import random
        return random.randint(0, 10000)

    @App('python', dfk, cache=True)
    def cached_failing(x):
        5 / 0
        return 1

    items = []
    for i in range(0, n):
        x = cached_failing(0)
        items.append(x)
        try:
            x.result()
        except Exception as e:
            print("Ignoring failure of task")
            pass

    x = cached_rand(1)
    print(x.result())
    rundir = dfk.rundir
    # Call cleanup *only* for dfk_exit to ensure that a checkpoint is written
    # at all
    if mode == "dfk_exit":
        dfk.cleanup()
    return rundir
Пример #3
0
def test_z_cleanup():
    dfk = DataFlowKernel(config=config)
    dfk.cleanup()
    pass
Пример #4
0
    MAXTIMEOUT = 200
    # MAXTIMEOUT = 50
    MINTIMEOUT = 60

    from random import randrange
    r = randrange

    a1, b1 = [
        cpu_stress(workers=1, timeout=r(MINTIMEOUT, MAXTIMEOUT)),
        cpu_stress(workers=1, timeout=r(MINTIMEOUT, MAXTIMEOUT))
    ]
    a1.result()
    b1.result()

    ins = [
        cpu_stress(workers=1, timeout=r(MINTIMEOUT, MAXTIMEOUT), inputs=[a1]),
        cpu_stress(workers=1, timeout=r(MINTIMEOUT, MAXTIMEOUT), inputs=[b1]),
        cpu_stress(workers=1, timeout=r(MINTIMEOUT, MAXTIMEOUT), inputs=[b1]),
        cpu_stress(workers=1, timeout=r(MINTIMEOUT, MAXTIMEOUT), inputs=[b1])
    ]

    a3 = cpu_stress(workers=1,
                    timeout=r(MINTIMEOUT, MAXTIMEOUT),
                    inputs=[*ins])
    print(a3.result())
    a4 = cpu_stress_fail(workers=1,
                         timeout=r(MINTIMEOUT, MAXTIMEOUT),
                         inputs=[a1])

    dfk.cleanup()