Esempio n. 1
0
def test_job_arg_name_files(tmpdir, fancyname):
    # Test to check that when a named argument is added to a Job, and the Job
    # is built with fancyname=True, the Job submit file and the
    # error/log/output files for the argument start with the same index.
    # E.g. job_(date)_01.submit, job_(date)_01.error, etc.
    # Regression test for issue #47
    submit_dir = str(tmpdir.mkdir('submit'))

    job = Job('testjob', example_script, submit=submit_dir)
    job.add_arg('arg', name='argname')
    dagman = Dagman('exampledagman', submit=submit_dir)
    dagman.add_job(job)
    dagman.build(fancyname=fancyname)

    with open(dagman.submit_file, 'r') as dagman_submit_file:
        dagman_submit_lines = dagman_submit_file.readlines()

    # Get root of the dagman submit file (submit file basename w/o .submit)
    submit_file_line = dagman_submit_lines[0]
    submit_file_basename = submit_file_line.split(os.sep)[-1].rstrip()
    submit_file_root = os.path.splitext(submit_file_basename)[0]
    # Get job_name variable (used to built error/log/output file basenames)
    jobname_line = dagman_submit_lines[2]
    jobname = jobname_line.split('"')[-2]
    other_file_root = '_'.join(jobname.split('_')[:-1])

    assert submit_file_root == other_file_root
Esempio n. 2
0
def test_add_job_int_fail():
    with pytest.raises(TypeError) as excinfo:
        dag = Dagman('dagname')
        dag.add_job(50)
    error = 'Expecting a Job or Dagman. ' + \
            'Got an object of type {}'.format(type(50))
    assert error == str(excinfo.value)
Esempio n. 3
0
def test_add_job_dag_parameter_equality(tmpdir):
    submit_dir = str(tmpdir.join('submit'))
    dag = Dagman('dagman', submit=submit_dir)
    job_1 = Job('job_1', example_script, dag=dag)
    job_2 = Job('job_2', example_script)
    dag.add_job(job_2)

    assert dag.nodes == [job_1, job_2]
Esempio n. 4
0
def test_dagman_env_variable_dir(tmpdir, monkeypatch):

    # Set pycondor environment variable
    submit_dir = str(tmpdir.mkdir('submit'))
    monkeypatch.setenv('PYCONDOR_SUBMIT_DIR', submit_dir)

    dagman = Dagman('testdagman')
    job = Job('jobname', example_script)
    dagman.add_job(job)
    dagman.build()

    submit_path = os.path.dirname(dagman.submit_file)
    assert submit_dir == submit_path
Esempio n. 5
0
def test_dagman_job_order(tmpdir):
    # Test to check that the order in which Jobs are added to a Dagman doesn't
    # change the Dagman submit file that is built. See issue #57.
    submit_dir = str(tmpdir.mkdir('submit'))

    dag_submit_lines = []
    for order_idx in range(2):
        dagman = Dagman('testdagman', submit=submit_dir)
        job_child = Job('childjob', example_script, submit=submit_dir)
        job_child.add_arg('--length 200', name='200jobname')
        job_child.add_arg('--length 400', retry=3)

        job_parent = Job('parentjob', example_script, submit=submit_dir)
        job_parent.add_arg('--length 100')
        job_parent.add_child(job_child)

        if order_idx == 0:
            # Add job_parent to dagman first
            dagman.add_job(job_parent)
            dagman.add_job(job_child)
        else:
            # Add job_child to dagman first
            dagman.add_job(job_child)
            dagman.add_job(job_parent)

        dagman.build(fancyname=False)
        # Append submit file lines to dag_submit_lines
        with open(dagman.submit_file, 'r') as dag_submit_file:
            dag_submit_lines.append(dag_submit_file.readlines())

    # Test that the same lines occur in the Dagman submit file for
    # adding the parent/child jobs in either order
    assert Counter(dag_submit_lines[0]) == Counter(dag_submit_lines[1])
Esempio n. 6
0
def test_dagman_has_bad_node_names(tmpdir):
    submit_dir = str(tmpdir.mkdir('submit'))

    # Test all combinations
    jobs_names = ['testjob', 'testjob.', 'testjob', 'testjob+']
    arg_names = ['argname', 'argname', 'argname+', 'argname.']
    has_bad_node_names = [False, True, True, True]
    for job_name, arg_name, bad_node_names in zip(jobs_names, arg_names,
                                                  has_bad_node_names):
        job = Job(job_name, example_script, submit=submit_dir)
        job.add_arg('arg', name=arg_name)
        dagman = Dagman('testdagman', submit=submit_dir)
        dagman.add_job(job)
        dagman.build()
        assert dagman._has_bad_node_names == bad_node_names
Esempio n. 7
0
    arguments += " --number " + str(args.number)
    arguments += " --size " + str(args.size)
    arguments += " --time " + str(args.time)
    arguments += " --dt " + str(args.dt)
    arguments += " --rms " + str(args.rms)
    generator_job.add_arg(arguments)

    if add_file_indices:
        files = [
            filename + "_" + suffix + ".npz" for suffix in output_suffixes
        ]
    else:
        files = [filename + ".npz"]
    arguments = " ".join(files)
    arguments += " --output " + str(args.envelope)
    arguments += " --amplification " + str(args.amplification)
    processor_job.add_arg(arguments)

# Create job dependencies
# processor_job doesn't start until generator_job has finished
generator_job.add_child(processor_job)

# Set up a dagman
dagman = Dagman("generate_" + basename, submit=submit, verbose=2)
# Add jobs to dagman
dagman.add_job(generator_job)
dagman.add_job(processor_job)

# Write all necessary submit files and submit job to Condor
dagman.build_submit()
Esempio n. 8
0
for filename in sorted(os.listdir(dirname)):
    filename = filename[:filename.rindex("_")]
    if filename.startswith(basename) and not (filename in file_bases):
        file_bases.append(filename)

for base in file_bases:
    arguments = os.path.join(os.path.dirname(args.noise_file_basename), base)
    arguments += " " + str(args.outfile)
    arguments += " --range " + str(args.range[0]) + " " + str(args.range[1])
    arguments += " --stations " + str(args.stations)
    arguments += " --geometry " + str(args.geometry)
    arguments += " --threshold " + str(args.threshold)
    arguments += " --tot " + str(args.tot)
    arguments += " --antennas_hit " + str(args.antennas_hit)
    calculator_job.add_arg(arguments)

culminator_job.add_arg(args.outfile)

# Create job dependencies
# culminator_job doesn't start until calculator_job has finished
calculator_job.add_child(culminator_job)

# Set up a dagman
dagman = Dagman("full_calculation_" + basename, submit=submit, verbose=2)
# Add jobs to dagman
dagman.add_job(calculator_job)
dagman.add_job(culminator_job)

# Write all necessary submit files and submit job to Condor
dagman.build_submit()
Esempio n. 9
0
            replaced_name = logfile_name.replace("OUTPUT", o[:o.rindex(".")])
            replaced_name = replaced_name.replace("ENERGY", energy)
            replaced_name = replaced_name.replace("ITERATION",
                                                  str(i).zfill(zfill_amount))
            args.args[logfile_index] = replaced_name
            transfer_files.append(replaced_name)
            file_remaps.append(replaced_name + '=' +
                               os.path.join(logfile_dirname, replaced_name))
        job = Job(
            descriptive_name + "_" + energy + "_" + str(i).zfill(zfill_amount),
            executable=script_file,
            output=output,
            error=error,
            log=log,
            submit=submit,  #request_memory="5GB",
            extra_lines=[
                "should_transfer_files = YES",
                "transfer_output_files = " + ", ".join(transfer_files),
                'transfer_output_remaps = "' + '; '.join(file_remaps) + '"',
                "when_to_transfer_output = ON_EXIT"
            ],
            verbose=2 if args.verbose else 0)
        job.add_arg(" ".join([energy] + args.args))
        dag.add_job(job)

# Write all necessary submit files and submit dagman to Condor
if args.maxjobs > 0:
    dag.build_submit(submit_options="-maxjobs " + str(args.maxjobs))
else:
    dag.build_submit()