Beispiel #1
0
def test_job_arg_name_files(tmpdir, fancyname):
    # Test to check that when a named argument is added to a Job, and the Job
    # is built with fancyname=True, the Job submit file and the
    # error/log/output files for the argument start with the same index.
    # E.g. job_(date)_01.submit, job_(date)_01.error, etc.
    # Regression test for issue #47
    submit_dir = str(tmpdir.mkdir('submit'))

    job = Job('testjob', example_script, submit=submit_dir)
    job.add_arg('arg', name='argname')
    dagman = Dagman('exampledagman', submit=submit_dir)
    dagman.add_job(job)
    dagman.build(fancyname=fancyname)

    with open(dagman.submit_file, 'r') as dagman_submit_file:
        dagman_submit_lines = dagman_submit_file.readlines()

    # Get root of the dagman submit file (submit file basename w/o .submit)
    submit_file_line = dagman_submit_lines[0]
    submit_file_basename = submit_file_line.split(os.sep)[-1].rstrip()
    submit_file_root = os.path.splitext(submit_file_basename)[0]
    # Get job_name variable (used to built error/log/output file basenames)
    jobname_line = dagman_submit_lines[2]
    jobname = jobname_line.split('"')[-2]
    other_file_root = '_'.join(jobname.split('_')[:-1])

    assert submit_file_root == other_file_root
Beispiel #2
0
def test_iter_job_args_fail(tmpdir):
    submit_dir = str(tmpdir.mkdir('submit'))

    # Check _iter_job_args raises a ValueError if input Job is not built
    job = Job('testjob', example_script, submit=submit_dir)
    with pytest.raises(ValueError) as excinfo:
        i = _iter_job_args(job)
        node_name, arg = next(i)
    error = ('Job {} must be built before adding it to a '
             'Dagman'.format(job.name))
    assert error == str(excinfo.value)

    # Check _iter_job_args raises a StopIteration exception on a Job w/o args
    job.build()
    with pytest.raises(StopIteration):
        i = _iter_job_args(job)
        node_name, arg = next(i)

    # Check _iter_job_args raises a TypeError when input is not a Job
    with pytest.raises(TypeError) as excinfo:
        not_job = 'thisisastring'
        i = _iter_job_args(not_job)
        node_name, arg = next(i)
    error = 'Expecting a Job object, got {}'.format(type(not_job))
    assert error == str(excinfo.value)
Beispiel #3
0
def test_build_executeable_not_found_fail(tmpdir):
    submit_dir = str(tmpdir.mkdir('submit'))
    with pytest.raises(IOError) as excinfo:
        ex = '/path/to/executable'
        job = Job('jobname', ex, submit=submit_dir)
        job.build(makedirs=False)
    error = 'The executable {} does not exist'.format(ex)
    assert error == str(excinfo.value)
Beispiel #4
0
def test_add_job_dag_parameter_equality(tmpdir):
    submit_dir = str(tmpdir.join('submit'))
    dag = Dagman('dagman', submit=submit_dir)
    job_1 = Job('job_1', example_script, dag=dag)
    job_2 = Job('job_2', example_script)
    dag.add_job(job_2)

    assert dag.nodes == [job_1, job_2]
Beispiel #5
0
def test_init_arguments_type_fail():
    with pytest.raises(TypeError) as excinfo:
        job_with_arg = Job(name='jobname',
                           executable=example_script,
                           arguments=50)
        job_with_arg.build()
    error = 'arguments must be a string or an iterable'
    assert error == str(excinfo.value)
Beispiel #6
0
def test_job_submit_env_variable_dir(tmpdir, monkeypatch):
    # Use monkeypatch fixture to set pycondor environment variable
    dir_path = str(tmpdir.mkdir('submit'))
    monkeypatch.setenv('PYCONDOR_SUBMIT_DIR', dir_path)

    job = Job('jobname', example_script)
    job.build()
    tmpdir_path = os.path.join(str(tmpdir), 'submit')
    job_path = os.path.dirname(getattr(job, 'submit_file'))
    assert tmpdir_path == job_path
Beispiel #7
0
def test_job_env_variable_dir(tmpdir, monkeypatch, env_var):
    submit_dir = str(tmpdir)
    # Use monkeypatch fixture to set pycondor environment variable
    dir_path = str(tmpdir.mkdir(env_var))
    monkeypatch.setenv('PYCONDOR_{}_DIR'.format(env_var.upper()), dir_path)

    job = Job('jobname', example_script, submit=submit_dir)
    job.build()
    tmpdir_path = os.path.join(str(tmpdir), env_var)
    job_path = os.path.dirname(getattr(job, '{}_file'.format(env_var)))
    assert tmpdir_path == job_path
Beispiel #8
0
def test_job_subdag_build(tmpdir):
    submit_dir = str(tmpdir.join('submit'))

    extra_lines = ['first extra line', 'second extra line']
    job = Job('job',
              example_script,
              submit=submit_dir,
              extra_lines=extra_lines)
    job.build()

    with open(job.submit_file, 'r') as f:
        assert set(extra_lines) <= set(line.rstrip('\n') for line in f)
Beispiel #9
0
def test_repr():
    default_job = Job('jobname', example_script)
    job_repr = repr(default_job)
    expected_repr = ('Job(name=jobname, executable=example_script.py, '
                     'submit={})'.format(os.getcwd()))
    assert job_repr == expected_repr

    job_non_default = Job('jobname', example_script, queue=2)
    job_repr = repr(job_non_default)
    expected_repr = ('Job(name=jobname, executable=example_script.py, '
                     'queue=2, submit={})'.format(os.getcwd()))
    assert job_repr == expected_repr
Beispiel #10
0
def test_repr():
    default_job = Job('jobname', example_script)
    job_repr = repr(default_job)
    expected_repr = ('Job(name=jobname, executable=example_script.py, '
                     'getenv=True, notification=never, submit={}, '
                     'universe=vanilla)'.format(os.getcwd()))
    assert job_repr == expected_repr

    job_non_default = Job('jobname', example_script, queue=2)
    job_repr = repr(job_non_default)
    expected_repr = ('Job(name=jobname, executable=example_script.py, '
                     'getenv=True, notification=never, queue=2, submit={}, '
                     'universe=vanilla)'.format(os.getcwd()))
    assert job_repr == expected_repr
Beispiel #11
0
def test_dagman_has_bad_node_names(tmpdir):
    submit_dir = str(tmpdir.mkdir('submit'))

    # Test all combinations
    jobs_names = ['testjob', 'testjob.', 'testjob', 'testjob+']
    arg_names = ['argname', 'argname', 'argname+', 'argname.']
    has_bad_node_names = [False, True, True, True]
    for job_name, arg_name, bad_node_names in zip(jobs_names, arg_names,
                                                  has_bad_node_names):
        job = Job(job_name, example_script, submit=submit_dir)
        job.add_arg('arg', name=arg_name)
        dagman = Dagman('testdagman', submit=submit_dir)
        dagman.add_job(job)
        dagman.build()
        assert dagman._has_bad_node_names == bad_node_names
Beispiel #12
0
def test_queue_written_to_submit_file(tmpdir):
    # Test to check that the queue parameter is properly written
    # to submit file when Job is created. See issue #38.

    submit_dir = str(tmpdir.mkdir('submit'))

    # Build Job object with queue=5
    job = Job('jobname', example_script, submit=submit_dir, queue=5)
    job.build(fancyname=False)

    # Read the built submit file and check that the 'queue 5' is
    # contained in the file.
    with open(job.submit_file, 'r') as f:
        lines = f.readlines()
    assert 'queue 5' in lines
Beispiel #13
0
def test_submit_job_parents_raises(tmpdir, monkeypatch_condor_submit):
    # Test submitting a Job with parents (not in a Dagman) raises an error
    submit = str(tmpdir)
    job = Job('jobname', example_script, submit=submit)
    parent_job = Job('parent_jobname', example_script, submit=submit)
    job.add_parent(parent_job)
    job.build()
    with pytest.raises(ValueError) as excinfo:
        job.submit_job()
    error = ('Attempting to submit a Job with parents. '
             'Interjob relationships requires Dagman.')
    assert error == str(excinfo.value)
Beispiel #14
0
def test_dagman_add_node_ignores_duplicates(tmpdir, dagman):
    submit_dir = str(tmpdir.join('submit'))
    job = Job('job', example_script, submit=submit_dir)
    dagman.add_job(job)
    dagman.add_job(job)

    assert dagman.nodes == [job]
Beispiel #15
0
def test_get_job_arg_lines_not_built_raises():
    job = Job('testjob', example_script)
    with pytest.raises(ValueError) as excinfo:
        Dagman('dag_name')._get_job_arg_lines(job, fancyname=True)
    error = ('Job {} must be built before adding it to a '
             'Dagman'.format(job.name))
    assert error == str(excinfo.value)
Beispiel #16
0
def test_job_dag_parameter(tmpdir):
    # Test that a Job is added to a Dagman when dag parameter given
    submit_dir = str(tmpdir.join('submit'))
    dag = Dagman('dagman', submit=submit_dir)
    job = Job('job', example_script, dag=dag)

    assert job in dag
Beispiel #17
0
def test_add_args():
    # Test that add_args is equivalent to multiple add_arg
    job_1 = Job('job1', example_script)
    for i in range(10):
        job_1.add_arg('file_{}.hdf'.format(i))

    job_2 = Job('job2', example_script)
    job_2.add_args(['file_{}.hdf'.format(i) for i in range(10)])

    assert job_1.args == job_2.args
Beispiel #18
0
def test_iter_job_args(tmpdir):
    # Check node names yielded by _iter_job_args
    submit_dir = str(tmpdir.mkdir('submit'))

    job = Job('testjob', example_script, submit=submit_dir)
    job.add_arg('argument1', name='arg1')
    job.add_arg('argument2')
    job.build()
    for idx, (node_name, jobarg) in enumerate(_iter_job_args(job)):
        if jobarg.name is not None:
            assert node_name == '{}_{}'.format(job.submit_name, jobarg.name)
        else:
            assert node_name == '{}_arg_{}'.format(job.submit_name, idx)
Beispiel #19
0
def test_dagman_env_variable_dir(tmpdir, monkeypatch):

    # Set pycondor environment variable
    submit_dir = str(tmpdir.mkdir('submit'))
    monkeypatch.setenv('PYCONDOR_SUBMIT_DIR', submit_dir)

    dagman = Dagman('testdagman')
    job = Job('jobname', example_script)
    dagman.add_job(job)
    dagman.build()

    submit_path = os.path.dirname(dagman.submit_file)
    assert submit_dir == submit_path
Beispiel #20
0
def test_job_dag_submit_file_same(tmpdir, dagman):
    # Test to check that the submit file for a Job with no arguments is the
    # same whether built from a Dagman or not. See issue #38.

    submit_dir = str(tmpdir.mkdir('submit'))
    # Build Job object that will be built outside of a Dagman
    job_outside_dag = Job('test_job',
                          example_script,
                          submit=submit_dir,
                          queue=5)
    job_outside_dag.build(fancyname=False)

    # Build Job object that will be built inside of a Dagman
    job_inside_dag = Job('test_job',
                         example_script,
                         submit=submit_dir,
                         queue=5)
    dagman.add_job(job_inside_dag)
    dagman.build(fancyname=False)

    # Check that the contents of the two Job submit files are the same
    assert filecmp.cmp(job_outside_dag.submit_file,
                       job_inside_dag.submit_file,
                       shallow=False)
Beispiel #21
0
def test_init_retry():
    # Test that global retry applies to add_arg without a retry specified and
    # not when add_arg has a retry specified
    job = Job(name='jobname', executable=example_script, retry=7)
    job.add_arg('arg1')
    job.add_arg('arg2', retry=3)

    assert len(job.args) == 2
    assert job.args[0].retry == 7
    assert job.args[1].retry == 3
Beispiel #22
0
def test_job_args_and_queue_raises(tmpdir):
    submit_dir = str(tmpdir.join('submit'))

    with pytest.raises(NotImplementedError) as excinfo:
        job = Job('job', example_script, submit=submit_dir, queue=2)
        job.add_args(str(i) for i in range(10))
        job.build()
    error = ('At this time multiple arguments and queue values '
             'are only supported through Dagman')
    assert error == str(excinfo.value)
Beispiel #23
0
    def ppplots(self):
        """
        Set up job to create PP plots.
        """

        from pycondor import Job

        # get executable
        jobexec = shutil.which("cwinpy_pe_generate_pp_plots")

        extra_lines = []
        if self.accountgroup is not None:
            extra_lines.append("accounting_group = {}".format(
                self.accountgroup))
        if self.accountuser is not None:
            extra_lines.append("accounting_group_user = {}".format(
                self.accountuser))

        # create cwinpy_pe Job
        job = Job(
            "cwinpy_pe_pp_plots",
            jobexec,
            error=self.runner.dag.inputs.pe_log_directory,
            log=self.runner.dag.inputs.pe_log_directory,
            output=self.runner.dag.inputs.pe_log_directory,
            submit=self.runner.dag.inputs.submit_directory,
            universe="vanilla",
            request_memory=self.runner.dag.inputs.request_memory,
            getenv=self.getenv,
            queue=1,
            requirements=self.runner.dag.inputs.requirements,
            retry=self.runner.dag.inputs.retry,
            extra_lines=extra_lines,
            dag=self.runner.dag.pycondor_dag,
        )

        jobargs = "--path '{}' ".format(
            os.path.join(self.basedir, "results", "*", "*"))
        jobargs += "--output {} ".format(
            os.path.join(self.basedir, "ppplot.png"))
        if self.outputsnr:
            jobargs += "--snrs "
        job.add_arg(jobargs)

        job.add_parents(self.runner.dag.pycondor_dag.nodes[:-1]
                        )  # exclude cwinpy_pe_pp_plots job itself
        self.runner.dag.build()
Beispiel #24
0
# Get script and options
generator_script = "/home/fasig/scalable_radio_array/noise_writer.sh"
processor_script = "/home/fasig/scalable_radio_array/envelope_processor.sh"

# Declare the error, output, log, and submit directories for Condor Job
error = '/data/user/fasig/pycondor'
output = '/data/user/fasig/pycondor'
log = '/data/user/fasig/pycondor'
submit = '/data/user/fasig/pycondor'

# Setting up PyCondor Jobs
generator_job = Job("make_noise_" + basename,
                    generator_script,
                    error=error,
                    output=output,
                    log=log,
                    submit=submit,
                    verbose=2)
processor_job = Job("process_noise_" + basename,
                    processor_script,
                    error=error,
                    output=output,
                    log=log,
                    submit=submit,
                    verbose=2)

output_suffixes = [
    str(i).zfill(file_zero_padding) for i in range(max_file_index + 1)
]
Beispiel #25
0
# Get script and options
script = "/home/fasig/scalable_radio_array/station_triggers_parallel.sh"

# Declare the error, output, log, and submit directories for Condor Job
error = '/data/user/fasig/pycondor'
output = '/data/user/fasig/pycondor'
log = '/data/user/fasig/pycondor'
submit = '/data/user/fasig/pycondor'

memory_requirement = str(int(args.stations / 100) + 1) + "GB"

# Setting up PyCondor Jobs
calculator_job = Job("calculate_" + basename,
                     script,
                     error=error,
                     output=output,
                     log=log,
                     submit=submit,
                     verbose=2,
                     request_memory=memory_requirement)
culminator_job = Job("culminate_" + basename,
                     script,
                     error=error,
                     output=output,
                     log=log,
                     submit=submit,
                     verbose=2)

# Add arguments to jobs
dirname = os.path.dirname(args.noise_file_basename)
if dirname == "":
    dirname = "."
Beispiel #26
0
            o = args.args[output_index]
            replaced_name = logfile_name.replace("OUTPUT", o[:o.rindex(".")])
            replaced_name = replaced_name.replace("ENERGY", energy)
            replaced_name = replaced_name.replace("ITERATION",
                                                  str(i).zfill(zfill_amount))
            args.args[logfile_index] = replaced_name
            transfer_files.append(replaced_name)
            file_remaps.append(replaced_name + '=' +
                               os.path.join(logfile_dirname, replaced_name))
        job = Job(
            descriptive_name + "_" + energy + "_" + str(i).zfill(zfill_amount),
            executable=script_file,
            output=output,
            error=error,
            log=log,
            submit=submit,  #request_memory="5GB",
            extra_lines=[
                "should_transfer_files = YES",
                "transfer_output_files = " + ", ".join(transfer_files),
                'transfer_output_remaps = "' + '; '.join(file_remaps) + '"',
                "when_to_transfer_output = ON_EXIT"
            ],
            verbose=2 if args.verbose else 0)
        job.add_arg(" ".join([energy] + args.args))
        dag.add_job(job)

# Write all necessary submit files and submit dagman to Condor
if args.maxjobs > 0:
    dag.build_submit(submit_options="-maxjobs " + str(args.maxjobs))
else:
    dag.build_submit()
Beispiel #27
0
def generate_dag(times, flags=[], tag='gwdetchar-omega-batch',
                 submit=False, outdir=os.getcwd(), universe='vanilla',
                 condor_commands=get_condor_arguments()):
    """Construct a Directed Acyclic Graph (DAG) for a batch of omega scans

    Parameters
    ----------
    times : `list` of `float`
        list of GPS times to scan

    flags : `list` of `str`, optional
        a list of command line flags to run for each job, defaults to an
        empty list

    tag : `str`, optional
        a helpful string to use to name the DAG,
        default: `'gwdetchar-omega-batch'`

    submit : `bool`, optional
        submit the DAG to condor, default: `False`

    outdir : `str`, optional
        the output directory in which to store files, will result in
        sub-directories called `'condor'` and `'log'`, default: `os.getcwd`

    universe : `str`, optional
        condor universe to run in, default: `'vanilla'`

    condor_commands : `list` of `str`, optional
        list of condor settings to process with, defaults to the output of
        `get_condor_arguments`

    Returns
    -------
    dagman : `~pycondor.Dagman`
        the fully built DAG object
    """
    logdir = os.path.join(outdir, 'logs')
    subdir = os.path.join(outdir, 'condor')
    executable = find_executable('gwdetchar-omega')
    # create DAG and jobs
    dagman = Dagman(name=tag, submit=subdir)
    job = Job(
        dag=dagman,
        name=os.path.basename(executable),
        executable=executable,
        universe=universe,
        submit=subdir,
        error=logdir,
        output=logdir,
        getenv=True,
        request_memory=4096 if universe != "local" else None,
        extra_lines=condor_commands
    )
    # make a node in the workflow for each event time
    for t in times:
        cmd = " ".join([str(t)] + [
            "--output-directory", os.path.join(outdir, str(t))] + flags)
        job.add_arg(cmd, name=str(t).replace(".", "_"))
    # write and submit the DAG
    dagman.build(fancyname=False)
    print("Workflow generated for {} times".format(len(times)))
    if submit:
        dagman.submit_dag(submit_options="-force")
        print(
            "Submitted to condor, check status via:\n\n"
            "$ condor_q {}".format(getuser())
        )
    else:
        print(
            "Submit to condor via:\n\n"
            "$ condor_submit_dag {0.submit_file}".format(dagman),
        )
    return dagman
Beispiel #28
0
def test_job_default_param_future_warning():
    future_msg = ('The default values for the universe, getenv, and '
                  'notification Job parameters will be changed to None '
                  'in release version 0.5.0.')
    with pytest.warns(FutureWarning, match=future_msg):
        Job(name='jobname', executable=example_script)
Beispiel #29
0
def test_init_retry_type_fail():
    with pytest.raises(TypeError) as excinfo:
        job_with_retry = Job('jobname', example_script, retry='20')
        job_with_retry.build()
    error = 'retry must be an int'
    assert error == str(excinfo.value)
Beispiel #30
0
def test_init_arguments_iterable():
    arguments = ['arg{}'.format(i) for i in range(10)]
    job = Job(name='jobname', executable=example_script, arguments=arguments)
    assert len(job.args) == len(arguments)
    for jobarg, argument in zip(job.args, arguments):
        assert jobarg.arg == argument
Beispiel #31
0
def test_init_arguments():
    arguments = 'my special argument'
    job = Job(name='jobname', executable=example_script, arguments=arguments)
    assert len(job.args) == 1
    assert job.args[0].arg == arguments