arguments += " --number " + str(args.number) arguments += " --size " + str(args.size) arguments += " --time " + str(args.time) arguments += " --dt " + str(args.dt) arguments += " --rms " + str(args.rms) generator_job.add_arg(arguments) if add_file_indices: files = [ filename + "_" + suffix + ".npz" for suffix in output_suffixes ] else: files = [filename + ".npz"] arguments = " ".join(files) arguments += " --output " + str(args.envelope) arguments += " --amplification " + str(args.amplification) processor_job.add_arg(arguments) # Create job dependencies # processor_job doesn't start until generator_job has finished generator_job.add_child(processor_job) # Set up a dagman dagman = Dagman("generate_" + basename, submit=submit, verbose=2) # Add jobs to dagman dagman.add_job(generator_job) dagman.add_job(processor_job) # Write all necessary submit files and submit job to Condor dagman.build_submit()
replaced_name = logfile_name.replace("OUTPUT", o[:o.rindex(".")]) replaced_name = replaced_name.replace("ENERGY", energy) replaced_name = replaced_name.replace("ITERATION", str(i).zfill(zfill_amount)) args.args[logfile_index] = replaced_name transfer_files.append(replaced_name) file_remaps.append(replaced_name + '=' + os.path.join(logfile_dirname, replaced_name)) job = Job( descriptive_name + "_" + energy + "_" + str(i).zfill(zfill_amount), executable=script_file, output=output, error=error, log=log, submit=submit, #request_memory="5GB", extra_lines=[ "should_transfer_files = YES", "transfer_output_files = " + ", ".join(transfer_files), 'transfer_output_remaps = "' + '; '.join(file_remaps) + '"', "when_to_transfer_output = ON_EXIT" ], verbose=2 if args.verbose else 0) job.add_arg(" ".join([energy] + args.args)) dag.add_job(job) # Write all necessary submit files and submit dagman to Condor if args.maxjobs > 0: dag.build_submit(submit_options="-maxjobs " + str(args.maxjobs)) else: dag.build_submit()