LOG = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO, format='%(asctime)-15s:' + logging.BASIC_FORMAT) parser = argparse.ArgumentParser() parser.add_argument('output_directory', action=WriteableDir, nargs=1, help='where the fits files will be written') parser.add_argument('hdf5_file', nargs=1, help='the file to provide the input data') parser.add_argument('array_id', nargs=1, type=int, help='the array id from the PDS job') args = vars(parser.parse_args()) output_dir = args['output_directory'] # The directory where the chopped files are written input_file = args['hdf5_file'][0] i = args['array_id'][0] # Open the HDF5 File h5_file = h5py.File(input_file, 'r') data_set = h5_file['full_cube'] path = os.path.join(output_dir, "cubelets_{0}".format(i+1)) mkdir_p(path) for j in range(0,len(start_y),1): for k in range(0,len(start_z),1): # only extract cubes relevant to DINGO ultradeep if start_z[k] + offset_z[k] < 6972: continue LOG.info('Processing %d - %d of %d - %d of %d', i, j, len(start_y) - 1, k, len(start_z) - 1) out_name = os.path.join(path, 'askap_cube_{0}_{1}_{2}.fits'.format(i+1, j+1, k+1)) extract_subcube(data_set, start_x[i], offset_x[i], start_y[j], offset_y[j], start_z[k], offset_z[k], out_name) h5_file.close()
"program": program, "resources": resources, } file.write(string) file.close() def run_pbs_job(filename, job_name, array_size, home_dir, program, resources, queue_name="workq"): try: build_pbs_file(filename, job_name, array_size, home_dir, program, resources) command = "/usr/bin/qsub -q {0} {1}".format(queue_name, filename) LOG.info("Running: %s", str(command)) os.system(command) except Exception as e: LOG.exception(e) pbs_output = os.path.join(pbs_directory, "pbs_output") mkdir_p(pbs_output) run_pbs_job( os.path.join(pbs_directory, "chop_askap_sim.pbs"), "chop_askap_sim", "0-" + str(len(start_x)), pbs_output, "python /home/kevin/chop_askap/py/chop_askap_sim_kv.py {0} {1}".format(output_dir, input_file), "nodes=1:ppn=1:compute,pmem=10gb,walltime=36:00:00", queue_name="usmall", )