예제 #1
0
def submit_rm_cor_rvm(run_params):
    """
    Runs the RM correction on the dspsr archive and writes the result to a text file.
    Relaunches the stokes_fold script afterwards for plotting

    Parameters:
    -----------
    run_params: object
        The run_params object from data_processing_pipeline.py
    """
    os.chdir(run_params.pointing_dir)
    filenames_dict = create_filenames(run_params)
    #Look for files in pointing dir
    is_rmsynth = isfile(filenames_dict["rmsynth"])
    is_rmfit = isfile(filenames_dict["rmfit"])

    #Correct for RM
    if is_rmsynth:
        logger.info("Using RM synthesis result for correction")
        rm_dict = rm_synthesis.read_rmsynth_out(filenames_dict["rmsynth"])
        RM = rm_dict["0"]["rm"]
    elif is_rmfit:
        logger.info("Using rmfit result for correction")
        RM = find_RM_from_file(filenames_dict["rmfit"])[0]
    if not RM:
        RM = find_RM_from_cat(run_params.pulsar)[0]
    run_params.RM = RM
    commands = add_rm_cor_to_commands(run_params.pointing_dir, run_params.RM,
                                      filenames_dict["archive1"],
                                      filenames_dict["archive2"],
                                      filenames_dict["ascii"])

    #RVM fitting
    commands = add_rvm_to_commands(run_params.pointing_dir, filenames_dict["archive2"], rvmfile=filenames_dict["rvmfit"], chimap=filenames_dict["chimap"],\
               commands=commands, res=run_params.rvmres)

    #relaunch
    if not run_params.stop:
        launch_line = dpp.stokes_launch_line(run_params)
        commands.append(launch_line)

    job_name = "ipfb_RMcor_RVM_{}".format(run_params.file_prefix)
    if not run_params.ipfb:
        job_name = job_name[5:]
    batch_dir = "{0}{1}/batch/".format(comp_config['base_product_dir'],
                                       run_params.obsid)
    job_id = submit_slurm(job_name, commands,\
                        batch_dir=batch_dir,\
                        slurm_kwargs={"time": "06:00:00"},\
                        module_list=["mwa_search/{0}".format(run_params.mwa_search),
                                    "psrchive/master"],\
                        submit=True, vcstools_version=run_params.vcs_tools, mem="")

    return job_id
예제 #2
0
def submit_to_db_and_continue(run_params, best_bins):
    """
    Submits the best fold profile to the pulsar database. Will also submit .ppps, .png and .pfd

    Parameters:
    -----------
    run_params: object
        The run_params object defined in data_processing_pipeline
    best_bins: int
    """
    ppps = "*_{}*_bins*.pfd.ps".format(best_bins)
    ppps = glob.glob(os.path.join(run_params.pointing_dir, ppps))[0]
    bestprof = "*_{}*_bins*.pfd.bestprof".format(best_bins)
    bestprof = glob.glob(os.path.join(run_params.pointing_dir, bestprof))[0]
    png = "*_{}*_bins*.png".format(best_bins)
    png = glob.glob(os.path.join(run_params.pointing_dir, png))[0]
    pfd = "*_{}*_bins*.pfd".format(best_bins)
    pfd = glob.glob(os.path.join(run_params.pointing_dir, pfd))[0]

    commands = []
    commands.append("cd {}".format(run_params.pointing_dir))
    commands.append(
        "echo 'Submitting profile to database with {} bins'".format(best_bins))
    commands.append('submit_to_database.py -o {0} --cal_id {1} -p {2} --bestprof {3} --ppps {4}'\
    .format(run_params.obsid, run_params.cal_id, run_params.pulsar, bestprof, ppps))

    #Make a nice plot
    plotting_toolkit.plot_bestprof(os.path.join(run_params.pointing_dir, bestprof),\
                                    out_dir=run_params.pointing_dir)

    bin_lim = bin_sampling_limit(run_params.pulsar)
    if bin_lim > 100:
        b_standard = 100
    else:
        b_standard = 50

    if best_bins != b_standard:
        #do the same for 100/50 bin profiles depending on whether this is an msp or not
        ppps = "*_{}*_bins*.pfd.ps".format(b_standard)
        ppps = glob.glob(os.path.join(run_params.pointing_dir, ppps))[0]
        bestprof = "*_{}*_bins*.pfd.bestprof".format(b_standard)
        bestprof = glob.glob(os.path.join(run_params.pointing_dir,
                                          bestprof))[0]
        png = "*_{}*_bins*.png".format(b_standard)
        png = glob.glob(os.path.join(run_params.pointing_dir, png))[0]
        pfd = "*_{}*_bins*.pfd".format(b_standard)
        pfd = glob.glob(os.path.join(run_params.pointing_dir, pfd))[0]

        commands.append(
            "echo 'Submitting profile to database with {} bins'".format(
                b_standard))
        commands.append('submit_to_database.py -o {0} --cal_id {1} -p {2} --bestprof {3} --ppps {4}'\
                        .format(run_params.obsid, run_params.cal_id, run_params.pulsar, bestprof, ppps))

    if run_params.stokes_dep:
        #submit inverse pfb profile if it exists
        ipfb_archive = os.path.join(
            run_params.pointing_dir,
            "{0}_{1}_ipfb_archive.txt".format(run_params.obsid,
                                              run_params.pulsar))
        commands.append("echo 'Submitting inverse PFB profile to database'")
        commands.append("submit_to_database.py -o {0} --cal_id {1} -p {2} --ascii {3} --ppps {4} --start {5} --stop {6}"\
                        .format(run_params.obsid, run_params.cal_id, run_params.pulsar, ipfb_archive, ppps,\
                        run_params.beg, run_params.end))

    #Move the pointing directory
    move_loc = os.path.join(comp_config["base_product_dir"], run_params.obsid,
                            "data_products")
    pointing = run_params.pointing_dir.split("/")
    pointing = [i for i in pointing if i != ""]
    new_pointing_dir = os.path.join(move_loc, pointing[-1])
    logger.info("New pointing directory: {}".format(new_pointing_dir))

    #in case the previous command fails. Don't move stuff around
    commands.append("errorcode=$?")
    commands.append("echo 'errorcode' $errorcode")
    commands.append('if [ "$errorcode" != "0" ]; then')
    commands.append("   echo 'Submission Failure!'")
    commands.append("   exit $errorcode")
    commands.append("fi")
    commands.append(
        "echo 'submitted profile to database: {0}'".format(bestprof))
    commands.append("echo 'Moving directory {0} to location {1}'".format(
        run_params.pointing_dir, move_loc))
    commands.append("mkdir -p {}".format(move_loc))
    if run_params.pulsar[-1].isalpha():
        commands.append("cp -ru {0} {1}".format(run_params.pointing_dir,
                                                new_pointing_dir))
    else:
        commands.append("mv {0} {1}".format(run_params.pointing_dir,
                                            new_pointing_dir))

    #submit job
    name = "Submit_db_{0}_{1}".format(run_params.pulsar, run_params.obsid)
    batch_dir = os.path.join(comp_config['base_product_dir'], run_params.obsid,
                             "batch")
    logger.info(
        "Submitting submission script for profile: {0}".format(bestprof))
    logger.info("Job name: {}".format(name))

    dep_id = submit_slurm(name, commands,\
                batch_dir=batch_dir,\
                slurm_kwargs={"time": "04:00:00"},\
                depend=run_params.stokes_dep,
                module_list=['mwa_search/{0}'.format(run_params.mwa_search)],\
                submit=True, vcstools_version="{0}".format(run_params.vcs_tools))

    if not run_params.stop:
        #Run stokes fold
        run_params.stokes_bins = best_bins
        launch_line = dpp.stokes_launch_line(run_params,
                                             dpp=True,
                                             custom_pointing=new_pointing_dir)
        commands = [launch_line]

        name = "dpp_stokes_{0}_{1}".format(run_params.pulsar, run_params.obsid)
        batch_dir = os.path.join(comp_config['base_product_dir'],
                                 run_params.obsid, "batch")
        logger.info("Submitting Stokes Fold script")
        logger.info("Job Name: {}".format(name))

        #wait for pfb inversion if it exists
        dep_ids = [dep_id]
        if run_params.stokes_dep:
            dep_ids.append(run_params.stokes_dep)

        submit_slurm(name, commands,\
                    batch_dir=batch_dir,\
                    slurm_kwargs={"time": "00:20:00"},\
                    depend=dep_ids, depend_type="afterany",\
                    module_list=['mwa_search/{0}'.format(run_params.mwa_search)],\
                    submit=True, vcstools_version="{0}".format(run_params.vcs_tools))
예제 #3
0
def submit_dspsr_rmfit(run_params):
    """
    Runs dspsr on fits files and relaunches the stokes fold script

    Parameters:
    -----------
    run_params: object
        The run_params object from data_processing_pipeline.py
    """
    if not run_params.cand:
        enter, leave, _ = binfinder.find_fold_times\
                        (run_params.pulsar, run_params.obsid, run_params.beg, run_params.end, min_z_power=[0.3, 0.1])
        obs_int = run_params.end - run_params.beg
        if enter is None or leave is None:
            logger.warn(
                "{} not in beam for given times. Will use entire integration time to fold."
                .format(run_params.pulsar))
            logger.warn("Used the following parameters:")
            logger.warn("pulsar: {}".format(run_params.pulsar))
            logger.warn("obsid: {}".format(run_params.obsid))
            logger.warn("beg: {}".format(run_params.beg))
            logger.warn("end: {}".format(run_params.end))
            enter_sec = 0
            duration = obs_int
        else:
            duration = (leave - enter) * obs_int
            enter_sec = enter * obs_int
            logger.info("{0} enters beam at {1} and leaves at {2}".format(
                run_params.pulsar, enter, leave))
            logger.info("Integration time: {}".format(duration))
    else:
        enter_sec = None
        duration = None

    filenames_dict = create_filenames(run_params)
    #dspsr command
    commands = add_dspsr_fold_to_commands(run_params.pulsar, run_params.pointing_dir, run_params.stokes_bins, out_pref=run_params.file_prefix,\
                                        seek=enter_sec, total=duration, subint=run_params.subint, dspsr_ops=run_params.dspsr_ops,\
                                        dm=run_params.dm, period=run_params.period)
    #rmfit command
    commands = add_rm_fit_to_commands(run_params.pulsar,
                                      run_params.pointing_dir,
                                      filenames_dict["archive1"],
                                      out_name=filenames_dict["rmfit"],
                                      commands=commands)

    #rmsynth command
    commands = add_rmsynth_to_commands(run_params.pointing_dir,
                                       filenames_dict["archive1"],
                                       label=run_params.file_prefix,
                                       write=True,
                                       plot=True,
                                       keep_QUV=False,
                                       commands=commands)

    #rerun the script
    if not run_params.stop:
        launch_line = dpp.stokes_launch_line(run_params)
        commands.append(launch_line)

    name = "DSPSR_RMfit_{0}".format(run_params.file_prefix)
    batch_dir = "{}".format(
        ospj(comp_config['base_product_dir'], run_params.obsid, "batch"))
    job_id = submit_slurm(name, commands,\
                        batch_dir=batch_dir,\
                        slurm_kwargs={"time": "08:00:00"},\
                        module_list=["mwa_search/{0}".format(run_params.mwa_search),\
                                    "dspsr/master", "psrchive/master"],\
                        submit=True, vcstools_version=run_params.vcs_tools, mem="")

    logger.info("Job submitted using\n\
                pointing directory:         {0}\n\
                pulsar:                     {1}"\
                .format(run_params.pointing_dir, run_params.pulsar))

    return job_id
예제 #4
0
def submit_inverse_pfb_fold(run_params, stop=False):
    """
    Submits the inverse pfb folding script and fits RM

    Parameters:
    -----------
    run_params: object
        The run_params object defined in data_processing_pipeline

    Returns:
    --------
    job_id: int
        The id of the submitted job
    """
    #Find beam coverage for known pulsars
    if not run_params.cand:
        enter, leave, _ = binfinder.find_fold_times(run_params.pulsar,
                                                    run_params.obsid,
                                                    run_params.beg,
                                                    run_params.end,
                                                    min_z_power=[0.3, 0.1])
        obs_int = run_params.end - run_params.beg
        if enter is None or leave is None:
            logger.warn(
                "{} not in beam for given times. Will use entire integration time to fold."
                .format(run_params.pulsar))
            logger.warn("Used the following parameters:")
            logger.warn("pulsar: {}".format(run_params.pulsar))
            logger.warn("obsid: {}".format(run_params.obsid))
            logger.warn("beg: {}".format(run_params.beg))
            logger.warn("end: {}".format(run_params.end))
            enter_sec = 0
            duration = obs_int
        else:
            duration = (leave - enter) * obs_int
            enter_sec = enter * obs_int
            logger.info("{0} enters beam at {1} and leaves at {2}".format(
                run_params.pulsar, enter, leave))
            logger.info("Integration time: {}".format(duration))
    else:
        enter_sec = None
        duration = None

    #pfb inversion
    filenames_dict = create_filenames(run_params)
    commands = add_pfb_inversion_to_commands(run_params.pointing_dir, run_params.pulsar, run_params.obsid, filenames_dict["archive1"], filenames_dict["ascii"],\
               seek=enter_sec, total=duration, tscrunch=duration, dm=run_params.dm, period=run_params.period)

    #launch RM fitting
    commands = add_rm_fit_to_commands(run_params.pulsar,
                                      run_params.pointing_dir,
                                      filenames_dict["archive1"],
                                      out_name=filenames_dict["rmfit"],
                                      commands=commands)

    #launch RM synthesis
    commands = add_rmsynth_to_commands(run_params.pointing_dir,
                                       filenames_dict["archive1"],
                                       write=True,
                                       plot=True,
                                       keep_QUV=False,
                                       label=run_params.file_prefix,
                                       commands=commands)

    if not stop:
        #Relaunch stokes_fold.py
        launch_line = dpp.stokes_launch_line(run_params)
        commands.append(launch_line)
    elif not run_params.stop:
        launch_line = dpp.stokes_launch_line(run_params)
        commands.append(launch_line)

    batch_dir = ospj(comp_config['base_product_dir'], run_params.obsid,
                     "batch")
    name = "inverse_pfb_{0}".format(run_params.file_prefix)

    logger.info("Submitting inverse pfb job:")
    logger.info("Pointing directory: {}".format(run_params.pointing_dir))
    logger.info("Pulsar name: {}".format(run_params.pulsar))
    logger.info("Job name: {}".format(name))
    job_id = submit_slurm(name, commands,\
                        batch_dir=batch_dir,\
                        slurm_kwargs={"time": "10:00:00"},\
                        module_list=['mwa_search/{0}'.format(run_params.mwa_search),\
                                    "dspsr", "psrchive"],\
                        submit=True, vcstools_version="{0}".format(run_params.vcs_tools))

    return job_id