Ejemplo n.º 1
0
def find_pulsars_in_fov_main(kwargs):
    import csv
    #option parsing
    if not kwargs["obsid"]:
        raise ValueError(
            "Please input observation id by setting -o or --obsid. Exiting")

    if not (kwargs["begin"] and kwargs["end"]):
        kwargs["beg"], kwargs["end"] = obs_max_min(kwargs["obsid"])

    output_list = find_pulsars_in_fov(
        kwargs["obsid"],
        kwargs["begin"],
        kwargs["end"],
        fwhm=kwargs["fwhm"],
        search_radius=kwargs["search_radius"],
        no_known_pulsars=kwargs["no_known_pulsars"],
        no_search_cands=kwargs["no_search_cands"])
    if kwargs['n_pointings'] is None:
        with open(f"{kwargs['obsid']}_fov_sources.csv", 'w',
                  newline='') as csvfile:
            spamwriter = csv.writer(csvfile, delimiter=',')
            for ol in output_list:
                if len(ol) == 0:
                    # Print a space to the line which prevents Nextflow formatting erorrs
                    #spamwriter.writerow([" "])
                    csvfile.write(" \n")
                else:
                    spamwriter.writerow(ol)
    else:
        name_pointing_pairs = [
            output_list[i:i + 2] for i in range(0, len(output_list), 2)
        ]
        pair_names = ["pulsar", "vdif", "pulsar_search", "single_pulse"]
        # Loop over each output type
        for i, name in enumerate(pair_names):
            name_list = output_list[2 * i]
            pointing_list = output_list[2 * i + 1]
            if len(name_list) != 0:
                # split each file into the required length
                pointing_list_chunks = [
                    pointing_list[x:x + kwargs['n_pointings']] for x in range(
                        0, len(pointing_list), kwargs['n_pointings'])
                ]
                for ci in range(len(pointing_list_chunks)):
                    first_id = ci * kwargs['n_pointings'] + 1
                    last_id = ci * kwargs['n_pointings'] + len(
                        pointing_list_chunks[ci])
                    out_file_name = f"{kwargs['obsid']}_fov_{name}_sources_{first_id}_{last_id}.txt"
                    print(f"Recording {name} sources in {out_file_name}")
                    with open(out_file_name, 'w') as out_file:
                        for out_pointing in pointing_list_chunks[ci]:
                            out_file.write(f"{out_pointing}\n")
Ejemplo n.º 2
0
            logger.info(version.__version__)
            sys.exit(0)
        except ImportError as IE:
            logger.error("Couldn't import version.py - have you installed vcstools?")
            logger.error("ImportError: {0}".format(IE))
            sys.exit(0)

    #Option parsing
    if not args.obs:
        logger.error("Observation ID required, please put in with -o or --obs")
        sys.exit(0)
    if args.all and (args.begin or args.end):
        logger.error("Please specify EITHER (-b,-e) OR -a")
        sys.exit(0)
    elif args.all:
        args.begin, args.end = meta.obs_max_min(args.cal_obs\
                               if args.mode == 'download_cal' else args.obs)
    elif args.mode != 'download_cal' and (not args.begin or not args.end):
        logger.debug(args.mode)
        logger.error("Please specify EITHER (-b,-e) OR -a")
        sys.exit(0)
    # make sure we can process increments smaller than 64 seconds when not in calibration related mode
    if args.mode != 'download_cal':
        if args.end - args.begin +1 < args.increment:
            args.increment = args.end - args.begin + 1
    e_mail = ""
    if args.mail:
        e_mail = get_user_email()
        logger.info("Sending info to {0}".format(e_mail))
    if not args.mode:
      logger.error("Mode required {0}. Please specify with -m or --mode.".format(modes))
    # set up the logger for stand-alone execution
    logger = setup_logger(logger, log_level=loglevels[args.loglvl])

    # get metadata
    common_metadata = get_common_obs_metadata(args.obsid)

    # Option parsing
    if not args.obsid:
        logger.error(
            "Observation ID required, please put in with -o or --obsid")
        sys.exit(0)
    if args.all and (args.begin or args.end):
        logger.error("Please specify EITHER (-b,-e) OR -a")
        sys.exit(0)
    elif args.all:
        args.begin, args.end = obs_max_min(args.obsid)
    if args.begin and args.end:
        if args.begin > args.end:
            logger.error("Starting time is after end time")
            sys.exit(0)
    if args.beam_model not in beam_models:
        logger.error(
            "Unknown beam model. Please use one of {0}. Exiting.".format(
                beam_models))
        quit()

    # Default parsing
    if args.freq is None:
        args.freq = common_metadata[5] * 1e6  #Hz
        logger.info("Using the observations centre frequency: {} MHz".format(
            args.freq / 1e6))
def test_obs_max_min():
    """Test the obs_max_min function."""
    if obs_max_min(1133775752) != (1133775759, 1133780672):
        raise AssertionError
        data_dict["metadata"]["ra_pointing"]))
    print("DEC Pointing (deg): {:6.2f}".format(
        data_dict["metadata"]["dec_pointing"]))
    print("Centrefreq (MHz):   {}".format(centre_freq))
    print("Channels:           {}".format(
        data_dict["rfstreams"]["0"]["frequencies"]))
    if array_phase != 'OTH':
        print("~FWHM (arcminute):  {:4.2f}".format(
            calc_ta_fwhm(centre_freq, array_phase=array_phase) * 60.))
    print("FoV (square deg):   {:5.1f}".format(fov))

    # Perform file metadata calls
    files_meta_data = getmeta(servicetype='metadata',
                              service='data_files',
                              params={'obs_id': str(args.obsid)})
    start, stop = obs_max_min(args.obsid, files_meta_data=files_meta_data)
    # Check available files
    available_files, all_files = files_available(
        args.obsid, files_meta_data=files_meta_data)
    # Split into raw and combined files to give a clearer idea of files available
    available_comb = []
    available_raw = []
    available_ics = []
    all_comb = []
    all_raw = []
    all_ics = []
    for file_name in all_files:
        if 'combined' in file_name:
            all_comb.append(file_name)
            if file_name in available_files:
                available_comb.append(file_name)
def source_beam_coverage_and_times(obsid,
                                   pulsar,
                                   p_ra=None,
                                   p_dec=None,
                                   obs_beg=None,
                                   obs_end=None,
                                   files_beg=None,
                                   files_end=None,
                                   min_z_power=0.3,
                                   dt_input=100,
                                   common_metadata=None,
                                   query=None,
                                   beam='analytic'):
    """Finds the normalised time that a pulsar is in the beam for a given obsid.
    If pulsar is not in beam, returns None, None

    Parameters
    ----------
    obsid : `int`
        The observation ID
    pulsar : `str`
        The pulsar's J name
    p_ra, p_dec : `str`, optional
        The target's right ascension and declination in sexidecimals.
        If not supplied will use the values from the ANTF.
    obs_beg, obs_end : `int`, optional
        Beginning and end GPS time of the observation.
        If not supplied will use :py:meth:`vcstools.metadb_utils.obs_max_min` to find it.
    files_beg, files_end : `int`, optional
        Beginning and end GPS time of the (fits of VCS) files.
        If not supplied will assume the full observation is available.
    min_z_power : `float`, optional
        Zenith normalised power cut off. |br| Default: 0.3.
    common_metadata : `list`, optional
        The list of common metadata generated from :py:meth:`vcstools.metadb_utils.get_common_obs_metadata`
    query : psrqpy object, optional
        A previous psrqpy query. Can be supplied to prevent performing a new query.
    beam : `str`, optional
        The primary beam model to use out of [analytic, advanced, full_EE]. |br| Default: analytic.

    Returns
    -------
    enter_files : `float`
        A float between 0 and 1 that describes the normalised time that the pulsar enters the beam
    exit_files : `float`
        A float between 0 and 1 that describes the normalised time that the pulsar exits the beam
    """
    # Perform required metadata calls
    if query is None:
        query = psrqpy.QueryATNF(psrs=pulsar,
                                 loadfromdb=data_load.ATNF_LOC).pandas
    if p_ra is None or p_dec is None:
        # Get some basic pulsar and obs info info
        query_id = list(query['PSRJ']).index(pulsar)
        p_ra = query["RAJ"][query_id]
        p_dec = query["DECJ"][query_id]
    if not common_metadata:
        common_metadata = get_common_obs_metadata(obsid)
    if obs_beg is None or obs_end is None:
        obs_beg, obs_end = obs_max_min(obsid)
    obs_dur = obs_end - obs_beg + 1
    if not files_beg:
        files_beg = obs_beg
    if not files_end:
        files_end = obs_end
    files_dur = files_end - files_beg + 1

    beam_coverage = source_beam_coverage(
        [obsid], [[pulsar, p_ra, p_dec]],
        common_metadata_list=[common_metadata],
        dt_input=dt_input,
        beam=beam,
        min_z_power=min_z_power)
    if pulsar not in beam_coverage[obsid].keys():
        # Not in beam exiting
        return None, None, None, None, None, None, None, None, None
    dect_beg_norm, dect_end_norm, _ = beam_coverage[obsid][pulsar]

    # GPS times the source enters and exits beam
    dect_beg = obs_beg + obs_dur * dect_beg_norm
    dect_end = obs_beg + obs_dur * dect_end_norm

    # Normalised time the source enters/exits the beam in the files (used for Presto commands)
    files_beg_norm = (dect_beg - files_beg) / files_dur
    files_end_norm = (dect_end - files_beg) / files_dur

    if files_beg_norm > 1. or files_end_norm < 0.:
        logger.debug(
            "source {0} is not in the beam for the files on disk".format(
                pulsar))
        files_beg_norm = None
        files_end_norm = None
    else:
        if files_beg_norm < 0.:
            files_beg_norm = 0.
        if files_end_norm > 1.:
            files_end_norm = 1.

    return dect_beg, dect_end, dect_beg_norm, dect_end_norm, files_beg_norm, files_end_norm, obs_beg, obs_end, obs_dur
Ejemplo n.º 7
0
    #remove pointings outside of ra or dec range
    if args.dec_range != [-90, 90] or args.ra_range != [0, 360]:
        print("Removing pointings outside of ra dec ranges")
        radls = []
        decdls = []
        for i in range(len(rads)):
            if  (args.dec_range[0] < float(decds[i]) < args.dec_range[1] ) and \
                (args.ra_range[0]  < float(rads[i]) < args.ra_range[1]):
                radls.append(rads[i])
                decdls.append(decds[i])
        rads = radls
        decds = decdls

    if args.all_pointings:
        #calculate powers
        obeg, oend = meta.obs_max_min(obs)
        if args.begin:
            start_time = obeg - args.begin
        else:
            start_time = 0
        if args.end and args.begin:
            duration = args.end - args.begin
        elif args.end:
            duration = args.end - obeg
        obs_metadata = [obs, ra, dec, duration, xdelays, centrefreq, channels]
        names_ra_dec = []
        for ni in range(len(rads)):
            if float(decds[ni]) < -90.:
                continue
            names_ra_dec.append(["name", rads[ni], decds[ni]])
        names_ra_dec = np.array(names_ra_dec)
Ejemplo n.º 8
0
def multi_psr_snfe(pulsar_list, obsid, obs_beg, obs_end,
                   common_metadata=None, full_metadata=None,
                   query=None, plot_flux=False,
                   min_z_power=0.3, trcvr=data_load.TRCVR_FILE):
    """Runs :py:meth:`vcstools.sn_flux_utils.est_pulsar_sn` for multiple pulsars in the same MWA observation.

    Parameters
    ----------
    pulsar : `list`
        A list of the pulsar Jnames.
    obsid : `int`
        The MWA Observation ID.
    obs_beg, obs_end : `int`
        Beginning and end GPS time of the observation.
        If not supplied will use :py:meth:`vcstools.metadb_utils.obs_max_min` to find it.
    common_metadata : `list`, optional
        The list of common metadata generated from :py:meth:`vcstools.metadb_utils.get_common_obs_metadata`
    full_metadata : `dict`, optional
        The dictionary of metadata generated from :py:meth:`vcstools.metadb_utils.getmeta`
    query : psrqpy object, optional
        A previous psrqpy.QueryATNF query. Can be supplied to prevent performing a new query.
    plot_flux : `boolean`, optional
        If `True` will produce a plot of the flux estimation. |br| Default: False
    min_z_power : `float`, optional
        Zenith normalised power cut off. |br| Default: 0.3.
    trcvr : `str`
        The location of the MWA receiver temp csv file. |br| Default: <vcstools_data_dir>MWA_Trcvr_tile_56.csv

    Returns
    -------
    sn_dict : `dict`
        A dictionary where eacy key is the pulsar jname and contains a list of the following
        sn_dict[pulsar]=[sn, sn_e, s, s_e]
        sn : `float`
            The expected signal to noise ratio for the given inputs.
        sn_err : `float`
            The uncertainty in the signal to noise ratio.
        s : `float`
            The expected flux density of the pulsar.
        s_e : `float`
            The uncertainty expected flux density of the pulsar.
    """
    logger.info("""This script may use estimations where data is missing.
    For full verbosity, use the DEBUG logger (ie. -L DEBUG)""")

    if common_metadata is None or full_metadata is None:
        logger.debug("Obtaining obs metadata")
        common_metadata, full_metadata = get_common_obs_metadata(obsid, return_all=True, full_metadata=full_metadata)

    if obs_beg is None or obs_end is None:
        obs_beg, obs_end = obs_max_min(obsid)

    mega_query = psrqpy.QueryATNF(psrs=pulsar_list, loadfromdb=data_load.ATNF_LOC).pandas
    sn_dict = {}
    for i, pulsar in enumerate(progress_bar(mega_query["PSRJ"], "Calculating pulsar SN: ")):
        psr_query = {}
        for key in mega_query.keys():
            psr_query[key] = [mega_query[key][i]]

        sn, sn_e, s, s_e = est_pulsar_sn(pulsar, obsid,
                                         obs_beg=obs_beg, obs_end=obs_end,
                                         common_metadata=common_metadata, full_metadata=full_metadata,
                                         plot_flux=plot_flux, query=psr_query,
                                         min_z_power=min_z_power, trcvr=trcvr)

        sn_dict[pulsar]=[sn, sn_e, s, s_e]

    return sn_dict