Beispiel #1
0
def pipeline_seg2(st, segment, cfile=None, vys_timeout=vys_timeout_default):
    """ Submit pipeline processing of a single segment to scheduler.
    No multi-threading or scheduling.
    """

    # plan fft
    wisdom = search.set_wisdom(st.npixx, st.npixy)

    data = source.read_segment(st, segment, timeout=vys_timeout, cfile=cfile)
    data_prep = source.data_prep(st, data)

    for dmind in range(len(st.dmarr)):
        delay = util.calc_delay(st.freq, st.freq.max(), st.dmarr[dmind],
                                st.inttime)
        data_dm = search.dedisperse(data_prep, delay)

        for dtind in range(len(st.dtarr)):
            data_dmdt = search.resample(data_dm, st.dtarr[dtind])
            canddatalist = search.search_thresh(st,
                                                data_dmdt,
                                                segment,
                                                dmind,
                                                dtind,
                                                wisdom=wisdom)

            features = candidates.calc_features(canddatalist)
            search.save_cands(st, features, canddatalist)
Beispiel #2
0
def pipeline_seg(st, segment, cfile=None, vys_timeout=vys_timeout_default, devicenum=None):
    """ Submit pipeline processing of a single segment on a single node.
    state/preference has fftmode that will determine functions used here.
    """

    data = source.read_segment(st, segment, timeout=vys_timeout, cfile=cfile)
    candcollection = prep_and_search(st, segment, data, devicenum=devicenum)

    return candcollection
Beispiel #3
0
def pipeline_seg(st, segment, cfile=None, vys_timeout=vys_timeout_default, devicenum=None):
    """ Submit pipeline processing of a single segment on a single node.
    state/preference has fftmode that will determine functions used here.
    """

    from rfpipe import source

    data = source.read_segment(st, segment, timeout=vys_timeout, cfile=cfile)
    candcollection = prep_and_search(st, segment, data, devicenum=devicenum)

    return candcollection
Beispiel #4
0
def pipeline_dataprep(st, candloc):
    """ Prepare (read, cal, flag) data for a given state and candloc.
    """

    segment, candint, dmind, dtind, beamnum = candloc.astype(int)

    # prep data
    data = source.read_segment(st, segment)
    data_prep = source.data_prep(st, data)

    return data_prep
Beispiel #5
0
def pipeline_dataprep(st, candloc):
    """ Prepare (read, cal, flag) data for a given state and candloc.
    """

    segment, candint, dmind, dtind, beamnum = candloc

    # propagate through to new candcollection
    st.prefs.segmenttimes = st._segmenttimes.tolist()

    # prep data
    data = source.read_segment(st, segment)
    flagversion = "rtpipe" if hasattr(st, "rtpipe_version") else "latest"
    data_prep = source.data_prep(st, segment, data, flagversion=flagversion)

    return data_prep
def prepare_data(sdmfile,
                 gainfile,
                 delta_l,
                 delta_m,
                 segment=0,
                 dm=0,
                 dt=1,
                 spws=None):
    """
    
    Applies Calibration, flagging, dedispersion and other data preparation steps
    from rfpipe. Then phaseshifts the data to the location of the candidate. 
    
    """
    st = state.State(sdmfile=sdmfile,
                     sdmscan=1,
                     inprefs={
                         'gainfile': gainfile,
                         'workdir': '.',
                         'maxdm': 0,
                         'flaglist': []
                     },
                     showsummary=False)
    if spws:
        st.prefs.spw = spws

    data = source.read_segment(st, segment)

    takepol = [st.metadata.pols_orig.index(pol) for pol in st.pols]
    takebls = [
        st.metadata.blarr_orig.tolist().index(list(bl)) for bl in st.blarr
    ]
    datap = np.require(data, requirements='W').take(takepol, axis=3).take(
        st.chans, axis=2).take(takebls, axis=1)
    datap = source.prep_standard(st, segment, datap)
    datap = calibration.apply_telcal(st, datap)
    datap = flagging.flag_data(st, datap)

    delay = calc_delay(st.freq, st.freq.max(), dm, st.inttime)
    data_dmdt = dedisperseresample(datap, delay, dt)

    print(f'shape of data_dmdt is {data_dmdt.shape}')

    uvw = get_uvw_segment(st, segment)
    phase_shift(data_dmdt, uvw=uvw, dl=delta_l, dm=delta_m)

    dataret = data_dmdt
    return dataret, st
Beispiel #7
0
def pipeline_dataprep(st, candloc):
    """ Prepare (read, cal, flag) data for a given state and candloc.
    """

    from rfpipe import source

    segment, candint, dmind, dtind, beamnum = candloc

    # propagate through to new candcollection
    st.prefs.segmenttimes = st._segmenttimes.tolist()

    # prep data
    data = source.read_segment(st, segment)
    flagversion = "rtpipe" if hasattr(st, "rtpipe_version") else "latest"
    data_prep = source.data_prep(st, segment, data, flagversion=flagversion)

    return data_prep
    st = state.State(sdmfile=sdmname,
                     sdmscan=scannum,
                     preffile=preffile,
                     name='NRAOdefault' + band,
                     showsummary=False,
                     inprefs=prefs)

st.prefs.gainfile = gainfile
st.prefs.workdir = '/hyrule/data/users/kshitij/fetchrf/sim_frbs/'  #fetch_data_dir+datasetId
logging.info('Working directory set to {0}'.format(st.prefs.workdir))

nseg = st.nsegment
# Random segment choice
segment = random.randint(0, nseg - 1)

data = source.read_segment(st, segment)

# if not any(data): # Should fix: Error: axis 3 is out of bounds for array of dimension 1 occured
#     continue

# Random location of candidate in the radio image
l = math.radians(random.uniform(-st.fieldsize_deg / 2, st.fieldsize_deg / 2))
m = math.radians(random.uniform(-st.fieldsize_deg / 2, st.fieldsize_deg / 2))

# Random SNR choice
snr = 0
while snr < 8:
    snr = np.random.lognormal(2.5, 1)
#     snr  = random.uniform(10,100)

# In[6]:
def inject_one(preffile, devicenum, outdir):
    """
    Script to inject one simulated FRB on simulated data and save unclustered candidates.

    :param preffile: Preference file with search preferences
    :param devicenum: GPU devicenumber
    :param outdir: Output directory
    :return:
    """
    configs = ["A", "B", "C", "D"]
    bands = ["L", "S", "X", "C"]
    config = configs[np.random.randint(len(configs))]
    band = bands[np.random.randint(len(bands))]

    t0 = time.Time.now().mjd
    meta = metadata.mock_metadata(
        t0,
        t0 + 10 / (24 * 3600),
        20,
        11,
        32 * 4 * 2,
        2,
        5e3,
        scan=1,
        datasource="sim",
        antconfig=config,
        band=band,
    )

    dataset = meta["datasetId"] + "_config_" + config + "_band_" + band

    workdir = outdir + "/" + dataset

    try:
        os.mkdir(workdir)
    except FileExistsError:
        logging.info("Directory {0} exists, using it.".format(workdir))
    except OSError:
        logging.info("Can't create directory {0}".format(workdir))
    else:
        logging.info("Created directory {0}".format(workdir))

    prefs = {}
    prefs["workdir"] = workdir
    prefs["savenoise"] = False
    prefs["fftmode"] = "fftw"
    prefs["nthread"] = 10
    prefs["flaglist"] = [
        ("badchtslide", 4.0, 20),
        ("badchtslide", 4, 20),
        ("badspw", 4.0),
        ("blstd", 3.5, 0.008),
    ]

    st = state.State(
        inmeta=meta,
        showsummary=False,
        preffile=preffile,
        name="NRAOdefault" + band,
        inprefs=prefs,
    )
    segment = 0
    data = source.read_segment(st, segment)
    dmind = None
    dtind = None
    snr = np.random.uniform(low=10, high=40)
    mock = util.make_transient_params(st,
                                      snr=snr,
                                      segment=segment,
                                      data=data,
                                      ntr=1,
                                      lm=-1,
                                      dmind=dmind,
                                      dtind=dtind)

    st.clearcache()
    st.prefs.simulated_transient = mock

    cc = pipeline.pipeline_seg(st=st, segment=segment, devicenum=devicenum)

    if not len(cc):
        logging.info(
            "No candidate found. Deleting the empty pickle, and trying with a higher SNR now."
        )
        pkl = glob.glob(cc.state.prefs.workdir + "/*pkl")[0]
        try:
            os.remove(pkl)
        except OSError as e:
            pass
        snr = snr + 5
        mock = util.make_transient_params(
            st,
            snr=snr,
            segment=segment,
            data=data,
            ntr=1,
            lm=-1,
            dmind=dmind,
            dtind=dtind,
        )

        st.clearcache()
        st.prefs.simulated_transient = mock
        cc = pipeline.pipeline_seg(st=st, segment=segment, devicenum=devicenum)