示例#1
0
    def test_crossgti3(self):
        """A more complicated example of intersection of GTIs."""
        gti1 = np.array([[1, 2], [4, 5], [7, 10]])
        newgti = cross_gtis([gti1])

        assert np.allclose(newgti, gti1), \
            'GTIs do not coincide!'
示例#2
0
    def test_crossgti1(self):
        """Test the basic working of the intersection of GTIs."""
        gti1 = np.array([[1, 4]])
        gti2 = np.array([[2, 5]])
        newgti = cross_gtis([gti1, gti2])

        assert np.allclose(newgti, [[2, 4]]), 'GTIs do not coincide!'
示例#3
0
    def test_crossgti2(self):
        """A more complicated example of intersection of GTIs."""
        gti1 = np.array([[1, 2], [4, 5], [7, 10], [11, 11.2], [12.2, 13.2]])
        gti2 = np.array([[2, 5], [6, 9], [11.4, 14]])
        newgti = cross_gtis([gti1, gti2])

        assert np.allclose(newgti, [[4.0, 5.0], [7.0, 9.0], [12.2, 13.2]]), \
            'GTIs do not coincide!'
示例#4
0
def scrunch_lightcurves(lcfilelist,
                        outfile='out_scrlc' + HEN_FILE_EXTENSION,
                        save_joint=False):
    """Create a single light curve from input light curves.

    Light curves are appended when they cover different times, and summed when
    they fall in the same time range. This is done regardless of the channel
    or the instrument.

    Parameters
    ----------
    lcfilelist : list of str
        The list of light curve files to scrunch

    Returns
    -------
    time : array-like
        The time array
    lc :
        The new light curve
    gti : [[gti0_0, gti0_1], [gti1_0, gti1_1], ...]
        Good Time Intervals

    Other Parameters
    ----------------
    outfile : str
        The output file name
    save_joint : bool
        If True, save the per-channel joint light curves

    See Also
    --------
        join_lightcurves : Join light curves from different files
    """
    if save_joint:
        lcdata = join_lightcurves(lcfilelist)
    else:
        lcdata = join_lightcurves(lcfilelist, outfile=None)

    instrs = list(lcdata.keys())
    gti_lists = [lcdata[inst].gti for inst in instrs]
    gti = cross_gtis(gti_lists)
    # Determine limits

    lc0 = lcdata[instrs[0]]

    for inst in instrs[1:]:
        lc0 = lc0 + lcdata[inst]

    lc0.instr = ",".join(instrs)

    logging.info('Saving scrunched light curve to %s' % outfile)
    save_lcurve(lc0, outfile)

    return lc0
示例#5
0
def _get_gti_from_all_extensions(lchdulist,
                                 accepted_gtistrings=['GTI'],
                                 det_numbers=None):
    if det_numbers is None:
        return _get_gti_from_extension(lchdulist, accepted_gtistrings)

    gti_lists = []
    for i in det_numbers:
        acc_gti_str = [x + '{:02d}'.format(i) for x in accepted_gtistrings]
        gti_lists.append(_get_gti_from_extension(lchdulist, acc_gti_str))

    return cross_gtis(gti_lists)
示例#6
0
def apply_gti(fname, gti, outname=None, minimum_length=0):
    """Apply a GTI list to the data contained in a file.

    File MUST have a GTI extension already, and an extension called `time`.
    """
    ftype, data = get_file_type(fname, raw_data=True)

    try:
        datagti = data['gti']
        newgtis = cross_gtis([gti, datagti])
    except:  # pragma: no cover
        logging.warning('Data have no GTI extension')
        newgtis = gti

    newgtis = filter_gti_by_length(newgtis, minimum_length)

    data['__sr__class__type__'] = 'gti'
    data['gti'] = newgtis
    good = create_gti_mask(data['time'], newgtis)

    data['time'] = data['time'][good]
    if ftype == 'lc':
        data['counts'] = data['counts'][good]
        data['counts_err'] = data['counts_err'][good]
    elif ftype == 'events':
        data['PI'] = data['PI'][good]
        if data['instr'] == 'PCA':  # pragma: no cover
            data['PCU'] = data['PCU'][good]

    newext = '_gtifilt' + HEN_FILE_EXTENSION
    outname = _assign_value_if_none(
        outname,
        fname.replace(HEN_FILE_EXTENSION, '') + newext)
    save_data(data, outname)

    return newgtis
示例#7
0
def calc_cpds(lcfile1, lcfile2, fftlen,
              save_dyn=False,
              bintime=1,
              pdsrebin=1,
              outname='cpds' + HEN_FILE_EXTENSION,
              normalization='leahy',
              back_ctrate=0.,
              noclobber=False):
    """Calculate the CPDS from a pair of input light curve files.

    Parameters
    ----------
    lcfile1 : str
        The first light curve file
    lcfile2 : str
        The second light curve file
    fftlen : float
        The length of the chunks over which FFTs will be calculated, in seconds

    Other Parameters
    ----------------
    save_dyn : bool
        If True, save the dynamical power spectrum
    bintime : float
        The bin time. If different from that of the light curve, a rebinning is
        performed
    pdsrebin : int
        Rebin the PDS of this factor.
    normalization : str
        'Leahy', 'frac', 'rms', or any normalization accepted by ``stingray``.
        Default 'Leahy'
    back_ctrate : float
        The non-source count rate
    noclobber : bool
        If True, do not overwrite existing files
    outname : str
        Output file name for the cpds. Default: cpds.[nc|p]
    """
    if noclobber and os.path.exists(outname):
        print('File exists, and noclobber option used. Skipping')
        return

    logging.info("Loading file %s..." % lcfile1)
    lc1 = load_lcurve(lcfile1)
    logging.info("Loading file %s..." % lcfile2)
    lc2 = load_lcurve(lcfile2)
    instr1 = lc1.instr
    instr2 = lc2.instr
    gti = cross_gtis([lc1.gti, lc2.gti])

    assert lc1.dt == lc2.dt, 'Light curves are sampled differently'
    dt = lc1.dt

    lc1.gti = gti
    lc2.gti = gti
    lc1._apply_gtis()
    lc2._apply_gtis()
    if lc1.tseg != lc2.tseg:  # compatibility with old versions of stingray
        lc1.tseg = np.max(gti) - np.min(gti)
        lc2.tseg = np.max(gti) - np.min(gti)

    if bintime > dt:
        lcrebin = np.rint(bintime / dt)
        logging.info("Rebinning lcs by a factor %d" % lcrebin)
        lc1 = lc1.rebin(lcrebin)
        lc1.instr = instr1
        lc2 = lc2.rebin(lcrebin)
        lc2.instr = instr2

    if lc1.mjdref != lc2.mjdref:
        lc2 = lc2.change_mjdref(lc1.mjdref)

    ctrate = np.sqrt(lc1.meanrate * lc2.meanrate)

    cpds = AveragedCrossspectrum(lc1, lc2, segment_size=fftlen,
                                 norm=normalization.lower())

    if pdsrebin is not None and pdsrebin != 1:
        cpds = cpds.rebin(pdsrebin)

    cpds.instrs = instr1 + ',' + instr2
    cpds.fftlen = fftlen
    cpds.back_phots = back_ctrate * fftlen
    cpds.mjdref = lc1.mjdref
    lags, lags_err = cpds.time_lag()
    cpds.lag = lags
    cpds.lag_err = lags

    logging.info('Saving CPDS to %s' % outname)
    save_pds(cpds, outname)