示例#1
0
 def test_compute_exptime_1(self):
     skypos = (170.04661625965298, 9.85073479305886)
     trange = (891394895.0, 891394925.0)
     self.assertAlmostEqual(dbt.compute_exptime('NUV', trange),
                            dbt.compute_exptime('NUV',
                                                trange,
                                                skypos=skypos),
                            places=12)
示例#2
0
 def test_compute_exptime_2(self):
     skypos = [176.919525856024, 0.255696872807351]
     trange = [766525332.995, 766526576.995]
     self.assertAlmostEqual(dbt.compute_exptime('NUV', trange),
                            dbt.compute_exptime('NUV',
                                                trange,
                                                skypos=skypos),
                            places=12)
示例#3
0
def movie_tbl(band, tranges, verbose=0, framesz=0., retries=100):
    """
    Initialize a FITS table to contain movie frame information.

    :param band: The band to use, either 'FUV' or 'NUV'.

    :type band: str

    :param tranges: Set of time ranges to retrieve the photon events, in
        GALEX time seconds.

    :type tranges: list

    :param verbose: Verbosity level, a value of 0 is minimum verbosity.

    :type verbose: int

    :param framesz: The time bin size (depth)to use per frame, in seconds.

    :type framesz: float

    :param retries: Number of query retries to attempt before giving up.

    :type retries: int

    :returns: astropy.fits.BinTableHDU object -- The set of frames as an HDU
        object.
    """

    if verbose:
        mc.print_inline('Populating exposure time table.')

    tstarts, tstops, exptimes = [], [], []

    for trange in tranges:
        stepsz = framesz if framesz else trange[1]-trange[0]
        steps = np.ceil((trange[1]-trange[0])/stepsz)
        for i, t0 in enumerate(np.arange(trange[0], trange[1], stepsz)):
            t1 = trange[1] if i == steps else t0+stepsz
            tstarts.append(t0)
            tstops.append(t1)
            exptimes.append(dbt.compute_exptime(band, [t0, t1],
                                                verbose=verbose))
    col1 = pyfits.Column(name='tstart', format='E', array=np.array(tstarts))
    col2 = pyfits.Column(name='tstop', format='E', array=np.array(tstops))
    col3 = pyfits.Column(name='exptime', format='E', array=np.array(exptimes))
    cols = pyfits.ColDefs([col1, col2, col3])
    tbl = pyfits.BinTableHDU.from_columns(cols)

    return tbl
示例#4
0
def movie_tbl(band, tranges, verbose=0, framesz=0., retries=100):
    """
    Initialize a FITS table to contain movie frame information.

    :param band: The band to use, either 'FUV' or 'NUV'.

    :type band: str

    :param tranges: Set of time ranges to retrieve the photon events, in
        GALEX time seconds.

    :type tranges: list

    :param verbose: Verbosity level, a value of 0 is minimum verbosity.

    :type verbose: int

    :param framesz: The time bin size (depth)to use per frame, in seconds.

    :type framesz: float

    :param retries: Number of query retries to attempt before giving up.

    :type retries: int

    :returns: astropy.fits.BinTableHDU object -- The set of frames as an HDU
        object.
    """

    if verbose:
        mc.print_inline('Populating exposure time table.')

    tstarts, tstops, exptimes = [], [], []

    for trange in tranges:
        stepsz = framesz if framesz else trange[1]-trange[0]
        steps = np.ceil((trange[1]-trange[0])/stepsz)
        for i, t0 in enumerate(np.arange(trange[0], trange[1], stepsz)):
            t1 = trange[1] if i == steps else t0+stepsz
            tstarts.append(t0)
            tstops.append(t1)
            exptimes.append(dbt.compute_exptime(band, [t0, t1],
                                                verbose=verbose))
    col1 = pyfits.Column(name='tstart', format='E', array=np.array(tstarts))
    col2 = pyfits.Column(name='tstop', format='E', array=np.array(tstops))
    col3 = pyfits.Column(name='exptime', format='E', array=np.array(exptimes))
    cols = pyfits.ColDefs([col1, col2, col3])
    tbl = pyfits.BinTableHDU.from_columns(cols)

    return tbl
示例#5
0
def quickmag(band, ra0, dec0, tranges, radius, annulus=None, stepsz=None,
             verbose=0, detsize=1.25, coadd=False):
    """
    Primary wrapper function for generating and synthesizing all of the
        parameters and calculations necessary to create light curves.

    :param band: The band being used, either 'FUV' or 'NUV'.

    :type band: str

    :param ra0: Right ascension, in degrees, of the target position.

    :type ra0: float

    :param dec0: Declination, in degrees, of the target position.

    :type dec0: float

    :param tranges: Set of time ranges to query within in GALEX time seconds.

    :type tranges: list

    :param radius: The radius of the  photometric aperture, in degrees.

    :type radius: float

    :param annulus: Radii of the inner and outer extents of the background
        annulus, in degrees.

    :type annulus: list

    :param stepsz: The size of the time bins to use, in seconds.

    :type stepsz: float

    :param verbose: Verbosity level, a value of 0 is minimum verbosity.

    :type verbose: int

    :param detsize: Effective diameter, in degrees, of the field-of-view.

    :param coadd: Set to True if calculating a total flux instead of flux
        from each time bin.

    :type coadd: bool

    :returns: dict -- The light curve, including input parameters.
    """

    if verbose:
        mc.print_inline("Retrieving all of the target events.")
    searchradius = radius if annulus is None else annulus[1]
    data = pullphotons(band, ra0, dec0, tranges, searchradius, verbose=verbose,
                       detsize=detsize)
    if not data:
        return None

    if verbose:
        mc.print_inline("Binning data according to requested depth.")

    # Multiple ways of defining bins
    try:
        trange = [np.array(tranges).min(), np.array(tranges).max()]
    except ValueError:
        trange = tranges

    if coadd:
        bins = np.array(trange)
    elif stepsz:
        bins = np.append(np.arange(trange[0], trange[1], stepsz), max(trange))
    else:
        bins = np.unique(np.array(tranges).flatten())

    lcurve = {'params':gphot_params(band, [ra0, dec0], radius, annulus=annulus,
                                    verbose=verbose, detsize=detsize,
                                    stepsz=stepsz, trange=trange)}

    # This is equivalent in function to np.digitize(data['t'],bins) except
    # that it's much, much faster. See numpy issue #2656 at
    # https://github.com/numpy/numpy/issues/2656
    bin_ix = np.searchsorted(bins, data['t'], "right")
    try:
        lcurve['t0'] = bins[np.unique(bin_ix)-1]
        lcurve['t1'] = bins[np.unique(bin_ix)]
        lcurve['exptime'] = np.array(
            dbt.compute_exptime(band,
                                tranges if coadd else list(zip(
                                    lcurve['t0'], lcurve['t1'])),
                                verbose=verbose, coadd=coadd, detsize=detsize,
                                skypos=[ra0, dec0]))
    except IndexError:
        if np.isnan(data['t']):
            if verbose:
                mc.print_inline(
                    "No valid data available in {t}".format(t=tranges))
        lcurve['t0'] = np.array([np.nan])
        lcurve['t1'] = np.array([np.nan])
        lcurve['exptime'] = np.array([0])

    angSep = mc.angularSeparation(ra0, dec0, data['ra'], data['dec'])

    aper_ix = np.where(angSep <= radius)
    lcurve['t0_data'] = reduce_lcurve(bin_ix, aper_ix, data['t'], np.min)
    lcurve['t1_data'] = reduce_lcurve(bin_ix, aper_ix, data['t'], np.max)
    lcurve['t_mean'] = reduce_lcurve(bin_ix, aper_ix, data['t'], np.mean)
    lcurve['q_mean'] = reduce_lcurve(bin_ix, aper_ix, data['q'], np.mean)
    lcurve['counts'] = reduce_lcurve(bin_ix, aper_ix, data['t'], len)
    lcurve['flat_counts'] = reduce_lcurve(bin_ix, aper_ix,
                                          1./data['response'], np.sum)
    lcurve['responses'] = reduce_lcurve(bin_ix, aper_ix, data['response'],
                                        np.mean)
    lcurve['detxs'] = reduce_lcurve(bin_ix, aper_ix, data['col'], np.mean)
    lcurve['detys'] = reduce_lcurve(bin_ix, aper_ix, data['row'], np.mean)
    lcurve['detrad'] = mc.distance(lcurve['detxs'], lcurve['detys'], 400, 400)
    lcurve['racent'] = reduce_lcurve(bin_ix, aper_ix, data['ra'], np.mean)
    lcurve['deccent'] = reduce_lcurve(bin_ix, aper_ix, data['dec'], np.mean)

    skybgmcatdata = dbt.get_mcat_data([ra0, dec0], radius)
    lcurve['mcat_bg'] = lcurve['exptime']*np.array(
        [dbt.mcat_skybg(band, [ra0, dec0], radius, trange=tr,
                        mcat=skybgmcatdata, verbose=verbose)
         for tr in zip(lcurve['t0'], lcurve['t1'])])

    if annulus is not None:
        annu_ix = np.where((angSep > annulus[0]) & (angSep <= annulus[1]))
        lcurve['bg_counts'] = reduce_lcurve(bin_ix, annu_ix, data['t'], len)
        lcurve['bg_flat_counts'] = reduce_lcurve(
            bin_ix, annu_ix, data['response'], np.sum)
        lcurve['bg'] = (mc.area(radius)*lcurve['bg_flat_counts'] /
                        (mc.area(annulus[1])-mc.area(annulus[0])))
    else:
        lcurve['bg_counts'] = np.zeros(len(lcurve['counts']))
        lcurve['bg_flat_counts'] = np.zeros(len(lcurve['counts']))
        lcurve['bg'] = np.zeros(len(lcurve['counts']))

    lcurve['cps'] = lcurve['flat_counts']/lcurve['exptime']
    lcurve['cps_err'] = aperture_error(lcurve['flat_counts'], lcurve['exptime'])
    lcurve['cps_bgsub'] = (lcurve['flat_counts']-
                           lcurve['bg'])/lcurve['exptime']
    lcurve['cps_bgsub_err'] = aperture_error(
        lcurve['flat_counts'], lcurve['exptime'], bgcounts=lcurve['bg'])
    lcurve['cps_mcatbgsub'] = (lcurve['flat_counts']-
                               lcurve['mcat_bg'])/lcurve['exptime']
    lcurve['cps_mcatbgsub_err'] = aperture_error(
        lcurve['flat_counts'], lcurve['exptime'], bgcounts=lcurve['mcat_bg'])
    lcurve['flux'] = gxt.counts2flux(lcurve['cps'], band)
    lcurve['flux_err'] = gxt.counts2flux(lcurve['cps_err'], band)
    lcurve['flux_bgsub'] = gxt.counts2flux(lcurve['cps_bgsub'], band)
    lcurve['flux_bgsub_err'] = gxt.counts2flux(lcurve['cps_bgsub_err'], band)
    lcurve['flux_mcatbgsub'] = gxt.counts2flux(lcurve['cps_mcatbgsub'], band)
    lcurve['flux_mcatbgsub_err'] = gxt.counts2flux(
        lcurve['cps_mcatbgsub_err'], band)

    # NOTE: These conversions to mag can throw logarithm warnings if the
    # background is brighter than the source, resuling in a negative cps which
    # then gets propagated as a magnitude of NaN.
    lcurve['mag'] = gxt.counts2mag(lcurve['cps'], band)
    lcurve['mag_err_1'] = (lcurve['mag'] -
                           gxt.counts2mag(lcurve['cps'] + lcurve['cps_err'],
                                          band))
    lcurve['mag_err_2'] = (gxt.counts2mag(lcurve['cps'] -
                                          lcurve['cps_err'], band) -
                           lcurve['mag'])
    lcurve['mag_bgsub'] = gxt.counts2mag(lcurve['cps_bgsub'], band)
    lcurve['mag_bgsub_err_1'] = (lcurve['mag_bgsub'] -
                                 gxt.counts2mag(lcurve['cps_bgsub'] +
                                                lcurve['cps_bgsub_err'], band))
    lcurve['mag_bgsub_err_2'] = (gxt.counts2mag(lcurve['cps_bgsub'] -
                                                lcurve['cps_bgsub_err'], band) -
                                 lcurve['mag_bgsub'])
    lcurve['mag_mcatbgsub'] = gxt.counts2mag(lcurve['cps_mcatbgsub'], band)
    lcurve['mag_mcatbgsub_err_1'] = (lcurve['mag_mcatbgsub'] -
                                     gxt.counts2mag(lcurve['cps_mcatbgsub'] +
                                                    lcurve['cps_mcatbgsub_err'],
                                                    band))
    lcurve['mag_mcatbgsub_err_2'] = (gxt.counts2mag(lcurve['cps_mcatbgsub'] -
                                                    lcurve['cps_mcatbgsub_err'],
                                                    band) -
                                     lcurve['mag_mcatbgsub'])

    lcurve['photons'] = data

    lcurve['flags'] = getflags(band, bin_ix, lcurve, verbose=verbose)

    return lcurve
示例#6
0
def integrate_map(band, skypos, tranges, skyrange, verbose=0, memlight=None,
                  hdu=None, retries=100, response=False, detsize=1.1):
    """
    Integrate an image over some number of time ranges. Use a reduced
	    memory optimization (at the expense of more web queries) if requested.

    :param band: The band to use, either 'FUV' or 'NUV'.

    :type band: str

    :param skypos: The right ascension and declination, in degrees.

    :type skypos: list

    :param tranges: Set of time ranges to retrieve the photon events, in
        GALEX time seconds.

    :type tranges: list

    :param skyrange: RA and Dec extents of the region of interest in degrees.

    :type skyrange: list

    :param verbose: Verbosity level, a value of 0 is minimum verbosity.

    :type verbose: int

    :param memlight: Reduce memory usage by breaking query into smaller
        segments of this size in seconds.

    :type memlight: float

    :param hdu: An existing HDU to modify.

    :type hdu: bool

    :param retries: Number of query retries to attempt before giving up.

    :type retries: int

    :param response: Apply the response correction.

    :type response: bool

    :param detsize: Effective diameter, in degrees, of the field-of-view.

    :type detsize: float

    :returns: numpy.ndarray - The integrated image.
    """

    imsz = gxt.deg2pix(skypos, skyrange)
    img = np.zeros(np.array(imsz, dtype='int32'))

    for trange in tranges:
        img += makemap(band, skypos, trange, skyrange,
            response=response, verbose=verbose, detsize=detsize)

    if response: # Intensity maps == average countrate maps.
        expt=np.sum([dbt.compute_exptime(band,trange) for trange in tranges])
        if expt>0:
            img/=expt

    return img
示例#7
0
def integrate_map(band, skypos, tranges, skyrange, verbose=0, memlight=None,
                  hdu=None, retries=100, response=False, detsize=1.1):
    """
    Integrate an image over some number of time ranges. Use a reduced
	    memory optimization (at the expense of more web queries) if requested.

    :param band: The band to use, either 'FUV' or 'NUV'.

    :type band: str

    :param skypos: The right ascension and declination, in degrees.

    :type skypos: list

    :param tranges: Set of time ranges to retrieve the photon events, in
        GALEX time seconds.

    :type tranges: list

    :param skyrange: RA and Dec extents of the region of interest in degrees.

    :type skyrange: list

    :param verbose: Verbosity level, a value of 0 is minimum verbosity.

    :type verbose: int

    :param memlight: Reduce memory usage by breaking query into smaller
        segments of this size in seconds.

    :type memlight: float

    :param hdu: An existing HDU to modify.

    :type hdu: bool

    :param retries: Number of query retries to attempt before giving up.

    :type retries: int

    :param response: Apply the response correction.

    :type response: bool

    :param detsize: Effective diameter, in degrees, of the field-of-view.

    :type detsize: float

    :returns: numpy.ndarray - The integrated image.
    """

    imsz = gxt.deg2pix(skypos, skyrange)
    img = np.zeros(np.array(imsz, dtype='int32'))

    for trange in tranges:
        img += makemap(band, skypos, trange, skyrange,
            response=response, verbose=verbose, detsize=detsize)

    if response: # Intensity maps == average countrate maps.
        expt=np.sum([dbt.compute_exptime(band,trange) for trange in tranges])
        if expt>0:
            img/=expt

    return img
示例#8
0
def quickmag(band,
             ra0,
             dec0,
             tranges,
             radius,
             annulus=None,
             stepsz=None,
             verbose=0,
             detsize=1.25,
             coadd=False):
    """
    Primary wrapper function for generating and synthesizing all of the
        parameters and calculations necessary to create light curves.

    :param band: The band being used, either 'FUV' or 'NUV'.

    :type band: str

    :param ra0: Right ascension, in degrees, of the target position.

    :type ra0: float

    :param dec0: Declination, in degrees, of the target position.

    :type dec0: float

    :param tranges: Set of time ranges to query within in GALEX time seconds.

    :type tranges: list

    :param radius: The radius of the  photometric aperture, in degrees.

    :type radius: float

    :param annulus: Radii of the inner and outer extents of the background
        annulus, in degrees.

    :type annulus: list

    :param stepsz: The size of the time bins to use, in seconds.

    :type stepsz: float

    :param verbose: Verbosity level, a value of 0 is minimum verbosity.

    :type verbose: int

    :param detsize: Effective diameter, in degrees, of the field-of-view.

    :param coadd: Set to True if calculating a total flux instead of flux
        from each time bin.

    :type coadd: bool

    :returns: dict -- The light curve, including input parameters.
    """

    if verbose:
        mc.print_inline("Retrieving all of the target events.")
    searchradius = radius if annulus is None else annulus[1]
    data = pullphotons(band,
                       ra0,
                       dec0,
                       tranges,
                       searchradius,
                       verbose=verbose,
                       detsize=detsize)
    if not data:
        return None

    if verbose:
        mc.print_inline("Binning data according to requested depth.")

    # Multiple ways of defining bins
    try:
        trange = [np.array(tranges).min(), np.array(tranges).max()]
    except ValueError:
        trange = tranges

    if coadd:
        bins = np.array(trange)
    elif stepsz:
        bins = np.append(np.arange(trange[0], trange[1], stepsz), max(trange))
    else:
        bins = np.unique(np.array(tranges).flatten())

    lcurve = {
        'params':
        gphot_params(band, [ra0, dec0],
                     radius,
                     annulus=annulus,
                     verbose=verbose,
                     detsize=detsize,
                     stepsz=stepsz,
                     trange=trange)
    }

    # This is equivalent in function to np.digitize(data['t'],bins) except
    # that it's much, much faster. See numpy issue #2656 at
    # https://github.com/numpy/numpy/issues/2656
    bin_ix = np.searchsorted(bins, data['t'], "right")
    try:
        lcurve['t0'] = bins[np.unique(bin_ix) - 1]
        lcurve['t1'] = bins[np.unique(bin_ix)]
        lcurve['exptime'] = np.array(
            dbt.compute_exptime(
                band,
                tranges if coadd else list(zip(lcurve['t0'], lcurve['t1'])),
                verbose=verbose,
                coadd=coadd,
                detsize=detsize,
                skypos=[ra0, dec0]))
    except IndexError:
        if np.isnan(data['t']):
            if verbose:
                mc.print_inline(
                    "No valid data available in {t}".format(t=tranges))
        lcurve['t0'] = np.array([np.nan])
        lcurve['t1'] = np.array([np.nan])
        lcurve['exptime'] = np.array([0])

    angSep = mc.angularSeparation(ra0, dec0, data['ra'], data['dec'])

    aper_ix = np.where(angSep <= radius)
    lcurve['t0_data'] = reduce_lcurve(bin_ix, aper_ix, data['t'], np.min)
    lcurve['t1_data'] = reduce_lcurve(bin_ix, aper_ix, data['t'], np.max)
    lcurve['t_mean'] = reduce_lcurve(bin_ix, aper_ix, data['t'], np.mean)
    lcurve['q_mean'] = reduce_lcurve(bin_ix, aper_ix, data['q'], np.mean)
    lcurve['counts'] = reduce_lcurve(bin_ix, aper_ix, data['t'], len)
    lcurve['flat_counts'] = reduce_lcurve(bin_ix, aper_ix,
                                          1. / data['response'], np.sum)
    lcurve['responses'] = reduce_lcurve(bin_ix, aper_ix, data['response'],
                                        np.mean)
    lcurve['detxs'] = reduce_lcurve(bin_ix, aper_ix, data['col'], np.mean)
    lcurve['detys'] = reduce_lcurve(bin_ix, aper_ix, data['row'], np.mean)
    lcurve['detrad'] = mc.distance(lcurve['detxs'], lcurve['detys'], 400, 400)
    lcurve['racent'] = reduce_lcurve(bin_ix, aper_ix, data['ra'], np.mean)
    lcurve['deccent'] = reduce_lcurve(bin_ix, aper_ix, data['dec'], np.mean)

    skybgmcatdata = dbt.get_mcat_data([ra0, dec0], radius)
    lcurve['mcat_bg'] = lcurve['exptime'] * np.array([
        dbt.mcat_skybg(band, [ra0, dec0],
                       radius,
                       trange=tr,
                       mcat=skybgmcatdata,
                       verbose=verbose)
        for tr in zip(lcurve['t0'], lcurve['t1'])
    ])

    if annulus is not None:
        annu_ix = np.where((angSep > annulus[0]) & (angSep <= annulus[1]))
        lcurve['bg_counts'] = reduce_lcurve(bin_ix, annu_ix, data['t'], len)
        lcurve['bg_flat_counts'] = reduce_lcurve(bin_ix, annu_ix,
                                                 data['response'], np.sum)
        lcurve['bg'] = (mc.area(radius) * lcurve['bg_flat_counts'] /
                        (mc.area(annulus[1]) - mc.area(annulus[0])))
    else:
        lcurve['bg_counts'] = np.zeros(len(lcurve['counts']))
        lcurve['bg_flat_counts'] = np.zeros(len(lcurve['counts']))
        lcurve['bg'] = np.zeros(len(lcurve['counts']))

    lcurve['cps'] = lcurve['flat_counts'] / lcurve['exptime']
    lcurve['cps_err'] = aperture_error(lcurve['flat_counts'],
                                       lcurve['exptime'])
    lcurve['cps_bgsub'] = (lcurve['flat_counts'] -
                           lcurve['bg']) / lcurve['exptime']
    lcurve['cps_bgsub_err'] = aperture_error(lcurve['flat_counts'],
                                             lcurve['exptime'],
                                             bgcounts=lcurve['bg'])
    lcurve['cps_mcatbgsub'] = (lcurve['flat_counts'] -
                               lcurve['mcat_bg']) / lcurve['exptime']
    lcurve['cps_mcatbgsub_err'] = aperture_error(lcurve['flat_counts'],
                                                 lcurve['exptime'],
                                                 bgcounts=lcurve['mcat_bg'])
    lcurve['flux'] = gxt.counts2flux(lcurve['cps'], band)
    lcurve['flux_err'] = gxt.counts2flux(lcurve['cps_err'], band)
    lcurve['flux_bgsub'] = gxt.counts2flux(lcurve['cps_bgsub'], band)
    lcurve['flux_bgsub_err'] = gxt.counts2flux(lcurve['cps_bgsub_err'], band)
    lcurve['flux_mcatbgsub'] = gxt.counts2flux(lcurve['cps_mcatbgsub'], band)
    lcurve['flux_mcatbgsub_err'] = gxt.counts2flux(lcurve['cps_mcatbgsub_err'],
                                                   band)

    # NOTE: These conversions to mag can throw logarithm warnings if the
    # background is brighter than the source, resuling in a negative cps which
    # then gets propagated as a magnitude of NaN.
    lcurve['mag'] = gxt.counts2mag(lcurve['cps'], band)
    lcurve['mag_err_1'] = (
        lcurve['mag'] -
        gxt.counts2mag(lcurve['cps'] + lcurve['cps_err'], band))
    lcurve['mag_err_2'] = (
        gxt.counts2mag(lcurve['cps'] - lcurve['cps_err'], band) -
        lcurve['mag'])
    lcurve['mag_bgsub'] = gxt.counts2mag(lcurve['cps_bgsub'], band)
    lcurve['mag_bgsub_err_1'] = (
        lcurve['mag_bgsub'] -
        gxt.counts2mag(lcurve['cps_bgsub'] + lcurve['cps_bgsub_err'], band))
    lcurve['mag_bgsub_err_2'] = (
        gxt.counts2mag(lcurve['cps_bgsub'] - lcurve['cps_bgsub_err'], band) -
        lcurve['mag_bgsub'])
    lcurve['mag_mcatbgsub'] = gxt.counts2mag(lcurve['cps_mcatbgsub'], band)
    lcurve['mag_mcatbgsub_err_1'] = (lcurve['mag_mcatbgsub'] - gxt.counts2mag(
        lcurve['cps_mcatbgsub'] + lcurve['cps_mcatbgsub_err'], band))
    lcurve['mag_mcatbgsub_err_2'] = (gxt.counts2mag(
        lcurve['cps_mcatbgsub'] - lcurve['cps_mcatbgsub_err'], band) -
                                     lcurve['mag_mcatbgsub'])

    lcurve['photons'] = data

    lcurve['flags'] = getflags(band, bin_ix, lcurve, verbose=verbose)

    return lcurve
示例#9
0
 def test_compute_exptime_regression_1(self):
     trange = (891394895.0, 891394925.0)
     self.assertAlmostEqual(dbt.compute_exptime('NUV', trange)[0],
                            0.8842699997336518,
                            places=12)