Ejemplo n.º 1
0
    def _parse_fits(filepath):
        """
        This method parses GBM CSPEC data files to create summary lightcurves.
        """
        hdulist=fits.open(filepath)
        header=OrderedDict(hdulist[0].header)
        #these GBM files have three FITS extensions.
        #extn1 - this gives the energy range for each of the 128 energy bins
        #extn2 - this contains the data, e.g. counts, exposure time, time of observation
        #extn3 - eclipse times?
        energy_bins=hdulist[1].data
        count_data=hdulist[2].data
        misc=hdulist[3].data


        #rebin the 128 energy channels into some summary ranges
        #4-15 keV, 15 - 25 keV, 25-50 keV, 50-100 keV, 100-300 keV, 300-800 keV, 800 - 2000 keV
        #put the data in the units of counts/s/keV
        summary_counts=_bin_data_for_summary(energy_bins,count_data)

        gbm_times=[]
        #get the time information in datetime format with the correct MET adjustment
        for t in count_data['time']:
            gbm_times.append(fermi.met_to_utc(t))
        column_labels=['4-15 keV','15-25 keV','25-50 keV','50-100 keV','100-300 keV',
                       '300-800 keV','800-2000 keV']
        return header, pandas.DataFrame(summary_counts, columns=column_labels, index=gbm_times)
Ejemplo n.º 2
0
def get_scx_scz_at_time(time, file):
    """
    Read a downloaded FERMI weekly pointing file and extract scx, scz for a
    single time.

    Parameters
    ----------

    time : `datetime.datetime`
        A datetime object or other time format understood by the parse_time function.
    file : `str`
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
         download_weekly_pointing_file function).
    """

    time = parse_time(time)
    hdulist = fits.open(file)
    timesinutc = []
    for tim in hdulist[1].data['START']:
        timesinutc.append(met_to_utc(tim))
    ind = np.searchsorted(timesinutc, time)

    scx_radec = (Longitude(hdulist[1].data['RA_SCX'][ind] * u.deg),
                 Latitude(hdulist[1].data['DEC_SCX'][ind] * u.deg))
    scz_radec = (Longitude(hdulist[1].data['RA_SCZ'][ind] * u.deg),
                 Latitude(hdulist[1].data['DEC_SCZ'][ind] * u.deg))

    return scx_radec, scz_radec, timesinutc[ind]
Ejemplo n.º 3
0
def get_scx_scz_in_timerange(timerange, file):
    """
    Read a downloaded FERMI weekly pointing file and extract scx, scz for a
    timerange.

    Parameters
    ----------

    timerange : `sunpy.time.TimeRange`
        A SunPy TimeRange object.
    file : `str`
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
        download_weekly_pointing_file function).
    """

    hdulist = fits.open(file)
    timesinutc = []
    for tim in hdulist[1].data['START']:
        timesinutc.append(met_to_utc(tim))

    startind = np.searchsorted(timesinutc, timerange.start)
    endind = np.searchsorted(timesinutc, timerange.end)

    scx_radec = []
    scz_radec = []
    for i in range(startind, endind):
        scx_radec.append((Longitude(hdulist[1].data['RA_SCX'][i] * u.deg),
                          Latitude(hdulist[1].data['DEC_SCX'][i] * u.deg)))
        scz_radec.append((Longitude(hdulist[1].data['RA_SCZ'][i] * u.deg),
                          Latitude(hdulist[1].data['DEC_SCZ'][i] * u.deg)))
    return scx_radec, scz_radec, timesinutc[startind:endind]
Ejemplo n.º 4
0
def get_scx_scz_at_time(time, file):
    """
    read a downloaded FERMI weekly pointing file and extract scx, scz for a single time.

    Parameters
    ----------

    time : `datetime.datetime` 
        A datetime object or other time format understood by the parse_time function.
    file : str
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
         download_weekly_pointing_file function).
    """
    
    time = parse_time(time)
    hdulist = fits.open(file)
    timesinutc = []
    for tim in hdulist[1].data['START']:
        timesinutc.append(met_to_utc(tim))
    ind = np.searchsorted(timesinutc, time)

    scx_radec = (Longitude(hdulist[1].data['RA_SCX'][ind]*u.deg),
                 Latitude(hdulist[1].data['DEC_SCX'][ind]*u.deg))
    scz_radec = (Longitude(hdulist[1].data['RA_SCZ'][ind]*u.deg),
                 Latitude(hdulist[1].data['DEC_SCZ'][ind]*u.deg))
    
    
    return scx_radec, scz_radec, timesinutc[ind]
Ejemplo n.º 5
0
def get_scx_scz_in_timerange(timerange, file):
    """
    read a downloaded FERMI weekly pointing file and extract scx, scz for a timerange.

    Parameters
    ----------

    date : `datetime.datetime` 
        A datetime object or other date format understood by the parse_time function.
    file : str
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
        download_weekly_pointing_file function).
    """
    
    hdulist = fits.open(file)
    timesinutc = []
    for tim in hdulist[1].data['START']:
        timesinutc.append(met_to_utc(tim))

    startind = np.searchsorted(timesinutc, timerange.start)
    endind = np.searchsorted(timesinutc, timerange.end)

    scx_radec = []
    scz_radec = []
    for i in range(startind, endind):
        scx_radec.append((Longitude(hdulist[1].data['RA_SCX'][i]*u.deg),
                          Latitude(hdulist[1].data['DEC_SCX'][i]*u.deg)))
        scz_radec.append((Longitude(hdulist[1].data['RA_SCZ'][i]*u.deg),
                          Latitude(hdulist[1].data['DEC_SCZ'][i]*u.deg)))
    return scx_radec, scz_radec, timesinutc[startind:endind]
Ejemplo n.º 6
0
def get_scx_scz_at_time(time, file):
    """
    Read a downloaded FERMI weekly pointing file and extract "scx", "scz" for a
    single time.

    Parameters
    ----------
    time : {parse_time_types}
        A time specified as a parse_time-compatible
        time string, number, or a datetime object.
    file : `str`
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
         `~sunpy.instr.fermi.download_weekly_pointing_file` function).

    Returns
    -------
    `tuple`, `tuple`, `list`:
        The pointing coordinates as a `~astropy.coordinates.Longitude` in a `tuple`
        and it's time.
    """

    time = parse_time(time)
    hdulist = fits.open(file)
    timesinutc = []
    for tim in hdulist[1].data['START']:
        timesinutc.append(met_to_utc(tim))
    ind = np.searchsorted(timesinutc, time)

    scx_radec = (Longitude(hdulist[1].data['RA_SCX'][ind] * u.deg),
                 Latitude(hdulist[1].data['DEC_SCX'][ind] * u.deg))
    scz_radec = (Longitude(hdulist[1].data['RA_SCZ'][ind] * u.deg),
                 Latitude(hdulist[1].data['DEC_SCZ'][ind] * u.deg))

    return scx_radec, scz_radec, timesinutc[ind]
Ejemplo n.º 7
0
def get_scx_scz_in_timerange(timerange, file):
    """
    Read a downloaded FERMI weekly pointing file and extract scx, scz for a
    timerange.

    Parameters
    ----------

    timerange : `sunpy.time.TimeRange`
        A SunPy TimeRange object.
    file : `str`
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
        download_weekly_pointing_file function).
    """

    hdulist = fits.open(file)
    timesinutc = []
    for tim in hdulist[1].data["START"]:
        timesinutc.append(met_to_utc(tim))

    startind = np.searchsorted(timesinutc, timerange.start)
    endind = np.searchsorted(timesinutc, timerange.end)

    scx_radec = []
    scz_radec = []
    for i in range(startind, endind):
        scx_radec.append(
            (Longitude(hdulist[1].data["RA_SCX"][i] * u.deg), Latitude(hdulist[1].data["DEC_SCX"][i] * u.deg))
        )
        scz_radec.append(
            (Longitude(hdulist[1].data["RA_SCZ"][i] * u.deg), Latitude(hdulist[1].data["DEC_SCZ"][i] * u.deg))
        )
    return scx_radec, scz_radec, timesinutc[startind:endind]
Ejemplo n.º 8
0
    def from_fits_file(cls, filename):
        r"""
        Create a new visibility object from a fits file.

        Parameters
        ----------
        filename : `basestring`
            The path/filename of the the fits file to read

        Returns
        -------
        `Visibility`
            The new visibility object

        Raises
        ------
        TypeError
            If the fits file is not from a supported instrument

        """
        with fits.open(filename) as hdu_list:
            primary_header = hdu_list[0].header
            if primary_header.get('source') == 'xrayvision':
                return Visibility.from_fits(hdu_list)
            elif primary_header.get('TELESCOP') == 'RHESSI' and \
                    primary_header.get('INSTRUME') == 'RHESSI':
                return RHESSIVisibility.from_fits_old(hdu_list=hdu_list)
            else:
                raise TypeError(
                    "This type of fits visibility file is not supported")
Ejemplo n.º 9
0
def get_scx_scz_in_timerange(timerange, file):
    """
    Read a downloaded FERMI weekly pointing file and extract scx, scz for a
    timerange.

    Parameters
    ----------
    timerange : `sunpy.time.TimeRange`
        A SunPy `~sunpy.time.TimeRange`.
    file : `str`
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
        `~sunkit_instruments.fermi.download_weekly_pointing_file` function).

    Returns
    -------
    `list`, `list`, `list`:
        The pointing coordinates as a `~astropy.coordinates.Longitude` in a `list`
        and it's time.
    """

    hdulist = fits.open(file)
    timesinutc = []
    for tim in hdulist[1].data["START"]:
        timesinutc.append(met_to_utc(tim))

    startind = np.searchsorted(timesinutc, timerange.start)
    endind = np.searchsorted(timesinutc, timerange.end)

    scx_radec = []
    scz_radec = []
    for i in range(startind, endind):
        scx_radec.append((
            Longitude(hdulist[1].data["RA_SCX"][i] * u.deg),
            Latitude(hdulist[1].data["DEC_SCX"][i] * u.deg),
        ))
        scz_radec.append((
            Longitude(hdulist[1].data["RA_SCZ"][i] * u.deg),
            Latitude(hdulist[1].data["DEC_SCZ"][i] * u.deg),
        ))
    return scx_radec, scz_radec, timesinutc[startind:endind]
Ejemplo n.º 10
0
    def from_fits_file(path):
        """
        Creates RHESSIVisibility objects from compatible fits files

        Parameters
        ----------
        path: str
            Path where the fits file can be found

        Examples
        --------

        Notes
        -----
        It separates the Visibility data based on the time and energy
        ranges.
        """
        hudlist = fits.open(path)
        for i in hudlist:
            if i.name == "VISIBILITY":
                # Checking how many data structures we have
                data_sort = {}
                erange = i.data["erange"]
                erange_unique = np.unique(erange, axis=0)
                trange = i.data["trange"]
                trange_unique = np.unique(trange, axis=0)

                def find_erange(e):
                    for i, j in enumerate(erange_unique):
                        if np.allclose(j, e):
                            return i

                def find_trange(t):
                    for i, j in enumerate(trange_unique):
                        if np.allclose(j, t, rtol=1e-15):
                            return i

                for j, k in enumerate(erange_unique):
                    data_sort[j] = {}

                for j, k in enumerate(trange):
                    eind = find_erange(erange[j])
                    tind = find_trange(k)
                    if tind not in data_sort[eind]:
                        data_sort[eind][tind] = [j]
                    else:
                        data_sort[eind][tind].append(j)

                # Creating the RHESSIVisibilities
                visibilities = []
                for j, k in data_sort.items():
                    for l, m in k.items():
                        visibilities.append(
                            RHESSIVisibility(np.array([]),
                                             np.array([[], []]),
                                             erange=erange_unique[j],
                                             trange=trange_unique[l]))
                        u = np.take(i.data["u"], m)
                        v = np.take(i.data["v"], m)
                        visibilities[-1].uv = np.array([u, v])
                        if "XYOFFSET" in i.header.values():
                            visibilities[-1].xyoffset = i.data["xyoffset"][
                                m[0]]
                        if "ISC" in i.header.values():
                            visibilities[-1].isc = np.take(i.data["isc"], m)
                        if "HARM" in i.header.values():
                            visibilities[-1].harm = i.data["harm"][m[0]]
                        if "OBSVIS" in i.header.values():
                            visibilities[-1].vis = np.take(i.data["obsvis"], m)
                        if "TOTFLUX" in i.header.values():
                            visibilities[-1].totflux = np.take(
                                i.data["totflux"], m)
                        if "SIGAMP" in i.header.values():
                            visibilities[-1].sigamp = np.take(
                                i.data["sigamp"], m)
                        if "CHI2" in i.header.values():
                            visibilities[-1].chi2 = np.take(i.data["chi2"], m)
                        if "TYPE" in i.header.values():
                            visibilities[-1].type_string = i.data["type"][m[0]]
                        if "UNITS" in i.header.values():
                            string = RHESSIVisibility.convert_units_to_tex(
                                i.data["units"][m[0]])
                            visibilities[-1].units = string
                        if "ATTEN_STATE" in i.header.values():
                            visibilities[-1].atten_state = i.data[
                                "atten_state"][m[0]]
                        if "COUNT" in i.header.values():
                            visibilities[-1].count = np.take(
                                i.data["count"], m)
                return visibilities
        return None