Пример #1
0
def main(source_data='.jp2',
         time_range=TimeRange('2011/10/01 09:45:00', '2011/10/01 10:15:59'),
         algorithm='hough',
         feed_directory='~/Data/eitwave/jp2/20111001_jp2/',
         use_pickle=None,
         diff_type='running',
         data_savedir=None):
    '''
    This is the main executable for the Automated EUV Wave Analysis and Reduction (AWARE)
    code. The procedure is as follows:
        - Query the HEK to find whether any flares were observed in SDO/AIA 211A during the input time range
        - If yes, then read in (or download) a sequence of solar images corresponding to the time range
        - Transform these images from Helioprojective Coordinates to Heliographic Coordinates,
          with the origin centered at the flare origin
        - Create a sequence of difference maps from these transformed maps
        - Use a threshold method to create a binary map from the the difference maps.
        - Apply the Hough Transform to the binary map to search for strong lines in the image
        - Use the results of the Hough Transform to detect whether an EUV wave is present
        - Fit an appropriate function (e.g. Gaussian) to the detected wavefront as a function of longitude
        - Record fit results and return data products
    
    Parameters
    ----------    
    source_data : string
        description of the type of data being input. Allowed is '.jp2', 'fits', or 'test'.
        will look for helioviewer JP2 files, FITS files, or load the test data respectively

    time_range : a TimeRange object
        time range within which to search for EUV waves

    feed_directory : string
        A directory containing data files to be analysed. If set, AWARE will assume data files are
        already download and will search in this directory instead. Assumes that all files in the
        directory with the appropriate extension (e.g. .jp2, .fits) are relevant to the flare detection.

    use_pickle : string
        BUGGED - currently not supported, always set to None

    diff_type : string
        The type of image differencing to use. Allowed values are 'running' or 'base'. Default is 'running'
        Will perform either running differencing or base differencing on the image sequence.

    data_savedir : string
        directory in which to save downloaded jp2 files from Helioviewer. If None, then AWARE will construct a directory
        based on the start time of the query.

    Returns
    -------

    Outputs a pickle file containing the following data products (in order):
        1) a list of maps modelling the detected wavefront, transformed back to original HPC coordinates  
    '''

    if feed_directory != None:
        feed_directory = os.path.expanduser(feed_directory)

    #Check which type of data is being analysed, and establish the directory to store downloaded files,
    #if appropriate
    if source_data == 'test':
        maps = test_wave2d()
    elif source_data == '.jp2' and data_savedir == None:
        data_savedir = '~/aware_data/jp2/' + time_range.start().strftime(
            '%Y%m%d_%H%M')
    elif source_data == '.fits' and data_savedir == None:
        data_savedir = '~/aware_data/fits/' + time_range.start().strftime(
            '%Y%m%d_%H%M')

    if not os.path.exists(os.path.expanduser(data_savedir)):
        os.makedirs(os.path.expanduser(data_savedir))

    # Query the HEK to see whether there were any flares during the time range specified
    # Concentrate on the AIA 211A channel as it has clearest observations of global waves
    client = hek.HEKClient()
    hek_result = client.query(hek.attrs.Time(time_range.t1, time_range.t2),
                              hek.attrs.EventType('FL'),
                              hek.attrs.OBS.ChannelID == '211')
    if hek_result is None:
        # if no flares found, no analysis possible. Return
        print 'No flares found in HEK database during specified time range.'
        print 'No analysis possible. Returning.'
        return None

    # Otherwise, we have found at least one flare
    print('Number of flares found = ' + str(len(hek_result)))

    #assume the first result of the HEK query has the correct information
    for flare in hek_result[0:1]:

        if feed_directory is None:
            print('Acquiring data for flare')
            filelist = aware_utils.acquire_data(data_savedir, source_data,
                                                flare)
        else:
            # Assumes that the necessary files are already present
            filelist = aware_utils.listdir_fullpath(feed_directory,
                                                    filetype=source_data)

        #filter to only grab the data files with the source_data extn in the directory
        #this looks like duplication of listdir_fullpath
        files_tmp = []
        for f in filelist:
            if f.endswith(source_data):
                files_tmp.append(f)
            files = files_tmp

        # reduce the number of files to those that happen after the flare has
        # started
        files = []
        if source_data == '.jp2':
            for f in files_tmp:
                fhv = f.split(os.sep)[-1]
                if aware_utils.hv_filename2datetime(fhv) > \
                parse_time(flare['event_starttime']):
                    files.append(f)
            print('Number of files :' + str(len(files)))
            if len(files) == 0:
                print('No files found.  Returning.')
                return None
        else:
            files = files_tmp

        # Define the transform parameters
        params = aware_utils.params(flare)

        # read in files and accumulate them
        if use_pickle != None:
            # load in a pickle file of the data
            pfile = open(feed_directory + use_pickle, 'rb')
            a = pickle.load(pfile)
            maps = a[0]
            new_maps = a[1]
            diffs = a[2]
            pfile.close()
        else:
            maps = aware_utils.accumulate(files[6:30],
                                          accum=1,
                                          nsuper=4,
                                          verbose=True)

            #temporary fix for exposure control and S/N changes
            long_maps = []
            for m in maps:
                if m.exposure_time > 2.0:
                    long_maps.append(m)
            maps = long_maps

            # Unravel the maps
            new_maps = aware_utils.map_unravel(maps, params, verbose=True)
            #return new_maps

            #sometimes unravelling maps leads to slight variations in the unraveled
            #image dimensions.  check dimensions of maps and resample to dimensions
            #of first image in sequence if need be.
            #new_maps[0].peek()
            new_maps = aware_utils.check_dims(new_maps)

            # calculate the differences
            if diff_type == 'base':
                diffs = aware_utils.map_basediff(new_maps)
            else:
                diffs = aware_utils.map_diff(new_maps)

        #generate persistence maps - currently bugged, so skip this step
        #persistence_maps = eitwaveutils.map_persistence(diffs)
        persistence_maps = []

        #determine the threshold to apply to the difference maps.
        #diffs > diff_thresh will be 1, otherwise 0.
        threshold_maps = aware_utils.map_threshold(new_maps, factor=0.2)
        #return threshold_maps

        # transform difference maps into binary maps
        binary_maps = aware_utils.map_binary(diffs, threshold_maps)

        if algorithm == 'hough':
            # detection based on the hough transform
            detection = aware_utils.hough_detect(binary_maps, vote_thresh=10)
        elif algorithm == 'prob_hough':
            # detection based on the probabilistic hough transform.  Takes the
            # keywords of the probabilistic hough transform - see the documentation
            # of skimage.transform.probabilistic_hough (scikit-image.org)
            detection = aware_utils.prob_hough_detect(binary_maps,
                                                      threshold=10)

        # Remove areas that are too small or that don't have enough detections
        detection = aware_utils.cleanup(detection,
                                        size_thresh=50,
                                        inv_thresh=8)

        detection_maps = copy.deepcopy(binary_maps)
        for i in range(0, len(detection)):
            detection_maps[i].data = detection[i]
        #If there is anything left in 'detection', fit a function to the original
        #diffmaps in the region defined by 'detection'. Currently fits a
        #Gaussian in the y-direction for each x
        #use 'detection' to guess starting fit parameters.

        #get just the positive elements of the difference map. Perform fitting on
        #these positive diffmaps.
        posdiffs = copy.deepcopy(diffs)
        for i in range(0, len(diffs)):
            temp = diffs[i].data < 0
            posdiffs[i].data[temp] = 0

        #fit a function to the difference maps in the cases where there has been a
        #detection
        fitparams, wavefront = aware_utils.fit_wavefront(posdiffs, detection)

        #transform the detected model wavefront back into heliocentric coordinates so it can be overlayed
        wavefront_hc = aware_utils.map_reravel(wavefront, params, verbose=True)

        #strip out the velocity information from the wavefront fitting
        velocity = aware_utils.wavefront_velocity(fitparams)

        #strip out the position and width information from the wavefront fitting
        pos_width = aware_utils.wavefront_position_and_width(fitparams)

        #now save products we have created in a pickle file for future reference
        #Will save output in ~/aware_results
        extn = time_range.start().strftime('%Y%m%d_%H%M')
        save_path = os.path.expanduser('~/aware_results/')
        save_file = 'aware_results_' + extn + '.pickle'

        if not os.path.exists(save_path):
            os.makedirs(save_path)

        output = open(save_path + save_file, 'wb')
        print 'Saving result products to: ' + save_path + save_file

        pickle.dump(wavefront_hc, output)
        output.close()

        #visualize the model wavefront
        visualize(wavefront_hc)

    return maps, new_maps, diffs, threshold_maps, binary_maps, detection_maps, wavefront, velocity, pos_width, persistence_maps, wavefront_hc
Пример #2
0
def test_rhessi_invalid_peek(rhessi_test_ts):
    a = rhessi_test_ts.time_range.start - TimeDelta(2*u.day)
    b = rhessi_test_ts.time_range.start - TimeDelta(1*u.day)
    empty_ts = rhessi_test_ts.truncate(TimeRange(a, b))
    with pytest.raises(ValueError):
        empty_ts.peek()
Пример #3
0
def test_generic_ts_invalid_peek(generic_ts):
    a = generic_ts.time_range.start - TimeDelta(2*u.day)
    b = generic_ts.time_range.start - TimeDelta(1*u.day)
    empty_ts = generic_ts.truncate(TimeRange(a, b))
    with pytest.raises(ValueError):
        empty_ts.peek()
Пример #4
0
"""
NOAA LightCurve Tests
"""
from __future__ import absolute_import

import pytest
import sunpy.lightcurve
from sunpy.time import TimeRange

timerange_a = TimeRange('2004/01/01', '2007/01/01')


class TestNOAAIndicesLightCurve(object):
    @pytest.mark.online
    def test_create(self):
        lc = sunpy.lightcurve.NOAAIndicesLightCurve.create()
        assert isinstance(lc, sunpy.lightcurve.NOAAIndicesLightCurve)

    @pytest.mark.online
    def test_isempty(self):
        lc = sunpy.lightcurve.NOAAIndicesLightCurve.create()
        assert lc.data.empty == False

    @pytest.mark.online
    def test_url(self):
        """Test creation with url"""
        url = 'ftp://ftp.swpc.noaa.gov/pub/weekly/RecentIndices.txt'
        lc1 = sunpy.lightcurve.NOAAIndicesLightCurve.create(url)
        assert isinstance(lc1, sunpy.lightcurve.NOAAIndicesLightCurve)

    @pytest.mark.online
Пример #5
0
def backprojection(calibrated_event_list,
                   pixel_size=(1., 1.) * u.arcsec,
                   image_dim=(64, 64) * u.pix):
    """
    Given a stacked calibrated event list fits file create a back
    projection image.

    .. warning:: The image is not in the right orientation!

    Parameters
    ----------
    calibrated_event_list : string
        filename of a RHESSI calibrated event list
    pixel_size : `~astropy.units.Quantity` instance
        the size of the pixels in arcseconds. Default is (1,1).
    image_dim : `~astropy.units.Quantity` instance
        the size of the output image in number of pixels

    Returns
    -------
    out : RHESSImap
        Return a backprojection map.

    Examples
    --------
    >>> import sunpy.data
    >>> import sunpy.data.sample
    >>> import sunpy.instr.rhessi as rhessi
    >>> sunpy.data.download_sample_data(overwrite=False)   # doctest: +SKIP
    >>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST)   # doctest: +SKIP
    >>> map.peek()   # doctest: +SKIP

    """
    if not isinstance(pixel_size, u.Quantity):
        raise ValueError("Must be astropy Quantity in arcseconds")
    try:
        pixel_size = pixel_size.to(u.arcsec)
    except:
        raise ValueError("'{0}' is not a valid pixel_size unit".format(
            pixel_size.unit))
    if not (isinstance(image_dim, u.Quantity) and image_dim.unit == 'pix'):
        raise ValueError("Must be astropy Quantity in pixels")

    try:
        import sunpy.data.sample
    except ImportError:
        import sunpy.data
        sunpy.data.download_sample()
    # This may need to be moved up to data from sample
    calibrated_event_list = sunpy.data.sample.RHESSI_EVENT_LIST

    afits = fits.open(calibrated_event_list)
    info_parameters = afits[2]
    xyoffset = info_parameters.data.field('USED_XYOFFSET')[0]
    time_range = TimeRange(
        info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0])

    image = np.zeros(image_dim.value)

    # find out what detectors were used
    det_index_mask = afits[1].data.field('det_index_mask')[0]
    detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
    for detector in detector_list:
        if detector > 0:
            image = image + _backproject(calibrated_event_list,
                                         detector=detector,
                                         pixel_size=pixel_size.value,
                                         image_dim=image_dim.value)

    dict_header = {
        "DATE-OBS":
        time_range.center().strftime("%Y-%m-%d %H:%M:%S"),
        "CDELT1":
        pixel_size[0],
        "NAXIS1":
        image_dim[0],
        "CRVAL1":
        xyoffset[0],
        "CRPIX1":
        image_dim[0].value / 2 + 0.5,
        "CUNIT1":
        "arcsec",
        "CTYPE1":
        "HPLN-TAN",
        "CDELT2":
        pixel_size[1],
        "NAXIS2":
        image_dim[1],
        "CRVAL2":
        xyoffset[1],
        "CRPIX2":
        image_dim[0].value / 2 + 0.5,
        "CUNIT2":
        "arcsec",
        "CTYPE2":
        "HPLT-TAN",
        "HGLT_OBS":
        0,
        "HGLN_OBS":
        0,
        "RSUN_OBS":
        solar_semidiameter_angular_size(time_range.center()).value,
        "RSUN_REF":
        sunpy.sun.constants.radius.value,
        "DSUN_OBS":
        sunearth_distance(time_range.center()) * sunpy.sun.constants.au.value
    }

    header = sunpy.map.MapMeta(dict_header)
    result_map = sunpy.map.Map(image, header)

    return result_map
Пример #6
0
 def timerange_a(self):
     return TimeRange('2008/06/01', '2008/06/02')
Пример #7
0
    def _get_goes_sat_num(start, end):
        """
        Parses the query time to determine which GOES satellite to use.

        Parameters
        ----------
        filepath : `str`
            The path to the file you want to parse.
        """
        goes_operational = {
            2: TimeRange('1980-01-04', '1983-05-01'),
            5: TimeRange('1983-05-02', '1984-08-01'),
            6: TimeRange('1983-06-01', '1994-08-19'),
            7: TimeRange('1994-01-01', '1996-08-14'),
            8: TimeRange('1996-03-21', '2003-06-19'),
            9: TimeRange('1997-01-01', '1998-09-09'),
            10: TimeRange('1998-07-10', '2009-12-02'),
            11: TimeRange('2006-06-20', '2008-02-16'),
            12: TimeRange('2002-12-13', '2007-05-09'),
            13: TimeRange('2006-08-01', '2006-08-01'),
            14: TimeRange('2009-12-02', '2010-11-05'),
            15: TimeRange('2010-09-01', Time.now()),
        }

        sat_list = []
        for sat_num in goes_operational:
            if (goes_operational[sat_num].start <= start <=
                    goes_operational[sat_num].end
                    and goes_operational[sat_num].start <= end <=
                    goes_operational[sat_num].end):
                # if true then the satellite with sat_num is available
                sat_list.append(sat_num)

        if not sat_list:
            # if no satellites were found then raise an exception
            raise Exception('No operational GOES satellites within time range')
        else:
            return sat_list
Пример #8
0
def testDirectoryRangeFalse():
    s = Scraper('%Y%m%d/%Y%m%d_%H.fit.gz')
    directory_list = ['20091230/', '20091231/', '20100101/',
                      '20090102/', '20090103/']
    timerange = TimeRange('2009/12/30', '2010/01/03')
    assert s.range(timerange) != directory_list
Пример #9
0
def backprojection(calibrated_event_list,
                   pixel_size=(1., 1.),
                   image_dim=(64, 64)):
    """
    Given a stacked calibrated event list fits file create a back 
    projection image.
    
    .. warning:: The image is not in the right orientation!

    Parameters
    ----------
    calibrated_event_list : string
        filename of a RHESSI calibrated event list
    detector : int
        the detector number
    pixel_size : 2-tuple
        the size of the pixels in arcseconds. Default is (1,1).
    image_dim : 2-tuple
        the size of the output image in number of pixels

    Returns
    -------
    out : RHESSImap
        Return a backprojection map.

    Examples
    --------
    >>> import sunpy.instr.rhessi as rhessi
    >>> map = rhessi.backprojection(sunpy.RHESSI_EVENT_LIST)
    >>> map.show()

    """

    calibrated_event_list = sunpy.RHESSI_EVENT_LIST
    afits = fits.open(calibrated_event_list)
    info_parameters = afits[2]
    xyoffset = info_parameters.data.field('USED_XYOFFSET')[0]
    time_range = TimeRange(
        info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0])

    image = np.zeros(image_dim)

    #find out what detectors were used
    det_index_mask = afits[1].data.field('det_index_mask')[0]
    detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
    for detector in detector_list:
        if detector > 0:
            image = image + _backproject(calibrated_event_list,
                                         detector=detector,
                                         pixel_size=pixel_size,
                                         image_dim=image_dim)

    dict_header = {
        "DATE-OBS": time_range.center().strftime("%Y-%m-%d %H:%M:%S"),
        "CDELT1": pixel_size[0],
        "NAXIS1": image_dim[0],
        "CRVAL1": xyoffset[0],
        "CRPIX1": image_dim[0] / 2 + 0.5,
        "CUNIT1": "arcsec",
        "CTYPE1": "HPLN-TAN",
        "CDELT2": pixel_size[1],
        "NAXIS2": image_dim[1],
        "CRVAL2": xyoffset[1],
        "CRPIX2": image_dim[0] / 2 + 0.5,
        "CUNIT2": "arcsec",
        "CTYPE2": "HPLT-TAN",
        "HGLT_OBS": 0,
        "HGLN_OBS": 0,
        "RSUN_OBS": solar_semidiameter_angular_size(time_range.center()),
        "RSUN_REF": sun.radius,
        "DSUN_OBS":
        sunearth_distance(time_range.center()) * sunpy.sun.constants.au
    }

    header = sunpy.map.MapHeader(dict_header)
    result_map = sunpy.map.Map(image, header)

    return result_map
Пример #10
0
def DownloadData():
    [tst, ted] = gettime()
    if ted.mjd <= tst.mjd:
        Div_JSOC_info.text = '''Error: start time must occur earlier than end time. please re-enter start time and end time!!!'''
    elif len(Wavelngth_checkbox.active) == 0:
        Div_JSOC_info.text = '''Error: at least choose one wavelength!!!'''
    else:
        Div_JSOC_info.text = ''''''
        c = drms.Client(verbose=True)
        export_protocol = 'fits'
        if not Text_email.value:
            Div_JSOC_info.text = '''Error: provide your JSOC registered email address!!!'''
            raise RuntimeError('Email address is required.')
        else:
            if not c.check_email(Text_email.value):
                Div_JSOC_info.text = '''Error: <b>Email address</b> is not valid or not registered.!!!'''
                raise RuntimeError(
                    'Email address is not valid or not registered.')
            else:
                config_main['core']['JSOC_reg_email'] = Text_email.value
                DButil.updatejsonfile(suncasa_dir + 'DataBrowser/config.json',
                                      config_main)

        labelsactive = [
            Wavelngth_checkbox.labels[ll] for ll in Wavelngth_checkbox.active
        ]
        if 'goes' in labelsactive:
            global goes
            tr = TimeRange(tst.iso, ted.iso)
            goes = GOESLightCurve.create(tr)
            fout = database_dir + 'goes-' + Text_PlotID.value
            with open(fout, 'wb') as fp:
                pickle.dump(goes, fp)
            Div_JSOC_info.text = """<p>{} saved.</p>""".format(fout)
            MkPlot_args_dict['goesfile'] = os.path.basename(fout)
            labelsactive.pop(labelsactive.index('goes'))
        for series in ['hmi.M_45s', 'aia.lev1_uv_24s', 'aia.lev1_euv_12s']:
            waves = []
            for ll in labelsactive:
                if serieslist[ll] == series:
                    waves.append(ll)
            if len(waves) > 0:
                # try:
                tsel = tst.iso.replace(' ', 'T') + '_TAI-' + ted.iso.replace(
                    ' ', 'T') + '_TAI'
                wave = ','.join(waves)
                cadence = Text_Cadence.value
                if cadence[-1] == 's' and get_num(cadence) < 12:
                    cadence = '12s'
                    Text_Cadence.value = cadence
                if series == 'aia.lev1_uv_24s':
                    if cadence[-1] == 's' and get_num(cadence) < 24:
                        cadence = '24s'
                if series == 'hmi.M_45s':
                    wave = ''
                    if cadence[-1] == 's' and get_num(cadence) < 45:
                        cadence = '45s'
                segments = 'image'
                qstr = '%s[%s@%s][%s]{%s}' % (series, tsel, cadence, wave,
                                              segments)
                print qstr
                r = c.export(qstr,
                             method='url',
                             protocol=export_protocol,
                             email=Text_email.value)
                Div_JSOC_info.text = Div_JSOC_info.text + """<p>Submitting export request <b>{}</b>...</p>""".format(
                    qstr)
                Div_JSOC_info.text = Div_JSOC_info.text + """<p>Request URL: {}</p>""".format(
                    r.request_url)
                Div_JSOC_info.text = Div_JSOC_info.text + """<p>{:d} file(s) available for download.</p>""".format(
                    len(r.urls))
                idx2download = DButil.FileNotInList(
                    r.data['filename'],
                    DButil.readsdofile(datadir=SDOdir,
                                       wavelength=wave,
                                       jdtime=[tst.jd, ted.jd],
                                       isexists=True))
                if len(idx2download) > 0:
                    Div_JSOC_info.text = Div_JSOC_info.text + """<p><b>Downloading</b>....</p>"""
                    r.download(SDOdir, index=idx2download)
                else:
                    Div_JSOC_info.text = Div_JSOC_info.text + """<p>Target file(s) existed.</p>"""

                filename = glob.glob(SDOdir + '*.fits')
                if len(filename) > 0:
                    dirs = DButil.getsdodir(filename)
                    for ll, dd in enumerate(dirs['dir']):
                        if not os.path.exists(SDOdir + dd):
                            os.makedirs(SDOdir + dd)
                        os.system('mv {}/*{}*.fits {}{}'.format(
                            SDOdir, dirs['timstr'][ll], SDOdir, dd))

                Div_JSOC_info.text = Div_JSOC_info.text + """<p>Download <b>finished</b>.</p>"""
                Div_JSOC_info.text = Div_JSOC_info.text + """<p>file(s) downloaded to <b>{}</b></p>""".format(
                    os.path.abspath(SDOdir))
Пример #11
0
def testDirectoryRange():
    s = Scraper('%Y/%m/%d/%Y%m%d_%H.fit.gz')
    directory_list = ['2009/12/30/', '2009/12/31/', '2010/01/01/',
                      '2010/01/02/', '2010/01/03/']
    timerange = TimeRange('2009-12-30', '2010-01-03')
    assert s.range(timerange) == directory_list
Пример #12
0
 def time_range(self):
     """Returns the start and end times of the LightCurve as a TimeRange
     object"""
     return TimeRange(self.data.index[0], self.data.index[-1])
Пример #13
0
def lightcurves(timerange, outdir='./', specfile=None, goes=True, hessifile=None, fermifile=None, ylog=False, hessi_smoth=0, dspec_cmap='cubehelix',
                vmax=None, vmin=None):
    from sunpy.lightcurve import GOESLightCurve
    from sunpy.time import TimeRange, parse_time
    import matplotlib as mpl
    import matplotlib.pyplot as plt
    from astropy.time import Time
    import numpy as np
    import numpy.ma as ma
    from scipy.signal import medfilt
    import matplotlib.colors as colors
    from scipy.io import readsav
    from suncasa.utils import DButil
    import os

    timerange = Time(timerange)
    if hessifile:
        if not os.path.exists(hessifile):
            hessi_script = 'HESSI_lc.pro'
            print('Run the script {} in SSWIDL to download RHESSI summary file first!'.format(hessi_script))
            fi = open(hessi_script, 'wb')
            fi.write("time_range = ['{}','{}'] \n".format(timerange[0].datetime.strftime('%Y-%b-%d %H:%M:%S'),
                                                          timerange[1].datetime.strftime('%Y-%b-%d %H:%M:%S')))
            fi.write('obs_obj = hsi_obs_summary(obs_time_interval=time_range) \n')
            fi.write('data = obs_obj->getdata() \n')
            fi.write('info = obs_obj->get( / info) \n')
            fi.write('obs_data = data.countrate \n')
            fi.write('obs_times = obs_obj->getdata(/time) \n')
            fi.write('obs_times_str  = anytim(obs_times,/CCSDS) \n')
            fi.write('obs_energies  = fltarr(2,n_elements(info.energy_edges)-1) \n')
            fi.write('for ll=0,n_elements(info.energy_edges)-2 do begin \n')
            fi.write('    obs_energies[0,ll]=info.energy_edges[ll] \n')
            fi.write('    obs_energies[1,ll]=info.energy_edges[ll+1] \n')
            fi.write('endfor \n')
            fi.write('save,filename="{}",OBS_DATA,OBS_ENERGIES,obs_times_str \n'.format(hessifile))
            fi.write('end \n')
            fi.close()
            return -1
        hessi = readsav(hessifile)
        hessi_tim = Time(list(hessi['obs_times_str']))
        hessi_tim_plt = hessi_tim.plot_date

    specdata = np.load(specfile)
    spec = specdata['spec']
    if len(spec.shape) == 4:
        (npol, nbl, nfreq, ntim) = spec.shape
    else:
        (nfreq, ntim) = spec.shape
    spec = np.mean(np.mean(spec, axis=0), axis=0)
    freq = specdata['freq']
    freqghz = freq / 1e9
    spec_tim = Time(specdata['tim'] / 3600. / 24., format='mjd')

    fidx_plt = np.linspace(0, nfreq - nfreq / 8.0 / 2.0, 8).astype(np.int) + nfreq / 8.0 / 2.0

    try:
        plt.style.use('seaborn-bright')
        params = {'font.size': 8, 'axes.grid': False, 'axes.facecolor': 'w', 'xtick.color': '#555555', 'ytick.color': '#555555',
                  'xtick.major.size': 2.0, 'ytick.major.size': 2.0, 'xtick.minor.size': 1.0, 'ytick.minor.size': 1.0, 'axes.axisbelow': False,
                  'axes.xmargin': 0.0, 'axes.ymargin': 0.0, 'axes.linewidth': 0.5, 'xtick.major.pad': 1.5, 'ytick.major.pad': 1.5,
                  'lines.linewidth': 1.0}
        mpl.rcParams.update(params)
    except:
        pass

    if goes:
        tr = TimeRange(timerange.iso)
        goes = GOESLightCurve.create(tr)
        dates = mpl.dates.date2num(parse_time(goes.data.index))

    tr_plt = Time(timerange)

    fig, axs = plt.subplots(nrows=3, ncols=1, sharex=True, figsize=(8, 6))
    ax = axs[0]
    tidx_hessi, = np.where((hessi_tim >= tr_plt[0]) & (hessi_tim <= tr_plt[1]))
    for idx, eg in enumerate(hessi['obs_energies']):
        flux_hessi = ma.masked_array(hessi['obs_data'][tidx_hessi, idx])
        if hessi_smoth > 0:
            flux_hessi = DButil.smooth(flux_hessi, 20)
            flux_hessi = flux_hessi / np.nanmax(flux_hessi)
            ax.step(hessi_tim_plt[tidx_hessi], DButil.smooth(flux_hessi, hessi_smoth), label='{:.0f}-{:.0f} keV'.format(eg[0], eg[1]))
        else:
            ax.step(hessi_tim_plt[tidx_hessi], flux_hessi, label='{:.0f}-{:.0f} keV'.format(eg[0], eg[1]))

    if ylog:
        ax.set_yscale("log", nonposy='clip')
    else:
        ax.set_yscale("linear", nonposy='clip')
    ax.legend()
    ax.set_ylabel('Count Rate [s$^{-1}$ detector$^{-1}$ ]')

    ax = axs[1]
    tidx_goes, = np.where((dates >= tr_plt[0].plot_date) & (dates <= tr_plt[1].plot_date))
    if goes:
        ax.plot(dates[tidx_goes], goes.data['xrsb'][tidx_goes] / np.nanmax(goes.data['xrsb'][tidx_goes]), label='GOES 1.0--8.0 $\AA$')
        ax.plot(dates[tidx_goes], goes.data['xrsa'][tidx_goes] / np.nanmax(goes.data['xrsa'][tidx_goes]), label='GOES 0.5--4.0 $\AA$')
    tidx_spec, = np.where((spec_tim >= tr_plt[0]) & (spec_tim <= tr_plt[1]))
    if len(tidx_spec) > 1:
        spec_tim_plt = spec_tim[tidx_spec[0]:tidx_spec[-1]].plot_date
        flux_colors = []
        for idx, fidx in enumerate(fidx_plt):
            flux_plt = medfilt(spec[fidx, tidx_spec[0]:tidx_spec[-1]], 7)
            p = ax.plot(spec_tim_plt, flux_plt / np.nanmax(flux_plt), label='{:.2f} GHz'.format(freqghz[fidx]))
            flux_colors.append(p[0].get_color())
        ax.set_ylabel('Flux (Normalized)')
        ax.set_ylim(0, 1.1)
        ax.legend()

        ax = axs[2]
        spec_plt = spec[:, tidx_spec[0]:tidx_spec[-1]]
        ax.pcolormesh(spec_tim_plt, freqghz, spec_plt, cmap=dspec_cmap, norm=colors.LogNorm(vmax=vmax, vmin=vmin))
        ax.set_ylabel('Frequency [GHz]')
        formatter = mpl.dates.DateFormatter('%H:%M:%S')
        ax.xaxis.set_major_formatter(formatter)
        ax.fmt_xdata = formatter
        ax.set_xlim(tr_plt.plot_date)
        ax.set_xlabel('Start time ({})'.format(tr_plt[0].datetime.strftime('%d-%b-%y %H:%M:%S')))
        for idx, fidx in enumerate(fidx_plt):
            ax.axhline(freqghz[fidx], color=flux_colors[idx], ls=':')
    else:
        print('Warning: No radio data in the timerange. Proceed without dynamic spectrum.')

    fig.tight_layout()
    fig.subplots_adjust(hspace=0.06)
    imgdir = outdir + '/fig01_{}-{}.png'.format(tr_plt[0].datetime.strftime('%H%M%S'), tr_plt[1].datetime.strftime('%H%M%S'))
    fig.savefig(imgdir, dpi=200)
    print('Save image to ' + imgdir)
Пример #14
0
    def _get_goes_sat_num(self, start, end):
        """Parses the query time to determine which GOES satellite to use."""

        goes_operational = {
            2: TimeRange('1981-01-01', '1983-04-30'),
            5: TimeRange('1983-05-02', '1984-07-31'),
            6: TimeRange('1983-06-01', '1994-08-18'),
            7: TimeRange('1994-01-01', '1996-08-13'),
            8: TimeRange('1996-03-21', '2003-06-18'),
            9: TimeRange('1997-01-01', '1998-09-08'),
            10: TimeRange('1998-07-10', '2009-12-01'),
            11: TimeRange('2006-06-20', '2008-02-15'),
            12: TimeRange('2002-12-13', '2007-05-08'),
            13: TimeRange('2006-08-01', '2006-08-01'),
            14: TimeRange('2009-12-02', '2010-10-04'),
            15: TimeRange('2010-09-01', datetime.datetime.utcnow())
        }

        sat_list = []
        for sat_num in goes_operational:
            if ((start > goes_operational[sat_num].start
                 and start < goes_operational[sat_num].end
                 and (end > goes_operational[sat_num].start
                      and end < goes_operational[sat_num].end))):
                # if true then the satellite with sat_num is available
                sat_list.append(sat_num)

        if not sat_list:
            # if no satellites were found then raise an exception
            raise Exception('No operational GOES satellites within time range')
        else:
            return sat_list
Пример #15
0
 def time_range(self):
     """
     Returns the time-span for which records are available
     """
     return TimeRange(min(qrblock.time.start for qrblock in self),
                      max(qrblock.time.end for qrblock in self))
Пример #16
0
def test_ftp():
    pattern = 'ftp://solar-pub.nao.ac.jp/pub/nsro/norh/data/tcx/%Y/%m/tca%y%m%d'
    s = Scraper(pattern)
    timerange = TimeRange('2016/5/18 15:28:00', '2016/5/20 16:30:50')
    assert len(s.filelist(timerange)) == 2
Пример #17
0

import warnings
warnings.filterwarnings('ignore')

"""
import matplotlib.pyplot as plt
from sunpy.timeseries import TimeSeries
from sunpy.time import TimeRange, parse_time
from sunpy.net import hek, Fido, attrs as a
import numpy as np

###############################################################################
# Let's first grab GOES XRS data for a particular time of interest

tr = TimeRange(['2011-06-07 06:00', '2011-06-07 10:00'])
results = Fido.search(a.Time(tr), a.Instrument('XRS'))
results

###############################################################################
# Then download the data and load it into a TimeSeries

files = Fido.fetch(results)
goes = TimeSeries(files)

###############################################################################
# Next lets grab the HEK data for this time from the NOAA Space Weather
# Prediction Center (SWPC)

client = hek.HEKClient()
flares_hek = client.search(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL,
Пример #18
0
def testNoDateDirectory():
    s = Scraper('mySpacecraft/myInstrument/xMinutes/aaa%y%b.ext')
    directory_list = ['mySpacecraft/myInstrument/xMinutes/']
    timerange = TimeRange('2009/11/20', '2010/01/03')
    assert s.range(timerange) == directory_list
Пример #19
0
 def timerange_b(self):
     return TimeRange('2004/06/03', '2004/06/04')
Пример #20
0
def testDirectoryRangeHours():
    s = Scraper('%Y%m%d_%H/%H%M.csv')
    timerange = TimeRange('2009-12-31T23:40:00', '2010-01-01T01:15:00')
    assert len(s.range(timerange)) == 3  # 3 directories (1 per hour)
Пример #21
0
import urllib
from bs4 import BeautifulSoup
from sunpy.util.scraper import Scraper
from sunpy.time import TimeRange

save_dir = "/Users/lahayes/QPP/interesting_event_2014-611/ssw_fits/"

timerange = TimeRange('2014-06-11 05:30:00', '2014-06-11 05:36:00')

url = 'https://hesperia.gsfc.nasa.gov/sdo/aia/2014/06/11/20140611_0528-0547/'

resp = urllib.request.urlopen(url)
soup = BeautifulSoup(resp)


def find_url_waves(wave):
    file_link = []
    for link in soup.find_all('a', href=True):
        if link['href'].endswith('{:d}_.fts'.format(wave)):
            file_link.append(link['href'])
    return file_link


# Use Scraper!


def list_files(url):

    resp = urllib.request.urlopen(url)
    soup = BeautifulSoup(resp)
Пример #22
0
def testDirectoryRange_single():
    s = Scraper('%Y%m%d/%H_%M.csv')
    startdate = datetime.datetime(2010, 10, 10, 5, 0)
    enddate = datetime.datetime(2010, 10, 10, 7, 0)
    timerange = TimeRange(startdate, enddate)
    assert len(s.range(timerange)) == 1
Пример #23
0
def backprojection(calibrated_event_list,
                   pixel_size=(1., 1.) * u.arcsec,
                   image_dim=(64, 64) * u.pix):
    """
    Given a stacked calibrated event list fits file create a back
    projection image.

    .. warning:: The image is not in the right orientation!

    Parameters
    ----------
    calibrated_event_list : str
        filename of a RHESSI calibrated event list
    pixel_size : `~astropy.units.Quantity` instance
        the size of the pixels in arcseconds. Default is (1,1).
    image_dim : `~astropy.units.Quantity` instance
        the size of the output image in number of pixels

    Returns
    -------
    out : RHESSImap
        Return a backprojection map.

    Examples
    --------
    This example is broken.
    >>> import sunpy.data
    >>> import sunpy.data.sample # doctest: +REMOTE_DATA
    >>> import sunpy.instr.rhessi as rhessi
    >>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST)   # doctest: +SKIP
    >>> map.peek()   # doctest: +SKIP

    """
    # import sunpy.map in here so that net and timeseries don't end up importing map
    import sunpy.map

    pixel_size = pixel_size.to(u.arcsec)
    image_dim = np.array(image_dim.to(u.pix).value, dtype=int)

    afits = sunpy.io.read_file(calibrated_event_list)
    info_parameters = afits[2]
    xyoffset = info_parameters.data.field('USED_XYOFFSET')[0]
    time_range = TimeRange(
        info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0], format='utime')

    image = np.zeros(image_dim)

    # find out what detectors were used
    det_index_mask = afits[1].data.field('det_index_mask')[0]
    detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
    for detector in detector_list:
        if detector > 0:
            image = image + _backproject(calibrated_event_list,
                                         detector=detector,
                                         pixel_size=pixel_size.value,
                                         image_dim=image_dim)

    dict_header = {
        "DATE-OBS":
        time_range.center.strftime("%Y-%m-%d %H:%M:%S"),
        "CDELT1":
        pixel_size[0],
        "NAXIS1":
        image_dim[0],
        "CRVAL1":
        xyoffset[0],
        "CRPIX1":
        image_dim[0] / 2 + 0.5,
        "CUNIT1":
        "arcsec",
        "CTYPE1":
        "HPLN-TAN",
        "CDELT2":
        pixel_size[1],
        "NAXIS2":
        image_dim[1],
        "CRVAL2":
        xyoffset[1],
        "CRPIX2":
        image_dim[0] / 2 + 0.5,
        "CUNIT2":
        "arcsec",
        "CTYPE2":
        "HPLT-TAN",
        "HGLT_OBS":
        0,
        "HGLN_OBS":
        0,
        "RSUN_OBS":
        solar_semidiameter_angular_size(time_range.center).value,
        "RSUN_REF":
        sunpy.sun.constants.radius.value,
        "DSUN_OBS":
        get_sunearth_distance(time_range.center).value *
        sunpy.sun.constants.au.value
    }

    result_map = sunpy.map.Map(image, dict_header)

    return result_map
Пример #24
0
def testNoDirectory():
    s = Scraper('files/%Y%m%d_%H%M.dat')
    startdate = datetime.datetime(2010, 1, 10, 20, 30)
    enddate = datetime.datetime(2010, 1, 20, 20, 30)
    timerange = TimeRange(startdate, enddate)
    assert len(s.range(timerange)) == 1
Пример #25
0
def test_fermi_gbm_invalid_peek(fermi_gbm_test_ts):
    a = fermi_gbm_test_ts.time_range.start - TimeDelta(2*u.day)
    b = fermi_gbm_test_ts.time_range.start - TimeDelta(1*u.day)
    empty_ts = fermi_gbm_test_ts.truncate(TimeRange(a, b))
    with pytest.raises(ValueError):
        empty_ts.peek()
Пример #26
0
    def _get_goes_sat_num(self, date):
        """
        Determines the satellite number for a given date.

        Parameters
        ----------

        date : `datetime.datetime`
            The date to determine which satellite is active.
        """
        goes_operational = {
            2: TimeRange('1981-01-01', '1983-04-30'),
            5: TimeRange('1983-05-02', '1984-07-31'),
            6: TimeRange('1983-06-01', '1994-08-18'),
            7: TimeRange('1994-01-01', '1996-08-13'),
            8: TimeRange('1996-03-21', '2003-06-18'),
            9: TimeRange('1997-01-01', '1998-09-08'),
            10: TimeRange('1998-07-10', '2009-12-01'),
            11: TimeRange('2006-06-20', '2008-02-15'),
            12: TimeRange('2002-12-13', '2007-05-08'),
            13: TimeRange('2006-08-01', '2006-08-01'),
            14: TimeRange('2009-12-02', '2010-10-04'),
            15: TimeRange('2010-09-01', datetime.datetime.utcnow())
        }

        results = []
        for sat_num in goes_operational:
            if date in goes_operational[sat_num]:
                # if true then the satellite with sat_num is available
                results.append(sat_num)

        if results:
            # Return the newest satellite
            return max(results)
        else:
            # if no satellites were found then raise an exception
            raise ValueError('No operational GOES satellites on {}'.format(
                date.strftime(TIME_FORMAT)))
Пример #27
0
def test_noaa_pre_invalid_peek(noaa_pre_test_ts):
    a = noaa_pre_test_ts.time_range.start - TimeDelta(2*u.day)
    b = noaa_pre_test_ts.time_range.start - TimeDelta(1*u.day)
    empty_ts = noaa_pre_test_ts.truncate(TimeRange(a, b))
    with pytest.raises(ValueError):
        empty_ts.peek()
Пример #28
0
ts_eve.peek(subplots=True)

##############################################################################
# An individual column can be extracted from a TimeSeries:
ts_eve_extract = ts_eve.extract('CMLon')
# Note: no matter the source type of the original TimeSeries, the extracted
# TimeSeries is always generic.

##############################################################################
# You can truncate a TimeSeries using the truncate() method.
# This can use string datetime arguments, a SunPy TimeRange or integer value
# arguments (similar to slicing, but using function notation).
# Using integers we can get every other entry using:
ts_goes_trunc = ts_goes.truncate(0, 100000, 2)
# Or using a TimeRange:
tr = TimeRange('2011-06-07 05:00', '2011-06-07 06:30')
ts_goes_trunc = ts_goes.truncate(tr)
# Or using strings:
ts_goes_trunc = ts_goes.truncate('2011-06-07 05:00', '2011-06-07 06:30')
fig, ax = plt.subplots()
ts_goes_trunc.plot()
# Note: the strings are parsed using SunPy's string parser.
# Debate: how should we deal with metadata when truncating.

##############################################################################
# You can use Pandas resample method, for example to downsample:
df_downsampled = ts_goes_trunc.to_dataframe().resample('10T').mean()
ts_downsampled = sunpy.timeseries.TimeSeries(df_downsampled,
                                             ts_goes_trunc.meta,
                                             ts_goes_trunc.units)
fig, ax = plt.subplots()
Пример #29
0
def test_Time_timerange():
    t = va.Time(TimeRange('2012/1/1', '2012/1/2'))
    assert isinstance(t, va.Time)
    assert t.min == parse_time((2012, 1, 1))
    assert t.max == parse_time((2012, 1, 2))
Пример #30
0
def get_detector_sun_angles_for_date(date, file):
    """
    Get the GBM detector angles vs the Sun as a function of time for a given
    date.

    Parameters
    ----------
    date : {parse_time_types}
        A date specified as a parse_time-compatible
        time string, number, or a datetime object.
    file : `str`
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
        download_weekly_pointing_file function).

    Returns
    -------
    `tuple`:
        A tuple of all the detector angles.
    """

    date = parse_time(date)
    tran = TimeRange(date, date + TimeDelta(1 * u.day))
    scx, scz, times = get_scx_scz_in_timerange(tran, file)

    # retrieve the detector angle information in spacecraft coordinates
    detectors = nai_detector_angles()

    detector_to_sun_angles = []
    # get the detector vs Sun angles for each t and store in a list of
    # dictionaries.
    for i in range(len(scx)):
        detector_radecs = nai_detector_radecs(detectors, scx[i], scz[i],
                                              times[i])

        # this gets the sun position with RA in hours in decimal format
        # (e.g. 4.3). DEC is already in degrees
        sunpos_ra_not_in_deg = [
            sun.apparent_rightascension(times[i]),
            sun.apparent_declination(times[i])
        ]
        # now Sun position with RA in degrees
        sun_pos = [sunpos_ra_not_in_deg[0].to('deg'), sunpos_ra_not_in_deg[1]]
        # now get the angle between each detector and the Sun
        detector_to_sun_angles.append(
            get_detector_separation_angles(detector_radecs, sun_pos))

    # slice the list of dictionaries to get the angles for each detector in a
    # list form
    angles = OrderedDict()
    key_list = [
        'n0', 'n1', 'n2', 'n3', 'n4', 'n5', 'n6', 'n7', 'n8', 'n9', 'n10',
        'n11', 'time'
    ]
    for i in range(13):
        if not key_list[i] == 'time':
            angles[key_list[i]] = [
                item[key_list[i]].value for item in detector_to_sun_angles
            ] * u.deg
        else:
            angles[key_list[i]] = [
                item[key_list[i]] for item in detector_to_sun_angles
            ]

    return angles