def _multi_request(self, **kwargs): """ Make a series of requests to avoid the 100GB limit """ start_time = kwargs.pop('start_time', None) end_time = kwargs.pop('end_time', None) series = kwargs.pop('series', None) if any(x is None for x in (start_time, end_time, series)): return [] start_time = self._process_time(start_time) end_time = self._process_time(end_time) tr = TimeRange(start_time, end_time) returns = [] response, json_response = self._send_jsoc_request(start_time, end_time, series, **kwargs) # We skip these lines because a massive request is not a practical test. error_response = 'Request exceeds max byte limit of 100000MB' if (json_response['status'] == 3 and json_response['error'] == error_response): # pragma: no cover returns.append(self._multi_request(tr.start(), tr.center(), series, **kwargs)[0]) # pragma: no cover returns.append(self._multi_request(tr.center(), tr.end(), series, **kwargs)[0]) # pragma: no cover else: returns.append(response) return returns
def truncate(self, a, b=None): """Returns a truncated version of the timeseries object""" if isinstance(a, TimeRange): time_range = a else: time_range = TimeRange(a, b) truncated = self.data.truncate(time_range.start(), time_range.end()) return self.__class__.create(truncated, self.meta.copy())
def truncate(self, a, b=None): """Returns a truncated version of the timeseries object""" if isinstance(a, TimeRange): time_range = a else: time_range = TimeRange(a,b) truncated = self.data.truncate(time_range.start(), time_range.end()) return LightCurve(truncated, self.header.copy())
def _multi_request(self, start_time, end_time, series, **kwargs): """ Make a series of requests to avoid the 100GB limit """ tr = TimeRange(start_time, end_time) returns = [] response, json_response = self._send_jsoc_request(start_time, end_time, series, **kwargs) if json_response['status'] == 3 and json_response['error'] == 'Request exceeds max byte limit of 100000MB': returns.append(self._multi_request(tr.start(), tr.center(), series, **kwargs)[0]) returns.append(self._multi_request(tr.center(), tr.end(), series, **kwargs)[0]) else: returns.append(response) return returns
def _get_url_for_date_range(*args, **kwargs): """Returns a URL to the RHESSI data for the specified date range. Parameters ---------- args : TimeRange, datetimes, date strings Date range should be specified using a TimeRange, or start and end dates at datetime instances or date strings. """ if len(args) == 1 and isinstance(args[0], TimeRange): time_range = args[0] elif len(args) == 2: time_range = TimeRange(parse_time(args[0]), parse_time(args[1])) if time_range.end() < time_range.start(): raise ValueError('start time > end time') url = rhessi.get_obssum_filename(time_range) return url
def test_rhessi_invalid_peek(rhessi_test_ts): a = rhessi_test_ts.time_range.start - TimeDelta(2 * u.day) b = rhessi_test_ts.time_range.start - TimeDelta(1 * u.day) empty_ts = rhessi_test_ts.truncate(TimeRange(a, b)) with pytest.raises(ValueError): empty_ts.peek()
query_string = '%s[%s][%s]%s{%s}' % (series, time_str, wave_str, filter_str, ','.join(segments)) keys, segs = client.query(query_string, key=keystr, seg=segstr) return keys, segs # test a time interval time_start = Time('2011-02-01T00:00:00.000', scale='utc') time_end = Time('2011-02-04T00:00:00.000', scale='utc') interval_cadence = 2 * u.min aia_search_cadence = 12 * u.second # generate the list of time intervals full_range = TimeRange(time_start, time_end) time_ranges = full_range.window(interval_cadence, interval_cadence) # pick a time range to experiement with time_range = time_ranges[0] print(len(time_ranges)) print(time_range) #print(jsoc_build_time_string_from_range(time_range,aia_search_cadence)) keys, segs = jsoc_query_time_interval(time_range, 193, aia_search_cadence) # = client.query(query_string, key=keystr, seg=segstr) if len(keys) > 0:
def test_basic_ascending_append_tr(basic_1_md, basic_3_md, basic_ascending_append_md): tr = TimeRange(basic_1_md.time_range.start, basic_3_md.time_range.end) assert basic_ascending_append_md.time_range == tr
def test_truncated_end_tr(basic_ascending_append_md, truncated_end_md): tr = TimeRange(truncated_end_md.time_range.start, '2010-01-03 1:59:56.091999') assert truncated_end_md.time_range == tr
def truncated_new_tr_all_after_md(basic_ascending_append_md): # Time range begins and ends after the data tr = TimeRange('2010-01-04 01:01:01.000000', '2010-01-04 02:01:01.000000') truncated = copy.deepcopy(basic_ascending_append_md) truncated._truncate(tr) return truncated
def truncated_none_md(basic_ascending_append_md): # This timerange covers the whole range of metadata, so no change is expected tr = TimeRange('2010-01-01 1:59:57.468999', '2010-01-04 23:59:56.091999') truncated = copy.deepcopy(basic_ascending_append_md) truncated._truncate(tr) return truncated
def test_Time_timerange(): t = va.Time(TimeRange('2012/1/1', '2012/1/2')) assert isinstance(t, va.Time) assert t.min == datetime.datetime(2012, 1, 1) assert t.max == datetime.datetime(2012, 1, 2)
def _get_goes_sat_num(self, date): """ Determines the satellite number for a given date. Parameters ---------- date : `astropy.time.Time` The date to determine which satellite is active. """ goes_operational = { 2: TimeRange("1981-01-01", "1983-04-30"), 5: TimeRange("1983-05-02", "1984-07-31"), 6: TimeRange("1983-06-01", "1994-08-18"), 7: TimeRange("1994-01-01", "1996-08-13"), 8: TimeRange("1996-03-21", "2003-06-18"), 9: TimeRange("1997-01-01", "1998-09-08"), 10: TimeRange("1998-07-10", "2009-12-01"), 11: TimeRange("2006-06-20", "2008-02-15"), 12: TimeRange("2002-12-13", "2007-05-08"), 13: TimeRange("2006-08-01", "2006-08-01"), 14: TimeRange("2009-12-02", "2010-10-04"), 15: TimeRange("2010-09-01", parse_time("now")), } results = [] for sat_num in goes_operational: if date in goes_operational[sat_num]: # if true then the satellite with sat_num is available results.append(sat_num) if results: # Return the newest satellite return max(results) else: # if no satellites were found then raise an exception raise ValueError("No operational GOES satellites on {}".format( date.strftime(TIME_FORMAT)))
def _get_goes_sat_num(start, end): """ Parses the query time to determine which GOES satellite to use. Parameters ---------- filepath : `str` The path to the file you want to parse. """ goes_operational = { 2: TimeRange('1980-01-04', '1983-05-01'), 5: TimeRange('1983-05-02', '1984-08-01'), 6: TimeRange('1983-06-01', '1994-08-19'), 7: TimeRange('1994-01-01', '1996-08-14'), 8: TimeRange('1996-03-21', '2003-06-19'), 9: TimeRange('1997-01-01', '1998-09-09'), 10: TimeRange('1998-07-10', '2009-12-02'), 11: TimeRange('2006-06-20', '2008-02-16'), 12: TimeRange('2002-12-13', '2007-05-09'), 13: TimeRange('2006-08-01', '2006-08-01'), 14: TimeRange('2009-12-02', '2010-11-05'), 15: TimeRange('2010-09-01', Time.now()), } sat_list = [] for sat_num in goes_operational: if (goes_operational[sat_num].start <= start <= goes_operational[sat_num].end and goes_operational[sat_num].start <= end <= goes_operational[sat_num].end): # if true then the satellite with sat_num is available sat_list.append(sat_num) if not sat_list: # if no satellites were found then raise an exception raise Exception('No operational GOES satellites within time range') else: return sat_list
def backprojection(calibrated_event_list, pixel_size=(1., 1.) * u.arcsec, image_dim=(64, 64) * u.pix): """ Given a stacked calibrated event list fits file create a back projection image. .. warning:: The image is not in the right orientation! Parameters ---------- calibrated_event_list : string filename of a RHESSI calibrated event list pixel_size : `~astropy.units.Quantity` instance the size of the pixels in arcseconds. Default is (1,1). image_dim : `~astropy.units.Quantity` instance the size of the output image in number of pixels Returns ------- out : RHESSImap Return a backprojection map. Examples -------- >>> import sunpy.data >>> import sunpy.data.sample >>> import sunpy.instr.rhessi as rhessi >>> sunpy.data.download_sample_data(overwrite=False) # doctest: +SKIP >>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST) # doctest: +SKIP >>> map.peek() # doctest: +SKIP """ if not isinstance(pixel_size, u.Quantity): raise ValueError("Must be astropy Quantity in arcseconds") try: pixel_size = pixel_size.to(u.arcsec) except: raise ValueError("'{0}' is not a valid pixel_size unit".format( pixel_size.unit)) if not (isinstance(image_dim, u.Quantity) and image_dim.unit == 'pix'): raise ValueError("Must be astropy Quantity in pixels") try: import sunpy.data.sample except ImportError: import sunpy.data sunpy.data.download_sample() # This may need to be moved up to data from sample calibrated_event_list = sunpy.data.sample.RHESSI_EVENT_LIST afits = fits.open(calibrated_event_list) info_parameters = afits[2] xyoffset = info_parameters.data.field('USED_XYOFFSET')[0] time_range = TimeRange( info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0]) image = np.zeros(image_dim.value) # find out what detectors were used det_index_mask = afits[1].data.field('det_index_mask')[0] detector_list = (np.arange(9) + 1) * np.array(det_index_mask) for detector in detector_list: if detector > 0: image = image + _backproject(calibrated_event_list, detector=detector, pixel_size=pixel_size.value, image_dim=image_dim.value) dict_header = { "DATE-OBS": time_range.center().strftime("%Y-%m-%d %H:%M:%S"), "CDELT1": pixel_size[0], "NAXIS1": image_dim[0], "CRVAL1": xyoffset[0], "CRPIX1": image_dim[0].value / 2 + 0.5, "CUNIT1": "arcsec", "CTYPE1": "HPLN-TAN", "CDELT2": pixel_size[1], "NAXIS2": image_dim[1], "CRVAL2": xyoffset[1], "CRPIX2": image_dim[0].value / 2 + 0.5, "CUNIT2": "arcsec", "CTYPE2": "HPLT-TAN", "HGLT_OBS": 0, "HGLN_OBS": 0, "RSUN_OBS": solar_semidiameter_angular_size(time_range.center()).value, "RSUN_REF": sunpy.sun.constants.radius.value, "DSUN_OBS": sunearth_distance(time_range.center()) * sunpy.sun.constants.au.value } header = sunpy.map.MapMeta(dict_header) result_map = sunpy.map.Map(image, header) return result_map
def backprojection(calibrated_event_list, pixel_size=(1.,1.) * u.arcsec, image_dim=(64,64) * u.pix): """ Given a stacked calibrated event list fits file create a back projection image. .. warning:: The image is not in the right orientation! Parameters ---------- calibrated_event_list : string filename of a RHESSI calibrated event list detector : int the detector number pixel_size : `~astropy.units.Quantity` instance the size of the pixels in arcseconds. Default is (1,1). image_dim : `~astropy.units.Quantity` instance the size of the output image in number of pixels Returns ------- out : RHESSImap Return a backprojection map. Examples -------- >>> import sunpy.instr.rhessi as rhessi >>> map = rhessi.backprojection(sunpy.RHESSI_EVENT_LIST) >>> map.peek() """ if not isinstance(pixel_size, u.Quantity): raise ValueError("Must be astropy Quantity in arcseconds") try: pixel_size = pixel_size.to(u.arcsec) except: raise ValueError("'{0}' is not a valid pixel_size unit".format(pixel_size.unit)) if not (isinstance(image_dim, u.Quantity) and image_dim.unit == 'pix'): raise ValueError("Must be astropy Quantity in pixels") calibrated_event_list = sunpy.RHESSI_EVENT_LIST afits = fits.open(calibrated_event_list) info_parameters = afits[2] xyoffset = info_parameters.data.field('USED_XYOFFSET')[0] time_range = TimeRange(info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0]) image = np.zeros(image_dim.value) #find out what detectors were used det_index_mask = afits[1].data.field('det_index_mask')[0] detector_list = (np.arange(9)+1) * np.array(det_index_mask) for detector in detector_list: if detector > 0: image = image + _backproject(calibrated_event_list, detector=detector, pixel_size=pixel_size.value , image_dim=image_dim.value) dict_header = { "DATE-OBS": time_range.center().strftime("%Y-%m-%d %H:%M:%S"), "CDELT1": pixel_size[0], "NAXIS1": image_dim[0], "CRVAL1": xyoffset[0], "CRPIX1": image_dim[0].value/2 + 0.5, "CUNIT1": "arcsec", "CTYPE1": "HPLN-TAN", "CDELT2": pixel_size[1], "NAXIS2": image_dim[1], "CRVAL2": xyoffset[1], "CRPIX2": image_dim[0].value/2 + 0.5, "CUNIT2": "arcsec", "CTYPE2": "HPLT-TAN", "HGLT_OBS": 0, "HGLN_OBS": 0, "RSUN_OBS": solar_semidiameter_angular_size(time_range.center()).value, "RSUN_REF": sunpy.sun.constants.radius.value, "DSUN_OBS": sunearth_distance(time_range.center()) * sunpy.sun.constants.au.value } header = sunpy.map.MapMeta(dict_header) result_map = sunpy.map.Map(image, header) return result_map
""" NOAA LightCurve Tests """ from __future__ import absolute_import import pytest import sunpy.lightcurve from sunpy.time import TimeRange timerange_a = TimeRange('2004/01/01', '2007/01/01') class TestNOAAIndicesLightCurve(object): @pytest.mark.online def test_create(self): lc = sunpy.lightcurve.NOAAIndicesLightCurve.create() assert isinstance(lc, sunpy.lightcurve.NOAAIndicesLightCurve) @pytest.mark.online def test_isempty(self): lc = sunpy.lightcurve.NOAAIndicesLightCurve.create() assert lc.data.empty == False @pytest.mark.online def test_url(self): """Test creation with url""" url = 'ftp://ftp.swpc.noaa.gov/pub/weekly/RecentIndices.txt' lc1 = sunpy.lightcurve.NOAAIndicesLightCurve.create(url) assert isinstance(lc1, sunpy.lightcurve.NOAAIndicesLightCurve) @pytest.mark.online
import urllib from bs4 import BeautifulSoup from sunpy.util.scraper import Scraper from sunpy.time import TimeRange save_dir = "/Users/lahayes/QPP/interesting_event_2014-611/ssw_fits/" timerange = TimeRange('2014-06-11 05:30:00', '2014-06-11 05:36:00') url = 'https://hesperia.gsfc.nasa.gov/sdo/aia/2014/06/11/20140611_0528-0547/' resp = urllib.request.urlopen(url) soup = BeautifulSoup(resp) def find_url_waves(wave): file_link = [] for link in soup.find_all('a', href=True): if link['href'].endswith('{:d}_.fts'.format(wave)): file_link.append(link['href']) return file_link # Use Scraper! def list_files(url): resp = urllib.request.urlopen(url) soup = BeautifulSoup(resp)
def timerange_b(self): return TimeRange('2004/06/03', '2004/06/04')
=============================== GOES Flare and HEK Plot Example =============================== An example showing how to combine GOES and HEK data """ import matplotlib.pyplot as plt from sunpy.lightcurve import GOESLightCurve from sunpy.time import TimeRange, parse_time from sunpy.net import hek ############################################################################### # Let's first grab GOES XRS data for a particular time of interest tr = TimeRange(['2011-06-07 04:00', '2011-06-07 12:00']) goes = GOESLightCurve.create(tr) ############################################################################### # Next lets grab the HEK data for this time from the NOAA Space Weather Prediction Center (SWPC) client = hek.HEKClient() flares_hek = client.query(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') ############################################################################### # Finally lets plot everything together goes.peek() plt.axvline(parse_time(flares_hek[0].get('event_peaktime'))) plt.axvspan(parse_time(flares_hek[0].get('event_starttime')),
def timerange_a(self): return TimeRange('2008/06/01', '2008/06/02')
def backprojection(calibrated_event_list, pixel_size=(1.,1.), image_dim=(64,64)): """Given a stacked calibrated event list fits file create a back projection image. .. warning:: The image is not in the right orientation! Parameters ---------- calibrated_event_list : string filename of a RHESSI calibrated event list detector : int the detector number pixel_size : 2-tuple the size of the pixels in arcseconds. Default is (1,1). image_dim : 2-tuple the size of the output image in number of pixels Returns ------- out : RHESSImap Return a backprojection map. Examples -------- >>> import sunpy.instr.rhessi as rhessi >>> map = rhessi.backprojection(sunpy.RHESSI_EVENT_LIST) >>> map.show() """ calibrated_event_list = sunpy.RHESSI_EVENT_LIST fits = pyfits.open(calibrated_event_list) info_parameters = fits[2] xyoffset = info_parameters.data.field('USED_XYOFFSET')[0] time_range = TimeRange(info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0]) image = np.zeros(image_dim) #find out what detectors were used det_index_mask = fits[1].data.field('det_index_mask')[0] detector_list = (np.arange(9)+1) * np.array(det_index_mask) for detector in detector_list: if detector > 0: image = image + _backproject(calibrated_event_list, detector=detector, pixel_size=pixel_size, image_dim=image_dim) dict_header = { "DATE-OBS": time_range.center().strftime("%Y-%m-%d %H:%M:%S"), "CDELT1": pixel_size[0], "NAXIS1": image_dim[0], "CRVAL1": xyoffset[0], "CRPIX1": image_dim[0]/2 + 0.5, "CUNIT1": "arcsec", "CTYPE1": "HPLN-TAN", "CDELT2": pixel_size[1], "NAXIS2": image_dim[1], "CRVAL2": xyoffset[1], "CRPIX2": image_dim[0]/2 + 0.5, "CUNIT2": "arcsec", "CTYPE2": "HPLT-TAN", "HGLT_OBS": 0, "HGLN_OBS": 0, "RSUN_OBS": solar_semidiameter_angular_size(time_range.center()), "RSUN_REF": sun.radius, "DSUN_OBS": sunearth_distance(time_range.center()) * sunpy.sun.constants.au } header = sunpy.map.MapHeader(dict_header) result_map = sunpy.map.Map(image, header) return result_map
def time_range(self): """Returns the start and end times of the TimeSeries as a `~sunpy.time.TimeRange` object""" return TimeRange(self.data.index.min(), self.data.index.max())
def test_fermi_gbm_invalid_peek(fermi_gbm_test_ts): a = fermi_gbm_test_ts.time_range.start - TimeDelta(2*u.day) b = fermi_gbm_test_ts.time_range.start - TimeDelta(1*u.day) empty_ts = fermi_gbm_test_ts.truncate(TimeRange(a, b)) with pytest.raises(ValueError): empty_ts.peek()
def truncated_both_md(basic_ascending_append_md): # This time range starts after and ends before the original, so expect truncation tr = TimeRange('2010-01-02 20:59:57.468999', '2010-01-03 1:59:56.091999') truncated = copy.deepcopy(basic_ascending_append_md) truncated._truncate(tr) return truncated
def test_generic_ts_invalid_peek(generic_ts): a = generic_ts.time_range.start - TimeDelta(2*u.day) b = generic_ts.time_range.start - TimeDelta(1*u.day) empty_ts = generic_ts.truncate(TimeRange(a, b)) with pytest.raises(ValueError): empty_ts.peek()
def test_truncated_start_tr(truncated_start_md): tr = TimeRange('2010-01-02 20:59:57.468999', truncated_start_md.time_range.end) assert truncated_start_md.time_range == tr
import matplotlib as mpl import datetime as datetime from datetime import datetime, timedelta, date import time import matplotlib.pyplot as plt import matplotlib.dates as dates mpl.rc('font', size=12) begin_time = datetime(2017, 9, 2, 15, 00, 14) fin_time = datetime(2017, 9, 2, 17, 30, 14) firsts = [] for i in range(begin_time.minute, fin_time.minute): firsts.append(datetime(2017, 9, 2, 15, i, 14)) i + 10 plt.figure(figsize=(12, 9)) tr = TimeRange(['2017-09-02 10:25:00', '2017-09-02 19:05:00']) results = Fido.search(a.Time(tr), a.Instrument('XRS')) files = Fido.fetch(results) goes = TimeSeries(files) client = hek.HEKClient() flares_hek = client.search(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') goes.peek() #plt.xticks(firsts) #plt.gca().xaxis.set_major_locator(dates.HourLocator()) plt.gca().xaxis.set_major_formatter(dates.DateFormatter('%H:%M')) plt.xlim(begin_time, fin_time) plt.savefig("goes_02092017.png")
def test_truncated_both_tr(truncated_both_md): tr = TimeRange('2010-01-02 20:59:57.468999', '2010-01-03 1:59:56.091999') assert truncated_both_md.time_range == tr
from bokeh.plotting import figure from bokeh.resources import INLINE from bokeh.util.string import encode_utf8 from bokeh.layouts import Column from bokeh.models.formatters import DatetimeTickFormatter from bokeh.models import ColumnDataSource, CustomJS, HoverTool import sunpy.lightcurve as lc from sunpy.time import TimeRange, parse_time import datetime app = flask.Flask(__name__) # set some defaults DEFAULT_TR = TimeRange(['2011-06-07 00:00', '2011-06-07 12:00']) PLOT_HEIGHT = 300 PLOT_WIDTH = 900 TOOLS = 'pan,box_zoom,box_select,crosshair,undo,redo,save,reset' ONE_HOUR = datetime.timedelta(seconds=60 * 60) ONE_DAY = datetime.timedelta(days=1) formatter = DatetimeTickFormatter(hours="%F %H:%M") #stats = PreText(text='', width=PLOT_WIDTH) goes = lc.GOESLightCurve.create(DEFAULT_TR) # add time string for display of hover tool goes.data['time_str'] = goes.data.index.strftime('%F %H:%M:%S') source = ColumnDataSource(data=goes.data) source_static = ColumnDataSource(data=goes.data)
def test_complex_append_tr(basic_1_md, basic_4_md, complex_append_md): tr = TimeRange(basic_1_md.time_range.start, basic_4_md.time_range.end) assert complex_append_md.time_range == tr
from sunpy.time import TimeRange from sunpy.lightcurve import GOESLightCurve dt = TimeRange('1981/01/10 00:00', '2014/04/18 23:00') tr_not_found = [] time_ranges = dt.window(60*60*24, 60*60*24) total_days = len(time_ranges) total_fails = 0 # missing files http://umbra.nascom.nasa.gov/goes/fits/2005/go1220051116.fits # http://umbra.nascom.nasa.gov/goes/fits/2005/go1220051116.fits for time_range in time_ranges: print(time_range.start()) try: goes = GOESLightCurve.create(time_range) print(goes.data['xrsa'].max()) print(goes.data['xrsb'].max()) except: print("File Not Found!") tr_not_found.append(time_range) total_fails = total_fails + 1 print('Number of fails:%i' % total_fails) print('Number of tries:%i' % total_days) print('Percent Fail: %d' % (float(total_fails)/total_days * 100)) for tr in tr_not_found: print(tr.start())
def backprojection(calibrated_event_list, pixel_size: u.arcsec=(1., 1.) * u.arcsec, image_dim: u.pix=(64, 64) * u.pix): """ Given a stacked calibrated event list fits file create a back projection image. .. warning:: The image is not in the right orientation! Parameters ---------- calibrated_event_list : str filename of a RHESSI calibrated event list pixel_size : `~astropy.units.Quantity` instance the size of the pixels in arcseconds. Default is (1,1). image_dim : `~astropy.units.Quantity` instance the size of the output image in number of pixels Returns ------- out : RHESSImap Return a backprojection map. Examples -------- This example is broken. >>> import sunpy.data >>> import sunpy.data.sample # doctest: +REMOTE_DATA >>> import sunpy.instr.rhessi as rhessi >>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST) # doctest: +SKIP >>> map.peek() # doctest: +SKIP """ # import sunpy.map in here so that net and timeseries don't end up importing map import sunpy.map pixel_size = pixel_size.to(u.arcsec) image_dim = np.array(image_dim.to(u.pix).value, dtype=int) afits = sunpy.io.read_file(calibrated_event_list) info_parameters = afits[2] xyoffset = info_parameters.data.field('USED_XYOFFSET')[0] time_range = TimeRange(info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0], format='utime') image = np.zeros(image_dim) # find out what detectors were used det_index_mask = afits[1].data.field('det_index_mask')[0] detector_list = (np.arange(9)+1) * np.array(det_index_mask) for detector in detector_list: if detector > 0: image = image + _backproject(calibrated_event_list, detector=detector, pixel_size=pixel_size.value, image_dim=image_dim) dict_header = { "DATE-OBS": time_range.center.strftime("%Y-%m-%d %H:%M:%S"), "CDELT1": pixel_size[0], "NAXIS1": image_dim[0], "CRVAL1": xyoffset[0], "CRPIX1": image_dim[0]/2 + 0.5, "CUNIT1": "arcsec", "CTYPE1": "HPLN-TAN", "CDELT2": pixel_size[1], "NAXIS2": image_dim[1], "CRVAL2": xyoffset[1], "CRPIX2": image_dim[0]/2 + 0.5, "CUNIT2": "arcsec", "CTYPE2": "HPLT-TAN", "HGLT_OBS": 0, "HGLN_OBS": 0, "RSUN_OBS": solar_semidiameter_angular_size(time_range.center).value, "RSUN_REF": sunpy.sun.constants.radius.value, "DSUN_OBS": get_sunearth_distance(time_range.center).value * sunpy.sun.constants.au.value } result_map = sunpy.map.Map(image, dict_header) return result_map
def time_range(self): """Returns the start and end times of the LightCurve as a TimeRange object""" return TimeRange(self.data.index[0], self.data.index[-1])
def test_noaa_pre_invalid_peek(noaa_pre_test_ts): a = noaa_pre_test_ts.time_range.start - TimeDelta(2 * u.day) b = noaa_pre_test_ts.time_range.start - TimeDelta(1 * u.day) empty_ts = noaa_pre_test_ts.truncate(TimeRange(a, b)) with pytest.raises(ValueError): empty_ts.peek()
import datetime as dt import matplotlib.dates as mdates from mpl_toolkits.axes_grid1 import make_axes_locatable from sk import LofarRaw start = time.time() filename = sys.argv[1] #observation parameters frange = [15, 60] #fname = 'udpoutput/jupiter-stokesI_0_2020-10-13T17:47:00_19563125244140' sbs = np.arange(76, 320) beam1_sbs = np.arange(76, 198) obs_mode = 3 trange = TimeRange("2020-10-13T17:45:00", 15. * u.min) """ fname = 'udpoutput/jupiter-stokesI_0_2020-10-13T17:47:00_19563125244140' sbs = np.arange(76, 319) obs_mode = 3 trange = TimeRange("2020-10-13T17:47:00", 10.*u.minute) """ #just plotting parameters xlabel = "Time from 2020/10/13 17:45:00.0000" ylabel = "Frequency (MHz)" title = sys.argv[1].split('/')[1] # read data raw = LofarRaw(fname=filename, sbs=sbs, obs_mode=obs_mode, frange=frange) # time resolution was scrubbed x16 in udpPM
def index(): """ Very simple embedding of a lightcurve chart """ # FLASK # Grab the inputs arguments from the URL # This is automated by the button args = flask.request.args _from = str(args.get('_from', str(DEFAULT_TR.start))) _to = str(args.get('_to', str(DEFAULT_TR.end))) tr = TimeRange(parse_time(_from), parse_time(_to)) if 'next' in args: tr = tr.next() if 'prev' in args: tr = tr.previous() if 'next_hour' in args: tr = TimeRange(tr.start + ONE_HOUR, tr.end + ONE_HOUR) if 'next_day' in args: tr = TimeRange(tr.start + ONE_DAY, tr.end + ONE_DAY) if 'prev_hour' in args: tr = TimeRange(tr.start - ONE_HOUR, tr.end - ONE_HOUR) if 'prev_day' in args: tr = TimeRange(tr.start - ONE_DAY, tr.end - ONE_DAY) _from = str(tr.start) _to = str(tr.end) # get the data goes = lc.GOESLightCurve.create(tr) # resample to reduce the number of points for debugging goes.data = goes.data.resample("1T").mean() # add time string for display of hover tool goes.data['time_str'] = goes.data.index.strftime('%F %H:%M:%S') source = ColumnDataSource(data=goes.data) source_static = ColumnDataSource(data=goes.data) # now create the bokeh plots # XRS-B Plot fig1 = figure(title="GOES", tools=TOOLS, plot_height=PLOT_HEIGHT, width=PLOT_WIDTH, x_axis_type='datetime', y_axis_type="log", y_range=(10**-9, 10**-2), toolbar_location="right") fig1.xaxis.formatter = formatter fig1.line('index', 'xrsb', source=source_static, color='red', line_width=2, legend="xrsa 1-8 Angstrom") fig2 = figure(title="GOES", tools=TOOLS, plot_height=PLOT_HEIGHT, width=PLOT_WIDTH, x_axis_type='datetime', y_axis_type="log", y_range=(10**-9, 10**-2)) fig2.xaxis.formatter = formatter fig2.line('index', 'xrsa', source=source_static, color='blue', line_width=2, legend="xrsa 0.5-4.0 Angstrom") # link the x-range for common panning fig2.x_range = fig1.x_range fig = Column(fig1, fig2) source_static.callback = CustomJS(code=""" var inds = cb_obj.selected['1d'].indices; var d1 = cb_obj.data; var m = 0; if (inds.length == 0) { return; } for (i = 0; i < inds.length; i++) { d1['color'][inds[i]] = "red" if (d1['y'][inds[i]] > m) { m = d1['y'][inds[i]] } } console.log(m); cb_obj.trigger('change'); """) hover = HoverTool() hover.tooltips = [("time", "@time_str"), ("xrsb", "@xrsb"), ("xrsa", "@xrsa")] fig1.add_tools(hover) hover2 = HoverTool() hover2.tooltips = [("time", "@time_str"), ("xrsb", "@xrsb"), ("xrsa", "@xrsa")] fig2.add_tools(hover2) # Configure resources to include BokehJS inline in the document. # For more details see: # http://bokeh.pydata.org/en/latest/docs/reference/resources_embedding.html#bokeh-embed js_resources = INLINE.render_js() css_resources = INLINE.render_css() # For more details see: # http://bokeh.pydata.org/en/latest/docs/user_guide/embedding.html#components script, div = components(fig, INLINE) html = flask.render_template( 'embed.html', plot_script=script, plot_div=div, js_resources=js_resources, css_resources=css_resources, _from=_from, _to=_to, ) return encode_utf8(html)
import warnings warnings.filterwarnings('ignore') """ import matplotlib.pyplot as plt from sunpy.timeseries import TimeSeries from sunpy.time import TimeRange, parse_time from sunpy.net import hek, Fido, attrs as a import numpy as np ############################################################################### # Let's first grab GOES XRS data for a particular time of interest tr = TimeRange(['2011-06-07 06:00', '2011-06-07 10:00']) results = Fido.search(a.Time(tr), a.Instrument('XRS')) results ############################################################################### # Then download the data and load it into a TimeSeries files = Fido.fetch(results) goes = TimeSeries(files) ############################################################################### # Next lets grab the HEK data for this time from the NOAA Space Weather # Prediction Center (SWPC) client = hek.HEKClient() flares_hek = client.search(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL,