def accumulate(filelist, accum=2, nsuper=4, verbose=False): """Add up data in time and space. Accumulate 'accum' files in time, and then form the images into super by super superpixels.""" # counter for number of files. j = 0 # storage for the returned maps maps = [] nfiles = len(filelist) while j + accum <= nfiles: i = 0 while i < accum: filename = filelist[i + j] if verbose: print('File %(#)i out of %(nfiles)i' % {'#': i + j, 'nfiles':nfiles}) print('Reading in file ' + filename) map1 = (sunpy.make_map(filename)).superpixel((nsuper, nsuper)) if i == 0: m = map1 else: m = m + map1 i = i + 1 j = j + accum maps.append(m) if verbose: print('Accumulated map List has length %(#)i' % {'#': len(maps)}) return maps
def onDateChange(self, qdatetime): """Updates the images when the date is changed""" dt = qdatetime.toPyDateTime() r = hv.get_jp2_image(dt, sourceId=self._datasources['304']['sourceId']) g = hv.get_jp2_image(dt, sourceId=self._datasources['193']['sourceId']) b = hv.get_jp2_image(dt, sourceId=self._datasources['171']['sourceId']) self.red = sunpy.make_map(r) self.green = sunpy.make_map(g) self.blue = sunpy.make_map(b) self._updateRedPreview() self._updateGreenPreview() self._updateBluePreview() self._updateCompositeImage()
def onDateChange(self, qdatetime): """Updates the images when the date is changed""" dt = qdatetime.toPyDateTime() r = self._hv.download_jp2(dt, sourceId=self._datasources["304"]["sourceId"]) g = self._hv.download_jp2(dt, sourceId=self._datasources["193"]["sourceId"]) b = self._hv.download_jp2(dt, sourceId=self._datasources["171"]["sourceId"]) self.red = sunpy.make_map(r) self.green = sunpy.make_map(g) self.blue = sunpy.make_map(b) self._updateRedPreview() self._updateGreenPreview() self._updateBluePreview() self._updateCompositeImage()
def onDateChange(self, qdatetime): """Updates the images when the date is changed""" dt = qdatetime.toPyDateTime() r = self._hv.download_jp2(dt, sourceId=self._datasources['304']['sourceId']) g = self._hv.download_jp2(dt, sourceId=self._datasources['193']['sourceId']) b = self._hv.download_jp2(dt, sourceId=self._datasources['171']['sourceId']) self.red = sunpy.make_map(r) self.green = sunpy.make_map(g) self.blue = sunpy.make_map(b) self._updateRedPreview() self._updateGreenPreview() self._updateBluePreview() self._updateCompositeImage()
def _load_defaults(self): """Load initial images""" now = datetime.datetime.utcnow() self.ui.dateTimeEdit.setDateTime(now) r = hv.get_jp2_image(now, sourceId=self._datasources['304']['sourceId']) g = hv.get_jp2_image(now, sourceId=self._datasources['193']['sourceId']) b = hv.get_jp2_image(now, sourceId=self._datasources['171']['sourceId']) self.red = sunpy.make_map(r) self.green = sunpy.make_map(g) self.blue = sunpy.make_map(b) self._updateRedPreview() self._updateGreenPreview() self._updateBluePreview() self._createCompositeImage()
def _load_defaults(self): """Load initial images""" now = datetime.datetime.utcnow() self.ui.dateTimeEdit.setDateTime(now) r = self._hv.download_jp2(now, sourceId=self._datasources['304']['sourceId']) g = self._hv.download_jp2(now, sourceId=self._datasources['193']['sourceId']) b = self._hv.download_jp2(now, sourceId=self._datasources['171']['sourceId']) self.red = sunpy.make_map(r) self.green = sunpy.make_map(g) self.blue = sunpy.make_map(b) self._updateRedPreview() self._updateGreenPreview() self._updateBluePreview() self._createCompositeImage()
def _load_defaults(self): """Load initial images""" now = datetime.datetime.utcnow() self.ui.dateTimeEdit.setDateTime(now) r = self._hv.download_jp2(now, sourceId=self._datasources["304"]["sourceId"]) g = self._hv.download_jp2(now, sourceId=self._datasources["193"]["sourceId"]) b = self._hv.download_jp2(now, sourceId=self._datasources["171"]["sourceId"]) self.red = sunpy.make_map(r) self.green = sunpy.make_map(g) self.blue = sunpy.make_map(b) self._updateRedPreview() self._updateGreenPreview() self._updateBluePreview() self._createCompositeImage()
def setup_class(self): self.file = sunpy.AIA_171_IMAGE self.map = sunpy.make_map(self.file) self.fits = pyfits.open(self.file) self.fits.verify("silentfix") # include full comment comment = "".join(self.fits[0].header.get_comment()) self.fits[0].header.update("COMMENT", comment)
def test_download_jp2(self): """Tests getJP2Image API method""" filepath = self.client.download_jp2('2020/01/01', observatory='SOHO', instrument='MDI', detector='MDI', measurement='continuum') try: map_ = sunpy.make_map(filepath) except sunpy.io.jp2.MissingOpenJPEGBinaryError: # We can't test JP2 decoding if binary is not available pass else: assert isinstance(map_, sunpy.Map)
def map_hg_to_hpc(map, xbin=10, ybin=10): """Take a map in heliographic coordinates (HG) and convert it to helioprojective cartesian coordinates (HPC).""" # xbin = 10 # ybin = 10 lon, lat = sunpy.wcs.convert_pixel_to_data( map.shape[1], map.shape[0], map.scale["x"], map.scale["y"], map.reference_pixel["x"], map.reference_pixel["y"], map.reference_coordinate["x"], map.reference_coordinate["y"], map.coordinate_system["x"], ) x_map, y_map = sunpy.wcs.convert_hg_hpc( map.rsun_meters, map.dsun, map.heliographic_latitude, map.carrington_longitude, lon, lat, units="arcsec" ) x_range = (np.nanmin(x_map), np.nanmax(x_map)) y_range = (np.nanmin(y_map), np.nanmax(y_map)) x = np.arange(x_range[0], x_range[1], xbin) y = np.arange(y_range[0], y_range[1], ybin) newgrid = np.meshgrid(x, y) points = np.vstack((x_map.ravel(), y_map.ravel())).T values = np.array(map).ravel() newdata = griddata(points, values, newgrid, method="linear") dict_header = { "CDELT1": xbin, "NAXIS1": len(x), "CRVAL1": x.min(), "CRPIX1": 1, "CRPIX2": 1, "CUNIT1": "arcsec", "CTYPE1": "HPLT-TAN", "CDELT2": ybin, "NAXIS2": len(y), "CRVAL2": y.min(), "CUNIT2": "arcsec", "CTYPE2": "HPLT-TAN", } header = sunpy.map.MapHeader(dict_header) transformed_map = sunpy.make_map(newdata, header) return transformed_map
def prob_hough_detect(diffs, **ph_kwargs): """Use the probabilistic hough transform to detect regions in the data that we will flag as being part of the EIT wave front.""" detection=[] for img in diffs: invTransform = sunpy.make_map(np.zeros(img.shape), img._original_header) lines = probabilistic_hough(img, ph_kwargs) if lines is not None: for line in lines: pos1=line[0] pos2=line[1] fillLine(pos1,pos2,invTransform) detection.append(invTransform) return detection
def add_tab(self, file_path, tab_title): """ Adds a new tab having title 'tab_title' containing a TabPage widget whose FigureCanvas displays the data in 'file_path' """ try: map_object = sunpy.make_map(file_path) tab_page = TabPage(map_object, self.tabWidget) self.tabWidget.addTab(tab_page, tab_title) # Focus new tab self.tabWidget.setCurrentIndex(self.tabWidget.count() - 1) # Set color options dialog appropriately self.initialize_color_options() if self.tabWidget.count() == 1: self.colorOptionsDockWidget.show() except TypeError, e: file_err = QMessageBox() file_err.setText(str(e) + '\n' + file_path) file_err.exec_()
def get_jp2_image(date, directory=None, **kwargs): """ Downloads the JPEG 2000 that most closely matches the specified time and data source. Parameters ---------- date : mixed A string or datetime object for the desired date of the image directory : string Directory to download JPEG 2000 image to. Returns ------- mixed : Returns a map representation of the requested image or a URI if "jpip" parameter is set to True. """ params = { "action": "getJP2Image", "date": parse_time(date).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + "Z" } params.update(kwargs) # Submit request response = _request(params) # JPIP URL response if 'jpip' in kwargs: return response.read() # JPEG 2000 image response if directory is None: import tempfile directory = tempfile.gettempdir() filename = response.info()['Content-Disposition'][22:-1] filepath = os.path.join(directory, filename) f = open(filepath, 'wb') f.write(response.read()) f.close() return sunpy.make_map(filepath)
def clean(params, wave_maps, verbose = False): """ Cleans a list of maps """ wave_maps_clean = [] for current_wave_map in wave_maps: if verbose: print("Cleaning map at "+str(current_wave_map.date)) data = np.asarray(current_wave_map) if params.get("clean_nans"): data[np.isnan(data)] = 0. cleaned_wave_map = sunpy.make_map(data, current_wave_map._original_header) cleaned_wave_map.name = current_wave_map.name cleaned_wave_map.date = current_wave_map.date wave_maps_clean += [cleaned_wave_map] return wave_maps_clean
def add_tab(self, file_path, tab_title): """ Adds a new tab having title 'tab_title' containing a TabPage widget whose FigureCanvas displays the data in 'file_path' """ try: mapobject = sunpy.make_map(file_path) tab_page = TabPage(mapobject, self.tabWidget) self.tabWidget.addTab(tab_page, tab_title) # Focus new tab self.tabWidget.setCurrentIndex(self.tabWidget.count() - 1) # Set color options dialog appropriately self.initialize_color_options() if self.tabWidget.count() == 1: self.colorOptionsDockWidget.show() except TypeError, e: file_err = QMessageBox() file_err.setText(str(e) + '\n' + file_path) file_err.exec_()
def setup_class(self): self.file = sunpy.AIA_171_IMAGE self.map = sunpy.make_map(self.file) self.fits = pyfits.open(self.file) self.fits.verify('silentfix') # include full comment fits_comment = self.fits[0].header.get_comment() # PyFITS 2.x if isinstance(fits_comment[0], basestring): comments = [val for val in fits_comment] else: # PyFITS 3.x comments = [card.value for card in fits_comment] comment = "".join(comments).strip() # touch data to apply scaling up front self.fits[0].data self.fits[0].header.update('COMMENT', comment)
def hough_detect(diffs, vote_thresh=15): """ Use the Hough detection method to detect lines in the data. With enough lines, you can fill in the wave front.""" detection = [] print("Performing hough transform on binary maps...") for img in diffs: # Perform the hough transform on each of the difference maps transform, theta, d = hough(img) # Filter the hough transform results and find the best lines in the # data. Keep detections that exceed the Hough vote threshold. indices = (transform > vote_thresh ).nonzero() distances = d[indices[0]] theta = theta[indices[1]] n =len(indices[1]) print("Found " + str(n) + " lines.") # Perform the inverse transform to get a series of rectangular # images that show where the wavefront is. # Create a map which is the same as the invTransform = sunpy.make_map(np.zeros(img.shape), img._original_header) invTransform.data = np.zeros(img.shape) # Add up all the detected lines over each other. The idea behind # adding up all the lines on top of each other is that pixels that # have larger number of detections are more likely to be in the # wavefront. Note that we are using th Hough transform - which is used # to detect lines - to detect and fill in a region. You might see this # as an abuse of the Hough transform! for i in range(0,len(indices[1])): nextLine = htLine(distances[i], theta[i], np.zeros(shape=img.shape)) invTransform = invTransform + nextLine detection.append(invTransform) return detection
def main(): directory = '/home/ireland/Data/AIA_Data/test2/' mc = sunpy.make_map(directory).derotate_by_center_of_fov() return mc
from __future__ import absolute_import #pylint: disable=E1103 import pyfits import sunpy from sunpy.wcs import wcs as wcs from numpy.testing import assert_array_almost_equal fits = pyfits.open(sunpy.AIA_171_IMAGE) header = fits[0].header img = sunpy.make_map(sunpy.AIA_171_IMAGE) def test_conv_hpc_hcc(): coord = [40.0, 32.0] result = wcs.convert_hpc_hcc(img.rsun_arcseconds, img.dsun, img.units['x'], img.units['y'], coord[0], coord[1]) assert_array_almost_equal(result, [28748691, 22998953], decimal=3) def test_conv_hcc_hpc(): coord = [34.0, 132.0] result = wcs.convert_hcc_hpc(img.rsun_arcseconds, img.dsun, coord[0], coord[1]) assert_array_almost_equal(result, [1.3140782e-08, 5.1017152e-08], decimal=2) def test_conv_hcc_hg(): coord = [13.0, 58.0] result = wcs.convert_hcc_hg(img.rsun_arcseconds, img.heliographic_latitude, img.heliographic_longitude, coord[0], coord[1])
if abs(np.sin(angle)) > eps: gradient = -np.cos(angle) / np.sin(angle) constant = distance / np.sin(angle) for x in range(0, nx): y = gradient * x + constant if y <= ny - 1 and y >= 0: img[y, x] = 255 else: img[:, distance] = 255 return img m2deg = 360.0 / (2 * 3.1415926 * 6.96e8) cube = sunpy.make_map("/Users/schriste/Downloads/eitdata_19970512/*.fits", type="cube") dmap = cube[2] - cube[1] dmap.show() # need an even number of maps so get rid of one cube = cube[0:4] import util tmap = util.map_hpc_to_hg(dmap) ttmap = util.map_hpc_to_hg_rotate(dmap, epi_lon=9.5, epi_lat=20.44) input_maps = [] for map in cube: print("Unraveling map at " + str(map.date))
print("Found " + str(n) + " lines.") # Perform the inverse transform to get a series of rectangular # images that show where the wavefront is. invTransform = sunpy.map.BaseMap(input_maps[i+1]) invTransform.data = np.zeros(imgShape) for i in range(0,n): nextLine = htLine( distances[i],theta[i], np.zeros(shape=imgShape) ) invTransform = invTransform + nextLine # Dump the inverse transform back into a series of maps detection.append(invTransform) visualize(diffs) visualize(detection) from matplotlib import cm from matplotlib import colors wmap = sunpy.make_map(wave_maps[max_steps/2], wave_maps[0], type = "composite") wmap.set_colors(1, cm.Reds) wmap.set_alpha(1,0.1) #wmap.set_norm(1, colors.Normalize(0.1,1)) wmap.show() pmap = sunpy.make_map(detection[max_steps/2],input_maps[max_steps/2], type ="composite") pmap.set_alpha(1,0.6) pmap.set_colors(0, cm.Blues) pmap.set_colors(1, cm.Reds) pmap.show()
#from the positive diffmap and the negative diffmap. May get more than 2 lines due to ties #in the accumulator #indices=((transform == transform.max())+(transform2 == transform2.max())).nonzero() indices = ((transform> votethresh)+(transform2 > votethresh)).nonzero() distances = d[indices[0]] theta = theta[indices[1]] n =len(indices[1]) print("Found " + str(n) + " lines.") # Perform the inverse transform to get a series of rectangular # images that show where0.581673377128 the wavefront is. invTransform = sunpy.make_map(np.zeros(imgShape),input_maps[i+1]._original_header) #invTransform.data = np.zeros(imgShape) for i in range(0,n): nextLine = htLine( distances[i],theta[i], np.zeros(shape=imgShape) ) invTransform = invTransform + nextLine # Dump the inverse transform back into a series of maps detection.append(invTransform) visualize(detection)
from __future__ import absolute_import #pylint: disable=E1103 import pyfits import sunpy from sunpy.wcs import wcs as wcs from numpy.testing import assert_array_almost_equal import numpy as np fits = pyfits.open(sunpy.AIA_171_IMAGE) header = fits[0].header img = sunpy.make_map(sunpy.AIA_171_IMAGE) # the following known_answers come from equivalent queries to IDL # WCS implementation (http://hesperia.gsfc.nasa.gov/ssw/gen/idl/wcs/) def test_conv_hpc_hcc(): coord = [40.0, 32.0] result = wcs.convert_hpc_hcc(img.rsun_meters, img.dsun, img.units['x'], img.units['y'], coord[0], coord[1]) known_answer = [28748691, 22998953] magnitude = np.floor(np.log10(np.abs(known_answer))) assert_array_almost_equal(result * 10**(-magnitude), known_answer * 10**(-magnitude), decimal=2) def test_conv_hcc_hpc(): coord = [34.0, 132.0]
def map_hg_to_hpc_rotate(map, epi_lon = 90, epi_lat = 0, xbin = 2.4, ybin = 2.4): """ Transform raw data in HG' coordinates to HPC coordinates HG' = HG, except center at wave epicenter """ #Origin grid, HG' lon_grid, lat_grid = sunpy.wcs.convert_pixel_to_data(map.shape[1], map.shape[0], map.scale['x'], map.scale['y'], map.reference_pixel['x'], map.reference_pixel['y'], map.reference_coordinate['x'], map.reference_coordinate['y'], map.coordinate_system['x']) #Origin grid, HG' to HCC' #HCC' = HCC, except centered at wave epicenter x, y, z = sunpy.wcs.convert_hg_hcc_xyz(map.rsun_meters, map.heliographic_latitude, map.carrington_longitude, lon_grid, lat_grid) #Origin grid, HCC' to HCC'' #Moves the wave epicenter to initial conditions #HCC'' = HCC, except assuming that HGLT_OBS = 0 zpp, xpp, ypp = euler_zyz((z, x, y), (epi_lon, 90.-epi_lat, 0.)) #Origin grid, HCC to HPC (arcsec) #xx, yy = sunpy.wcs.convert_hcc_hpc(current_wave_map.header, xpp, ypp) xx, yy = sunpy.wcs.convert_hcc_hpc(map.rsun_meters, map.dsun, xpp, ypp) xx *= 3600 yy *= 3600 #Destination HPC grid hpcx_range = (np.nanmin(xx), np.nanmax(xx)) hpcy_range = (np.nanmin(yy), np.nanmax(yy)) hpcx = np.arange(hpcx_range[0], hpcx_range[1], xbin) hpcy = np.arange(hpcy_range[0], hpcy_range[1], ybin) newgrid = np.meshgrid(hpcx, hpcy) #Coordinate positions (HPC) with corresponding map data points = np.vstack((xx.ravel(), yy.ravel())).T values = np.array(map).ravel() #2D interpolation from origin grid to destination grid newdata = griddata(points[zpp.ravel() >= 0], values[zpp.ravel() >= 0], newgrid, method="linear") dict_header = { "CDELT1": xbin, "NAXIS1": len(hpcx), "CRVAL1": hpcx.min(), "CRPIX1": 1, #this makes hpcx.min() the center of the first bin "CUNIT1": "arcsec", "CTYPE1": "HPLN-TAN", "CDELT2": ybin, "NAXIS2": len(hpcy), "CRVAL2": hpcy.min(), "CRPIX2": 1, #this makes hpcy.min() the center of the first bin "CUNIT2": "arcsec", "CTYPE2": "HPLT-TAN", "HGLT_OBS": 0, "HGLN_OBS": 0, } header = sunpy.map.MapHeader(dict_header) transformed_map = sunpy.make_map(newdata, header) transformed_map.name = map.name transformed_map.date = map.date return transformed_map
########################################################################### ########################################################################### def fcm_rm_sat_pix(map_list,threshold): """ """ ########################################################################### ########################################################################### fits_path=os.path.join(PATH_2_STACKS, "Working_old","ivo:__helio-informatics.org__FL_FlareDetective-TriggerModule_20120501_014529_2012-05-01T01:25:47.070_1","fits") file_list= glob.glob(os.path.join(fits_path, '*.fits')) map_obj=sunpy.make_map(file_list[0]) out_map_list=[] for files in file_list[0:50]: temp_map= sunpy.make_map(files) temp_map=temp_map.submap([bbc[0], bbc[2]],[bbc[1], bbc[3]]) out_map_list.append(temp_map) mask=abs(out_map_list[0].base.copy()*0.) mask[out_map_list[0].shape[1]-300:out_map_list[0].shape[1]-100][out_map_list[0].shape[0]-100:out_map_list[0].shape[0]-50]=AIA_sat_thresh mask[:][out_map_list[0].shape[0]-90:out_map_list[0].shape[0]-75]=AIA_sat_thresh for maps in out_map_list:
#!/usr/bin/env python #-*- coding:utf-8 -*- import sunpy import numpy as np import matplotlib.pyplot as plt from scipy import ndimage from sunpy.map.sources.sdo import AIAMap import util """ Prototype code """ cube = sunpy.make_map("/Users/schriste/Downloads/data2/AIA2010*", type = "cube")[:, 512:2048, 2048:3584] # Blur maps blurred = [] for map_ in cube: blurred.append(AIAMap(ndimage.gaussian_filter(map_, 10), map_.header)) # Plot map 3 - map 1 diff = blurred[2] - blurred[4] tmap = util.map_hpc_to_hg(diff) # Labeling labels, nr_nuclei = ndimage.label(diff.clip(diff.min(), 0)) print("Number of nuclei found: %d" % nr_nuclei) # Watershed areas = np.array([(labels == s).sum() for s in np.arange(nr_nuclei) + 1])
if abs(np.sin(angle)) > eps: gradient = -np.cos(angle) / np.sin(angle) constant = distance / np.sin(angle) for x in range(0, nx): y = gradient * x + constant if y <= ny - 1 and y >= 0: img[y, x] = 255 else: img[:, distance] = 255 return img m2deg = 360. / (2 * 3.1415926 * 6.96e8) cube = sunpy.make_map("/Users/schriste/Downloads/eitdata_19970512/*.fits", type="cube") dmap = cube[2] - cube[1] dmap.show() # need an even number of maps so get rid of one cube = cube[0:4] import util tmap = util.map_hpc_to_hg(dmap) ttmap = util.map_hpc_to_hg_rotate(dmap, epi_lon=9.5, epi_lat=20.44) input_maps = [] for map in cube: print("Unraveling map at " + str(map.date))
# Program to test red noise stuff import sunpy import numpy as np from matplotlib import pyplot as plt mc = sunpy.make_map('~/Data/rednoise/test_171', type='cube') # size of the map sh = mc[0].shape # pick an element a = int(np.rint( (sh[0]-1)*np.random.uniform() )) b = int(np.rint( (sh[1]-1)*np.random.uniform() )) # 304 data em1 = mc.get_lightcurve_by_array_index(a,b) name = mc[0].name d = em1.data[name] pwr1 = (np.abs(np.fft.rfft(d)))**2 norm1 = np.array(np.var(d))[0][0] # Number of elements n = len(d) # Frequencies freq = np.fft.fftfreq(n, 12.0) pfreq = freq[freq>0]
#!/usr/bin/env python """Based on http://matplotlib.sourceforge.net/examples/animation/dynamic_image2.html""" import os import sunpy import matplotlib.pyplot as plt import matplotlib.animation as animation imagedir = '/home//hwinter/programs/Flare_Detective/test_files' filenames = sorted(os.listdir(imagedir)) fig = plt.figure() ims = [] for x in filenames: print("Processing %s" % x) im = sunpy.make_map(os.path.join(imagedir, x)).resample((1024, 1024)) extent = im.xrange + im.yrange axes = plt.imshow(im, origin='lower', extent=extent, norm=im.norm(), cmap=im.cmap) ims.append([axes]) ani = animation.ArtistAnimation(fig, ims, interval=50, blit=True, repeat_delay=1000) ani.save('output.mp4', fps=20) plt.show()
def map_hpc_to_hg_rotate(map, epi_lon=0, epi_lat=0, xbin=1, ybin=1): """Take a map (like an AIA map) and convert it from HPC to HG.""" # epi_lon = 0 # epi_lat = 90 # xbin = 1 # ybin = 1 x, y = sunpy.wcs.convert_pixel_to_data( map.shape[1], map.shape[0], map.scale["x"], map.scale["y"], map.reference_pixel["x"], map.reference_pixel["y"], map.reference_coordinate["x"], map.reference_coordinate["y"], map.coordinate_system["x"], ) hccx, hccy, hccz = wcs.convert_hpc_hcc_xyz(map.rsun_meters, map.dsun, map.units["x"], map.units["y"], x, y) rot_hccz, rot_hccx, rot_hccy = euler_zyz((hccz, hccx, hccy), (0.0, epi_lat - 90.0, -epi_lon)) lon_map, lat_map = wcs.convert_hcc_hg( map.rsun_meters, map.heliographic_latitude, map.heliographic_longitude, rot_hccx, rot_hccy, z=rot_hccz ) lon_bin = xbin lat_bin = ybin lon_range = (np.nanmin(lon_map), np.nanmax(lon_map)) lat_range = (np.nanmin(lat_map), np.nanmax(lat_map)) lon = np.arange(lon_range[0], lon_range[1], lon_bin) lat = np.arange(lat_range[0], lat_range[1], lat_bin) newgrid = np.meshgrid(lon, lat) ng_xyz = wcs.convert_hg_hcc_xyz( map.rsun_meters, map.heliographic_latitude, map.heliographic_longitude, newgrid[0], newgrid[1] ) ng_zp, ng_xp, ng_yp = euler_zyz((ng_xyz[2], ng_xyz[0], ng_xyz[1]), (epi_lon, 90.0 - epi_lat, 0.0)) points = np.vstack((lon_map.ravel(), lat_map.ravel())).T values = np.array(map).ravel() # get rid of all of the bad (nan) indices (i.e. those off of the sun) index = np.isfinite(points[:, 0]) * np.isfinite(points[:, 1]) # points = np.vstack((points[index,0], points[index,1])).T points = points[index] values = values[index] newdata = griddata(points, values, newgrid, method="linear") newdata[ng_zp < 0] = np.nan dict_header = { "CDELT1": lon_bin, "NAXIS1": len(lon), "CRVAL1": lon.min(), "CRPIX1": 1, "CRPIX2": 1, "CUNIT1": "deg", "CTYPE1": "HG", "CDELT2": lat_bin, "NAXIS2": len(lat), "CRVAL2": lat.min(), "CUNIT2": "deg", "CTYPE2": "HG", } header = sunpy.map.MapHeader(dict_header) transformed_map = sunpy.make_map(newdata, header) return transformed_map
def transform(params, wave_maps, verbose = False): """ Transform raw data in HG' coordinates to HPC coordinates HG' = HG, except center at wave epicenter """ from scipy.interpolate import griddata hglt_obs = params["hglt_obs"] rotation = params["rotation"] epi_lat = params["epi_lat"] epi_lon = params["epi_lon"] hpcx_min = params["hpcx_min"] hpcx_max = params["hpcx_max"] hpcx_bin = params["hpcx_bin"] hpcy_min = params["hpcy_min"] hpcy_max = params["hpcy_max"] hpcy_bin = params["hpcy_bin"] hpcx_num = int(round((hpcx_max-hpcx_min)/hpcx_bin)) hpcy_num = int(round((hpcy_max-hpcy_min)/hpcy_bin)) wave_maps_transformed = [] dict_header = { "CDELT1": hpcx_bin, "NAXIS1": hpcx_num, "CRVAL1": hpcx_min, "CRPIX1": 0.5, #this makes hpcx_min the left edge of the first bin "CUNIT1": "arcsec", "CTYPE1": "HPLN-TAN", "CDELT2": hpcy_bin, "NAXIS2": hpcy_num, "CRVAL2": hpcy_min, "CRPIX2": 0.5, #this makes hpcy_min the left edge of the first bin "CUNIT2": "arcsec", "CTYPE2": "HPLT-TAN", "HGLT_OBS": hglt_obs, "HGLN_OBS": 0, } header = sunpy.map.MapHeader(dict_header) start_date = wave_maps[0].date #Origin grid, HG' #lon_grid, lat_grid = sunpy.wcs.convert_pixel_to_data(wave_maps[0].header) # Changed call to keep up to date with updated wcs library lon_grid, lat_grid = sunpy.wcs.convert_pixel_to_data(size =[wave_maps[0].shape[1],wave_maps[0].shape[0]], scale = [wave_maps[0].scale['x'],wave_maps[0].scale['y']], reference_pixel = [wave_maps[0].reference_pixel['x'], wave_maps[0].reference_pixel['y']], reference_coordinate = [wave_maps[0].reference_coordinate['x'], wave_maps[0].reference_coordinate['y']]) #Origin grid, HG' to HCC' #HCC' = HCC, except centered at wave epicenter #x, y, z = sunpy.wcs.convert_hg_hcc_xyz(wave_maps[0].header, # lon_grid, lat_grid) x, y, z = sunpy.wcs.convert_hg_hcc(lon_grid, lat_grid, b0_deg = wave_maps[0].heliographic_latitude, l0_deg = wave_maps[0].carrington_longitude, z = True) #Origin grid, HCC' to HCC'' #Moves the wave epicenter to initial conditions #HCC'' = HCC, except assuming that HGLT_OBS = 0 zxy_p = euler_zyz((z, x, y), (epi_lon, 90.-epi_lat, 0.)) #Destination HPC grid #hpcx_grid, hpcy_grid = sunpy.wcs.convert_pixel_to_data(header) # Updated to use new wcs function calls size = [header['NAXIS1'],header['NAXIS2']] scale = [header['CDELT1'], header['CDELT2']] reference_pixel = [header['CRPIX1'], header['CRPIX2']] reference_coordinate = [header['CRVAL1'],header['CRVAL2']] hpcx_grid, hpcy_grid = sunpy.wcs.convert_pixel_to_data(size, scale, reference_pixel, reference_coordinate) #laura note - ok to here, same outputs as old! for current_wave_map in wave_maps: print("Transforming map at "+str(current_wave_map.date)) #Origin grid, HCC'' to HCC #Moves the observer to HGLT_OBS and adds rigid solar rotation td = current_wave_map.date-start_date total_seconds = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6 zpp, xpp, ypp = euler_zyz(zxy_p, (0., hglt_obs, total_seconds*rotation)) #Origin grid, HCC to HPC (arcsec) #xx, yy = sunpy.wcs.convert_hcc_hpc(current_wave_map.header, xpp, ypp) xx, yy = sunpy.wcs.convert_hcc_hpc(xpp, ypp, current_wave_map.dsun) #print xx/3600.0, yy/3600.0 #xx *= 3600 #yy *= 3600 #Coordinate positions (HPC) with corresponding map data points = np.vstack((xx.ravel(), yy.ravel())).T values = np.array(current_wave_map).ravel() #2D interpolation from origin grid to destination grid grid = griddata(points[zpp.ravel() >= 0], values[zpp.ravel() >= 0], (hpcx_grid, hpcy_grid), method="linear") transformed_wave_map = sunpy.make_map(grid, header) transformed_wave_map.name = current_wave_map.name transformed_wave_map.date = current_wave_map.date wave_maps_transformed += [transformed_wave_map] return wave_maps_transformed
else: extension = '' roots = {"pickle": '~/ts/pickle' + extension, "image": '~/ts/img' + extension, "movie": '~/ts/movies' + extension} save_locations = aia_specific.save_location_calculator(roots, branches) ident = aia_specific.ident_creator(branches) # Load in the single file we need nt = 1800 layer_index = nt / 2 if wave == '171': filename = 'AIA20120923_025959_0171.fits' omap = sunpy.make_map(os.path.join(aia_data_location['aiadata'], filename)) if wave == '193': filename = 'AIA20120923_030006_0193.fits' omap = sunpy.make_map(os.path.join(aia_data_location['aiadata'], filename)) # Define regions in the datacube # y locations are first, x locations second if corename == 'shutdownfun6_6hr': print('Using %s and %f' % (corename, layer_index)) regions = {'highlimb': [[100, 150], [50, 150]], 'lowlimb': [[100, 150], [200, 250]], 'crosslimb': [[100, 150], [285, 340]], 'loopfootpoints1': [[90, 155], [515, 620]], 'loopfootpoints2': [[20, 90], [793, 828]], 'moss': [[45, 95], [888, 950]]}
# in the data #indices = (transform >votethresh).nonzero() #indices = (transform == transform.max()).nonzero() #instead of getting all lines above some threshold, just get the *strongest* line only #from the positive diffmap and the negative diffmap. May get more than 2 lines due to ties #in the accumulator indices = ((transform == transform.max()) + (transform2 == transform2.max())).nonzero() distances = d[indices[0]] theta = theta[indices[1]] n = len(indices[1]) print("Found " + str(n) + " lines.") # Perform the inverse transform to get a series of rectangular # images that show where the wavefront is. invTransform = sunpy.make_map(np.zeros(imgShape), input_maps[i + 1]._original_header) # invTransform.data = np.zeros(imgShape) for i in range(0, n): nextLine = htLine(distances[i], theta[i], np.zeros(shape=imgShape)) invTransform = invTransform + nextLine # Dump the inverse transform back into a series of maps detection.append(invTransform) visualize(diffs) visualize(detection) from matplotlib import cm from matplotlib import colors
def fit_wavefront(diffs, detection): """Fit the wavefront that has been detected by the hough transform. Simplest case is to fit along the y-direction for some x or range of x.""" dims=diffs[0].shape answers=[] wavefront_maps=[] for i in range (0, len(diffs)): if (detection[i].max() == 0.0): #if the 'detection' array is empty then skip this image fit_map=sunpy.make_map(np.zeros(dims),diffs[0]._original_header) print("Nothing detected in image " + str(i) + ". Skipping.") answers.append([]) wavefront_maps.append(fit_map) else: #if the 'detection' array is not empty, then fit the wavefront in the image img = diffs[i] fit_map=np.zeros(dims) #get the independent variable for the columns in the image x=(np.linspace(0,dims[0],num=dims[0])*img.scale['y']) + img.yrange[0] #use 'detection' to guess the centroid of the Gaussian fit function guess_index=detection[i].argmax() guess_index=np.unravel_index(guess_index,detection[i].shape) guess_position=x[guess_index[0]] print("Analysing wavefront in image " + str(i)) column_fits=[] #for each column in image, fit along the y-direction a function to find wave parameters for n in range (0,dims[1]): #guess the amplitude of the Gaussian fit from the difference image guess_amp=np.float(img[guess_index[0],n]) #put the guess input parameters into a vector guess_params=[guess_amp,guess_position,5] #get the current image column y=img[:,n] y=y.flatten() #call Albert's fitting function result = util.fitfunc(x,y,'Gaussian',guess_params) #define a Gaussian function. Messy - clean this up later gaussian = lambda p,x: p[0]/np.sqrt(2.*np.pi)/p[2]*np.exp(-((x-p[1])/p[2])**2/2.) #Draw the Gaussian fit for the current column and save it in fit_map #save the best-fit parameters in column_fits #only want to store the successful fits, discard the others. #result contains a pass/fail integer. Keep successes ( ==1). if result[1] == 1: #if we got a pass integer, perform some other checks to eliminate unphysical values result=check_fit(result) column_fits.append(result) if result != []: fit_column = gaussian(result[0],x) else: fit_column = np.zeros(len(x)) else: #if the fit failed then save as zeros/null values result=[] column_fits.append(result) fit_column = np.zeros(len(x)) #draw the Gaussian fit for the current column and save it in fit_map #gaussian = lambda p,x: p[0]/np.sqrt(2.*np.pi)/p[2]*np.exp(-((x-p[1])/p[2])**2/2.) #save the drawn column in fit_map fit_map[:,n] = fit_column #save the fit parameters for the image in 'answers' and the drawn map in 'wavefront_maps' fit_map=sunpy.make_map(fit_map,diffs[0]._original_header) answers.append(column_fits) wavefront_maps.append(fit_map) #now get the mean values of the fitted wavefront, averaged over all x #average_fits=[] #for ans in answers: # cleaned_answers=[] # for k in range(0,len(ans)): # #ans[:,1] contains a pass/fail integer. Keep successes (==1), discard the rest # if ans[k][1] == 1: # tmp=ans[k][0] # cleaned_answers.append(tmp) # else: # cleaned_answers.append([]) # #get the mean of each fit parameter for this image and store it # #average_fits.append(np.mean(g,axis=0)) return answers, wavefront_maps
def map_hpc_to_hg_rotate(map, epi_lon = 0, epi_lat = 90, lon_bin = 1, lat_bin = 1): """ Transform raw data in HPC coordinates to HG' coordinates HG' = HG, except center at wave epicenter """ x, y = sunpy.wcs.convert_pixel_to_data(map.shape[1], map.shape[0], map.scale['x'], map.scale['y'], map.reference_pixel['x'], map.reference_pixel['y'], map.reference_coordinate['x'], map.reference_coordinate['y'], map.coordinate_system['x']) hccx, hccy, hccz = wcs.convert_hpc_hcc_xyz(map.rsun_meters, map.dsun, map.units['x'], map.units['y'], x, y) rot_hccz, rot_hccx, rot_hccy = euler_zyz((hccz, hccx, hccy), (0., epi_lat-90., -epi_lon)) lon_map, lat_map = wcs.convert_hcc_hg(map.rsun_meters, map.heliographic_latitude, map.heliographic_longitude, rot_hccx, rot_hccy, z = rot_hccz) lon_range = (np.nanmin(lon_map), np.nanmax(lon_map)) lat_range = (np.nanmin(lat_map), np.nanmax(lat_map)) lon = np.arange(lon_range[0], lon_range[1], lon_bin) lat = np.arange(lat_range[0], lat_range[1], lat_bin) newgrid = np.meshgrid(lon, lat) ng_xyz = wcs.convert_hg_hcc_xyz(map.rsun_meters, map.heliographic_latitude, map.heliographic_longitude, newgrid[0], newgrid[1]) ng_zp, ng_xp, ng_yp = euler_zyz((ng_xyz[2], ng_xyz[0], ng_xyz[1]), (epi_lon, 90.-epi_lat, 0.)) points = np.vstack((lon_map.ravel(), lat_map.ravel())).T values = np.array(map).ravel() # get rid of all of the bad (nan) indices (i.e. those off of the sun) index = np.isfinite(points[:,0]) * np.isfinite(points[:,1]) #points = np.vstack((points[index,0], points[index,1])).T points = points[index] values = values[index] newdata = griddata(points, values, newgrid, method="linear") newdata[ng_zp < 0] = np.nan dict_header = { 'CDELT1': lon_bin, 'NAXIS1': len(lon), 'CRVAL1': lon.min(), 'CRPIX1': 1, 'CRPIX2': 1, 'CUNIT1': "deg", 'CTYPE1': "HG", 'CDELT2': lat_bin, 'NAXIS2': len(lat), 'CRVAL2': lat.min(), 'CUNIT2': "deg", 'CTYPE2': "HG" } header = sunpy.map.MapHeader(dict_header) transformed_map = sunpy.make_map(newdata, header) transformed_map.name = map.name transformed_map.date = map.date return transformed_map
if abs(np.sin(angle)) > eps: gradient = - np.cos(angle) / np.sin(angle) constant = distance / np.sin(angle) for x in range(0,nx): y = gradient*x + constant if y <= ny-1 and y >= 0: img[y,x] = 255 else: img[:,distance] = 255 return img m2deg = 360./(2*3.1415926*6.96e8) cube = sunpy.make_map("/home/hayesla/fits/data/", type = "cube") dmap = cube[2] - cube[1] dmap.show() # need an even number of maps so get rid of one cube = cube[0:4] import util tmap = util.map_hpc_to_hg(dmap) ttmap = util.map_hpc_to_hg_rotate(dmap, epi_lon = 9.5, epi_lat = 20.44) input_maps = [] for map in cube: print("Unraveling map at "+str(map.date))
def setup_class(self): self.file = sunpy.AIA_171_IMAGE self.map = sunpy.make_map(self.file) self.fits = pyfits.open(self.file) self.fits.verify('silentfix')
def map_hpc_to_hg(map, xbin=1, ybin=1): """Take a map (like an AIA map) and convert it from HPC to HG.""" x, y = sunpy.wcs.convert_pixel_to_data( map.shape[1], map.shape[0], map.scale["x"], map.scale["y"], map.reference_pixel["x"], map.reference_pixel["y"], map.reference_coordinate["x"], map.reference_coordinate["y"], map.coordinate_system["x"], ) lon_map, lat_map = sunpy.wcs.convert_hpc_hg( map.rsun_meters, map.dsun, map.units["x"], map.units["y"], map.heliographic_latitude, map.carrington_longitude, x, y, ) # xbin = 1 # ybin = 1 lon_bin = xbin lat_bin = ybin lon_range = (np.nanmin(lon_map), np.nanmax(lon_map)) lat_range = (np.nanmin(lat_map), np.nanmax(lat_map)) lon = np.arange(lon_range[0], lon_range[1], lon_bin) lat = np.arange(lat_range[0], lat_range[1], lat_bin) newgrid = np.meshgrid(lon, lat) # newgrid = wcs.convert_hg_hpc(map.header, lon_grid, lat_grid, units = 'arcsec') points = np.vstack((lon_map.ravel(), lat_map.ravel())).T values = np.array(map).ravel() # get rid of all of the bad (nan) indices (i.e. those off of the sun) index = np.isfinite(points[:, 0]) * np.isfinite(points[:, 1]) points = np.vstack((points[index, 0], points[index, 1])).T values = values[index] newdata = griddata(points, values, newgrid, method="linear") dict_header = { "CDELT1": lon_bin, "NAXIS1": len(lon), "CRVAL1": lon.min(), "CRPIX1": 1, "CRPIX2": 1, "CUNIT1": "deg", "CTYPE1": "HG", "CDELT2": lat_bin, "NAXIS2": len(lat), "CRVAL2": lat.min(), "CUNIT2": "deg", "CTYPE2": "HG", } header = sunpy.map.MapHeader(dict_header) transformed_map = sunpy.make_map(newdata, header) transformed_map.heliographic_latitude = map.heliographic_latitude return transformed_map