class SWavesSpectrogram(LinearTimeSpectrogram): _create = ConditionalDispatch.from_existing(LinearTimeSpectrogram._create) create = classmethod(_create.wrapper()) COPY_PROPERTIES = LinearTimeSpectrogram.COPY_PROPERTIES + [ ('bg', REFERENCE) ] @staticmethod def swavesfile_to_date(filename): _, name = os.path.split(filename) date = name.split('_')[2] return datetime.datetime( int(date[0:4]), int(date[4:6]), int(date[6:]) ) @classmethod def read(cls, filename, **kwargs): """Read in FITS file and return a new SWavesSpectrogram. """ data = np.genfromtxt(filename, skip_header=2) time_axis = data[:, 0] * 60. data = data[:, 1:].transpose() header = np.genfromtxt(filename, skip_footer=time_axis.size) freq_axis = header[0, :] bg = header[1, :] start = cls.swavesfile_to_date(filename) end = start + datetime.timedelta(seconds=time_axis[-1]) t_delt = 60. t_init = (start - get_day(start)).seconds content = '' t_label = 'Time [UT]' f_label = 'Frequency [KHz]' freq_axis = freq_axis[::-1] data = data[::-1, :] return cls(data, time_axis, freq_axis, start, end, t_init, t_delt, t_label, f_label, content, bg) def __init__(self, data, time_axis, freq_axis, start, end, t_init, t_delt, t_label, f_label, content, bg): # Because of how object creation works, there is no avoiding # unused arguments in this case. # pylint: disable=W0613 super(SWavesSpectrogram, self).__init__( data, time_axis, freq_axis, start, end, t_init, t_delt, t_label, f_label, content, set(["SWAVES"]) ) self.bg = bg
class Spectrogram(Parent): """ Spectrogram Class. .. warning:: This module is under development! Use at your own risk. Attributes ---------- data : `~numpy.ndarray` two-dimensional array of the image data of the spectrogram. time_axis : `~numpy.ndarray` one-dimensional array containing the offset from the start for each column of data. freq_axis : `~numpy.ndarray` one-dimensional array containing information about the frequencies each row of the image corresponds to. start : `~datetime.datetime` starting time of the measurement end : `~datetime.datetime` end time of the measurement t_init : int offset from the start of the day the measurement began. If None gets automatically set from start. t_label : str label for the time axis f_label : str label for the frequency axis content : str header for the image instruments : str array instruments that recorded the data, may be more than one if it was constructed using combine_frequencies or join_many. """ # Contrary to what pylint may think, this is not an old-style class. # pylint: disable=E1002,W0142,R0902 # This needs to list all attributes that need to be # copied to maintain the object and how to handle them. COPY_PROPERTIES = [ ('time_axis', COPY), ('freq_axis', COPY), ('instruments', COPY), ('start', REFERENCE), ('end', REFERENCE), ('t_label', REFERENCE), ('f_label', REFERENCE), ('content', REFERENCE), ('t_init', REFERENCE), ] _create = ConditionalDispatch.from_existing(Parent._create) @property def shape(self): return self.data.shape @property def dtype(self): return self.data.dtype def _get_params(self): """Implementation detail.""" return dict( (name, getattr(self, name)) for name, _ in self.COPY_PROPERTIES ) def _slice(self, y_range, x_range): """Return new spectrogram reduced to the values passed as slices. Implementation detail.""" data = self.data[y_range, x_range] params = self._get_params() soffset = 0 if x_range.start is None else x_range.start soffset = int(soffset) eoffset = self.shape[1] if x_range.stop is None else x_range.stop # pylint: disable=E1101 eoffset -= 1 eoffset = int(eoffset) params.update({ 'time_axis': self.time_axis[ x_range.start:x_range.stop:x_range.step ] - self.time_axis[soffset], 'freq_axis': self.freq_axis[ y_range.start:y_range.stop:y_range.step], 'start': self.start + datetime.timedelta( seconds=self.time_axis[soffset]), 'end': self.start + datetime.timedelta( seconds=self.time_axis[eoffset]), 't_init': self.t_init + self.time_axis[soffset], }) return self.__class__(data, **params) def _with_data(self, data): new = copy(self) new.data = data return new def __init__(self, data, time_axis, freq_axis, start, end, t_init=None, t_label="Time", f_label="Frequency", content="", instruments=None): # Because of how object creation works, there is no avoiding # unused arguments in this case. self.data = data if t_init is None: diff = start - get_day(start) t_init = diff.seconds if instruments is None: instruments = set() self.start = start self.end = end self.t_label = t_label self.f_label = f_label self.t_init = t_init self.time_axis = time_axis self.freq_axis = freq_axis self.content = content self.instruments = instruments def time_formatter(self, x, pos): """This returns the label for the tick of value x at a specified pos on the time axis.""" # Callback, cannot avoid unused arguments. # pylint: disable=W0613 x = int(x) if x >= len(self.time_axis) or x < 0: return "" return self.format_time( self.start + datetime.timedelta( seconds=float(self.time_axis[x]) ) ) @staticmethod def format_time(time): """Override to configure default plotting.""" return time.strftime("%H:%M:%S") @staticmethod def format_freq(freq): """Override to configure default plotting.""" return "{freq:0.1f}".format(freq=freq) def peek(self, *args, **kwargs): """ Plot spectrum onto current axes. Parameters ---------- *args : dict **kwargs : dict Any additional plot arguments that should be used when plotting. Returns ------- fig : `~matplotlib.Figure` A plot figure. """ figure() ret = self.plot(*args, **kwargs) plt.show() return ret def plot(self, figure=None, overlays=[], colorbar=True, vmin=None, vmax=None, linear=True, showz=True, yres=DEFAULT_YRES, max_dist=None, **matplotlib_args): """ Plot spectrogram onto figure. Parameters ---------- figure : `~matplotlib.Figure` Figure to plot the spectrogram on. If None, new Figure is created. overlays : list List of overlays (functions that receive figure and axes and return new ones) to be applied after drawing. colorbar : bool Flag that determines whether or not to draw a colorbar. If existing figure is passed, it is attempted to overdraw old colorbar. vmin : float Clip intensities lower than vmin before drawing. vmax : float Clip intensities higher than vmax before drawing. linear : bool If set to True, "stretch" image to make frequency axis linear. showz : bool If set to True, the value of the pixel that is hovered with the mouse is shown in the bottom right corner. yres : int or None To be used in combination with linear=True. If None, sample the image with half the minimum frequency delta. Else, sample the image to be at most yres pixels in vertical dimension. Defaults to 1080 because that's a common screen size. max_dist : float or None If not None, mask elements that are further than max_dist away from actual data points (ie, frequencies that actually have data from the receiver and are not just nearest-neighbour interpolated). """ # [] as default argument is okay here because it is only read. # pylint: disable=W0102,R0914 if linear: delt = yres if delt is not None: delt = max( (self.freq_axis[0] - self.freq_axis[-1]) / (yres - 1), _min_delt(self.freq_axis) / 2. ) delt = float(delt) data = _LinearView(self.clip_values(vmin, vmax), delt) freqs = np.arange( self.freq_axis[0], self.freq_axis[-1], -data.delt ) else: data = np.array(self.clip_values(vmin, vmax)) freqs = self.freq_axis figure = plt.gcf() if figure.axes: axes = figure.axes[0] else: axes = figure.add_subplot(111) params = { 'origin': 'lower', 'aspect': 'auto', } params.update(matplotlib_args) if linear and max_dist is not None: toplot = ma.masked_array(data, mask=data.make_mask(max_dist)) pass else: toplot = data im = axes.imshow(toplot, **params) xa = axes.get_xaxis() ya = axes.get_yaxis() xa.set_major_formatter( FuncFormatter(self.time_formatter) ) if linear: # Start with a number that is divisible by 5. init = (self.freq_axis[0] % 5) / data.delt nticks = 15. # Calculate MHz difference between major ticks. dist = (self.freq_axis[0] - self.freq_axis[-1]) / nticks # Round to next multiple of 10, at least ten. dist = max(round(dist, -1), 10) # One pixel in image space is data.delt MHz, thus we can convert # our distance between the major ticks into image space by dividing # it by data.delt. ya.set_major_locator( IndexLocator( dist / data.delt, init ) ) ya.set_minor_locator( IndexLocator( dist / data.delt / 10, init ) ) def freq_fmt(x, pos): # This is necessary because matplotlib somehow tries to get # the mid-point of the row, which we do not need here. x = x + 0.5 return self.format_freq(self.freq_axis[0] - x * data.delt) else: freq_fmt = _list_formatter(freqs, self.format_freq) ya.set_major_locator(MaxNLocator(integer=True, steps=[1, 5, 10])) ya.set_major_formatter( FuncFormatter(freq_fmt) ) axes.set_xlabel(self.t_label) axes.set_ylabel(self.f_label) # figure.suptitle(self.content) figure.suptitle( ' '.join([ get_day(self.start).strftime("%d %b %Y"), 'Radio flux density', '(' + ', '.join(self.instruments) + ')', ]) ) for tl in xa.get_ticklabels(): tl.set_fontsize(10) tl.set_rotation(30) figure.add_axes(axes) figure.subplots_adjust(bottom=0.2) figure.subplots_adjust(left=0.2) if showz: axes.format_coord = self._mk_format_coord( data, figure.gca().format_coord) if colorbar: if len(figure.axes) > 1: Colorbar(figure.axes[1], im).set_label("Intensity") else: figure.colorbar(im).set_label("Intensity") for overlay in overlays: figure, axes = overlay(figure, axes) for ax in figure.axes: ax.autoscale() if isinstance(figure, SpectroFigure): figure._init(self, freqs) return axes def __getitem__(self, key): only_y = not isinstance(key, tuple) if only_y: return self.data[int(key)] elif isinstance(key[0], slice) and isinstance(key[1], slice): return self._slice(key[0], key[1]) elif isinstance(key[1], slice): # return Spectrum( # XXX: Right class # super(Spectrogram, self).__getitem__(key), # self.time_axis[key[1].start:key[1].stop:key[1].step] # ) return np.array(self.data[key]) elif isinstance(key[0], slice): return Spectrum( self.data[key], self.freq_axis[key[0].start:key[0].stop:key[0].step] ) return self.data[int(key)] def clip_freq(self, vmin=None, vmax=None): """Return a new spectrogram only consisting of frequencies in the interval [vmin, vmax]. Parameters ---------- vmin : float All frequencies in the result are greater or equal to this. vmax : float All frequencies in the result are smaller or equal to this. """ left = 0 if vmax is not None: while self.freq_axis[left] > vmax: left += 1 right = len(self.freq_axis) - 1 if vmin is not None: while self.freq_axis[right] < vmin: right -= 1 return self[left:right + 1, :] def auto_find_background(self, amount=0.05): """Automatically find the background. This is done by first subtracting the average value in each channel and then finding those times which have the lowest standard deviation. Parameters ---------- amount : float The percent amount (out of 1) of lowest standard deviation to consider. """ # pylint: disable=E1101,E1103 data = self.data.astype(to_signed(self.dtype)) # Subtract average value from every frequency channel. tmp = (data - np.average(self.data, 1).reshape(self.shape[0], 1)) # Get standard deviation at every point of time. # Need to convert because otherwise this class's __getitem__ # is used which assumes two-dimensionality. sdevs = np.asarray(np.std(tmp, 0)) # Get indices of values with lowest standard deviation. cand = sorted(range(self.shape[1]), key=lambda y: sdevs[y]) # Only consider the best 5 %. return cand[:max(1, int(amount * len(cand)))] def auto_const_bg(self): """Automatically determine background.""" realcand = self.auto_find_background() bg = np.average(self.data[:, realcand], 1) return bg.reshape(self.shape[0], 1) def subtract_bg(self): """Perform constant background subtraction.""" return self._with_data(self.data - self.auto_const_bg()) def randomized_auto_const_bg(self, amount): """Automatically determine background. Only consider a randomly chosen subset of the image. Parameters ---------- amount : int Size of random sample that is considered for calculation of the background. """ cols = [randint(0, self.shape[1] - 1) for _ in range(amount)] # pylint: disable=E1101,E1103 data = self.data.astype(to_signed(self.dtype)) # Subtract average value from every frequency channel. tmp = (data - np.average(self.data, 1).reshape(self.shape[0], 1)) # Get standard deviation at every point of time. # Need to convert because otherwise this class's __getitem__ # is used which assumes two-dimensionality. tmp = tmp[:, cols] sdevs = np.asarray(np.std(tmp, 0)) # Get indices of values with lowest standard deviation. cand = sorted(range(amount), key=lambda y: sdevs[y]) # Only consider the best 5 %. realcand = cand[:max(1, int(0.05 * len(cand)))] # Average the best 5 % bg = np.average(self[:, [cols[r] for r in realcand]], 1) return bg.reshape(self.shape[0], 1) def randomized_subtract_bg(self, amount): """Perform randomized constant background subtraction. Does not produce the same result every time it is run. Parameters ---------- amount : int Size of random sample that is considered for calculation of the background. """ return self._with_data(self.data - self.randomized_auto_const_bg(amount)) def clip_values(self, vmin=None, vmax=None, out=None): """ Clip intensities to be in the interval [vmin, vmax]. Any values greater than the maximum will be assigned the maximum, any values lower than the minimum will be assigned the minimum. If either is left out or None, do not clip at that side of the interval. Parameters ---------- min : int or float New minimum value for intensities. max : int or float New maximum value for intensities """ # pylint: disable=E1101 if vmin is None: vmin = int(self.data.min()) if vmax is None: vmax = int(self.data.max()) return self._with_data(self.data.clip(vmin, vmax, out)) def rescale(self, vmin=0, vmax=1, dtype=np.dtype('float32')): """ Rescale intensities to [vmin, vmax]. Note that vmin ≠ vmax and spectrogram.min() ≠ spectrogram.max(). Parameters ---------- vmin : float or int New minimum value in the resulting spectrogram. vmax : float or int New maximum value in the resulting spectrogram. dtype : `numpy.dtype` Data-type of the resulting spectrogram. """ if vmax == vmin: raise ValueError("Maximum and minimum must be different.") if self.data.max() == self.data.min(): raise ValueError("Spectrogram needs to contain distinct values.") data = self.data.astype(dtype) # pylint: disable=E1101 return self._with_data( vmin + (vmax - vmin) * (data - self.data.min()) / # pylint: disable=E1101 (self.data.max() - self.data.min()) # pylint: disable=E1101 ) def interpolate(self, frequency): """ Linearly interpolate intensity at unknown frequency using linear interpolation of its two neighbours. Parameters ---------- frequency : float or int Unknown frequency for which to linearly interpolate the intensities. freq_axis[0] >= frequency >= self_freq_axis[-1] """ lfreq, lvalue = None, None for freq, value in zip(self.freq_axis, self.data[:, :]): if freq < frequency: break lfreq, lvalue = freq, value else: raise ValueError("Frequency not in interpolation range") if lfreq is None: raise ValueError("Frequency not in interpolation range") diff = frequency - freq # pylint: disable=W0631 ldiff = lfreq - frequency return (ldiff * value + diff * lvalue) / (diff + ldiff) # pylint: disable=W0631 def linearize_freqs(self, delta_freq=None): """Rebin frequencies so that the frequency axis is linear. Parameters ---------- delta_freq : float Difference between consecutive values on the new frequency axis. Defaults to half of smallest delta in current frequency axis. Compare Nyquist-Shannon sampling theorem. """ if delta_freq is None: # Nyquist–Shannon sampling theorem delta_freq = _min_delt(self.freq_axis) / 2. nsize = (self.freq_axis.max() - self.freq_axis.min()) / delta_freq + 1 new = np.zeros((int(nsize), self.shape[1]), dtype=self.data.dtype) freqs = self.freq_axis - self.freq_axis.max() freqs = freqs / delta_freq midpoints = np.round((freqs[:-1] + freqs[1:]) / 2) fillto = np.concatenate( [midpoints - 1, np.round([freqs[-1]]) - 1] ) fillfrom = np.concatenate( [np.round([freqs[0]]), midpoints - 1] ) fillto = np.abs(fillto) fillfrom = np.abs(fillfrom) for row, from_, to_ in zip(self, fillfrom, fillto): new[int(from_): int(to_)] = row vrs = self._get_params() vrs.update({ 'freq_axis': np.linspace( self.freq_axis.max(), self.freq_axis.min(), nsize ) }) return self.__class__(new, **vrs) def freq_overlap(self, other): """Get frequency range present in both spectrograms. Returns (min, max) tuple. Parameters ---------- other : Spectrogram other spectrogram with which to look for frequency overlap """ lower = max(self.freq_axis[-1], other.freq_axis[-1]) upper = min(self.freq_axis[0], other.freq_axis[0]) if lower > upper: raise ValueError("No overlap.") return lower, upper def time_to_x(self, time): """Return x-coordinate in spectrogram that corresponds to the passed `~datetime.datetime` value. Parameters ---------- time : `~sunpy.time.parse_time` compatible str `~datetime.datetime` to find the x coordinate for. """ diff = time - self.start diff_s = SECONDS_PER_DAY * diff.days + diff.seconds if self.time_axis[-1] < diff_s < 0: raise ValueError("Out of bounds") for n, elem in enumerate(self.time_axis): if diff_s < elem: return n - 1 # The last element is the searched one. return n def at_freq(self, freq): return self[np.nonzero(self.freq_axis == freq)[0], :] @staticmethod def _mk_format_coord(spec, fmt_coord): def format_coord(x, y): shape = list(map(int, spec.shape)) xint, yint = int(x), int(y) if 0 <= xint < shape[1] and 0 <= yint < shape[0]: pixel = spec[yint][xint] else: pixel = "" return '{0!s} z={1!s}'.format(fmt_coord(x, y), pixel) return format_coord
class CallistoSpectrogram(LinearTimeSpectrogram): """ Classed used for dynamic spectra coming from the Callisto network. Attributes ---------- header : fits.Header main header of the FITS file axes_header : fits.Header header foe the axes table swapped : boolean flag that specifies whether originally in the file the x-axis was frequency """ # XXX: Determine those from the data. SIGMA_SUM = 75 SIGMA_DELTA_SUM = 20 _create = ConditionalDispatch.from_existing(LinearTimeSpectrogram._create) create = classmethod(_create.wrapper()) # Contrary to what pylint may think, this is not an old-style class. # pylint: disable=E1002,W0142,R0902 # This needs to list all attributes that need to be # copied to maintain the object and how to handle them. COPY_PROPERTIES = LinearTimeSpectrogram.COPY_PROPERTIES + [ ('header', REFERENCE), ('swapped', REFERENCE), ('axes_header', REFERENCE) ] # List of instruments retrieved in July 2012 from # http://soleil.i4ds.ch/solarradio/data/2002-20yy_Callisto/ INSTRUMENTS = set([ 'ALASKA', 'ALMATY', 'BIR', 'DARO', 'HB9SCT', 'HUMAIN', 'HURBANOVO', 'KASI', 'KENYA', 'KRIM', 'MALAYSIA', 'MRT1', 'MRT2', 'OOTY', 'OSRA', 'SWMC', 'TRIEST', 'UNAM' ]) def save(self, filepath): """ Save modified spectrogram back to filepath. Parameters ---------- filepath : str path to save the spectrogram to """ main_header = self.get_header() data = fits.PrimaryHDU(self, header=main_header) ## XXX: Update axes header. freq_col = fits.Column( name="frequency", format="D8.3", array=self.freq_axis ) time_col = fits.Column( name="time", format="D8.3", array=self.time_axis ) cols = fits.ColDefs([freq_col, time_col]) table = fits.new_table(cols, header=self.axes_header) hdulist = fits.HDUList([data, table]) hdulist.writeto(filepath) def get_header(self): """ Return updated header. """ header = self.header.copy() if self.swapped: header['NAXIS2'] = self.shape[1] # pylint: disable=E1101 header['NAXIS1'] = self.shape[0] # pylint: disable=E1101 else: header['NAXIS1'] = self.shape[1] # pylint: disable=E1101 header['NAXIS2'] = self.shape[0] # pylint: disable=E1101 return header @classmethod def read(cls, filename, **kwargs): """ Read in FITS file and return a new CallistoSpectrogram. Any unknown (i.e. any except filename) keyword arguments get passed to fits.open. Parameters ---------- filename : str path of the file to read """ fl = fits.open(filename, **kwargs) data = fl[0].data axes = fl[1] header = fl[0].header start = _parse_header_time( header['DATE-OBS'], header.get('TIME-OBS', header.get('TIME$_OBS')) ) end = _parse_header_time( header['DATE-END'], header.get('TIME-END', header.get('TIME$_END')) ) swapped = "time" not in header["CTYPE1"].lower() # Swap dimensions so x-axis is always time. if swapped: t_delt = header["CDELT2"] t_init = header["CRVAL2"] - t_delt * header["CRPIX2"] t_label = header["CTYPE2"] f_delt = header["CDELT1"] f_init = header["CRVAL1"] - t_delt * header["CRPIX1"] f_label = header["CTYPE1"] data = data.transpose() else: t_delt = header["CDELT1"] t_init = header["CRVAL1"] - t_delt * header["CRPIX1"] t_label = header["CTYPE1"] f_delt = header["CDELT2"] f_init = header["CRVAL2"] - t_delt * header["CRPIX2"] f_label = header["CTYPE2"] # Table may contain the axes data. If it does, the other way of doing # it might be very wrong. if axes is not None: try: # It's not my fault. Neither supports __contains__ nor .get tm = axes.data['time'] except KeyError: tm = None try: fq = axes.data['frequency'] except KeyError: fq = None if tm is not None: # Fix dimensions (whyever they are (1, x) in the first place) time_axis = np.squeeze(tm) else: # Otherwise, assume it's linear. time_axis = \ np.linspace(0, data.shape[1] - 1) * t_delt + t_init # pylint: disable=E1101 if fq is not None: freq_axis = np.squeeze(fq) else: freq_axis = \ np.linspace(0, data.shape[0] - 1) * f_delt + f_init # pylint: disable=E1101 content = header["CONTENT"] instruments = set([header["INSTRUME"]]) return cls( data, time_axis, freq_axis, start, end, t_init, t_delt, t_label, f_label, content, instruments, header, axes.header, swapped ) def __init__(self, data, time_axis, freq_axis, start, end, t_init=None, t_delt=None, t_label="Time", f_label="Frequency", content="", instruments=None, header=None, axes_header=None, swapped=False): # Because of how object creation works, there is no avoiding # unused arguments in this case. # pylint: disable=W0613 super(CallistoSpectrogram, self).__init__( data, time_axis, freq_axis, start, end, t_init, t_delt, t_label, f_label, content, instruments ) self.header = header self.axes_header = axes_header self.swapped = swapped @classmethod def is_datasource_for(cls, header): """ Check if class supports data from the given FITS file. Parameters ---------- header : fits.Header main header of the FITS file """ return header.get('instrume', '').strip() in cls.INSTRUMENTS def remove_border(self): """ Remove duplicate entries on the borders. """ left = 0 while self.freq_axis[left] == self.freq_axis[0]: left += 1 right = self.shape[0] - 1 while self.freq_axis[right] == self.freq_axis[-1]: right -= 1 return self[left-1:right+2, :] @classmethod def read_many(cls, filenames, sort_by=None): """ Return list of CallistoSpectrogram objects read from filenames. Parameters ---------- filenames : list of str list of paths to read from sort_by : str optional attribute of the resulting objects to sort from, e.g. start to sort by starting time. """ objs = map(cls.read, filenames) if sort_by is not None: objs.sort(key=lambda x: getattr(x, sort_by)) return objs @classmethod def from_range(cls, instrument, start, end, **kwargs): """ Automatically download data from instrument between start and end and join it together. Parameters ---------- instrument : str instrument to retrieve the data from start : parse_time compatible start of the measurement end : parse_time compatible end of the measurement """ kw = { 'maxgap': None, 'fill': cls.JOIN_REPEAT, } kw.update(kwargs) start = parse_time(start) end = parse_time(end) urls = query(start, end, [instrument]) data = map(cls.from_url, urls) freq_buckets = defaultdict(list) for elem in data: freq_buckets[tuple(elem.freq_axis)].append(elem) try: return cls.combine_frequencies( [cls.join_many(elem, **kw) for elem in freq_buckets.itervalues()] ) except ValueError: raise ValueError("No data found.") def _overlap(self, other): """ Find frequency and time overlap of two spectrograms. """ one, two = self.intersect_time([self, other]) ovl = one.freq_overlap(two) return one.clip_freq(*ovl), two.clip_freq(*ovl) @staticmethod def _to_minimize(a, b): """ Function to be minimized for matching to frequency channels. """ def _fun(p): if p[0] <= 0.2 or abs(p[1]) >= a.max(): return float("inf") return a - (p[0] * b + p[1]) return _fun def _homogenize_params(self, other, maxdiff=1): """ Return triple with a tuple of indices (in self and other, respectively), factors and constants at these frequencies. Parameters ---------- other : CallistoSpectrogram Spectrogram to be homogenized with the current one. maxdiff : float Threshold for which frequencies are considered equal. """ pairs_indices = [ (x, y) for x, y, d in minimal_pairs(self.freq_axis, other.freq_axis) if d <= maxdiff ] pairs_data = [ (self[n_one, :], other[n_two, :]) for n_one, n_two in pairs_indices ] # XXX: Maybe unnecessary. pairs_data_gaussian = [ (gaussian_filter1d(a, 15), gaussian_filter1d(b, 15)) for a, b in pairs_data ] # If we used integer arithmetic, we would accept more invalid # values. pairs_data_gaussian64 = np.float64(pairs_data_gaussian) least = [ leastsq(self._to_minimize(a,b), [1, 0])[0] for a, b in pairs_data_gaussian64 ] factors = [x for x, y in least] constants = [y for x, y in least] return pairs_indices, factors, constants def homogenize(self, other, maxdiff=1): """ Return overlapping part of self and other as (self, other) tuple. Homogenize intensities so that the images can be used with combine_frequencies. Note that this works best when most of the picture is signal, so use :py:meth:`in_interval` to select the subset of your image before applying this method. Parameters ---------- other : CallistoSpectrogram Spectrogram to be homogenized with the current one. maxdiff : float Threshold for which frequencies are considered equal. """ one, two = self._overlap(other) pairs_indices, factors, constants = one._homogenize_params( two, maxdiff ) # XXX: Maybe (xd.freq_axis[x] + yd.freq_axis[y]) / 2. pairs_freqs = [one.freq_axis[x] for x, y in pairs_indices] # XXX: Extrapolation does not work this way. # XXX: Improve. f1 = np.polyfit(pairs_freqs, factors, 3) f2 = np.polyfit(pairs_freqs, constants, 3) return ( one, two * polyfun_at(f1, two.freq_axis)[:, np.newaxis] + polyfun_at(f2, two.freq_axis)[:, np.newaxis] ) def extend(self, minutes=15, **kwargs): """ Request subsequent files from the server. If minutes is negative, retrieve preceding files. """ if len(self.instruments) != 1: raise ValueError instrument = iter(self.instruments).next() if minutes > 0: data = CallistoSpectrogram.from_range( instrument, self.end, self.end + datetime.timedelta(minutes=minutes) ) else: data = CallistoSpectrogram.from_range( instrument, self.start - datetime.timedelta(minutes=-minutes), self.start ) data = data.clip_freq(self.freq_axis[-1], self.freq_axis[0]) return CallistoSpectrogram.join_many([self, data], **kwargs) @classmethod def from_url(cls, url): """ Return CallistoSpectrogram read from URL. Parameters ---------- url : str URL to retrieve the data from """ return cls.read(url)
class Map(np.ndarray, Parent): """ Map(data, header) A spatially-aware data array based on the SolarSoft Map object Parameters ---------- data : numpy.ndarray, list A 2d list or ndarray containing the map data header : dict A dictionary of the original image header tags Attributes ---------- carrington_longitude : str Carrington longitude (crln_obs) center : dict X and Y coordinate of the center of the map in units. Usually represents the offset between the center of the Sun and the center of the map. cmap : matplotlib.colors.Colormap A Matplotlib colormap to be applied to the data coordinate_system : dict Coordinate system used for x and y axes (ctype1/2) date : datetime Image observation time detector : str Detector name dsun : float The observer distance from the Sun. exptime : float Exposure time of the image in seconds. heliographic_latitude : float Heliographic latitude in degrees heliographic_longitude : float Heliographic longitude in degrees instrument : str Instrument name measurement : str, int Measurement name. In some instances this is the wavelength of image. name: str Human-readable description of map-type nickname : str An abbreviated human-readable description of the map-type; part of the Helioviewer data model observatory : str Observatory name reference_coordinate : float Reference point WCS axes in data units (crval1/2) reference_pixel : float Reference point axes in pixels (crpix1/2) rsun_arcseconds : float Radius of the sun in arcseconds rsun_meters : float Radius of the sun in meters scale : dict Image scale along the x and y axes in units/pixel (cdelt1/2). units : dict Image coordinate units along the x and y axes (cunit1/2). Methods ------- std() Return the standard deviation of the map data mean() Return the mean of the map data min() Return the minimum value of the map data max() Return the maximum value of the map data resample(dimension, method) Returns a new map that has been resampled up or down superpixel(dimension, method) Returns a new map consisting of superpixels formed from the original data. save() Save the map to a fits file. submap(range_a, range_b, units) Returns a submap of the map with the specified range plot() Return a matplotlib plot figure object show() Display a matplotlib plot to the screen get_header() Returns the original header from when the map was first created. Examples -------- >>> aia = sunpy.Map(sunpy.AIA_171_IMAGE) >>> aia.T AIAMap([[ 0.3125, 1. , -1.1875, ..., -0.625 , 0.5625, 0.5 ], [-0.0625, 0.1875, 0.375 , ..., 0.0625, 0.0625, -0.125 ], [-0.125 , -0.8125, -0.5 , ..., -0.3125, 0.5625, 0.4375], ..., [ 0.625 , 0.625 , -0.125 , ..., 0.125 , -0.0625, 0.6875], [-0.625 , -0.625 , -0.625 , ..., 0.125 , -0.0625, 0.6875], [ 0. , 0. , -1.1875, ..., 0.125 , 0. , 0.6875]]) >>> aia.units['x'] 'arcsec' >>> aia.show() >>> import matplotlib.cm as cm >>> import matplotlib.colors as colors >>> aia.show(cmap=cm.hot, norm=colors.Normalize(1, 2048)) See Also -------- numpy.ndarray Parent class for the Map object References ---------- | http://docs.scipy.org/doc/numpy/reference/arrays.classes.html | http://docs.scipy.org/doc/numpy/user/basics.subclassing.html | http://docs.scipy.org/doc/numpy/reference/ufuncs.html | http://www.scipy.org/Subclasses """ _create = ConditionalDispatch.from_existing(Parent._create) create = classmethod(_create.wrapper()) def __new__(cls, data, header): """Creates a new Map instance""" if isinstance(data, np.ndarray): obj = data.view(cls) elif isinstance(data, list): obj = np.asarray(data).view(cls) else: raise TypeError('Invalid input') return obj def __init__(self, data, header): self._original_header = header # Set naxis1 and naxis2 if not specified if header.get('naxis1') is None: header['naxis1'] = self.shape[1] if header.get('naxis2') is None: header['naxis2'] = self.shape[0] # Parse header and set map attributes for attr, value in list(self.get_properties(header).items()): setattr(self, attr, value) # Validate properties self._validate() @classmethod def get_properties(cls, header): """Parses a map header and determines default properties.""" return { "cmap": cm.gray, # @UndefinedVariable "date": parse_time(header.get('date-obs', None)), "detector": header.get('detector', ''), "dsun": header.get('dsun_obs', constants.au), "exposure_time": header.get('exptime', 0.), "instrument": header.get('instrume', ''), "measurement": header.get('wavelnth', ''), "observatory": header.get('telescop', ''), "name": header.get('telescop', '') + " " + str(header.get('wavelnth', '')), "nickname": header.get('detector', ''), "rsun_meters": header.get('rsun_ref', constants.radius), "rsun_arcseconds": header.get( 'rsun_obs', header.get( 'solar_r', header.get('radius', constants.average_angular_size))), "coordinate_system": { 'x': header.get('ctype1', 'HPLN-TAN'), 'y': header.get('ctype2', 'HPLT-TAN') }, "carrington_longitude": header.get('crln_obs', 0.), "heliographic_latitude": header.get('hglt_obs', header.get('crlt_obs', header.get('solar_b0', 0.))), "heliographic_longitude": header.get('hgln_obs', 0.), "reference_coordinate": { 'x': header.get('crval1', 0.), 'y': header.get('crval2', 0.), }, "reference_pixel": { 'x': header.get('crpix1', (header.get('naxis1') + 1) / 2.), 'y': header.get('crpix2', (header.get('naxis2') + 1) / 2.) }, "scale": { 'x': header.get('cdelt1', 1.), 'y': header.get('cdelt2', 1.), }, "units": { 'x': header.get('cunit1', 'arcsec'), 'y': header.get('cunit2', 'arcsec') }, "rotation_angle": { 'x': header.get('crota1', 0.), 'y': header.get('crota2', 0.) } } def __array_finalize__(self, obj): """Finishes instantiation of the new map object""" if obj is None: return if hasattr(obj, '_original_header'): properties = [ '_original_header', 'cmap', 'date', 'detector', 'dsun', 'exposure_time', 'instrument', 'measurement', 'name', 'observatory', 'rsun_arcseconds', 'rsun_meters', 'scale', 'units', 'reference_coordinate', 'reference_pixel', 'coordinate_system', 'heliographic_latitude', 'heliographic_longitude', 'carrington_longitude', 'rotation_angle' ] for attr in properties: setattr(self, attr, getattr(obj, attr)) def __array_wrap__(self, out_arr, context=None): """Returns a wrapped instance of a Map object""" return np.ndarray.__array_wrap__(self, out_arr, context) def __getitem__(self, key): """Overiding indexing operation to ensure that header is updated""" if isinstance(key, tuple) and type(key[0]) is slice: x_range = [key[1].start, key[1].stop] y_range = [key[0].start, key[0].stop] return self.submap(y_range, x_range, units="pixels") else: return np.ndarray.__getitem__(self, key) def __add__(self, other): """Add two maps. Currently does not take into account the alignment between the two maps.""" result = np.ndarray.__add__(self, other) return result def __repr__(self): if not hasattr(self, 'observatory'): return np.ndarray.__repr__(self) return ("""SunPy Map --------- Observatory:\t %s Instrument:\t %s Detector:\t %s Measurement:\t %s Obs Date:\t %s dt:\t\t %f Dimension:\t [%d, %d] [dx, dy] =\t [%f, %f] """ % (self.observatory, self.instrument, self.detector, self.measurement, self.date.strftime("%Y-%m-%d %H:%M:%S"), self.exposure_time, self.shape[1], self.shape[0], self.scale['x'], self.scale['y']) + np.ndarray.__repr__(self)) def __sub__(self, other): """Subtract two maps. Currently does not take into account the alignment between the two maps. numpy dtype nums: 1 int8 2 uint8 3 int16 4 uint16 """ # if data is stored as unsigned, cast up (e.g. uint8 => int16) if self.dtype.kind == "u": self = self.astype(to_signed(self.dtype)) if other.dtype.kind == "u": other = other.astype(to_signed(other.dtype)) result = np.ndarray.__sub__(self, other) def norm(): mean = result.mean() std = result.std() vmin = max(result.min(), mean - 6 * std) vmax = min(result.max(), mean + 6 * std) return colors.Normalize(vmin, vmax) result.norm = norm result.cmap = cm.gray # @UndefinedVariable return result @property def xrange(self): """Return the X range of the image in arcsec from edge to edge.""" xmin = self.center['x'] - self.shape[1] / 2 * self.scale['x'] xmax = self.center['x'] + self.shape[1] / 2 * self.scale['x'] return [xmin, xmax] @property def yrange(self): """Return the Y range of the image in arcsec from edge to edge.""" ymin = self.center['y'] - self.shape[0] / 2 * self.scale['y'] ymax = self.center['y'] + self.shape[0] / 2 * self.scale['y'] return [ymin, ymax] @property def center(self): """Returns the offset between the center of the Sun and the center of the map.""" return { 'x': wcs.get_center(self.shape[1], self.scale['x'], self.reference_pixel['x'], self.reference_coordinate['x']), 'y': wcs.get_center(self.shape[0], self.scale['y'], self.reference_pixel['y'], self.reference_coordinate['y']) } def _draw_limb(self, fig, axes): """Draws a circle representing the solar limb""" circ = patches.Circle([0, 0], radius=self.rsun_arcseconds, fill=False, color='white') axes.add_artist(circ) return fig, axes def _draw_grid(self, fig, axes, grid_spacing=20): """Draws a grid over the surface of the Sun""" # define the number of points for each latitude or longitude line num_points = 20 hg_longitude_deg = np.linspace(-90, 90, num=num_points) hg_latitude_deg = np.arange(-90, 90, grid_spacing) # draw the latitude lines for lat in hg_latitude_deg: hg_latitude_deg_mesh, hg_longitude_deg_mesh = np.meshgrid( lat * np.ones(num_points), hg_longitude_deg) x, y = wcs.convert_hg_hpc(self.rsun_meters, self.dsun, self.heliographic_latitude, self.heliographic_longitude, hg_longitude_deg_mesh, hg_latitude_deg_mesh, units='arcsec') axes.plot(x, y, color='white', linestyle='dotted') hg_longitude_deg = np.arange(-90, 90, grid_spacing) hg_latitude_deg = np.linspace(-90, 90, num=num_points) # draw the longitude lines for lon in hg_longitude_deg: hg_longitude_deg_mesh, hg_latitude_deg_mesh = np.meshgrid( lon * np.ones(num_points), hg_latitude_deg) x, y = wcs.convert_hg_hpc(self.rsun_meters, self.dsun, self.heliographic_latitude, self.heliographic_longitude, hg_longitude_deg_mesh, hg_latitude_deg_mesh, units='arcsec') axes.plot(x, y, color='white', linestyle='dotted') return fig, axes def _validate(self): """Validates the meta-information associated with a Map. This function includes very basic validation checks which apply to all of the kinds of files that SunPy can read. Datasource-specific validation should be handled in the relevant file in the sunpy.map.sources package.""" if (self.dsun <= 0 or self.dsun >= 40 * constants.au): raise InvalidHeaderInformation("Invalid value for DSUN") def std(self, *args, **kwargs): """overide np.ndarray.std()""" return np.array(self, copy=False, subok=False).std(*args, **kwargs) def mean(self, *args, **kwargs): """overide np.ndarray.mean()""" return np.array(self, copy=False, subok=False).mean(*args, **kwargs) def min(self, *args, **kwargs): """overide np.ndarray.min()""" return np.array(self, copy=False, subok=False).min(*args, **kwargs) def max(self, *args, **kwargs): """overide np.ndarray.max()""" return np.array(self, copy=False, subok=False).max(*args, **kwargs) def data_to_pixel(self, value, dim): """Convert pixel-center data coordinates to pixel values""" if dim not in ['x', 'y']: raise ValueError("Invalid dimension. Must be one of 'x' or 'y'.") size = self.shape[dim == 'x'] # 1 if dim == 'x', 0 if dim == 'y'. return (value - self.center[dim]) / self.scale[dim] + ((size - 1) / 2.) def get_header(self, original=False): """Returns an updated MapHeader instance""" header = self._original_header.copy() # If requested, return original header as-is if original: return header # Bit-depth # # 8 Character or unsigned binary integer # 16 16-bit twos-complement binary integer # 32 32-bit twos-complement binary integer # -32 IEEE single precision floating point # -64 IEEE double precision floating point # if not header.has_key('bitpix'): bitdepth = 8 * self.dtype.itemsize if self.dtype.kind == "f": bitdepth = -bitdepth header['bitpix'] = bitdepth # naxis header['naxis'] = self.ndim header['naxis1'] = self.shape[1] header['naxis2'] = self.shape[0] # dsun if header.has_key('dsun_obs'): header['dsun_obs'] = self.dsun # rsun_obs if header.has_key('rsun_obs'): header['rsun_obs'] = self.rsun_arcseconds elif header.has_key('solar_r'): header['solar_r'] = self.rsun_arcseconds elif header.has_key('radius'): header['radius'] = self.rsun_arcseconds # cdelt header['cdelt1'] = self.scale['x'] header['cdelt2'] = self.scale['y'] # crpix header['crval1'] = self.reference_coordinate['x'] header['crval2'] = self.reference_coordinate['y'] # crval header['crpix1'] = self.reference_pixel['x'] header['crpix2'] = self.reference_pixel['y'] return header def resample(self, dimensions, method='linear'): """Returns a new Map that has been resampled up or down Arbitrary resampling of the Map to new dimension sizes. Uses the same parameters and creates the same co-ordinate lookup points as IDL''s congrid routine, which apparently originally came from a VAX/VMS routine of the same name. Parameters ---------- dimensions : tuple Dimensions that new Map should have. Note: the first argument corresponds to the 'x' axis and the second argument corresponds to the 'y' axis. method : {'neighbor' | 'nearest' | 'linear' | 'spline'} Method to use for resampling interpolation. * neighbor - Closest value from original data * nearest and linear - Uses n x 1-D interpolations using scipy.interpolate.interp1d * spline - Uses ndimage.map_coordinates Returns ------- out : Map A new Map which has been resampled to the desired dimensions. References ---------- | http://www.scipy.org/Cookbook/Rebinning (Original source, 2011/11/19) """ # Note: because the underlying ndarray is transposed in sense when # compared to the Map, the ndarray is transposed, resampled, then # transposed back # Note: "center" defaults to True in this function because data # coordinates in a Map are at pixel centers # Make a copy of the original data and perform resample data = resample(np.asarray(self).copy().T, dimensions, method, center=True) # Update image scale and number of pixels header = self._original_header.copy() # Note that 'x' and 'y' correspond to 1 and 0 in self.shape, # respectively scale_factor_x = (float(self.shape[1]) / dimensions[0]) scale_factor_y = (float(self.shape[0]) / dimensions[1]) # Create new map instance new_map = self.__class__(data.T, header) # Update metadata new_map.scale['x'] *= scale_factor_x new_map.scale['y'] *= scale_factor_y new_map.reference_pixel['x'] = (dimensions[0] + 1) / 2. new_map.reference_pixel['y'] = (dimensions[1] + 1) / 2. new_map.reference_coordinate['x'] = self.center['x'] new_map.reference_coordinate['y'] = self.center['y'] return new_map def superpixel(self, dimensions, method='sum'): """Returns a new map consisting of superpixels formed from the original data. Useful for increasing signal to noise ratio in images. Parameters ---------- dimensions : tuple One superpixel in the new map is equal to (dimension[0], dimension[1]) pixels of the original map Note: the first argument corresponds to the 'x' axis and the second argument corresponds to the 'y' axis. method : {'sum' | 'average'} What each superpixel represents compared to the original data * sum - add up the original data * average - average the sum over the number of original pixels Returns ------- out : Map A new Map which has superpixels of the required size. References ---------- | http://mail.scipy.org/pipermail/numpy-discussion/2010-July/051760.html """ # Note: because the underlying ndarray is transposed in sense when # compared to the Map, the ndarray is transposed, resampled, then # transposed back # Note: "center" defaults to True in this function because data # coordinates in a Map are at pixel centers # Make a copy of the original data and perform reshaping reshaped = reshape_image_to_4d_superpixel( np.asarray(self).copy().T, dimensions) if method == 'sum': data = reshaped.sum(axis=3).sum(axis=1) elif method == 'average': data = ((reshaped.sum(axis=3).sum(axis=1)) / np.float32(dimensions[0] * dimensions[1])) #data = resample(np.asarray(self).copy().T, dimensions, # method, center=True) # Update image scale and number of pixels header = self._original_header.copy() # Note that 'x' and 'y' correspond to 1 and 0 in self.shape, # respectively new_nx = self.shape[1] / dimensions[0] new_ny = self.shape[0] / dimensions[1] # Create new map instance new_map = self.__class__(data.T, header) # Update metadata new_map.scale['x'] = dimensions[0] * self.scale['x'] new_map.scale['y'] = dimensions[1] * self.scale['y'] new_map.reference_pixel['x'] = (new_nx + 1) / 2. new_map.reference_pixel['y'] = (new_ny + 1) / 2. new_map.reference_coordinate['x'] = self.center['x'] new_map.reference_coordinate['y'] = self.center['y'] return new_map def save(self, filepath): """Saves the SunPy Map object to a file. Currently SunPy can only save files in the FITS format. In the future support will be added for saving to other formats. Parameters ---------- filepath : string Location to save file to. """ pyfits_header = self.get_header().as_pyfits_header() hdu = pyfits.PrimaryHDU(self, header=pyfits_header) hdulist = pyfits.HDUList([hdu]) hdulist.writeto(os.path.expanduser(filepath)) def submap(self, range_a, range_b, units="data"): """Returns a submap of the map with the specified range Parameters ---------- range_a : list The range of the Map to select across either the x axis. range_b : list The range of the Map to select across either the y axis. units : {'data' | 'pixels'}, optional The units for the supplied ranges. Returns ------- out : Map A new map instance is returned representing to specified sub-region Examples -------- >>> aia.submap([-5,5],[-5,5]) AIAMap([[ 341.3125, 266.5 , 329.375 , 330.5625, 298.875 ], [ 347.1875, 273.4375, 247.4375, 303.5 , 305.3125], [ 322.8125, 302.3125, 298.125 , 299. , 261.5 ], [ 334.875 , 289.75 , 269.25 , 256.375 , 242.3125], [ 273.125 , 241.75 , 248.8125, 263.0625, 249.0625]]) >>> aia.submap([0,5],[0,5], units='pixels') AIAMap([[ 0.3125, -0.0625, -0.125 , 0. , -0.375 ], [ 1. , 0.1875, -0.8125, 0.125 , 0.3125], [-1.1875, 0.375 , -0.5 , 0.25 , -0.4375], [-0.6875, -0.3125, 0.8125, 0.0625, 0.1875], [-0.875 , 0.25 , 0.1875, 0. , -0.6875]]) """ if units is "data": # Check edges (e.g. [:512,..] or [:,...]) if range_a[0] is None: range_a[0] = self.xrange[0] if range_a[1] is None: range_a[1] = self.xrange[1] if range_b[0] is None: range_b[0] = self.yrange[0] if range_b[1] is None: range_b[1] = self.yrange[1] #x_pixels = [self.data_to_pixel(elem, 'x') for elem in range_a] x_pixels = [ np.ceil(self.data_to_pixel(range_a[0], 'x')), np.floor(self.data_to_pixel(range_a[1], 'x')) + 1 ] #y_pixels = [self.data_to_pixel(elem, 'y') for elem in range_b] y_pixels = [ np.ceil(self.data_to_pixel(range_b[0], 'y')), np.floor(self.data_to_pixel(range_b[1], 'y')) + 1 ] elif units is "pixels": # Check edges if range_a[0] is None: range_a[0] = 0 if range_a[1] is None: range_a[1] = self.shape[0] if range_b[0] is None: range_b[0] = 0 if range_b[1] is None: range_b[1] = self.shape[0] x_pixels = range_a y_pixels = range_b else: raise ValueError("Invalid unit. Must be one of 'data' or 'pixels'") # Make a copy of the header with updated centering information header = self._original_header.copy() # Get ndarray representation of submap data = np.asarray(self)[y_pixels[0]:y_pixels[1], x_pixels[0]:x_pixels[1]] # Instantiate new instance and update metadata new_map = self.__class__(data.copy(), header) new_map.reference_pixel['x'] = self.reference_pixel['x'] - x_pixels[0] new_map.reference_pixel['y'] = self.reference_pixel['y'] - y_pixels[0] return new_map @toggle_pylab def plot(self, figure=None, overlays=None, draw_limb=True, gamma=None, draw_grid=False, colorbar=True, basic_plot=False, **matplot_args): """Plots the map object using matplotlib Parameters ---------- overlays : list List of overlays to include in the plot draw_limb : bool Whether the solar limb should be plotted. draw_grid : bool Whether solar meridians and parallels grid_spacing : float Set the spacing between meridians and parallels for the grid gamma : float Gamma value to use for the color map colorbar : bool Whether to display a colorbar next to the plot basic_plot : bool If true, the data is plotted by itself at it's natural scale; no title, labels, or axes are shown. **matplot_args : dict Matplotlib Any additional imshow arguments that should be used when plotting the image. """ if overlays is None: overlays = [] if draw_limb: overlays = overlays + [self._draw_limb] # TODO: need to be able to pass the grid spacing to _draw_grid from the # plot command. if draw_grid: overlays = overlays + [self._draw_grid] # Create a figure and add title and axes if figure is None: figure = plt.figure(frameon=not basic_plot) # Basic plot if basic_plot: axes = plt.Axes(figure, [0., 0., 1., 1.]) axes.set_axis_off() figure.add_axes(axes) # Normal plot else: axes = figure.add_subplot(111) axes.set_title("%s %s" % (self.name, self.date)) # x-axis label if self.coordinate_system['x'] == 'HG': xlabel = 'Longitude [%s]' % self.units['x'] else: xlabel = 'X-position [%s]' % self.units['x'] # y-axis label if self.coordinate_system['y'] == 'HG': ylabel = 'Latitude [%s]' % self.units['y'] else: ylabel = 'Y-position [%s]' % self.units['y'] axes.set_xlabel(xlabel) axes.set_ylabel(ylabel) # Determine extent extent = self.xrange + self.yrange # Matplotlib arguments params = {"cmap": self.cmap, "norm": self.norm()} params.update(matplot_args) if gamma is not None: params['cmap'] = copy(params['cmap']) params['cmap'].set_gamma(gamma) im = axes.imshow(self, origin='lower', extent=extent, **params) if colorbar and not basic_plot: figure.colorbar(im) for overlay in overlays: figure, axes = overlay(figure, axes) return figure def show(self, figure=None, overlays=None, draw_limb=False, gamma=1.0, draw_grid=False, colorbar=True, basic_plot=False, **matplot_args): """Displays map on screen. Arguments are same as plot().""" self.plot(figure, overlays, draw_limb, gamma, draw_grid, colorbar, basic_plot, **matplot_args).show() def norm(self): """Default normalization method""" return None @classmethod def parse_file(cls, filepath): """Reads in a map file and returns a header and data array""" return read_file(filepath) @classmethod def read(cls, filepath): """Map class factory Attempts to determine the type of data associated with input and returns an instance of either the generic Map class or a subclass of Map such as AIAMap, EUVIMap, etc. Parameters ---------- filepath : string Path to a valid FITS or JPEG 2000 file of a type supported by SunPy Returns ------- out : Map Returns a Map instance for the particular type of data loaded. """ data, header = cls.parse_file(filepath) if cls.__name__ is not "Map": return cls(data, header) for cls in Map.__subclasses__(): if cls.is_datasource_for(header): return cls(data, header) return Map(data, header) @classmethod def read_header(cls, filepath): """Attempts to detect the datasource type and returns meta-information for that particular datasource.""" header = read_file_header(filepath) for cls in Map.__subclasses__(): if cls.is_datasource_for(header): properties = cls.get_properties(header) properties['header'] = header return properties
class Map(np.ndarray, Parent): """ Map(data, header) A spatially-aware data array based on the SolarSoft Map object Parameters ---------- data : numpy.ndarray, list A 2d list or ndarray containing the map data header : dict A dictionary of the original image header tags Attributes ---------- original_header : dict Dictionary representation of the original FITS header carrington_longitude : str Carrington longitude (crln_obs) center : dict X and Y coordinate of the center of the map in units. Usually represents the offset between the center of the Sun and the center of the map. cmap : matplotlib.colors.Colormap A Matplotlib colormap to be applied to the data coordinate_system : dict Coordinate system used for x and y axes (ctype1/2) date : datetime Image observation time detector : str Detector name dsun : float The observer distance from the Sun. exptime : float Exposure time of the image in seconds. heliographic_latitude : float Heliographic latitude in degrees heliographic_longitude : float Heliographic longitude in degrees instrument : str Instrument name measurement : str, int Measurement name. In some instances this is the wavelength of image. name: str Human-readable description of map-type nickname : str An abbreviated human-readable description of the map-type; part of the Helioviewer data model observatory : str Observatory name reference_coordinate : float Reference point WCS axes in data units (crval1/2) reference_pixel : float Reference point axes in pixels (crpix1/2) rsun_arcseconds : float Radius of the sun in arcseconds rsun_meters : float Radius of the sun in meters scale : dict Image scale along the x and y axes in units/pixel (cdelt1/2). units : dict Image coordinate units along the x and y axes (cunit1/2). Methods ------- std() Return the standard deviation of the map data mean() Return the mean of the map data min() Return the minimum value of the map data max() Return the maximum value of the map data resample(dimension, method) Returns a new map that has been resampled up or down superpixel(dimension, method) Returns a new map consisting of superpixels formed from the original data. save() Save the map to a fits file. submap(range_a, range_b, units) Returns a submap of the map with the specified range plot() Return a matplotlib imageaxes instance, like plt.imshow() peek() Display a matplotlib plot to the screen draw_limb() Draw a line on the image where the solar limb is. draw_grid() Draw a lon/lat grid on a map plot. get_header() Returns the original header from when the map was first created. Examples -------- >>> aia = sunpy.make_map(sunpy.AIA_171_IMAGE) >>> aia.T AIAMap([[ 0.3125, 1. , -1.1875, ..., -0.625 , 0.5625, 0.5 ], [-0.0625, 0.1875, 0.375 , ..., 0.0625, 0.0625, -0.125 ], [-0.125 , -0.8125, -0.5 , ..., -0.3125, 0.5625, 0.4375], ..., [ 0.625 , 0.625 , -0.125 , ..., 0.125 , -0.0625, 0.6875], [-0.625 , -0.625 , -0.625 , ..., 0.125 , -0.0625, 0.6875], [ 0. , 0. , -1.1875, ..., 0.125 , 0. , 0.6875]]) >>> aia.units['x'] 'arcsec' >>> aia.peek() See Also -------- numpy.ndarray Parent class for the Map object References ---------- | http://docs.scipy.org/doc/numpy/reference/arrays.classes.html | http://docs.scipy.org/doc/numpy/user/basics.subclassing.html | http://docs.scipy.org/doc/numpy/reference/ufuncs.html | http://www.scipy.org/Subclasses """ _create = ConditionalDispatch.from_existing(Parent._create) create = classmethod(_create.wrapper()) def __new__(cls, data, header): """Creates a new Map instance""" if isinstance(data, np.ndarray): obj = data.view(cls) elif isinstance(data, list): obj = np.asarray(data).view(cls) else: raise TypeError('Invalid input') return obj def __init__(self, data, header): self._original_header = header # Set naxis1 and naxis2 if not specified if header.get('naxis1') is None: header['naxis1'] = self.shape[1] if header.get('naxis2') is None: header['naxis2'] = self.shape[0] # Parse header and set map attributes for attr, value in list(self.get_properties(header).items()): setattr(self, attr, value) # Validate properties self._validate() @classmethod def get_properties(cls, header): """Parses a map header and determines default properties.""" if is_time(header.get('date-obs', [])): # Hack! check FITS standard is a time date = header.get('date-obs') # Check commonly used but non-standard FITS keyword for observation time is a time elif is_time(header.get('date_obs', [])): # Horrible [] hack date = header.get('date_obs') else: date = None return { "cmap": cm.gray, # @UndefinedVariable "date": parse_time(date) if date is not None else 'N/A', "detector": header.get('detector', ''), "dsun": header.get('dsun_obs', constants.au), "exposure_time": header.get('exptime', 0.), "instrument": header.get('instrume', ''), "measurement": header.get('wavelnth', ''), "observatory": header.get('telescop', ''), "name": header.get('telescop', '') + " " + str(header.get('wavelnth', '')), "nickname": header.get('detector', ''), "rsun_meters": header.get('rsun_ref', constants.radius), "rsun_arcseconds": header.get( 'rsun_obs', header.get( 'solar_r', header.get('radius', constants.average_angular_size))), "coordinate_system": { 'x': header.get('ctype1', 'HPLN-TAN'), 'y': header.get('ctype2', 'HPLT-TAN') }, "carrington_longitude": header.get('crln_obs', 0.), "heliographic_latitude": header.get('hglt_obs', header.get('crlt_obs', header.get('solar_b0', 0.))), "heliographic_longitude": header.get('hgln_obs', 0.), "reference_coordinate": { 'x': header.get('crval1', 0.), 'y': header.get('crval2', 0.), }, "reference_pixel": { 'x': header.get('crpix1', (header.get('naxis1') + 1) / 2.), 'y': header.get('crpix2', (header.get('naxis2') + 1) / 2.) }, "scale": { 'x': header.get('cdelt1', 1.), 'y': header.get('cdelt2', 1.), }, "units": { 'x': header.get('cunit1', 'arcsec'), 'y': header.get('cunit2', 'arcsec') }, "rotation_angle": { 'x': header.get('crota1', 0.), 'y': header.get('crota2', 0.) } } def __array_finalize__(self, obj): """Finishes instantiation of the new map object""" if obj is None: return if hasattr(obj, '_original_header'): properties = [ '_original_header', 'cmap', 'date', 'detector', 'dsun', 'exposure_time', 'instrument', 'measurement', 'name', 'observatory', 'rsun_arcseconds', 'rsun_meters', 'scale', 'units', 'reference_coordinate', 'reference_pixel', 'coordinate_system', 'heliographic_latitude', 'heliographic_longitude', 'carrington_longitude', 'rotation_angle' ] for attr in properties: setattr(self, attr, getattr(obj, attr)) def __array_wrap__(self, out_arr, context=None): """Returns a wrapped instance of a Map object""" return np.ndarray.__array_wrap__(self, out_arr, context) def __getitem__(self, key): """Overriding indexing operation to ensure that header is updated. Note that the indexing follows the ndarray row-column order, which is reversed from calling Map.submap()""" if isinstance(key, tuple): # Used when asking for a 2D sub-array # The header will be updated if type(key[1]) is slice: x_range = [key[1].start, key[1].stop] else: x_range = [key[1], key[1] + 1] if type(key[0]) is slice: y_range = [key[0].start, key[0].stop] else: y_range = [key[0], key[0] + 1] return self.submap(x_range, y_range, units="pixels") else: # Typically used by np.ndarray.__repr__() due to indexing with [-1] # The header will not be updated properly! return np.ndarray.__getitem__(self, key) def __add__(self, other): """Add two maps. Currently does not take into account the alignment between the two maps.""" result = np.ndarray.__add__(self, other) return result def __repr__(self): if not hasattr(self, 'observatory'): return np.ndarray.__repr__(self) return ("""SunPy Map --------- Observatory:\t %s Instrument:\t %s Detector:\t %s Measurement:\t %s Obs Date:\t %s dt:\t\t %f Dimension:\t [%d, %d] [dx, dy] =\t [%f, %f] """ % (self.observatory, self.instrument, self.detector, self.measurement, self.date, self.exposure_time, self.shape[1], self.shape[0], self.scale['x'], self.scale['y']) + np.ndarray.__repr__(self)) def __sub__(self, other): """Subtract two maps. Currently does not take into account the alignment between the two maps. numpy dtype nums: 1 int8 2 uint8 3 int16 4 uint16 """ # if data is stored as unsigned, cast up (e.g. uint8 => int16) if self.dtype.kind == "u": self = self.astype(to_signed(self.dtype)) if other.dtype.kind == "u": other = other.astype(to_signed(other.dtype)) result = np.ndarray.__sub__(self, other) def norm(): mean = result.mean() std = result.std() vmin = max(result.min(), mean - 6 * std) vmax = min(result.max(), mean + 6 * std) return colors.Normalize(vmin, vmax) result.norm = norm result.cmap = cm.gray # @UndefinedVariable return result @property def xrange(self): """Return the X range of the image in arcsec from edge to edge.""" xmin = self.center['x'] - self.shape[1] / 2. * self.scale['x'] xmax = self.center['x'] + self.shape[1] / 2. * self.scale['x'] return [xmin, xmax] @property def yrange(self): """Return the Y range of the image in arcsec from edge to edge.""" ymin = self.center['y'] - self.shape[0] / 2. * self.scale['y'] ymax = self.center['y'] + self.shape[0] / 2. * self.scale['y'] return [ymin, ymax] @property def center(self): """Returns the offset between the center of the Sun and the center of the map.""" return { 'x': wcs.get_center(self.shape[1], self.scale['x'], self.reference_pixel['x'], self.reference_coordinate['x']), 'y': wcs.get_center(self.shape[0], self.scale['y'], self.reference_pixel['y'], self.reference_coordinate['y']) } def draw_limb(self, axes=None): """Draws a circle representing the solar limb Parameters ---------- axes: matplotlib.axes object or None Axes to plot limb on or None to use current axes. Returns ------- matplotlib.axes object """ if not axes: axes = plt.gca() if hasattr(self, 'center'): circ = patches.Circle([self.center['x'], self.center['y']], radius=self.rsun_arcseconds, fill=False, color='white', zorder=100) else: print("Assuming center of Sun is center of image") circ = patches.Circle([0, 0], radius=self.rsun_arcseconds, fill=False, color='white', zorder=100) axes.add_artist(circ) return axes def draw_grid(self, axes=None, grid_spacing=20): """Draws a grid over the surface of the Sun Parameters ---------- axes: matplotlib.axes object or None Axes to plot limb on or None to use current axes. grid_spacing: float Spacing (in degrees) for longitude and latitude grid. Returns ------- matplotlib.axes object """ if not axes: axes = plt.gca() x, y = self.pixel_to_data() rsun = self.rsun_meters dsun = self.dsun b0 = self.heliographic_latitude l0 = self.heliographic_longitude units = [self.units.get('x'), self.units.get('y')] #TODO: This function could be optimized. Does not need to convert the entire image # coordinates lon_self, lat_self = wcs.convert_hpc_hg(rsun, dsun, units[0], units[1], b0, l0, x, y) # define the number of points for each latitude or longitude line num_points = 20 #TODO: The following code is ugly. Fix it. lon_range = [lon_self.min(), lon_self.max()] lat_range = [lat_self.min(), lat_self.max()] if np.isfinite(lon_range[0]) == False: lon_range[0] = -90 + self.heliographic_longitude if np.isfinite(lon_range[1]) == False: lon_range[1] = 90 + self.heliographic_longitude if np.isfinite(lat_range[0]) == False: lat_range[0] = -90 + self.heliographic_latitude if np.isfinite(lat_range[1]) == False: lat_range[1] = 90 + self.heliographic_latitude hg_longitude_deg = np.linspace(lon_range[0], lon_range[1], num=num_points) hg_latitude_deg = np.arange(lat_range[0], lat_range[1] + grid_spacing, grid_spacing) # draw the latitude lines for lat in hg_latitude_deg: hg_latitude_deg_mesh, hg_longitude_deg_mesh = np.meshgrid( lat * np.ones(num_points), hg_longitude_deg) x, y = wcs.convert_hg_hpc(self.rsun_meters, self.dsun, self.heliographic_latitude, self.heliographic_longitude, hg_longitude_deg_mesh, hg_latitude_deg_mesh, units='arcsec') axes.plot(x, y, color='white', linestyle='dotted', zorder=100) hg_longitude_deg = np.arange(lon_range[0], lon_range[1] + grid_spacing, grid_spacing) hg_latitude_deg = np.linspace(lat_range[0], lat_range[1], num=num_points) # draw the longitude lines for lon in hg_longitude_deg: hg_longitude_deg_mesh, hg_latitude_deg_mesh = np.meshgrid( lon * np.ones(num_points), hg_latitude_deg) x, y = wcs.convert_hg_hpc(self.rsun_meters, self.dsun, self.heliographic_latitude, self.heliographic_longitude, hg_longitude_deg_mesh, hg_latitude_deg_mesh, units='arcsec') axes.plot(x, y, color='white', linestyle='dotted', zorder=100) axes.set_ylim(self.yrange) axes.set_xlim(self.xrange) return axes def _validate(self): """Validates the meta-information associated with a Map. This function includes very basic validation checks which apply to all of the kinds of files that SunPy can read. Datasource-specific validation should be handled in the relevant file in the sunpy.map.sources package.""" if (self.dsun <= 0 or self.dsun >= 40 * constants.au): raise InvalidHeaderInformation("Invalid value for DSUN") def std(self, *args, **kwargs): """overide np.ndarray.std()""" return np.array(self, copy=False, subok=False).std(*args, **kwargs) def mean(self, *args, **kwargs): """overide np.ndarray.mean()""" return np.array(self, copy=False, subok=False).mean(*args, **kwargs) def min(self, *args, **kwargs): """overide np.ndarray.min()""" return np.array(self, copy=False, subok=False).min(*args, **kwargs) def max(self, *args, **kwargs): """overide np.ndarray.max()""" return np.array(self, copy=False, subok=False).max(*args, **kwargs) def data_to_pixel(self, value, dim): """Convert pixel-center data coordinates to pixel values""" #TODO: This function should be renamed. It is confusing as data # coordinates are in something like arcsec but this function just changes how you # count pixels if dim not in ['x', 'y']: raise ValueError("Invalid dimension. Must be one of 'x' or 'y'.") size = self.shape[dim == 'x'] # 1 if dim == 'x', 0 if dim == 'y'. return (value - self.center[dim]) / self.scale[dim] + ((size - 1) / 2.) def pixel_to_data(self, x=None, y=None): """Convert from pixel coordinates to data coordinates (e.g. arcsec)""" width = self.shape[1] height = self.shape[0] if (x is not None) & (x > width - 1): raise ValueError("X pixel value larger than image width (%s)." % width) if (x is not None) & (y > height - 1): raise ValueError("Y pixel value larger than image height (%s)." % height) if (x is not None) & (x < 0): raise ValueError("X pixel value cannot be less than 0.") if (x is not None) & (y < 0): raise ValueError("Y pixel value cannot be less than 0.") scale = np.array([self.scale.get('x'), self.scale.get('y')]) crpix = np.array( [self.reference_pixel.get('x'), self.reference_pixel.get('y')]) crval = np.array([ self.reference_coordinate.get('x'), self.reference_coordinate.get('y') ]) coordinate_system = [ self.coordinate_system.get('x'), self.coordinate_system.get('y') ] x, y = wcs.convert_pixel_to_data(width, height, scale[0], scale[1], crpix[0], crpix[1], crval[0], crval[1], coordinate_system[0], x=x, y=y) return x, y def get_header(self, original=False): """Returns an updated MapHeader instance""" header = self._original_header.copy() # If requested, return original header as-is if original: return header # Bit-depth # # 8 Character or unsigned binary integer # 16 16-bit twos-complement binary integer # 32 32-bit twos-complement binary integer # -32 IEEE single precision floating point # -64 IEEE double precision floating point # if not header.has_key('bitpix'): bitdepth = 8 * self.dtype.itemsize if self.dtype.kind == "f": bitdepth = -bitdepth header['bitpix'] = bitdepth # naxis header['naxis'] = self.ndim header['naxis1'] = self.shape[1] header['naxis2'] = self.shape[0] # dsun if header.has_key('dsun_obs'): header['dsun_obs'] = self.dsun # rsun_obs if header.has_key('rsun_obs'): header['rsun_obs'] = self.rsun_arcseconds elif header.has_key('solar_r'): header['solar_r'] = self.rsun_arcseconds elif header.has_key('radius'): header['radius'] = self.rsun_arcseconds # cdelt header['cdelt1'] = self.scale['x'] header['cdelt2'] = self.scale['y'] # crpix header['crval1'] = self.reference_coordinate['x'] header['crval2'] = self.reference_coordinate['y'] # crval header['crpix1'] = self.reference_pixel['x'] header['crpix2'] = self.reference_pixel['y'] return header def resample(self, dimensions, method='linear'): """Returns a new Map that has been resampled up or down Arbitrary resampling of the Map to new dimension sizes. Uses the same parameters and creates the same co-ordinate lookup points as IDL''s congrid routine, which apparently originally came from a VAX/VMS routine of the same name. Parameters ---------- dimensions : tuple Dimensions that new Map should have. Note: the first argument corresponds to the 'x' axis and the second argument corresponds to the 'y' axis. method : {'neighbor' | 'nearest' | 'linear' | 'spline'} Method to use for resampling interpolation. * neighbor - Closest value from original data * nearest and linear - Uses n x 1-D interpolations using scipy.interpolate.interp1d * spline - Uses ndimage.map_coordinates Returns ------- out : Map A new Map which has been resampled to the desired dimensions. References ---------- | http://www.scipy.org/Cookbook/Rebinning (Original source, 2011/11/19) """ # Note: because the underlying ndarray is transposed in sense when # compared to the Map, the ndarray is transposed, resampled, then # transposed back # Note: "center" defaults to True in this function because data # coordinates in a Map are at pixel centers # Make a copy of the original data and perform resample data = sunpy_image_resample(np.asarray(self).copy().T, dimensions, method, center=True) # Update image scale and number of pixels header = self._original_header.copy() # Note that 'x' and 'y' correspond to 1 and 0 in self.shape, # respectively scale_factor_x = (float(self.shape[1]) / dimensions[0]) scale_factor_y = (float(self.shape[0]) / dimensions[1]) # Create new map instance new_map = self.__class__(data.T, header) # Update metadata new_map.scale['x'] *= scale_factor_x new_map.scale['y'] *= scale_factor_y new_map.reference_pixel['x'] = (dimensions[0] + 1) / 2. new_map.reference_pixel['y'] = (dimensions[1] + 1) / 2. new_map.reference_coordinate['x'] = self.center['x'] new_map.reference_coordinate['y'] = self.center['y'] return new_map def superpixel(self, dimensions, method='sum'): """Returns a new map consisting of superpixels formed from the original data. Useful for increasing signal to noise ratio in images. Parameters ---------- dimensions : tuple One superpixel in the new map is equal to (dimension[0], dimension[1]) pixels of the original map Note: the first argument corresponds to the 'x' axis and the second argument corresponds to the 'y' axis. method : {'sum' | 'average'} What each superpixel represents compared to the original data * sum - add up the original data * average - average the sum over the number of original pixels Returns ------- out : Map A new Map which has superpixels of the required size. References ---------- | http://mail.scipy.org/pipermail/numpy-discussion/2010-July/051760.html """ # Note: because the underlying ndarray is transposed in sense when # compared to the Map, the ndarray is transposed, resampled, then # transposed back # Note: "center" defaults to True in this function because data # coordinates in a Map are at pixel centers # Make a copy of the original data and perform reshaping reshaped = reshape_image_to_4d_superpixel( np.asarray(self).copy().T, dimensions) if method == 'sum': data = reshaped.sum(axis=3).sum(axis=1) elif method == 'average': data = ((reshaped.sum(axis=3).sum(axis=1)) / np.float32(dimensions[0] * dimensions[1])) #data = resample(np.asarray(self).copy().T, dimensions, # method, center=True) # Update image scale and number of pixels header = self._original_header.copy() # Note that 'x' and 'y' correspond to 1 and 0 in self.shape, # respectively new_nx = self.shape[1] / dimensions[0] new_ny = self.shape[0] / dimensions[1] # Create new map instance new_map = self.__class__(data.T, header) # Update metadata new_map.scale['x'] = dimensions[0] * self.scale['x'] new_map.scale['y'] = dimensions[1] * self.scale['y'] new_map.reference_pixel['x'] = (new_nx + 1) / 2. new_map.reference_pixel['y'] = (new_ny + 1) / 2. new_map.reference_coordinate['x'] = self.center['x'] new_map.reference_coordinate['y'] = self.center['y'] return new_map def rotate(self, angle, scale=1.0, rotation_centre=None, recentre=True, missing=0.0, interpolation='bicubic', interp_param=-0.5): """Returns a new rotated, rescaled and shifted map. Parameters --------- angle: float The angle to rotate the image by (radians) scale: float A scale factor for the image, default is no scaling rotation_centre: tuple The point in the image to rotate around (Axis of rotation). Default: Centre of the array recentre: bool, or array-like Move the centroid (axis of rotation) to the centre of the array or recentre coords. Default: True, recentre to the centre of the array. missing: float The numerical value to fill any missing points after rotation. Default: 0.0 interpolation: {'nearest' | 'bilinear' | 'spline' | 'bicubic'} Interpolation method to use in the transform. Spline uses the scipy.ndimage.interpolation.affline_transform routine. nearest, bilinear and bicubic all replicate the IDL rot() function. Default: 'bicubic' interp_par: Int or Float Optional parameter for controlling the interpolation. Spline interpolation requires an integer value between 1 and 5 for the degree of the spline fit. Default: 3 BiCubic interpolation requires a flaot value between -1 and 0. Default: 0.5 Other interpolation options ingore the argument. Returns ------- New rotated, rescaled, translated map Notes ----- Apart from interpolation='spline' all other options use a compiled C-API extension. If for some reason this is not compiled correctly this routine will fall back upon the scipy implementation of order = 3. For more infomation see: http://sunpy.readthedocs.org/en/latest/guide/troubleshooting.html#crotate-warning """ #Interpolation parameter Sanity assert interpolation in ['nearest', 'spline', 'bilinear', 'bicubic'] #Set defaults based on interpolation if interp_param is None: if interpolation is 'spline': interp_param = 3 elif interpolation is 'bicubic': interp_param = 0.5 else: interp_param = 0 #Default value for nearest or bilinear #Make sure recenter is a vector with shape (2,1) if not isinstance(recentre, bool): recentre = np.array(recentre).reshape(2, 1) #Define Size and centre of array centre = (np.array(self.shape) - 1) / 2.0 #If rotation_centre is not set (None or False), #set rotation_centre to the centre of the image. if rotation_centre is None: rotation_centre = centre else: #Else check rotation_centre is a vector with shape (2,1) rotation_centre = np.array(rotation_centre).reshape(2, 1) #Recentre to the rotation_centre if recentre is True if isinstance(recentre, bool): #if rentre is False then this will be (0,0) shift = np.array(rotation_centre) - np.array(centre) else: #Recentre to recentre vector otherwise shift = np.array(recentre) - np.array(centre) image = np.asarray(self).copy() #Calulate the parameters for the affline_transform c = np.cos(angle) s = np.sin(angle) mati = np.array([[c, s], [-s, c]]) / scale # res->orig centre = np.array([centre]).transpose() # the centre of rotn shift = np.array([shift]).transpose() # the shift kpos = centre - np.dot(mati, (centre + shift)) # kpos and mati are the two transform constants, kpos is a 2x2 array rsmat, offs = mati, np.squeeze((kpos[0, 0], kpos[1, 0])) if interpolation == 'spline': # This is the scipy call data = scipy.ndimage.interpolation.affine_transform( image, rsmat, offset=offs, order=interp_param, mode='constant', cval=missing) else: #Use C extension Package if not 'Crotate' in globals(): warnings.warn( """"The C extension sunpy.image.Crotate is not installed, falling back to the interpolation='spline' of order=3""", Warning) data = scipy.ndimage.interpolation.affine_transform( image, rsmat, offset=offs, order=3, mode='constant', cval=missing) #Set up call parameters depending on interp type. if interpolation == 'nearest': interp_type = Crotate.NEAREST elif interpolation == 'bilinear': interp_type = Crotate.BILINEAR elif interpolation == 'bicubic': interp_type = Crotate.BICUBIC #Make call to extension data = Crotate.affine_transform(image, rsmat, offset=offs, kernel=interp_type, cubic=interp_param, mode='constant', cval=missing) #Return a new map #Copy Header header = self._original_header.copy() # Create new map instance new_map = self.__class__(data, header) return new_map def save(self, filepath): """Saves the SunPy Map object to a file. Currently SunPy can only save files in the FITS format. In the future support will be added for saving to other formats. Parameters ---------- filepath : string Location to save file to. """ pyfits_header = self.get_header().as_pyfits_header() hdu = pyfits.PrimaryHDU(self, header=pyfits_header) hdulist = pyfits.HDUList([hdu]) hdulist.writeto(os.path.expanduser(filepath)) def submap(self, range_a, range_b, units="data"): """Returns a submap of the map with the specified range Parameters ---------- range_a : list The range of the Map to select across either the x axis. range_b : list The range of the Map to select across either the y axis. units : {'data' | 'pixels'}, optional The units for the supplied ranges. Returns ------- out : Map A new map instance is returned representing to specified sub-region Examples -------- >>> aia.submap([-5,5],[-5,5]) AIAMap([[ 341.3125, 266.5 , 329.375 , 330.5625, 298.875 ], [ 347.1875, 273.4375, 247.4375, 303.5 , 305.3125], [ 322.8125, 302.3125, 298.125 , 299. , 261.5 ], [ 334.875 , 289.75 , 269.25 , 256.375 , 242.3125], [ 273.125 , 241.75 , 248.8125, 263.0625, 249.0625]]) >>> aia.submap([0,5],[0,5], units='pixels') AIAMap([[ 0.3125, -0.0625, -0.125 , 0. , -0.375 ], [ 1. , 0.1875, -0.8125, 0.125 , 0.3125], [-1.1875, 0.375 , -0.5 , 0.25 , -0.4375], [-0.6875, -0.3125, 0.8125, 0.0625, 0.1875], [-0.875 , 0.25 , 0.1875, 0. , -0.6875]]) """ if units is "data": # Check edges (e.g. [:512,..] or [:,...]) if range_a[0] is None: range_a[0] = self.xrange[0] if range_a[1] is None: range_a[1] = self.xrange[1] if range_b[0] is None: range_b[0] = self.yrange[0] if range_b[1] is None: range_b[1] = self.yrange[1] #x_pixels = [self.data_to_pixel(elem, 'x') for elem in range_a] x_pixels = [ np.ceil(self.data_to_pixel(range_a[0], 'x')), np.floor(self.data_to_pixel(range_a[1], 'x')) + 1 ] #y_pixels = [self.data_to_pixel(elem, 'y') for elem in range_b] y_pixels = [ np.ceil(self.data_to_pixel(range_b[0], 'y')), np.floor(self.data_to_pixel(range_b[1], 'y')) + 1 ] elif units is "pixels": # Check edges if range_a[0] is None: range_a[0] = 0 if range_a[1] is None: range_a[1] = self.shape[1] if range_b[0] is None: range_b[0] = 0 if range_b[1] is None: range_b[1] = self.shape[0] x_pixels = range_a y_pixels = range_b else: raise ValueError("Invalid unit. Must be one of 'data' or 'pixels'") # Make a copy of the header with updated centering information header = self._original_header.copy() # Get ndarray representation of submap data = np.asarray(self)[y_pixels[0]:y_pixels[1], x_pixels[0]:x_pixels[1]] # Instantiate new instance and update metadata new_map = self.__class__(data.copy(), header) new_map.reference_pixel['x'] = self.reference_pixel['x'] - x_pixels[0] new_map.reference_pixel['y'] = self.reference_pixel['y'] - y_pixels[0] return new_map @toggle_pylab def plot(self, gamma=None, annotate=True, axes=None, **imshow_args): """ Plots the map object using matplotlib, in a method equivalent to plt.imshow() using nearest neighbour interpolation. Parameters ---------- gamma : float Gamma value to use for the color map annotate : bool If true, the data is plotted at it's natural scale; with title and axis labels. axes: matplotlib.axes object or None If provided the image will be plotted on the given axes. Else the current matplotlib axes will be used. **imshow_args : dict Any additional imshow arguments that should be used when plotting the image. Examples -------- #Simple Plot with color bar plt.figure() aiamap.plot() plt.colorbar() #Add a limb line and grid aia.plot() aia.draw_limb() aia.draw_grid() """ #Get current axes if not axes: axes = plt.gca() # Normal plot if annotate: axes.set_title("%s %s" % (self.name, self.date)) # x-axis label if self.coordinate_system['x'] == 'HG': xlabel = 'Longitude [%s]' % self.units['x'] else: xlabel = 'X-position [%s]' % self.units['x'] # y-axis label if self.coordinate_system['y'] == 'HG': ylabel = 'Latitude [%s]' % self.units['y'] else: ylabel = 'Y-position [%s]' % self.units['y'] axes.set_xlabel(xlabel) axes.set_ylabel(ylabel) # Determine extent extent = self.xrange + self.yrange cmap = copy(self.cmap) if gamma is not None: cmap.set_gamma(gamma) #make imshow kwargs a dict kwargs = { 'origin': 'lower', 'cmap': cmap, 'norm': self.norm(), 'extent': extent, 'interpolation': 'nearest' } kwargs.update(imshow_args) ret = axes.imshow(self, **kwargs) #Set current image (makes colorbar work) plt.sci(ret) return ret @toggle_pylab def peek(self, draw_limb=True, draw_grid=False, gamma=None, colorbar=True, basic_plot=False, **matplot_args): """Displays the map in a new figure Parameters ---------- draw_limb : bool Whether the solar limb should be plotted. draw_grid : bool or number Whether solar meridians and parallels are plotted. If float then sets degree difference between parallels and meridians. gamma : float Gamma value to use for the color map colorbar : bool Whether to display a colorbar next to the plot basic_plot : bool If true, the data is plotted by itself at it's natural scale; no title, labels, or axes are shown. **matplot_args : dict Matplotlib Any additional imshow arguments that should be used when plotting the image. """ # Create a figure and add title and axes figure = plt.figure(frameon=not basic_plot) # Basic plot if basic_plot: axes = plt.Axes(figure, [0., 0., 1., 1.]) axes.set_axis_off() figure.add_axes(axes) matplot_args.update({'annotate': False}) # Normal plot else: axes = figure.gca() im = self.plot(axes=axes, **matplot_args) if colorbar and not basic_plot: figure.colorbar(im) if draw_limb: self.draw_limb(axes=axes) if isinstance(draw_grid, bool): if draw_grid: self.draw_grid(axes=axes) elif isinstance(draw_grid, (int, long, float)): self.draw_grid(axes=axes, grid_spacing=draw_grid) else: raise TypeError("draw_grid should be bool, int, long or float") figure.show() return figure def norm(self): """Default normalization method. Not yet implemented.""" return None @classmethod def parse_file(cls, filepath): """Reads in a map file and returns a header and data array""" return read_file(filepath) @classmethod def read(cls, filepath): """Map class factory Attempts to determine the type of data associated with input and returns an instance of either the generic Map class or a subclass of Map such as AIAMap, EUVIMap, etc. Parameters ---------- filepath : string Path to a valid FITS or JPEG 2000 file of a type supported by SunPy Returns ------- out : Map Returns a Map instance for the particular type of data loaded. """ data, header = cls.parse_file(filepath) if cls.__name__ is not "Map": return cls(data, header) for cls in Map.__subclasses__(): if cls.is_datasource_for(header): return cls(data, header) return Map(data, header) @classmethod def read_header(cls, filepath): """Attempts to detect the datasource type and returns meta-information for that particular datasource.""" header = read_file_header(filepath) for cls in Map.__subclasses__(): if cls.is_datasource_for(header): properties = cls.get_properties(header) properties['header'] = header return properties