Esempio n. 1
0
    def test_mlt_convert_change(self):
        """Test that MLT changes with UT"""
        self.mlt_out = aacgmv2.convert_mlt(self.mlon_list, self.dtime)
        self.mlt_diff = self.mlt_out - aacgmv2.convert_mlt(self.mlon_list,
                                                           self.dtime2)

        np.testing.assert_allclose(self.mlt_diff, self.diff_comp, rtol=1.0e-4)
Esempio n. 2
0
    def to_AACGM(self, append_mlt=False, **kwargs):
        import aacgmv2 as aacgm
        from geospacelab.cs._aacgm import AACGM
        method_code = 'G2A'

        ut_type = type(self.ut)
        if ut_type is list:
            uts = np.array(self.ut)
        elif ut_type is np.ndarray:
            uts = self.ut
        lat_shape = self.coords.lat.shape
        lon_shape = self.coords.lon.shape
        if issubclass(self.ut.__class__, datetime.datetime):

            lat, lon, r = aacgm.convert_latlon_arr(
                in_lat=self.coords.lat.flatten(),
                in_lon=self.coords.lon.flatten(),
                height=self.coords.height.flatten(),
                dtime=self.ut,
                method_code=method_code)
        else:
            if uts.shape[0] != self.coords.lat.shape[0]:
                mylog.StreamLogger.error(
                    "Datetimes must have the same length as cs!")
                return
            lat = np.empty_like(self.coords.lat)
            lon = np.empty_like(self.coords.lon)
            r = np.empty_like(self.coords.lat)
            for ind_dt, dt in enumerate(uts.flatten()):
                # print(ind_dt, dt, cs.lat[ind_dt, 0])
                lat[ind_dt], lon[ind_dt], r[ind_dt] = aacgm.convert_latlon_arr(
                    in_lat=self.coords.lat[ind_dt],
                    in_lon=self.coords.lon[ind_dt],
                    height=self.coords.height[ind_dt],
                    dtime=dt,
                    method_code=method_code)
        cs_new = AACGM(coords={
            'lat': lat.reshape(lat_shape),
            'lon': lon.reshape(lon_shape),
            'r': r.reshape(lat_shape),
            'r_unit': 'R_E'
        },
                       ut=self.ut)
        if append_mlt:
            lon = lon.flatten()
            if issubclass(self.ut.__class__, datetime.datetime):
                mlt = aacgm.convert_mlt(lon, self.ut)
            else:
                lon = cs_new['lon']
                mlt = np.empty_like(lon)
                for ind_dt, dt in enumerate(self.ut.flatten()):
                    mlt[ind_dt] = aacgm.convert_mlt(lon[ind_dt], dt)
            cs_new['mlt'] = mlt.reshape(lon_shape)
        return cs_new
Esempio n. 3
0
def f4():
    plt.figure(figsize=(6.88,6.74))
    #geographic coordinates
    ax1, projection1 = gcc.create_map(
        2, 2, 1, 'pol', 90, 50, 0, coastlines=False,useLT=True,
        dlat=10, lonticklabel=(1, 1, 1, 1))
    ax1.plot([45,45],[50,90], 'k--',transform=ccrs.PlateCarree())
    ax1.plot([225,225],[50,90], 'k--',transform=ccrs.PlateCarree())
    ax1.plot([105,105],[50,90], 'k--',transform=ccrs.PlateCarree())
    ax1.plot([285,285],[50,90], 'k--',transform=ccrs.PlateCarree())
    ax1.scatter(
        0, 90, color='r', transform=ccrs.PlateCarree(), zorder=10,
        label='North Pole')
    ax1.text(0,1,'(a)', transform = plt.gca().transAxes)
    plt.legend(loc=[0.5,1.1])

    ax2, projection2 = gcc.create_map(
        2, 2, 2, 'pol', -50, -90, 0, coastlines=False,useLT=True,
        dlat=10, lonticklabel=(1, 1, 1, 1))
    ax2.scatter(
        0, -90, color='b', transform=ccrs.PlateCarree(),label='South Pole')
    ax2.text(0,1,'(b)', transform = plt.gca().transAxes)
    plt.legend(loc=[-0.1,1.1])

    #geomagnetic coordinates
    ax3, projection3 = gcc.create_map(
        2, 2, 3, 'pol', 90, 50, 0, coastlines=False,useLT=True,
        dlat=10, lonticklabel=(1, 1, 1, 1))
    mlatn,mlonn = convert(90,0,0,date=dt.date(2002,3,21))
    for k in range(24):
        mltn = convert_mlt(mlonn[0],dtime=dt.datetime(2003,3,21,k))
        ax3.scatter(mltn*15,mlatn[0],color='r',transform=ccrs.PlateCarree())
    ax3.scatter(180,75,s=50,c='k',marker='x',transform=ccrs.PlateCarree())
    ax3.text(0,1,'(c)', transform = plt.gca().transAxes)

    ax4, projection4 = gcc.create_map(
        2, 2, 4, 'pol', -50, -90, 0, coastlines=False,useLT=True,
        dlat=10, lonticklabel=(1, 1, 1, 1))
    mlats,mlons = convert(-90,0,0,date=dt.date(2002,3,21))
    for k in range(24):
        mlts = convert_mlt(mlons[0],dtime=dt.datetime(2003,3,21,k))
        ax4.scatter(mlts*15,mlats[0],color='b',transform=ccrs.PlateCarree())
    ax4.scatter(180,-75,s=50,c='k',marker='x',transform=ccrs.PlateCarree())
    ax4.text(0,1,'(d)', transform = plt.gca().transAxes)
    plt.savefig(
        '/Users/guod/Documents/Pole_Density_MLT_Change/Figures/'
        '000_Pole_Feature.pdf')
    return
Esempio n. 4
0
    def plot_heppner_maynard_boundary(cls,
                                      mlats: list,
                                      mlons: list,
                                      date: object,
                                      line_color: str = 'black',
                                      **kwargs):
        # TODO: No evaluation of coordinate system made! May need if in
        # plotting to plot in radians/geo ect.
        """
        Plots the position of the Heppner-Maynard Boundary

        Parameters
        ----------
            ax: object
                matplotlib axis object
            mlats: List[float]
                Magnetic Latitude in degrees
            mlons: List[float]
                Magnetic Longitude in radians
            date: datetime object
                Date from record
            line_color: str
                Color of the Heppner-Maynard boundary
                Default: black

        """
        # Shift mlon to MLT
        shifted_mlts = mlons[0] - \
            (aacgmv2.convert_mlt(mlons[0], date) * 15)
        shifted_lons = mlons - shifted_mlts
        mlon = np.radians(shifted_lons)

        plt.plot(mlon, mlats, c=line_color, zorder=4.0, **kwargs)
Esempio n. 5
0
 def test_mlt_convert_list_w_times(self):
     """Test MLT calculation for data and time arrays"""
     self.dtime = [self.dtime for dd in self.mlon_list]
     self.mlt_out = aacgmv2.convert_mlt(self.mlon_list,
                                        self.dtime,
                                        m2a=False)
     np.testing.assert_allclose(self.mlt_out, self.mlt_comp, rtol=1.0e-4)
Esempio n. 6
0
 def test_mlt_convert_single(self):
     """Test MLT calculation for a single value"""
     for i, mlon in enumerate(self.mlon_list):
         self.mlt_out = aacgmv2.convert_mlt(mlon, self.dtime, m2a=False)
         np.testing.assert_almost_equal(self.mlt_out,
                                        self.mlt_comp[i],
                                        decimal=4)
Esempio n. 7
0
    def test_inv_convert_mlt_arr(self):
        """Test MLT inversion for an array"""
        self.mlon_out = aacgmv2.convert_mlt(np.array(self.mlt_list),
                                            self.dtime,
                                            m2a=True)

        np.testing.assert_allclose(self.mlon_out, self.mlon_comp, rtol=1.0e-4)
Esempio n. 8
0
 def test_inv_convert_mlt_single(self):
     """Test MLT inversion for a single value"""
     for i, mlt in enumerate(self.mlt_list):
         self.mlon_out = aacgmv2.convert_mlt(mlt, self.dtime, m2a=True)
         np.testing.assert_almost_equal(self.mlon_out,
                                        self.mlon_comp[i],
                                        decimal=4)
Esempio n. 9
0
def get_flux_geo(dt):
    atypes = ['diff', 'mono', 'wave']
    jtype = 'electron energy flux'
    fluxgrid = None
    # Iterate over types
    for atype in atypes:
        print("Generating fluxes for {} type aurora".format(atype), end="\r")
        # create estimator
        estimator = ovation_prime.FluxEstimator(atype, jtype)
        mlatgrid, mltgrid, fg = estimator.get_flux_for_time(dt)
        # Create fluxgrid in the first iteration
        if fluxgrid is None:
            fluxgrid = np.zeros(fg.shape)
        # Add to flux
        fluxgrid += fg
    print("Converting to GEO coordinates", end="\r")
    # Convert mag local time to magnetic longitude
    mlongrid = aacgmv2.convert_mlt(mltgrid, dt, m2a=True)
    # Convert CGM to GEO
    # altitude of aurora ~ 300 km
    aur_z = 300
    lat_grid, lon_grid, r_grid = aacgmv2.convert_latlon_arr(
        mlatgrid,
        mlongrid,  #inputs
        aur_z,
        dt,
        u'A2G')  # aurora height, datetime, and conversion mode
    return lat_grid, lon_grid, fluxgrid
Esempio n. 10
0
def convert2MLT(lons: float, date: object, **kwargs):
    fan_shape = lons.shape
    # Work out shift due in MLT
    beam_corners_mlts = np.zeros((fan_shape[0], fan_shape[1]))
    mltshift = lons[0, 0] - (aacgmv2.convert_mlt(lons[0, 0], date) * 15)
    beam_corners_mlts = lons - mltshift
    return beam_corners_mlts
Esempio n. 11
0
def add_mlt_to_df(cell_corners_aacgm_lons, cell_corners_aacgm_lats, df):
    """

    Add magnetic local time ('mlt'), aacgm longitude ('lon'), and aacgm latitude ('lat') columns to a dataframe

    The first date is used to compute the mlt shift, this shift is then applied to the whole dataframe.
     While less accurate, this approach is much faster than computing mlt for each df row independently.

    :param cell_corners_aacgm_lons: 2d numpy.ndarray: Longitudes of the cell corners in aacgm units
    :param cell_corners_aacgm_lats: 2d  numpy.ndarray: Latitudes of the cell corners in aacgm units
    :param df: pandas.DataFrame: The dataframe to which you want to add mlt
    :return: pandas.DataFrame: The input dataframe, except now with 'mlt', 'lon', and 'lat' columns
    """

    if len(df) <= 0:
        df['lon'], df['lat'], df['mlt'] = [], [], []
        return df

    fan_shape = cell_corners_aacgm_lons.shape

    # Compute cell centroids
    cell_centers_aacgm_lons = np.zeros(shape=(fan_shape[0], fan_shape[1]))
    cell_centers_aacgm_lats = np.zeros(shape=(fan_shape[0], fan_shape[1]))

    for gate_corner in range(fan_shape[0] - 1):
        for beam_corner in range(fan_shape[1] - 1):
            cent_lon, cent_lat = centroid([
                (cell_corners_aacgm_lons[gate_corner, beam_corner],
                 cell_corners_aacgm_lats[gate_corner, beam_corner]),
                (cell_corners_aacgm_lons[gate_corner + 1, beam_corner],
                 cell_corners_aacgm_lats[gate_corner + 1, beam_corner]),
                (cell_corners_aacgm_lons[gate_corner, beam_corner + 1],
                 cell_corners_aacgm_lats[gate_corner, beam_corner + 1]),
                (cell_corners_aacgm_lons[gate_corner + 1, beam_corner + 1],
                 cell_corners_aacgm_lats[gate_corner + 1, beam_corner + 1])
            ])
            cell_centers_aacgm_lons[gate_corner, beam_corner] = cent_lon
            cell_centers_aacgm_lats[gate_corner, beam_corner] = cent_lat

    aacgm_lons = []
    aacgm_lats = []
    dates = []

    #  Loop through the dataframe, and build up aacgm_lons and dates
    for i in range(len(df)):

        gate = df['slist'][i]
        beam = df['bmnum'][i]
        date = df['datetime'][i]

        aacgm_lons.append(cell_centers_aacgm_lons[gate, beam])
        aacgm_lats.append(cell_centers_aacgm_lats[gate, beam])
        dates.append(date)

    df['lon'] = aacgm_lons
    df['lat'] = aacgm_lats
    df['mlt'] = aacgmv2.convert_mlt(arr=aacgm_lons, dtime=dates, m2a=False)

    return df
def calc_mlon_slow(df):
    # given the est bnd df, time get MLT from MLON
    #df["mlon"] = df.apply(lambda x: np.round( aacgmv2.convert_mlt(x["mag_gltc"],
    #                      x["datetime"], m2a=True), 1), axis=1)
    mlon = df.apply(lambda x: np.round(
        aacgmv2.convert_mlt(x["mag_gltc"], x["datetime"], m2a=True), 1),
                    axis=1)
    return mlon
Esempio n. 13
0
    def plot_radar_label(cls,
                         stid: int,
                         date: dt.datetime,
                         coords: Coords = Coords.AACGM_MLT,
                         projs: Projs = Projs.POLAR,
                         line_color: str = 'black',
                         transform: object = None,
                         **kwargs):
        """
        plots only string at the position of a given radar station ID (stid)

        Parameters
        -----------
            stid: int
                Radar station ID
            date: datetime datetime object
                sets the datetime used to find the coordinates of the
                FOV
            line_color: str
                color of the text
                default: black

        Returns
        -------
            No variables returned
        """
        # Label text
        label_str = ' ' + SuperDARNRadars.radars[stid]\
                    .hardware_info.abbrev.upper()
        # Get location of radar
        lat = SuperDARNRadars.radars[stid].hardware_info.geographic.lat
        lon = SuperDARNRadars.radars[stid].hardware_info.geographic.lon

        # Convert to geomag coords
        if coords == Coords.AACGM_MLT or coords == Coords.AACGM:
            geomag_radar = aacgmv2.get_aacgm_coord(lat, lon, 250, date)
            lat = geomag_radar[0]
            lon = geomag_radar[1]
            if coords == Coords.AACGM_MLT:
                mltshift = geomag_radar[1] -\
                        (aacgmv2.convert_mlt(geomag_radar[1], date) * 15)
                lon = geomag_radar[1] - mltshift
        if projs == Projs.POLAR:
            lon = np.radians(lon)

        theta_text = lon
        # Shift in latitude (dependent on hemisphere)
        if SuperDARNRadars.radars[stid].hemisphere == Hemisphere.North:
            r_text = lat - 5
        else:
            r_text = lat + 5
        plt.text(theta_text,
                 r_text,
                 label_str,
                 ha='center',
                 transform=transform,
                 c=line_color)
        return
Esempio n. 14
0
    def test_mlt_convert_mlon_wrapping(self):
        """Test mlon wrapping"""
        self.mlt_out = aacgmv2.convert_mlt(np.array([270, -90, 1, 361]),
                                           self.dtime, m2a=False)

        np.testing.assert_almost_equal(self.mlt_out[0], self.mlt_out[1],
                                       decimal=6)
        np.testing.assert_almost_equal(self.mlt_out[2], self.mlt_out[3],
                                       decimal=6)
Esempio n. 15
0
    def test_inv_convert_mlt_wrapping(self):
        """Test MLT wrapping"""
        self.mlon_out = aacgmv2.convert_mlt(np.array([1, 25, -1, 23]),
                                            self.dtime, m2a=True)

        np.testing.assert_almost_equal(self.mlon_out[0], self.mlon_out[1],
                                       decimal=6)
        np.testing.assert_almost_equal(self.mlon_out[2], self.mlon_out[3],
                                       decimal=6)
Esempio n. 16
0
def convert_cdmag_gsm(row, frm="CDMAG", to="GSM"):
    """
    All the data in CDMAG from the web database
    are converted GSM coordinates
    """
    a = [row["CDMAG_R"], row["CDMAG_MLAT"], row["CDMAG_MLON"]]
    x = magcoords.coordTrans(a, row["epoch"], frm, to)
    row["R"], row["MLAT"], row["MLON"] = x[0], x[1], x[2]
    row["MLT"] = aacgmv2.convert_mlt([x[2]], row["epoch"], m2a=False)[0]
    return row
Esempio n. 17
0
    def solar_conductance(self, dt, mlats, mlts, return_f107=False):
        """
        Estimate the solar conductance using methods from:
            Cousins, E. D. P., T. Matsuo, and A. D. Richmond (2015), Mapping
            high-latitude ionospheric electrodynamics with SuperDARN and AMPERE

            --which cites--

            Asgeir Brekke, Joran Moen, Observations of high latitude ionospheric conductances

            Maybe is not good for SZA for southern hemisphere? Don't know
            Going to use absolute value of latitude because that's what's done
            in Cousins IDL code.
        """
        # Find the closest hourly f107 value
        # to the current time to specifiy the conductance
        f107 = ovation_utilities.get_daily_f107(dt)
        if hasattr(self, '_f107'):
            log.warning(
                ('Warning: Overriding real F107 {0}'.format(f107) +
                 'with secret instance property _f107 {0}'.format(self._f107) +
                 'this is for debugging and will not' +
                 'produce accurate results for a particular date.'))
            f107 = self._f107

        # print "F10.7 = %f" % (f107)

        # Convert from magnetic to geocentric using the AACGMv2 python library
        flatmlats, flatmlts = mlats.flatten(), mlts.flatten()
        flatmlons = aacgmv2.convert_mlt(flatmlts, dt, m2a=True)
        try:
            glats, glons = aacgmv2.convert(flatmlats,
                                           flatmlons,
                                           110. * np.ones_like(flatmlats),
                                           date=dt,
                                           a2g=True,
                                           geocentric=False)
        except AttributeError:
            # convert method was deprecated
            glats, glons, r = aacgmv2.convert_latlon_arr(flatmlats,
                                                         flatmlons,
                                                         110.,
                                                         dt,
                                                         method_code='A2G')

        sigp, sigh = brekke_moen_solar_conductance(dt, glats, glons, f107)

        sigp_unflat = sigp.reshape(mlats.shape)
        sigh_unflat = sigh.reshape(mlats.shape)

        if return_f107:
            return sigp_unflat, sigh_unflat, f107
        else:
            return sigp_unflat, sigh_unflat
def calc_mlon(df):
    # given the est bnd df, time get MLT from MLON
    groups = df.groupby("datetime")
    mlon = np.zeros(df.shape[0])
    mlon.fill(np.nan)
    sidx = 0
    for name, g in groups:
        eidx = sidx + g.shape[0]
        mlon[sidx:eidx] = np.round(
            aacgmv2.convert_mlt(g["mag_gltc"].values,
                                pd.to_datetime(name),
                                m2a=True), 1)
        sidx = eidx
    return mlon
Esempio n. 19
0
def fitted_vecs(coeffs, mlat, mlon, dtime, minlat=50):
    ut = (dtime - dtime.replace(hour=0, minute=0, second=0,
                                microsecond=0)).total_seconds()
    rotated_coeffs = sdarn_rotate_coeffs(coeffs, ut)
    mlts = aacgmv2.convert_mlt(mlon, dtime)

    azimuths = []
    magnitudes = []

    for i in range(len(mlon)):
        azi, mag = sdarn_get_fitted(rotated_coeffs, minlat, mlat[i], mlts[i])
        azimuths.append(azi)
        magnitudes.append(mag)

    return azimuths, magnitudes
Esempio n. 20
0
 def convert_geo_to_aacgm(self):
     #    aalat,aalon, aar =      \
     #            aacgm.wrapper.convert_latlon_arr(lat, lon, alt, dt, code='G2A')
     lat_in = self.variables['SC_GEO_LAT']
     lon_in = self.variables['SC_GEO_LON']
     alt_in = self.variables['SC_GEO_ALT']
     date0 = self.dates[0]
     dts = self.variables['SC_DATETIME']
     aalat, aalon, aar = \
         aacgmv2.convert_latlon_arr(lat_in.flatten(), lon_in.flatten(), alt_in.flatten(), date0, code='G2A')
     mlt = []
     arr = aalon.flatten()
     for ind, dt in enumerate(dts.flatten()):
         mlt.append(aacgmv2.convert_mlt(arr[ind], dt, m2a=False))
     datashape = dts.shape
     self.variables['SC_AACGM_LAT'] = aalat.reshape(datashape)
     self.variables['SC_AACGM_LON'] = aalon.reshape(datashape)
     self.variables['SC_AACGM_R'] = aar.reshape(datashape)
     self.variables['SC_AACGM_MLT'] = np.array(mlt).reshape(datashape)
Esempio n. 21
0
def sdarn_get_fitted_AACGM(coeffs, hmb_lat, mag_lat, mag_lon, ut, dtime):
    """
    Calculate fitted vectors azimuth and magnitude using AACGMv2 MLT values
    """

    # get MLT
    mag_LT = aacgmv2.convert_mlt(mag_lon, dtime)[0]

    # convert coeffs to MLT
    new_coeffs = sdarn_rotate_coeffs(coeffs, ut)

    # get meridional and zonal plasma drift velocity components
    vmeri, vzone = sdarn_get_vel(new_coeffs, hmb_lat, mag_lat, mag_LT)

    # Fitted azimuth and magnitude
    fitv_azi = np.degrees(np.arctan2(vzone, vmeri))
    fitv_mag = np.sqrt(vzone**2 + vmeri**2)

    return fitv_azi, fitv_mag
Esempio n. 22
0
    def plot_radar_position(cls,
                            stid: int,
                            date: dt.datetime,
                            transform: object = None,
                            coords: Coords = Coords.AACGM_MLT,
                            projs: Projs = Projs.POLAR,
                            line_color: str = 'black',
                            **kwargs):
        """
        plots only a dot at the position of a given radar station ID (stid)

        Parameters
        -----------
            stid: int
                Radar station ID
            date: datetime datetime object
                sets the datetime used to find the coordinates of the
                FOV
            line_color: str
                color of the dot
                default: black

        Returns
        -------
            No variables returned
        """
        # Get location of radar
        lat = SuperDARNRadars.radars[stid].hardware_info.geographic.lat
        lon = SuperDARNRadars.radars[stid].hardware_info.geographic.lon
        # Convert to geomag coords
        if coords == Coords.AACGM_MLT or coords == Coords.AACGM:
            geomag_radar = aacgmv2.get_aacgm_coord(lat, lon, 250, date)
            lat = geomag_radar[0]
            lon = geomag_radar[1]
            if coords == Coords.AACGM_MLT:
                mltshift = geomag_radar[1] -\
                        (aacgmv2.convert_mlt(geomag_radar[1], date) * 15)
                lon = geomag_radar[1] - mltshift
        if projs == Projs.POLAR:
            lon = np.radians(lon)
        # Plot a dot at the radar site
        plt.scatter(lon, lat, c=line_color, s=5, transform=transform)
        return
Esempio n. 23
0
def make_aurora_cube_multi(ts,ec,k):
    '''
    make the aurora flux image cuves 
    multiprocessing version 
    - ts is a single datetime object into this function
    - ec the averaged Newell coupling inout
    - k the counter for the frames
    for debugging making one frame use: >> make_aurora_cube_multi(tsm[0],ecm[0],0)   
    '''
    
    print('Frame number and time:', k, '  ',ts)
    
    #################  (2a) get fluxes
        
    mlatN, mltN, fluxNd=de.get_flux_for_time(ts,ec)
    mlatN, mltN, fluxNm=me.get_flux_for_time(ts,ec)
    fluxN=fluxNd+fluxNm #+fluxNw
    #print(ts), print(ec), print(k), print('....')
    
    ################  (2b) coordinate conversion magnetic to geographic 
    #Coordinate conversion MLT to AACGM mlon/lat to geographic coordinates
    mlonN_1D_small=aacgmv2.convert_mlt(mltN[0],ts,m2a=True)
    mlonN_1D=np.tile(mlonN_1D_small,mlatN.shape[0])
    mlatN_1D=np.squeeze(mlatN.reshape(np.size(mltN),1))
    (glatN_1D, glonN_1D, galtN) = aacgmv2.convert_latlon_arr(mlatN_1D,mlonN_1D, 100,ts, method_code="A2G") #**check 100 km

    ##############  (2c) interpolate to world map 
    geo_2D=np.vstack((glatN_1D,glonN_1D)).T      #stack 2 (7680,) arrays to a single 7680,2 arrays, .T is needed
    fluxN_1D=fluxN.reshape(7680,1)   #also change flux values to 1D array

    #make a world map grid in latitude 512 pixels, longitude 1024 pixel like NOAA
    wx,wy= np.mgrid[-90:90:180/512,-180:180:360/1024]
    aimg=  np.squeeze(scipy.interpolate.griddata(geo_2D, fluxN_1D, (wx, wy), method='linear',fill_value=0))
    aimg = scipy.ndimage.gaussian_filter(aimg,sigma=(5,7),mode='wrap') #wrap means wrapping at the 180 degree edge
      
    #Array variable to be used by all processes
    ovation_img_multi[512*1024*k:512*1024*(k+1)]=aimg.reshape(512*1024)
Esempio n. 24
0
def main():
    """Entry point for the script"""

    desc = 'Converts between geographical coordinates, AACGM-v2, and MLT'
    parser = argparse.ArgumentParser(description=desc)

    desc = 'for help, run %(prog)s SUBCOMMAND -h'
    subparsers = parser.add_subparsers(title='Subcommands',
                                       prog='aacgmv2',
                                       dest='subcommand',
                                       description=desc)
    subparsers.required = True

    desc = 'convert to/from geomagnetic coordinates. Input file must have lines'
    desc += 'of the form "LAT LON ALT".'
    parser_convert = subparsers.add_parser('convert', help=(desc))

    desc = 'convert between magnetic local time (MLT) and AACGM-v2 longitude. '
    desc += 'Input file must have a single number on each line.'
    parser_convert_mlt = subparsers.add_parser('convert_mlt', help=(desc))

    desc = 'input file (stdin if none specified)'
    for pp in [parser_convert, parser_convert_mlt]:
        pp.add_argument('-i',
                        '--input',
                        dest='file_in',
                        metavar='FILE_IN',
                        type=argparse.FileType('r'),
                        default=STDIN,
                        help=desc)
        pp.add_argument('-o',
                        '--output',
                        dest='file_out',
                        metavar='FILE_OUT',
                        type=argparse.FileType('wb'),
                        default=STDOUT,
                        help='output file (stdout if none specified)')

    desc = 'date for magnetic field model (1900-2020, default: today)'
    parser_convert.add_argument('-d',
                                '--date',
                                dest='date',
                                metavar='YYYYMMDD',
                                help=desc)

    desc = 'invert - convert AACGM to geographic instead of geographic to AACGM'
    parser_convert.add_argument('-v',
                                '--a2g',
                                dest='a2g',
                                action='store_true',
                                default=False,
                                help=desc)

    desc = 'use field-line tracing instead of coefficients'
    parser_convert.add_argument('-t',
                                '--trace',
                                dest='trace',
                                action='store_true',
                                default=False,
                                help=desc)

    desc = 'automatically use field-line tracing above 2000 km'
    parser_convert.add_argument('-a',
                                '--allowtrace',
                                dest='allowtrace',
                                action='store_true',
                                default=False,
                                help=desc)

    desc = 'allow use of coefficients above 2000 km (bad idea!)'
    parser_convert.add_argument('-b',
                                '--badidea',
                                dest='badidea',
                                action='store_true',
                                default=False,
                                help=desc)

    desc = 'assume inputs are geocentric with Earth radius 6371.2 km'
    parser_convert.add_argument('-g',
                                '--geocentric',
                                dest='geocentric',
                                action='store_true',
                                default=False,
                                help=desc)

    parser_convert_mlt.add_argument('datetime',
                                    metavar='YYYYMMDDHHMMSS',
                                    help='date and time for conversion')

    desc = 'invert - convert MLT to AACGM longitude instead of AACGM longitude'
    desc += ' to MLT'
    parser_convert_mlt.add_argument('-v',
                                    '--m2a',
                                    dest='m2a',
                                    action='store_true',
                                    default=False,
                                    help=desc)

    args = parser.parse_args()
    array = np.loadtxt(args.file_in, ndmin=2)

    if args.subcommand == 'convert':
        date = dt.date.today() if args.date is None else \
               dt.datetime.strptime(args.date, '%Y%m%d')
        code = aacgmv2.convert_bool_to_bit(a2g=args.a2g,
                                           trace=args.trace,
                                           allowtrace=args.allowtrace,
                                           badidea=args.badidea,
                                           geocentric=args.geocentric)
        lats, lons, alts = aacgmv2.convert_latlon_arr(array[:, 0],
                                                      array[:, 1],
                                                      array[:, 2],
                                                      dtime=date,
                                                      method_code=code)

        np.savetxt(args.file_out,
                   np.column_stack((lats, lons, alts)),
                   fmt='%.8f')
    elif args.subcommand == 'convert_mlt':
        dtime = dt.datetime.strptime(args.datetime, '%Y%m%d%H%M%S')
        out = np.array(aacgmv2.convert_mlt(array[:, 0], dtime, m2a=args.m2a))

        if len(out.shape) == 0:
            out = np.array([out])

        np.savetxt(args.file_out, out, fmt='%.8f')
Esempio n. 25
0
def test_MLT_a2m():
    mlt = aacgmv2.convert_mlt([1, 12, 23], dt.datetime(2015, 2, 24, 14, 0, 15))
    np.testing.assert_allclose(mlt, [9.057565, 9.790899, 10.524232], rtol=1e-6)
Esempio n. 26
0
    def plot_grid(cls,
                  dmap_data: List[dict],
                  record: int = 0,
                  start_time: dt.datetime = None,
                  time_delta: int = 1,
                  ax=None,
                  parameter: str = 'vel',
                  cmap: str = None,
                  zmin: int = None,
                  zmax: int = None,
                  colorbar: bool = True,
                  colorbar_label: str = '',
                  title: str = '',
                  len_factor: float = 150.0,
                  ref_vector: int = 300,
                  **kwargs):
        """
        Plots a radar's gridded vectors from a GRID file

        Parameters
        -----------
            dmap_data: List[dict]
                Named list of dictionaries obtained from SDarn_read
            record: int
                record number to plot
                default: 0
            start_time: datetime.datetime
                datetime object as the start time of the record to plot
                if none then record will be used
                default: none
            time_delta: int
                How close the start_time has to be start_time of the record
                in minutes
                default: 1
            ax: matplotlib.pyplot axis
                Pre-defined axis object to pass in, must currently
                be polar projection
                Default: Generates a polar projection for the user
                with MLT/latitude labels
            parameter: str
                Key name indicating which parameter to plot.
                Default: vel (Velocity). Alternatives: 'pwr', 'wdt'
            cmap: matplotlib.cm
                matplotlib colour map
                https://matplotlib.org/tutorials/colors/colormaps.html
                Default: Official pyDARN colour map for given parameter
            zmin: int
                The minimum parameter value for coloring
                Default: {'pwr': [0], 'vel': [0], 'wdt': [0]}
            zmax: int
                The maximum parameter value for  coloring
                Default: {'pwr': [50], 'vel': [1000], 'wdt': [250]}
            colorbar: bool
                Draw a colourbar if True
                Default: True
            colorbar_label: str
                The label that appears next to the colour bar.
                Requires colorbar to be true
                Default: ''
            title: str
                Adds a title to the plot. If no title is specified,
                one will be provided
                Default: ''
            len_factor: float
                Normalisation factor for the vectors, to control size on plot
                Larger number means smaller vectors on plot
                Default: 150.0
            ref_vector: int
                Velocity value to be used for the reference vector, in m/s
                Default: 300
            kwargs: key=value
                uses the parameters for plot_fov and projections.axis
        See Also
        --------
        plot_fov - plots the field of view found in fan.py

        Returns
        -----------
        If parameter is 'vel':
        thetas - List of gridded data point magnetic local times (degrees)
        end_thetas - List of magnetic local time end points used for vector
                     plotting (degrees)
        rs - List of gridded data point radius' (AACGM latitude)
        end_rs - List of radius end points for vector plotting (AACGM latitude)
        data - List of magnitudes of line-of-sight velocity
        azm_v -  List of azimuths of line-of-sight velocity
        else:
        thetas - List of gridded data point magnetic local times (degrees)
        rs - List of gridded data point radius' (AACGM latitude)
        data - List of data magnitudes plotted, for parameter chosen
        """
        # Short hand for the parameters in GRID files
        if parameter == 'vel' or parameter == 'pwr' or parameter == 'wdt':
            parameter = "vector.{param}.median".format(param=parameter)

        # Find the record corresponding to the start time
        if start_time is not None:
            for record in range(len(dmap_data)):
                date = dt.datetime(dmap_data[record]['start.year'],
                                   dmap_data[record]['start.month'],
                                   dmap_data[record]['start.day'],
                                   dmap_data[record]['start.hour'],
                                   dmap_data[record]['start.minute'])
                time_diff = date - start_time
                if time_diff.seconds / 60 <= time_delta:
                    break
            if time_diff.seconds / 60 > time_delta:
                raise plot_exceptions.NoDataFoundError(parameter,
                                                       start_time=start_time)
        else:
            record = 0
            date = dt.datetime(dmap_data[record]['start.year'],
                               dmap_data[record]['start.month'],
                               dmap_data[record]['start.day'],
                               dmap_data[record]['start.hour'],
                               dmap_data[record]['start.minute'])

        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            for stid in dmap_data[record]['stid']:
                _, aacgm_lons, ax, _ =\
                        Fan.plot_fov(stid, date,
                                     ax=ax, **kwargs)
                try:
                    data_lons = dmap_data[record]['vector.mlon']
                    data_lats = dmap_data[record]['vector.mlat']
                except KeyError:
                    raise plot_exceptions.PartialRecordsError('vector.mlon')

                # Hold the beam positions
                shifted_mlts = aacgm_lons[0, 0] - \
                    (aacgmv2.convert_mlt(aacgm_lons[0, 0], date) * 15)
                shifted_lons = data_lons - shifted_mlts
                thetas = np.radians(shifted_lons)
                rs = data_lats

                # Colour table and max value selection depending on
                # parameter plotted Load defaults if none given
                if cmap is None:
                    cmap = {
                        'vector.pwr.median': 'plasma',
                        'vector.vel.median': 'plasma_r',
                        'vector.wdt.median': PyDARNColormaps.PYDARN_VIRIDIS
                    }
                    cmap = plt.cm.get_cmap(cmap[parameter])

                # Setting zmin and zmax
                defaultzminmax = {
                    'vector.pwr.median': [0, 50],
                    'vector.vel.median': [0, 1000],
                    'vector.wdt.median': [0, 250]
                }
                if zmin is None:
                    zmin = defaultzminmax[parameter][0]
                if zmax is None:
                    zmax = defaultzminmax[parameter][1]

                norm = colors.Normalize
                norm = norm(zmin, zmax)

                # check to make sure the parameter is present in the file
                # this may not be the case for wdt and pwr as you need -xtd
                # option in make_grid
                try:
                    data = dmap_data[record][parameter]
                except KeyError:
                    raise plot_exceptions.UnknownParameterError(parameter,
                                                                grid=True)
                # Plot the magnitude of the parameter
                ax.scatter(thetas,
                           rs,
                           c=data,
                           s=2.0,
                           vmin=zmin,
                           vmax=zmax,
                           zorder=5,
                           cmap=cmap)

                # If the parameter is velocity then plot the LOS vectors
                if parameter == "vector.vel.median":

                    # Get the azimuths from the data
                    azm_v = dmap_data[record]['vector.kvect']

                    # Number of data points
                    num_pts = range(len(data))

                    # Angle to "rotate" each vector by to get into same
                    # reference frame Controlled by longitude, or "mltitude"
                    alpha = thetas

                    # Convert initial positions to Cartesian
                    start_pos_x = (90 - rs) * np.cos(thetas)
                    start_pos_y = (90 - rs) * np.sin(thetas)

                    # Resolve LOS vector in x and y directions,
                    # with respect to mag pole
                    # Gives zonal and meridional components of LOS vector
                    los_x = -data * np.cos(np.radians(-azm_v))
                    los_y = -data * np.sin(np.radians(-azm_v))

                    # Rotate each vector into same reference frame
                    # following vector rotation matrix
                    # https://en.wikipedia.org/wiki/Rotation_matrix
                    vec_x = (los_x * np.cos(alpha)) - (los_y * np.sin(alpha))
                    vec_y = (los_x * np.sin(alpha)) + (los_y * np.cos(alpha))

                    # New vector end points, in Cartesian
                    end_pos_x = start_pos_x + (vec_x / len_factor)
                    end_pos_y = start_pos_y + (vec_y / len_factor)

                    # Convert back to polar for plotting
                    end_rs = 90 - (np.sqrt(end_pos_x**2 + end_pos_y**2))
                    end_thetas = np.arctan2(end_pos_y, end_pos_x)

                    # Plot the vectors
                    for i in num_pts:
                        plt.plot([thetas[i], end_thetas[i]],
                                 [rs[i], end_rs[i]],
                                 c=cmap(norm(data[i])),
                                 linewidth=0.5)

                # TODO: Add a velocity reference vector

        if colorbar is True:
            mappable = cm.ScalarMappable(norm=norm, cmap=cmap)
            locator = ticker.MaxNLocator(symmetric=True,
                                         min_n_ticks=3,
                                         integer=True,
                                         nbins='auto')
            ticks = locator.tick_values(vmin=zmin, vmax=zmax)

            cb = ax.figure.colorbar(mappable,
                                    ax=ax,
                                    extend='both',
                                    ticks=ticks)

            if colorbar_label != '':
                cb.set_label(colorbar_label)

        if title == '':
            title = "{year}-{month}-{day} {start_hour}:{start_minute} -"\
                " {end_hour}:{end_minute}"\
                    "".format(year=date.year,
                              month=str(date.month).zfill(2),
                              day=str(date.day).zfill(2),
                              start_hour=str(date.hour).zfill(2),
                              start_minute=str(date.minute).zfill(2),
                              end_hour=str(dmap_data[record]['end.hour']).
                              zfill(2),
                              end_minute=str(dmap_data[record]['end.minute']).
                              zfill(2))
        plt.title(title)
        if parameter == 'vector.vel.median':
            return thetas, end_thetas, rs, end_rs, data, azm_v
        return thetas, rs, data
Esempio n. 27
0
def test_MLT_forward_backward():
    mlon = aacgmv2.convert_mlt(12, dtObj, m2a=True)
    mlt = aacgmv2.convert_mlt(mlon, dtObj)
    np.testing.assert_allclose(mlt, 12)
Esempio n. 28
0
 #     print cDtStr
 # dateStrArr.append(cDtStr)
 fName = fitDir + "bnd-coeffs" + yr + "-" +\
             mt + "-" + dt + ".txt"
 coeffDF = pandas.read_csv(fName, delim_whitespace=True,\
                                 header=None, names=coeffCols,\
                             infer_datetime_format=True,\
                             parse_dates=["trghPredTime"])
 # need to estimate location of trough between the MLT range where
 # we could calculate trough bnds. Get that range first!!!
 currBndDF = finBndDF[finBndDF["date"] == gd]
 #     print currBndDF["mlon"].values
 #     nMlonStart = numpy.min( currBndDF["mlon"].values )
 #     nMlonEnd = numpy.max( currBndDF["mlon"].values )
 for cMlon in currBndDF["mlon"].values:
     cpMlt = round( convert_mlt( cMlon,\
                                 ts , m2a=False ) )
     if cpMlt >= 12:
         nMlt = cpMlt - 24.
     else:
         nMlt = cpMlt
     selCoeffDF = coeffDF[coeffDF["trghPredTime"] == ts]
     selBndDF = currBndDF[currBndDF["mlon"] == cMlon]
     minTrghParams = selCoeffDF[ [ "a0MinTrgh", "c1MinTrgh",\
                              "s1MinTrgh", "phiC1MinTrgh",\
                              "phiS1MinTrgh" ] ].values[0]
     eqBndParams = selCoeffDF[ [ "a0EquBnd", "c1EquBnd",\
                            "s1EquBnd", "phiC1EquBnd",\
                            "phiS1EquBnd" ] ].values[0]
     polBndParams = selCoeffDF[ [ "a0PolBnd", "c1PolBnd",\
                             "s1PolBnd", "phiC1PolBnd",\
                             "phiS1PolBnd" ] ].values[0]
Esempio n. 29
0
    def save_to_nc(self, file_path):
        if not file_path.is_file():
            raise FileExistsError
        with open(file_path, 'r') as f:
            text = f.read()

            # results = re.findall(
            #     r'^\s*(\d+)\s*\[(\d+),(\d+)]\s*([-\d.]+)\s*' +
            #     r'([-\d.]+)\s*([-\d.]+)\s*([-\d.]+)\s*([-\d.]+)\s*([-\d.]+)\s*([-\d.]+)\s*' +
            #     r'([\S]+)',
            #     text,
            #     re.M
            # )
            results = re.findall(
                r'^\s*(\d+)\s*\[(\d+),(\d+)]\s*([\S]+)\s*([\S]+)\s*([\S]+)\s*([\S]+)\s*([\S]+)\s*([\S]+)\s*([\S]+)\s*([\S]+)',
                text,
                re.M
            )
            results = list(zip(*results))
            nlat = 40
            nlon = 180
            ntime = len(results[0]) / nlon / nlat
            if ntime != int(ntime):
                raise ValueError
            ntime = int(ntime)
            mlat_arr = np.array(results[3]).reshape([ntime, nlat, nlon], order='C').transpose((0, 2, 1)).astype(np.float32)
            mlon_arr = np.array(results[4]).reshape([ntime, nlat, nlon], order='C').transpose((0, 2, 1)).astype(np.float32)
            EF_N_arr = np.array(results[5]).reshape([ntime, nlat, nlon], order='C').transpose((0, 2, 1)).astype(np.float32)
            EF_E_arr = np.array(results[6]).reshape([ntime, nlat, nlon], order='C').transpose((0, 2, 1)).astype(np.float32)
            v_N_arr = np.array(results[7]).reshape([ntime, nlat, nlon], order='C').transpose((0, 2, 1)).astype(np.float32)
            v_E_arr = np.array(results[8]).reshape([ntime, nlat, nlon], order='C').transpose((0, 2, 1)).astype(np.float32)
            phi_arr = np.array(results[9]).reshape([ntime, nlat, nlon], order='C').transpose((0, 2, 1)).astype(np.float32)

            dts = np.array(results[10])[::nlon * nlat]
            dts = [datetime.datetime.strptime(dtstr, "%Y-%m-%d/%H:%M:%S") for dtstr in dts]
            time_array = np.array(cftime.date2num(dts, units='seconds since 1970-01-01 00:00:00.0'))

            import aacgmv2
            mlt_arr = np.empty_like(mlat_arr)
            for i in range(ntime):
                mlt1 = aacgmv2.convert_mlt(mlon_arr[i].flatten(), dts[i]).reshape((nlon, nlat))
                mlt_arr[i, ::] = mlt1[::]

            fp = pathlib.Path(file_path.with_suffix('.nc'))
            fp.parent.resolve().mkdir(parents=True, exist_ok=True)
            fnc = nc.Dataset(fp, 'w')
            fnc.createDimension('UNIX_TIME', ntime)
            fnc.createDimension('MLAT', nlat)
            fnc.createDimension('MLON', nlon)

            fnc.title = "SuperDARN Potential maps"

            time = fnc.createVariable('UNIX_TIME', np.float64, ('UNIX_TIME',))
            time.units = 'seconds since 1970-01-01 00:00:00.0'
            time[::] = time_array[::]

            mlat = fnc.createVariable('MLAT', np.float32, ('UNIX_TIME', 'MLON', 'MLAT'))
            mlat[::] = mlat_arr[::]
            mlon = fnc.createVariable('MLON', np.float32, ('UNIX_TIME', 'MLON', 'MLAT'))
            mlon[::] = mlon_arr[::]
            mlt = fnc.createVariable('MLT', np.float32, ('UNIX_TIME', 'MLON', 'MLAT'))
            mlt[::] = mlt_arr[::]
            EF_N = fnc.createVariable('E_N', np.float32, ('UNIX_TIME', 'MLON', 'MLAT'))
            EF_N[::] = EF_N_arr[::]
            EF_E = fnc.createVariable('E_E', np.float32, ('UNIX_TIME', 'MLON', 'MLAT'))
            EF_E[::] = EF_E_arr[::]
            v_N = fnc.createVariable('v_i_N', np.float32, ('UNIX_TIME', 'MLON', 'MLAT'))
            v_N[::] = v_N_arr[::]
            v_E = fnc.createVariable('v_i_E', np.float32, ('UNIX_TIME', 'MLON', 'MLAT'))
            v_E[::] = v_E_arr[::]
            phi = fnc.createVariable('phi', np.float32, ('UNIX_TIME', 'MLON', 'MLAT'))
            phi[::] = phi_arr[::]

            # Support data such as IMF, VCNUM, potential drop
            results = re.findall(
                r'^>[A-Za-z ]*\(VCNUM\):\s*([\d]+)',
                text,
                re.M
            )
            vcnum_arr = np.array(results, dtype=np.int32).reshape([ntime, 1], order='C')
            vcnum = fnc.createVariable('VCNUM', 'i4', ('UNIX_TIME',))
            vcnum[::] = vcnum_arr[::]

            results = re.findall(
                r'^>\s*IMF Model: ([\S]+) Bang\s*([\S]+) deg\., '
                + r'Esw\s*([\S]+) mV/m, tilt\s*([\S]+) deg\.,\s*([A-Za-z0-9]+), Fit Order:\s*([\d]+)',
                text,
                re.M
            )
            results = list(zip(*results))
            imf_model_arr = np.array(results[0], dtype=object).reshape([ntime, 1], order='C')
            imf_model = fnc.createVariable('IMF_MODEL', 'S8', ('UNIX_TIME',))
            imf_model[::] = imf_model_arr[::]
            B_angle_arr = np.array(results[1], dtype=np.float32).reshape([ntime, 1], order='C')
            B_angle = fnc.createVariable('CLOCK_ANGLE', np.float32, ('UNIX_TIME',))
            B_angle[::] = B_angle_arr[::]
            E_sw_arr = np.array(results[2], dtype=np.float32).reshape([ntime, 1], order='C')
            E_sw = fnc.createVariable('E_SW', np.float32, ('UNIX_TIME',))
            E_sw[::] = E_sw_arr[::]
            dipole_tilt_arr = np.array(results[3], dtype=np.float32).reshape([ntime, 1], order='C')
            dipole_tilt = fnc.createVariable('DIP_TILT', np.float32, ('UNIX_TIME',))
            dipole_tilt[::] = dipole_tilt_arr[::]
            SD_model_arr = np.array(results[4], dtype=object).reshape([ntime, 1], order='C')
            SD_model = fnc.createVariable('SD_MODEL', 'S8', ('UNIX_TIME',))
            SD_model[::] = SD_model_arr[::]
            fit_order_arr = np.array(results[5], dtype=np.int32).reshape([ntime, 1], order='C')
            fit_order = fnc.createVariable('FIT_ORDER', 'i4', ('UNIX_TIME',))
            fit_order[::] = fit_order_arr[::]

            results = re.findall(
                r'^> OMNI IMF:\s*Bx=([\S]+) nT,\s*By=([\S]+) nT,\s*Bz=([\S]+) nT',
                text,
                re.M
            )
            results = list(zip(*results))
            B_x_OMNI_arr = np.array(results[0], dtype=np.float32).reshape([ntime, 1], order='C')
            B_x_OMNI = fnc.createVariable('B_x_OMNI', np.float32, ('UNIX_TIME',))
            B_x_OMNI[::] = B_x_OMNI_arr[::]
            B_y_OMNI_arr = np.array(results[1], dtype=np.float32).reshape([ntime, 1], order='C')
            B_y_OMNI = fnc.createVariable('B_y_OMNI', np.float32, ('UNIX_TIME',))
            B_y_OMNI[::] = B_y_OMNI_arr[::]
            B_z_OMNI_arr = np.array(results[2], dtype=np.float32).reshape([ntime, 1], order='C')
            B_z_OMNI = fnc.createVariable('B_z_OMNI', np.float32, ('UNIX_TIME',))
            B_z_OMNI[::] = B_z_OMNI_arr[::]

            # > Potential: Drop = 33 kV, Min = -19 kV, Max = 14 kV
            results = re.findall(
                r'^> Potential:\s*Drop=([\S]+) kV,\s*Min=([\S]+) kV,\s*Max=([\S]+) kV',
                text,
                re.M
            )
            results = list(zip(*results))
            phi_CPCP_arr = np.array(results[0], dtype=np.float32).reshape([ntime, 1], order='C')
            phi_CPCP = fnc.createVariable('phi_CPCP', np.float32, ('UNIX_TIME',))
            phi_CPCP[::] = phi_CPCP_arr[::]
            phi_max_arr = np.array(results[1], dtype=np.float32).reshape([ntime, 1], order='C')
            phi_max = fnc.createVariable('phi_MAX', np.float32, ('UNIX_TIME',))
            phi_max[::] = phi_max_arr[::]
            phi_min_arr = np.array(results[2], dtype=np.float32).reshape([ntime, 1], order='C')
            phi_min = fnc.createVariable('phi_MIN', np.float32, ('UNIX_TIME',))
            phi_min[::] = phi_min_arr[::]

            print('From {} to {}.'.format(
                datetime.datetime.utcfromtimestamp(time_array[0]),
                datetime.datetime.utcfromtimestamp(time_array[-1]))
            )
            mylog.StreamLogger.info(
                "The requested SuperDARN map potential data has been saved in the file {}.".format(fp))
            fnc.close()
Esempio n. 30
0
File: thesis.py Progetto: guodj/work
ax1.plot([45,45],[50,90], 'k--',transform=ccrs.PlateCarree())
ax1.plot([225,225],[50,90], 'k--',transform=ccrs.PlateCarree())
ax1.plot([105,105],[50,90], 'k--',transform=ccrs.PlateCarree())
ax1.plot([285,285],[50,90], 'k--',transform=ccrs.PlateCarree())
ax1.scatter(0, 90, color='r', transform=ccrs.PlateCarree(), zorder=10)
ax2, projection2 = gcc.create_map(
        2, 2, 2, 'pol', -50, -90, 0, coastlines=False,useLT=True,
        dlat=10, lonticklabel=(1, 1, 1, 1))
ax2.scatter(0, -90, color='b', transform=ccrs.PlateCarree())

#geomagnetic coordinates
ax3, projection3 = gcc.create_map(
        2, 2, 3, 'pol', 90, 50, 0, coastlines=False,useLT=True,
        dlat=10, lonticklabel=(1, 1, 1, 1))
mlatn,mlonn = convert(90,0,0,date=dt.date(2002,3,21))
for k in range(24):
    mltn = convert_mlt(mlonn[0],dtime=dt.datetime(2003,3,21,k))
    ax3.scatter(mltn*15,mlatn[0],color='r',transform=ccrs.PlateCarree())
ax3.scatter(180,75,s=50,c='k',marker='x',transform=ccrs.PlateCarree())

ax4, projection4 = gcc.create_map(
        2, 2, 4, 'pol', -50, -90, 0, coastlines=False,useLT=True,
        dlat=10, lonticklabel=(1, 1, 1, 1))
mlats,mlons = convert(-90,0,0,date=dt.date(2002,3,21))
for k in range(24):
    mlts = convert_mlt(mlons[0],dtime=dt.datetime(2003,3,21,k))
    ax4.scatter(mlts*15,mlats[0],color='b',transform=ccrs.PlateCarree())
ax4.scatter(180,-75,s=50,c='k',marker='x',transform=ccrs.PlateCarree())
plt.savefig('/Users/guod/Documents/Thesis/006SciFig/work3_001.eps')
#plt.show()
Esempio n. 31
0
def f4():
    plt.figure(figsize=(6.88, 6.74))
    #geographic coordinates
    ax1, projection1 = gcc.create_map(2,
                                      2,
                                      1,
                                      'pol',
                                      90,
                                      50,
                                      0,
                                      coastlines=False,
                                      useLT=True,
                                      dlat=10,
                                      lonticklabel=(1, 1, 1, 1))
    ax1.plot([45, 45], [50, 90], 'k--', transform=ccrs.PlateCarree())
    ax1.plot([225, 225], [50, 90], 'k--', transform=ccrs.PlateCarree())
    ax1.plot([105, 105], [50, 90], 'k--', transform=ccrs.PlateCarree())
    ax1.plot([285, 285], [50, 90], 'k--', transform=ccrs.PlateCarree())
    ax1.scatter(0,
                90,
                color='r',
                transform=ccrs.PlateCarree(),
                zorder=10,
                label='North Pole')
    ax1.text(0, 1, '(a)', transform=plt.gca().transAxes)
    plt.legend(loc=[0.5, 1.1])

    ax2, projection2 = gcc.create_map(2,
                                      2,
                                      2,
                                      'pol',
                                      -50,
                                      -90,
                                      0,
                                      coastlines=False,
                                      useLT=True,
                                      dlat=10,
                                      lonticklabel=(1, 1, 1, 1))
    ax2.scatter(0,
                -90,
                color='b',
                transform=ccrs.PlateCarree(),
                label='South Pole')
    ax2.text(0, 1, '(b)', transform=plt.gca().transAxes)
    plt.legend(loc=[-0.1, 1.1])

    #geomagnetic coordinates
    ax3, projection3 = gcc.create_map(2,
                                      2,
                                      3,
                                      'pol',
                                      90,
                                      50,
                                      0,
                                      coastlines=False,
                                      useLT=True,
                                      dlat=10,
                                      lonticklabel=(1, 1, 1, 1))
    mlatn, mlonn = convert(90, 0, 0, date=dt.date(2002, 3, 21))
    for k in range(24):
        mltn = convert_mlt(mlonn[0], dtime=dt.datetime(2003, 3, 21, k))
        ax3.scatter(mltn * 15,
                    mlatn[0],
                    color='r',
                    transform=ccrs.PlateCarree())
    ax3.scatter(180, 75, s=50, c='k', marker='x', transform=ccrs.PlateCarree())
    ax3.text(0, 1, '(c)', transform=plt.gca().transAxes)

    ax4, projection4 = gcc.create_map(2,
                                      2,
                                      4,
                                      'pol',
                                      -50,
                                      -90,
                                      0,
                                      coastlines=False,
                                      useLT=True,
                                      dlat=10,
                                      lonticklabel=(1, 1, 1, 1))
    mlats, mlons = convert(-90, 0, 0, date=dt.date(2002, 3, 21))
    for k in range(24):
        mlts = convert_mlt(mlons[0], dtime=dt.datetime(2003, 3, 21, k))
        ax4.scatter(mlts * 15,
                    mlats[0],
                    color='b',
                    transform=ccrs.PlateCarree())
    ax4.scatter(180,
                -75,
                s=50,
                c='k',
                marker='x',
                transform=ccrs.PlateCarree())
    ax4.text(0, 1, '(d)', transform=plt.gca().transAxes)
    plt.savefig('/Users/guod/Documents/Pole_Density_MLT_Change/Figures/'
                '000_Pole_Feature.pdf')
    return
Esempio n. 32
0
def f3():
    def percentile(n):
        def percentile_(x):
            return np.percentile(x, n)

        percentile_.__name__ = 'percentile_%s' % n
        return percentile_

    date_polarity = get_date_polarity()  # date_polarity is sorted
    date_polarity = date_polarity['2002-1-1':'2010-12-31']

    # IMF Bx, By, Bz and AE
    if False:
        print('Reading IMF data from 2002 to 2010...')
        baea = omni.get_omni('2002-1-1',
                             '2011-1-1',
                             variables=['Bx', 'Bym', 'Bzm', 'AE'],
                             res='5m')
        print('Reading finished')
        bae = [pd.DataFrame(), pd.DataFrame()]
        for k00, k0 in enumerate(['away', 'toward']):
            sbt = date_polarity[(date_polarity.polarity == k0)]
            for k11, k1 in enumerate(sbt.index):
                baet = baea[k1:(k1 + pd.Timedelta('1D') - pd.Timedelta('1s'))]
                if baet.empty:
                    print('No IMF and AE data on ', k1)
                    continue
                bae[k00] = bae[k00].append(baet)
    # end of IMF data preparation

    # Grace density data.
        nsbu = np.zeros([2, 2])
        rho = [[pd.DataFrame(), pd.DataFrame()],
               [pd.DataFrame(), pd.DataFrame()]]
        for k00, k0 in enumerate(['away', 'toward']):
            sbt = date_polarity[(date_polarity.polarity == k0)]
            for k2 in sbt.index:
                rhot = cg.ChampDensity(k2,
                                       k2 + pd.Timedelta('1D') -
                                       pd.Timedelta('1s'),
                                       satellite='grace',
                                       variables=['rho400', 'lat3'])
                if rhot.empty:
                    print('No GRACE data on ', k2)
                    continue
                for k33, k3 in enumerate([-90, 90]):  # south and north poles
                    rhott = rhot[rhot.lat3 == k3].copy()
                    print([k0, k3])
                    if rhott.shape[0] < 25:
                        print(
                            'There is only {:d} '
                            'data points on '.format(rhott.shape[0]), k2)
                        continue
                    rhott['rrho400'] = 100 * (
                        rhott['rho400'] -
                        rhott['rho400'].mean()) / rhott['rho400'].mean()
                    nsbu[k00, k33] += 1
                    rho[k00][k33] = rho[k00][k33].append(rhott)
        pd.to_pickle((bae, rho, nsbu),
                     os.environ.get('DATAPATH') + 'tmp/w2_f4_02.dat')
    # End of data preparation

    print('Begin figure 1')
    bdate = pd.Timestamp('2002-10-09')
    edate = pd.Timestamp('2002-10-14')
    print('Date range: ', bdate, '-->', edate)
    imf = omni.get_omni(bdate, edate, variables=['Bx', 'Bym', 'Bzm'], res='1h')
    rho = cg.ChampDensity(bdate,
                          edate,
                          variables=['rho400', 'lat3', 'Mlat', 'MLT'],
                          satellite='grace')
    fig, ax = plt.subplots(2, 1, sharex=True, figsize=(7.3, 6.8))
    # IMF Bx, By, Bz
    plt.sca(ax[0])
    plt.plot(imf.index, imf.Bx, 'b')
    plt.plot(imf.index, imf.Bym, 'r')
    plt.plot(imf.index, imf.Bzm, 'k')
    plt.ylim(-10, 10)
    plt.yticks(np.arange(-10, 11, 5))
    plt.gca().yaxis.set_minor_locator(AutoMinorLocator(5))
    plt.grid()
    plt.ylabel('IMF (nT)')
    plt.legend([r'$B_x$', r'$B_y$', r'$B_z$'], loc=1, ncol=3)
    plt.text(0.03, 0.87, '(a)', transform=plt.gca().transAxes)

    plt.sca(ax[1])
    rho['rho400'] /= 1e-11
    plt.plot(rho.index, rho.rho400, 'gray', lw=1)
    rhot = rho[rho.lat3 == -90]
    rhott = rho[((rho.Mlat <= -70) & (rho.Mlat >= -80) & (rho.MLT >= 11) &
                 (rho.MLT <= 13))]
    plt.plot(rhot.index, rhot.rho400, 'k')
    plt.plot(rhott.index, rhott.rho400, 'bo', ms=5)
    plt.gca().xaxis.set_major_locator(mdates.DayLocator())
    plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%j'))
    plt.gca().xaxis.set_minor_locator(mdates.HourLocator(interval=2))
    plt.gca().yaxis.set_minor_locator(AutoMinorLocator(2))
    plt.xlim(bdate, edate)
    plt.ylim(0.2, 1.4)
    plt.yticks(np.arange(0.2, 1.5, 0.2))
    dates = pd.date_range(
        bdate, edate, freq='1D') + pd.Timedelta('15h') + pd.Timedelta('37m')
    plt.vlines(dates, ymin=0.2, ymax=1.4, color='k', linestyle='--')
    plt.grid()
    plt.xlabel('Day of 2002')
    plt.ylabel(r'$\rho$ ($10^{-11}$ kg/m$^3$)')
    plt.text(0.03, 0.87, '(b)', transform=plt.gca().transAxes)
    plt.savefig(
        '/Users/guod/Documents/Pole_Density_MLT_Change/Figures/001_01_Case.pdf'
    )
    print('End of figure 1\n\n')

    print('Begin figure 2')
    bae, rho, nsbu = pd.read_pickle(
        os.environ.get('DATAPATH') + 'tmp/w2_f4_02.dat')
    print('Total away and toward days [[AS, AN], [TS, TN]]: \n', nsbu)
    fig = plt.figure(figsize=(6, 8))
    grid1 = ImageGrid(fig, [0.1, 0.4, 0.8, 0.66], [3, 2],
                      label_mode='L',
                      axes_pad=0.2,
                      cbar_mode='edge',
                      cbar_location='right')
    grid2 = ImageGrid(fig, [0.1, 0.07, 0.8, 0.44], [2, 2],
                      label_mode='L',
                      axes_pad=[0.2, 0.4],
                      cbar_mode='edge',
                      cbar_location='right')
    ctt = [r'$B_x$ (nT)', r'$B_y$ (nT)', r'$B_z$ (nT)']
    plb = np.array(list('abcdefghij')).reshape(2, 5).T
    for k00, k0 in enumerate(['Away', 'Toward']):
        baet = bae[k00]
        baet['month'] = baet.index.month - 0.5
        baet['uthour'] = baet.index.hour + 0.5
        baett = baet.groupby(['month', 'uthour']).agg(np.median)
        baett = baett.reset_index()
        print('For %s polarities: ' % k0)
        for k11, k1 in enumerate(['Bx', 'Bym', 'Bzm']):
            ll = np.linspace(-3.2, 3.2, 11)
            cl = np.arange(-3, 4, 1)
            if k1 == 'AE':
                ll = np.linspace(0, 300, 11)
                cl = np.arange(0, 301, 100)
            #plt.sca(ax[k11, k00])
            plt.sca(grid1[k00 + k11 * 2])
            baettt = baett.pivot('month', 'uthour', k1)
            # Extend month and ut
            baettt.loc[:,
                       baettt.columns[0] - 1] = baettt.loc[:,
                                                           baettt.columns[-1]]
            baettt.loc[:,
                       baettt.columns[-2] + 1] = baettt.loc[:,
                                                            baettt.columns[0]]
            baettt = baettt.sort_index(axis=1)
            baettt.loc[baettt.index[0] -
                       1, :] = baettt.loc[baettt.index[-1], :]
            baettt.loc[baettt.index[-2] +
                       1, :] = baettt.loc[baettt.index[0], :]
            baettt = baettt.sort_index(axis=0)
            x = baettt.columns  # uthour
            y = baettt.index  # month
            hc = plt.contourf(x,
                              y,
                              baettt,
                              levels=ll,
                              extend='neither',
                              cmap='seismic')
            print('  Average {:s} is: {:5.1f}'.format(k1,
                                                      baettt.mean().mean()))
            if k1 == 'Bzm':
                print('  Bz max: {:5.1f}'.format(baettt.max().max()))
                print('  Bz min: {:5.1f}'.format(baettt.min().min()))
            plt.xlim(0, 24)
            plt.xticks(np.arange(0, 25, 6), [])
            plt.yticks(np.arange(0.5, 12.5, 1),
                       ['', '', 3, '', '', 6, '', '', 9, '', '', 12])
            plt.gca().xaxis.set_minor_locator(AutoMinorLocator(6))
            plt.gca().yaxis.set_minor_locator(AutoMinorLocator(2))
            plt.tick_params(axis='x', which='major', direction='out', length=4)
            plt.tick_params(axis='y',
                            which='major',
                            direction='out',
                            length=0,
                            pad=6)
            plt.tick_params(axis='x', which='minor', direction='out', length=2)
            plt.tick_params(axis='y',
                            which='minor',
                            direction='out',
                            length=3,
                            width=1.2)
            plt.text(0.1,
                     0.82,
                     '(' + plb[k11, k00] + ')',
                     bbox=dict(facecolor='grey', alpha=0.5),
                     transform=plt.gca().transAxes)
            if k11 == 0:
                plt.title(k0)
            if k00 == 0:
                plt.ylabel('Month')
            plt.ylim(-0.000001, 12.0000001)
            grid1.cbar_axes[k11].colorbar(hc, ticks=cl)
            grid1.cbar_axes[k11].set_ylabel(ctt[k11])
        for k11, k1 in enumerate(['south', 'north']):
            rhot = rho[k00][k11]
            rhot['month'] = rhot.index.month
            rhot['uthour'] = rhot.index.hour + 0.5
            rhott = rhot.groupby(['month', 'uthour']).agg(np.median)
            rhott = rhott.reset_index()
            plt.sca(grid2[k00 + k11 * 2])
            rhottt = rhott.pivot('month', 'uthour', 'rrho400')
            # extend month and ut
            rhottt.loc[:,
                       rhottt.columns[0] - 1] = rhottt.loc[:,
                                                           rhottt.columns[-1]]
            rhottt.loc[:,
                       rhottt.columns[-2] + 1] = rhottt.loc[:,
                                                            rhottt.columns[0]]
            rhottt = rhottt.sort_index(axis=1)
            rhottt.loc[rhottt.index[0] -
                       1, :] = rhottt.loc[rhottt.index[-1], :]
            rhottt.loc[rhottt.index[-2] +
                       1, :] = rhottt.loc[rhottt.index[0], :]
            rhottt = rhottt.sort_index(axis=0)
            hc = plt.contourf(x,
                              y,
                              rhottt,
                              levels=np.linspace(-22, 22, 11),
                              cmap='seismic')
            print('  ', k1, ' density max (%): ', rhottt.max().max())
            print('  ', k1, ' density min (%): ', rhottt.min().min())
            if k1 is 'south':
                #plt.axvline(15+37/60, 0, 1, c='k', ls='--')
                utx = np.arange(0, 25, 6)
                uts = [
                    convert_mlt(19, dtime=dt.datetime(2003, 2, 23, k))
                    for k in utx % 24
                ]
                [plt.text(k1, 13, '%.0f'%k2, horizontalalignment='center')\
                    for k1, k2 in zip(utx, uts)]
                if k00 == 0:
                    plt.text(-0.2, 1.08, 'MLT', transform=plt.gca().transAxes)
            if k1 is 'north':
                #plt.axvline(5+25/60, 0, 1, c='k', ls='--')
                utx = np.arange(0, 25, 6)
                utn = [
                    convert_mlt(170, dtime=dt.datetime(2003, 2, 23, k))
                    for k in utx % 24
                ]
                [plt.text(k1, 13, '%.0f'%k2, horizontalalignment='center')\
                    for k1, k2 in zip(utx, utn)]
                if k00 == 0:
                    plt.text(-0.2, 1.08, 'MLT', transform=plt.gca().transAxes)
            plt.xlim(0, 24)
            plt.xticks(np.arange(0, 25, 6))
            plt.ylim(-0.001, 12.001)
            plt.yticks(np.arange(0.5, 12.5),
                       ['', '', 3, '', '', 6, '', '', 9, '', '', 12])
            plt.gca().xaxis.set_minor_locator(AutoMinorLocator(6))
            plt.gca().yaxis.set_minor_locator(AutoMinorLocator(2))
            plt.tick_params(axis='x', which='major', direction='out', length=4)
            plt.tick_params(axis='y',
                            which='major',
                            direction='out',
                            length=0,
                            pad=6)
            plt.tick_params(axis='x', which='minor', direction='out', length=2)
            plt.tick_params(axis='y',
                            which='minor',
                            direction='out',
                            length=3,
                            width=1.2)
            plt.text(0.1,
                     0.82,
                     '(' + plb[k11 + 3, k00] + ')',
                     bbox=dict(facecolor='grey', alpha=0.5),
                     transform=plt.gca().transAxes)
            if k00 == 0:
                plt.ylabel('Month')
            if k11 == 1:
                plt.xlabel('UT (hour)')
            grid2.cbar_axes[k11].colorbar(hc, ticks=np.arange(-20, 21, 10))
            grid2.cbar_axes[k11].set_ylabel((r'S' if k11 == 0 else 'N') +
                                            r', $\delta\rho$ (%)')
    plt.savefig('/Users/guod/Documents/Pole_Density_MLT_Change/'
                'Figures/002_Statistical_Results.pdf')
    print('End of figure 2\n\n')

    print('Begin figure 3')
    fig, ax = plt.subplots(1, 2, sharex=True, sharey=True, figsize=(8, 4))
    plt.subplots_adjust(left=0.10,
                        right=0.95,
                        top=0.90,
                        bottom=0.15,
                        wspace=0.23,
                        hspace=0.16)
    plb = ['(a)', '(b)']
    for k00, k0 in enumerate(['Solar Maximum', 'Solar Minimum']):
        print(k0, ':')
        rhot = rho[0][0]  #  away sectors, south pole
        if 'max' in k0.lower():
            rhott = rhot['2002-1-1':'2003-12-31'].copy()
            print('  {:d} days'.format(len(np.unique(rhott.index.date))))
            tit = 'Year: 2002-2003'
        else:
            rhott = rhot['2009-1-1':'2010-12-31'].copy()
            print('  {:d} days'.format(len(np.unique(rhott.index.date))))
            tit = 'Year: 2009-2010'
        rhott = rhott[(rhott.index.month >= 9)
                      & (rhott.index.month <= 10)].copy()
        rhott['uthour'] = rhott.index.hour + 0.5
        rhottt = rhott.groupby(['uthour'])['rrho400'].agg(
            [np.median, percentile(25),
             percentile(75)])
        rhottt.columns = ['median', 'p25', 'p75']
        plt.sca(ax[k00])
        # Extend ut
        rhottt.loc[rhottt.index[0] - 1, :] = rhottt.loc[rhottt.index[-1], :]
        rhottt.loc[rhottt.index[-2] + 1, :] = rhottt.loc[rhottt.index[0], :]
        rhottt = rhottt.sort_index(axis=0)
        hp = plt.plot(rhottt.index, rhottt['median'], 'b')
        print('  Density max (%): ', rhottt['median'].max())
        print('  Density min (%): ', rhottt['median'].min())
        plt.plot(rhottt.index,
                 rhottt.p25,
                 'gray',
                 rhottt.index,
                 rhottt.p75,
                 'gray',
                 linestyle='--')
        plt.grid()
        plt.xlim(0, 24)
        plt.xticks(np.arange(0, 25, 6), [])
        utx = np.arange(0, 25, 6)
        uts = [
            convert_mlt(19, dtime=dt.datetime(2003, 2, 23, k))
            for k in utx % 24
        ]
        plt.text(-3.5, -34, 'UT')
        [
            plt.text(k1, -34, '%.0f' % k1, horizontalalignment='center')
            for k1 in utx
        ]
        plt.text(-3.5, -38, 'MLT')
        [
            plt.text(k1, -38, '%.0f' % k2, horizontalalignment='center')
            for k1, k2 in zip(utx, uts)
        ]

        plt.ylim(-30, 30)
        plt.yticks(np.arange(-30, 31, 10))
        plt.gca().xaxis.set_minor_locator(AutoMinorLocator(6))
        plt.gca().yaxis.set_minor_locator(AutoMinorLocator(5))
        plt.tick_params(axis='both', which='major')
        plt.tick_params(axis='both', which='minor')
        if k00 == 0:
            plt.ylabel(r'South, $\delta\rho$ (%)')
        plt.title(tit)
        plt.text(0.03, 0.91, plb[k00], transform=plt.gca().transAxes)
    print('End of figure 3\n\n')
    plt.savefig('/Users/guod/Documents/Pole_Density_MLT_Change/'
                'Figures/003_Solar_Activity_Dependence.pdf')

    print('Begin figure 4')
    bdate = pd.Timestamp('2002-10-09')
    edate = pd.Timestamp('2002-10-14')
    print('Date range: ', bdate, '-->', edate)
    rho = cg.ChampDensity(bdate,
                          edate,
                          variables=['rho400', 'lat3', 'Mlat', 'MLT'],
                          satellite='grace')
    rho['Dist_Cusp'] = 6371 * np.arccos(
        np.sin(rho['Mlat'] / 180 * np.pi) * np.sin(-75 / 180 * np.pi) +
        np.cos(rho['Mlat'] / 180 * np.pi) * np.cos(-75 / 180 * np.pi) * np.cos(
            (rho['MLT'] - 12) / 12 * np.pi))
    fig, ax = plt.subplots(1, 1, figsize=(7.3, 4.8))
    rho['rho400'] /= 1e-11
    #plt.plot(rho.index, rho.rho400, 'gray', lw=1)
    rhot = rho[rho.lat3 == -90]
    plt.plot(rhot['Dist_Cusp'], rhot['rho400'], '.k')
    plt.xlim(0, 3500)
    plt.ylim(0.2, 1.4)
    plt.yticks(np.arange(0.2, 1.5, 0.2))
    plt.xticks(np.arange(0, 4000, 500))
    ax.xaxis.set_minor_locator(AutoMinorLocator(5))
    ax.yaxis.set_minor_locator(AutoMinorLocator(2))
    plt.grid()
    plt.xlabel('Pole-Cusp Distance (km)')
    plt.ylabel(r'$\rho$ ($10^{-11}$ kg/m$^3$)')
    print('End of figure 4\n\n')
    plt.savefig('/Users/guod/Documents/Pole_Density_MLT_Change/'
                'Figures/001_02_Dist_Cusp.pdf')
    return
Esempio n. 33
0
    def plot_fan(cls, dmap_data: List[dict], ax=None, scan_index: int = 1,
                       ranges: List = [0,75], boundary: bool = True,
                       parameter: str = 'v', lowlat: int = 30, cmap: str = None,
                       groundscatter: bool = False,
                       zmin: int = None, zmax: int = None,
                       colorbar: bool = True,
                       colorbar_label: str = ''):
        """
        Plots a radar's Field Of View (FOV) fan plot for the given data and scan number

        Parameters
        -----------
            dmap_data: List[dict]
                Named list of dictionaries obtained from SDarn_read
            ax: matplotlib.pyplot axis
                Pre-defined axis object to pass in, must currently be polar projection
                Default: Generates a polar projection for the user with MLT/latitude labels
            scan_index: int
                Scan number from beginning of first record in file
                Default: 1
            parameter: str
                Key name indicating which parameter to plot.
                Default: v (Velocity). Alternatives: 'p_l', 'w_l', 'elv'
            lowlat: int
                Lower AACGM latitude boundary for the polar plot
                Default: 50
            ranges: list
                Set to a two element list of the lower and upper ranges to plot
                Default: [0,75]
            boundary: bool
                Set to false to not plot the outline of the FOV
                Default: True
            cmap: matplotlib.cm
                matplotlib colour map
                https://matplotlib.org/tutorials/colors/colormaps.html
                Default: Official pyDARN colour map for given parameter
            groundscatter : bool
                Set true to indicate if groundscatter should be plotted in grey
                Default: False
            zmin: int
                The minimum parameter value for coloring
                Default: {'p_l': [0], 'v': [-200], 'w_l': [0], 'elv': [0]}
            zmax: int
                The maximum parameter value for  coloring
                Default: {'p_l': [50], 'v': [200], 'w_l': [250], 'elv': [50]}
            colorbar: bool
                Draw a colourbar if True
                Default: True
            colorbar_label: str
                the label that appears next to the colour bar. Requires colorbar to be true
                Default: ''
        Returns
        -----------
        beam_corners_aacgm_lats
            n_beams x n_gates numpy array of AACGMv2 latitudes
        beam_corners_aacgm_lons
            n_beams x n_gates numpy array of AACGMv2 longitudes
        scan
            n_beams x n_gates numpy array of the scan data (for the selected parameter)
        grndsct
            n_beams x n_gates numpy array of the scan data (for the selected parameter)
        dtime
            datetime object for the scan plotted

        """

        my_path = os.path.abspath(os.path.dirname(__file__))
        base_path = os.path.join(my_path, '..')

        # Get scan numbers for each record
        beam_scan=build_scan(dmap_data)

        # Locate scan in loaded data
        plot_beams = np.where(beam_scan == scan_index)

        # Time for coordinate conversion
        dtime = dt.datetime(dmap_data[plot_beams[0][0]]['time.yr'],
                            dmap_data[plot_beams[0][0]]['time.mo'], dmap_data[plot_beams[0][0]]['time.dy'],
                            dmap_data[plot_beams[0][0]]['time.hr'], dmap_data[plot_beams[0][0]]['time.mt'],
                            dmap_data[plot_beams[0][0]]['time.sc'])

        # Get radar beam/gate locations
        beam_corners_aacgm_lats, beam_corners_aacgm_lons=radar_fov(dmap_data[0]['stid'],
            coords='aacgm', date=dtime)
        fan_shape = beam_corners_aacgm_lons.shape

        # Work out shift due in MLT
        beam_corners_mlts = np.zeros((fan_shape[0], fan_shape[1]))
        mltshift = beam_corners_aacgm_lons[0, 0] - \
            (aacgmv2.convert_mlt(beam_corners_aacgm_lons[0, 0], dtime) * 15)
        beam_corners_mlts = beam_corners_aacgm_lons - mltshift

        # Hold the beam positions
        thetas = np.radians(beam_corners_mlts)
        rs = beam_corners_aacgm_lats

        # Get range-gate data and groundscatter array for given scan
        scan = np.zeros((fan_shape[0] - 1, fan_shape[1]-1))
        grndsct = np.zeros((fan_shape[0] - 1, fan_shape[1]-1)) #initialise arrays
        for i in np.nditer(plot_beams):
            try:
                slist = dmap_data[i.astype(int)]['slist'] #get a list of gates where there is data
                beam = dmap_data[i.astype(int)]['bmnum'] #get the beam number for the record
                scan[slist, beam] = dmap_data[i.astype(int)][parameter]
                grndsct[slist, beam] = dmap_data[i.astype(int)]['gflg']
            # if there is no slist field this means partial record
            except KeyError:
                continue

        # Colour table and max value selection depending on parameter plotted
        # Load defaults if none given
        # TODO: use cmaps as over writting cmap is bad practice...
        # did I do that in my code ... hmm
        if cmap is None:
            cmap = {'p_l': 'plasma', 'v': PyDARNColormaps.PYDARN_VELOCITY,
                    'w_l': PyDARNColormaps.PYDARN_VIRIDIS,
                    'elv': PyDARNColormaps.PYDARN}
            cmap = plt.cm.get_cmap(cmap[parameter])

        # Setting zmin and zmax
        defaultzminmax = {'p_l': [0, 50], 'v': [-200, 200],
                          'w_l': [0, 250], 'elv': [0, 50]}
        if zmin is None:
            zmin = defaultzminmax[parameter][0]
        if zmax is None:
            zmax = defaultzminmax[parameter][1]

        # Setup plot
        # This may screw up references
        if ax is None:
            ax = plt.axes(polar=True)
            if beam_corners_aacgm_lats[0,0] > 0:
                ax.set_ylim(90, lowlat)
                ax.set_yticks(np.arange(lowlat, 90, 10))
            else:
                ax.set_ylim(-90, -abs(lowlat))
                ax.set_yticks(np.arange(-abs(lowlat), -90, -10))
            ax.set_xticklabels(['00', '', '06', '', '12', '', '18', ''])
            ax.set_theta_zero_location("S")

        # Begin plotting by iterating over ranges and beams
        for gates in range(ranges[0],ranges[1]-1):
            for beams in range(thetas.shape[1] - 2):
                # Index colour table correctly
                cmapindex = (scan[gates, beams] + abs(zmin)) /\
                        (abs(zmin) + abs(zmax))
                if cmapindex < 0:
                    cmapindex = 0

                if cmapindex > 1:
                    cmapindex = 1
                colour_rgba = cmap(cmapindex)

                # Check for zero values (white) and groundscatter (gray)
                if scan[gates, beams] == 0:
                    colour_rgba = 'w'

                if groundscatter and grndsct[gates, beams] == 1:
                    colour_rgba = 'gray'

                #Angle for polar plotting
                theta = [thetas[gates, beams], thetas[gates + 1, beams],
                         thetas[gates + 1, beams + 1],
                         thetas[gates, beams + 1]]
                #Radius for polar plotting
                r = [rs[gates, beams], rs[gates + 1, beams],
                     rs[gates + 1, beams + 1], rs[gates, beams + 1]]
                im = ax.fill(theta, r, color=colour_rgba)

        # Plot FOV outline
        if boundary is True:
            plt.polar(thetas[0:ranges[1], 0], rs[0:ranges[1], 0], color='black',
                      linewidth=0.5)
            plt.polar(thetas[ranges[1] - 1, 0:thetas.shape[1] - 1],
                      rs[ranges[1] - 1, 0:thetas.shape[1] - 1], color='black',
                      linewidth=0.5)
            plt.polar(thetas[0:ranges[1], thetas.shape[1] - 2],
                      rs[0:ranges[1], thetas.shape[1] - 2],
                      color='black', linewidth=0.5)
            plt.polar(thetas[0, 0:thetas.shape[1] - 2],
                      rs[0, 0:thetas.shape[1] - 2], color='black',
                      linewidth=0.5)

        norm = colors.Normalize
        norm = norm(zmin, zmax)
        # Create color bar if True
        if colorbar is True:
            mappable = cm.ScalarMappable(norm=norm, cmap=cmap)
            locator = ticker.MaxNLocator(symmetric=True, min_n_ticks=3,
                                         integer=True, nbins='auto')
            ticks = locator.tick_values(vmin=zmin, vmax=zmax)

            cb = ax.figure.colorbar(mappable, ax=ax, extend='both', ticks=ticks)

            if colorbar_label != '':
                cb.set_label(colorbar_label)

        return beam_corners_aacgm_lats, beam_corners_aacgm_lons, scan, grndsct, dtime
Esempio n. 34
0
    def plot_potential_contours(cls,
                                fit_coefficient: list,
                                lat_min: list,
                                date: object,
                                lat_shift: int = 0,
                                lon_shift: int = 0,
                                fit_order: int = 6,
                                hemisphere: Enum = Hemisphere.North,
                                contour_levels: list = [],
                                contour_color: str = 'dimgrey',
                                contour_linewidths: float = 0.8,
                                contour_fill: bool = False,
                                contour_colorbar: bool = True,
                                contour_fill_cmap: str = 'RdBu',
                                contour_colorbar_label: str = 'Potential (kV)',
                                pot_minmax_color: str = 'k',
                                **kwargs):
        # TODO: No evaluation of coordinate system made! May need if in
        # plotting to plot in radians/geo ect.
        '''
        Takes the grid of potentials, plots a contour plot and min and max
        potential positions

        Parameters
        ----------
            fit_coefficient: List[float]
                Value of the coefficient
            lat_min: List[float]
                Minimum latitude that will be evaluated
                Not to be confused with 'lowlat'
            date: datetime object
                Date from record
            lat_shift: int
                Generic shift in latitude from map file
                default: 0
            lon_shift: int
                Generic shift in longitude from map file
                default: 0
            fit_order: int
                order of the fit
                default: 6
            contour_levels: np.arr
                Array of values at which the contours
                are plotted
                Default: []
                Default list is defined in function due
                to length of the list, values higher or
                lower than the minimum and maximum values
                given are colored in as min and max color
                values if contour_fill=True
            contour_color: str
                Colour of the contour lines plotted
                Default: dimgrey
            contour_label: bool - NOT CURRENTLY IMPLEMENTED
                If contour_fill is True, contour labels will
                be plotted on the contour lines
                Default: True
            contour_linewidths: float
                Thickness of contour lines
                Default: 0.8
            contour_fill: bool
                Option to use filled contours rather than
                an outline. If True, contour_color and
                contour_linewidths are ignored
                If False
                Default: False
            contour_colorbar: bool
                Option to show the colorbar for the contours
                if contour_fill = True
                Default: True
            contour_fill_cmap: matplotlib.cm
                Colormap used to fill the contours if
                contour_fill is True
                Default: 'RdBu'
            contour_colorbar_label: str
                Label for the colorbar describing the
                contours if contour_fill is True
                Default: empty string ''
            pot_minmax_color: str
                Colour of the cross and plus symbols for
                minimum and maximum potentials
                Default: 'k' - black
            **kwargs
                including lowlat and hemisphere for calculating
                potentials
        '''
        mlat, mlon_u, pot_arr = cls.calculate_potentials(fit_coefficient,
                                                         lat_min,
                                                         lat_shift=lat_shift,
                                                         lon_shift=lon_shift,
                                                         fit_order=fit_order,
                                                         hemisphere=hemisphere,
                                                         **kwargs)

        # Shift mlon to MLT
        shifted_mlts = mlon_u[0, 0] - \
            (aacgmv2.convert_mlt(mlon_u[0, 0], date) * 15)
        shifted_lons = mlon_u - shifted_mlts
        mlon = shifted_lons

        # Contained in function as too long to go into the function call
        if contour_levels == []:
            contour_levels = [
                -100, -95, -90, -85, -80, -75, -70, -65, -60, -55, -50, -45,
                -40, -35, -30, -25, -20, -15, -10, -5, -1, 1, 5, 10, 15, 20,
                25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95, 100
            ]

        if contour_fill:
            # Filled contours
            plt.contourf(np.radians(mlon),
                         mlat,
                         pot_arr,
                         2,
                         vmax=abs(pot_arr).max(),
                         vmin=-abs(pot_arr).max(),
                         locator=ticker.FixedLocator(contour_levels),
                         cmap=contour_fill_cmap,
                         alpha=0.6,
                         extend='both',
                         zorder=3.0)
            if contour_colorbar is True:
                norm = colors.Normalize
                norm = norm(-abs(pot_arr).max(), abs(pot_arr).max())
                mappable = cm.ScalarMappable(norm=norm, cmap=contour_fill_cmap)
                locator = ticker.MaxNLocator(symmetric=True,
                                             min_n_ticks=3,
                                             integer=True,
                                             nbins='auto')
                ticks = locator.tick_values(vmin=-abs(pot_arr).max(),
                                            vmax=abs(pot_arr).max())
                cb = plt.colorbar(mappable, extend='both', ticks=ticks)
                if contour_colorbar_label != '':
                    cb.set_label(contour_colorbar_label)
        else:
            # Contour lines only
            cs = plt.contour(np.radians(mlon),
                             mlat,
                             pot_arr,
                             2,
                             vmax=abs(pot_arr).max(),
                             vmin=-abs(pot_arr).max(),
                             locator=ticker.FixedLocator(contour_levels),
                             colors=contour_color,
                             alpha=0.8,
                             linewidths=contour_linewidths,
                             zorder=3.0)
            # TODO: Add in contour labels
            # if contour_label:
            #    plt.clabel(cs, cs.levels, inline=True, fmt='%d', fontsize=5)

        # Get max value of potential
        ind_max = np.where(pot_arr == pot_arr.max())
        ind_min = np.where(pot_arr == pot_arr.min())
        max_mlon = mlon[ind_max]
        max_mlat = mlat[ind_max]
        min_mlon = mlon[ind_min]
        min_mlat = mlat[ind_min]

        plt.scatter(np.radians(max_mlon),
                    max_mlat,
                    marker='+',
                    s=70,
                    color=pot_minmax_color,
                    zorder=5.0)
        plt.scatter(np.radians(min_mlon),
                    min_mlat,
                    marker='_',
                    s=70,
                    color=pot_minmax_color,
                    zorder=5.0)
Esempio n. 35
0
def test_MLT_a2m():
    mlt = aacgmv2.convert_mlt([1, 12, 23], dt.datetime(2015, 2, 24, 14, 0, 15))
    np.testing.assert_allclose(mlt, [9.056476, 9.78981 , 10.523143], rtol=1e-6)
Esempio n. 36
0
def test_MLT_m2a():
    mlon = aacgmv2.convert_mlt([1, 12, 23], dt.datetime(2015, 2, 24, 14, 0, 15), m2a=True)
    np.testing.assert_allclose(mlon, [240.152854, 45.152854, 210.152854], rtol=1e-6)
Esempio n. 37
0
def f3():
    def percentile(n):
        def percentile_(x):
            return np.percentile(x,n)
        percentile_.__name__ = 'percentile_%s' % n
        return percentile_
    date_polarity = get_date_polarity() # date_polarity is sorted
    date_polarity = date_polarity['2002-1-1':'2010-12-31']

    # IMF Bx, By, Bz and AE
    if False:
        print('Reading IMF data from 2002 to 2010...')
        baea = omni.get_omni(
            '2002-1-1', '2011-1-1',
            variables=['Bx', 'Bym', 'Bzm', 'AE'], res='5m')
        print('Reading finished')
        bae = [pd.DataFrame(), pd.DataFrame()]
        for k00, k0 in enumerate(['away', 'toward']):
            sbt = date_polarity[(date_polarity.polarity==k0)]
            for k11, k1 in enumerate(sbt.index):
                baet = baea[k1:(k1+pd.Timedelta('1D')-pd.Timedelta('1s'))]
                if baet.empty:
                    print('No IMF and AE data on ', k1)
                    continue
                bae[k00] = bae[k00].append(baet)
    # end of IMF data preparation

    # Grace density data.
        nsbu = np.zeros([2, 2])
        rho = [
            [pd.DataFrame(), pd.DataFrame()], [pd.DataFrame(), pd.DataFrame()]]
        for k00, k0 in enumerate(['away', 'toward']):
            sbt = date_polarity[(date_polarity.polarity==k0)]
            for k2 in sbt.index:
                rhot = cg.ChampDensity(
                    k2, k2+pd.Timedelta('1D')-pd.Timedelta('1s'),
                    satellite='grace', variables=['rho400', 'lat3'])
                if rhot.empty:
                    print('No GRACE data on ',k2)
                    continue
                for k33, k3 in enumerate([-90, 90]):  # south and north poles
                    rhott = rhot[rhot.lat3==k3].copy()
                    print([k0, k3])
                    if rhott.shape[0]<25:
                        print(
                            'There is only {:d} '
                            'data points on '.format(rhott.shape[0]), k2)
                        continue
                    rhott['rrho400'] = 100*(
                        rhott['rho400']-rhott['rho400'].mean()
                        )/rhott['rho400'].mean()
                    nsbu[k00, k33] +=1
                    rho[k00][k33] = rho[k00][k33].append(rhott)
        pd.to_pickle(
            (bae, rho, nsbu),
            os.environ.get('DATAPATH') + 'tmp/w2_f4_02.dat')
    # End of data preparation

    print('Begin figure 1')
    bdate = pd.Timestamp('2002-10-09')
    edate = pd.Timestamp('2002-10-14')
    print('Date range: ', bdate, '-->', edate)
    imf = omni.get_omni(bdate, edate, variables=['Bx', 'Bym', 'Bzm'], res='1h')
    rho = cg.ChampDensity(
        bdate, edate,
        variables=['rho400', 'lat3', 'Mlat', 'MLT'], satellite='grace')
    fig, ax = plt.subplots(2, 1, sharex=True, figsize=(7.3, 6.8))
    # IMF Bx, By, Bz
    plt.sca(ax[0])
    plt.plot(imf.index, imf.Bx, 'b')
    plt.plot(imf.index, imf.Bym, 'r')
    plt.plot(imf.index, imf.Bzm, 'k')
    plt.ylim(-10, 10)
    plt.yticks(np.arange(-10, 11, 5))
    plt.gca().yaxis.set_minor_locator(AutoMinorLocator(5))
    plt.grid()
    plt.ylabel('IMF (nT)')
    plt.legend([r'$B_x$', r'$B_y$', r'$B_z$'], loc=1, ncol=3)
    plt.text(0.03, 0.87, '(a)', transform=plt.gca().transAxes)

    plt.sca(ax[1])
    rho['rho400'] /= 1e-11
    plt.plot(rho.index, rho.rho400, 'gray', lw=1)
    rhot = rho[rho.lat3==-90]
    rhott = rho[
        ((rho.Mlat<=-70) & (rho.Mlat>=-80) & (rho.MLT>=11) & (rho.MLT<=13))]
    plt.plot(rhot.index, rhot.rho400, 'k')
    plt.plot(rhott.index, rhott.rho400, 'bo', ms=5)
    plt.gca().xaxis.set_major_locator(mdates.DayLocator())
    plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%j'))
    plt.gca().xaxis.set_minor_locator(mdates.HourLocator(interval=2))
    plt.gca().yaxis.set_minor_locator(AutoMinorLocator(2))
    plt.xlim(bdate, edate)
    plt.ylim(0.2, 1.4)
    plt.yticks(np.arange(0.2, 1.5, 0.2))
    dates = pd.date_range(
        bdate, edate,
        freq='1D')+pd.Timedelta('15h')+pd.Timedelta('37m')
    plt.vlines(dates, ymin=0.2, ymax=1.4, color='k', linestyle='--')
    plt.grid()
    plt.xlabel('Day of 2002')
    plt.ylabel(r'$\rho$ ($10^{-11}$ kg/m$^3$)')
    plt.text(0.03, 0.87, '(b)', transform=plt.gca().transAxes)
    plt.savefig(
        '/Users/guod/Documents/Pole_Density_MLT_Change/Figures/001_01_Case.pdf')
    print('End of figure 1\n\n')

    print('Begin figure 2')
    bae, rho, nsbu= pd.read_pickle(
        os.environ.get('DATAPATH') + 'tmp/w2_f4_02.dat')
    print('Total away and toward days [[AS, AN], [TS, TN]]: \n', nsbu)
    fig = plt.figure(figsize=(6,8))
    grid1 = ImageGrid(
        fig, [0.1, 0.4, 0.8, 0.66], [3, 2], label_mode='L',axes_pad=0.2,
        cbar_mode='edge', cbar_location='right')
    grid2 = ImageGrid(
        fig, [0.1, 0.07, 0.8, 0.44], [2, 2], label_mode='L',
        axes_pad=[0.2, 0.4], cbar_mode='edge', cbar_location='right')
    ctt = [r'$B_x$ (nT)', r'$B_y$ (nT)', r'$B_z$ (nT)']
    plb = np.array(list('abcdefghij')).reshape(2,5).T
    for k00, k0 in enumerate(['Away', 'Toward']):
        baet = bae[k00]
        baet['month'] = baet.index.month-0.5
        baet['uthour'] = baet.index.hour+0.5
        baett = baet.groupby(['month', 'uthour']).agg(np.median)
        baett = baett.reset_index()
        print('For %s polarities: ' % k0)
        for k11, k1 in enumerate(['Bx', 'Bym', 'Bzm']):
            ll = np.linspace(-3.2, 3.2, 11)
            cl = np.arange(-3, 4, 1)
            if k1=='AE':
                ll = np.linspace(0, 300, 11)
                cl = np.arange(0, 301, 100)
            #plt.sca(ax[k11, k00])
            plt.sca(grid1[k00+k11*2])
            baettt = baett.pivot('month', 'uthour', k1)
            # Extend month and ut
            baettt.loc[:, baettt.columns[0]-1] = baettt.loc[:, baettt.columns[-1]]
            baettt.loc[:, baettt.columns[-2]+1] = baettt.loc[:, baettt.columns[0]]
            baettt = baettt.sort_index(axis=1)
            baettt.loc[baettt.index[0]-1, :] = baettt.loc[baettt.index[-1], :]
            baettt.loc[baettt.index[-2]+1, :] = baettt.loc[baettt.index[0], :]
            baettt = baettt.sort_index(axis=0)
            x = baettt.columns # uthour
            y = baettt.index # month
            hc = plt.contourf(
                x, y, baettt, levels=ll, extend='neither', cmap='seismic')
            print('  Average {:s} is: {:5.1f}'.format(k1, baettt.mean().mean()))
            if k1=='Bzm':
                print('  Bz max: {:5.1f}'.format(baettt.max().max()))
                print('  Bz min: {:5.1f}'.format(baettt.min().min()))
            plt.xlim(0,24)
            plt.xticks(np.arange(0,25,6),[])
            plt.yticks(np.arange(0.5,12.5,1),['','',3,'','',6,'','',9,'','',12])
            plt.gca().xaxis.set_minor_locator(AutoMinorLocator(6))
            plt.gca().yaxis.set_minor_locator(AutoMinorLocator(2))
            plt.tick_params(axis='x', which='major', direction='out', length=4)
            plt.tick_params(axis='y', which='major', direction='out', length=0, pad=6)
            plt.tick_params(axis='x', which='minor', direction='out', length=2)
            plt.tick_params(axis='y', which='minor', direction='out', length=3, width=1.2)
            plt.text(
                0.1, 0.82, '('+plb[k11, k00]+')',bbox=dict(facecolor='grey', alpha=0.5),
                transform=plt.gca().transAxes)
            if k11 == 0:
                plt.title(k0)
            if k00 == 0:
                plt.ylabel('Month')
            plt.ylim(-0.000001,12.0000001)
            grid1.cbar_axes[k11].colorbar(hc, ticks=cl)
            grid1.cbar_axes[k11].set_ylabel(ctt[k11])
        for k11, k1 in enumerate(['south', 'north']):
            rhot = rho[k00][k11]
            rhot['month'] = rhot.index.month
            rhot['uthour'] = rhot.index.hour+0.5
            rhott = rhot.groupby(['month', 'uthour']).agg(np.median)
            rhott = rhott.reset_index()
            plt.sca(grid2[k00+k11*2])
            rhottt = rhott.pivot('month', 'uthour', 'rrho400')
            # extend month and ut
            rhottt.loc[:, rhottt.columns[0]-1] = rhottt.loc[:, rhottt.columns[-1]]
            rhottt.loc[:, rhottt.columns[-2]+1] = rhottt.loc[:, rhottt.columns[0]]
            rhottt = rhottt.sort_index(axis=1)
            rhottt.loc[rhottt.index[0]-1, :] = rhottt.loc[rhottt.index[-1], :]
            rhottt.loc[rhottt.index[-2]+1, :] = rhottt.loc[rhottt.index[0], :]
            rhottt = rhottt.sort_index(axis=0)
            hc = plt.contourf(
                x, y, rhottt, levels=np.linspace(-22, 22, 11), cmap='seismic')
            print('  ', k1, ' density max (%): ', rhottt.max().max())
            print('  ', k1, ' density min (%): ', rhottt.min().min())
            if k1 is 'south':
                #plt.axvline(15+37/60, 0, 1, c='k', ls='--')
                utx = np.arange(0,25,6)
                uts = [convert_mlt(19,dtime=dt.datetime(2003,2,23,k)) for k in utx%24]
                [plt.text(k1, 13, '%.0f'%k2, horizontalalignment='center')\
                    for k1, k2 in zip(utx, uts)]
                if k00==0:
                    plt.text(-0.2,1.08,'MLT',transform=plt.gca().transAxes)
            if k1 is 'north':
                #plt.axvline(5+25/60, 0, 1, c='k', ls='--')
                utx = np.arange(0,25,6)
                utn = [convert_mlt(170,dtime=dt.datetime(2003,2,23,k)) for k in utx%24]
                [plt.text(k1, 13, '%.0f'%k2, horizontalalignment='center')\
                    for k1, k2 in zip(utx, utn)]
                if k00==0:
                    plt.text(-0.2,1.08,'MLT',transform=plt.gca().transAxes)
            plt.xlim(0, 24)
            plt.xticks(np.arange(0, 25, 6))
            plt.ylim(-0.001, 12.001)
            plt.yticks(
                np.arange(0.5, 12.5),
                ['', '', 3, '', '', 6, '', '', 9, '', '', 12])
            plt.gca().xaxis.set_minor_locator(AutoMinorLocator(6))
            plt.gca().yaxis.set_minor_locator(AutoMinorLocator(2))
            plt.tick_params(axis='x', which='major', direction='out', length=4)
            plt.tick_params(
                axis='y', which='major', direction='out', length=0, pad=6)
            plt.tick_params(axis='x', which='minor', direction='out', length=2)
            plt.tick_params(
                axis='y', which='minor', direction='out', length=3, width=1.2)
            plt.text(
                0.1, 0.82, '('+plb[k11+3, k00]+')',
                bbox=dict(facecolor='grey', alpha=0.5),
                transform=plt.gca().transAxes)
            if k00 == 0:
                plt.ylabel('Month')
            if k11 == 1:
                plt.xlabel('UT (hour)')
            grid2.cbar_axes[k11].colorbar(hc, ticks=np.arange(-20,21,10))
            grid2.cbar_axes[k11].set_ylabel(
                (r'S' if k11==0 else 'N') +r', $\delta\rho$ (%)')
    plt.savefig(
        '/Users/guod/Documents/Pole_Density_MLT_Change/'
        'Figures/002_Statistical_Results.pdf')
    print('End of figure 2\n\n')

    print('Begin figure 3')
    fig, ax = plt.subplots(1, 2, sharex=True, sharey=True, figsize=(8, 4))
    plt.subplots_adjust(
        left=0.10, right=0.95, top=0.90, bottom=0.15, wspace=0.23, hspace=0.16)
    plb = ['(a)', '(b)']
    for k00, k0 in enumerate(['Solar Maximum', 'Solar Minimum']):
        print(k0, ':')
        rhot = rho[0][0] #  away sectors, south pole
        if 'max' in k0.lower():
            rhott = rhot['2002-1-1':'2003-12-31'].copy()
            print('  {:d} days'.format(len(np.unique(rhott.index.date))))
            tit = 'Year: 2002-2003'
        else:
            rhott = rhot['2009-1-1':'2010-12-31'].copy()
            print('  {:d} days'.format(len(np.unique(rhott.index.date))))
            tit = 'Year: 2009-2010'
        rhott = rhott[(rhott.index.month>=9) & (rhott.index.month<=10)].copy()
        rhott['uthour'] = rhott.index.hour+0.5
        rhottt = rhott.groupby(['uthour'])['rrho400'].agg(
            [np.median, percentile(25), percentile(75)])
        rhottt.columns = ['median', 'p25', 'p75']
        plt.sca(ax[k00])
        # Extend ut
        rhottt.loc[rhottt.index[0]-1, :] = rhottt.loc[rhottt.index[-1], :]
        rhottt.loc[rhottt.index[-2]+1, :] = rhottt.loc[rhottt.index[0], :]
        rhottt = rhottt.sort_index(axis=0)
        hp = plt.plot(rhottt.index, rhottt['median'], 'b')
        print('  Density max (%): ', rhottt['median'].max())
        print('  Density min (%): ', rhottt['median'].min())
        plt.plot(
            rhottt.index, rhottt.p25,'gray', rhottt.index, rhottt.p75,
            'gray', linestyle='--')
        plt.grid()
        plt.xlim(0, 24)
        plt.xticks(np.arange(0, 25, 6), [])
        utx = np.arange(0,25,6)
        uts = [convert_mlt(19,dtime=dt.datetime(2003,2,23,k)) for k in utx%24]
        plt.text(-3.5, -34, 'UT')
        [plt.text(k1, -34, '%.0f'%k1, horizontalalignment='center')
            for k1 in utx]
        plt.text(-3.5, -38, 'MLT')
        [plt.text(k1, -38, '%.0f'%k2, horizontalalignment='center')
            for k1, k2 in zip(utx, uts)]

        plt.ylim(-30, 30)
        plt.yticks(np.arange(-30, 31, 10))
        plt.gca().xaxis.set_minor_locator(AutoMinorLocator(6))
        plt.gca().yaxis.set_minor_locator(AutoMinorLocator(5))
        plt.tick_params(axis='both', which='major')
        plt.tick_params(axis='both', which='minor')
        if k00 == 0:
            plt.ylabel(r'South, $\delta\rho$ (%)')
        plt.title(tit)
        plt.text(0.03, 0.91, plb[k00], transform=plt.gca().transAxes)
    print('End of figure 3\n\n')
    plt.savefig(
        '/Users/guod/Documents/Pole_Density_MLT_Change/'
        'Figures/003_Solar_Activity_Dependence.pdf')

    print('Begin figure 4')
    bdate = pd.Timestamp('2002-10-09')
    edate = pd.Timestamp('2002-10-14')
    print('Date range: ', bdate, '-->', edate)
    rho = cg.ChampDensity(
        bdate, edate,
        variables=['rho400', 'lat3', 'Mlat', 'MLT'], satellite='grace')
    rho['Dist_Cusp'] = 6371*np.arccos(
        np.sin(rho['Mlat']/180*np.pi)*np.sin(-75/180*np.pi) +
        np.cos(rho['Mlat']/180*np.pi)*np.cos(-75/180*np.pi) *
            np.cos((rho['MLT']-12)/12*np.pi))
    fig, ax = plt.subplots(1, 1, figsize=(7.3, 4.8))
    rho['rho400'] /= 1e-11
    #plt.plot(rho.index, rho.rho400, 'gray', lw=1)
    rhot = rho[rho.lat3==-90]
    plt.plot(rhot['Dist_Cusp'], rhot['rho400'], '.k')
    plt.xlim(0, 3500)
    plt.ylim(0.2, 1.4)
    plt.yticks(np.arange(0.2, 1.5, 0.2))
    plt.xticks(np.arange(0,4000,500))
    ax.xaxis.set_minor_locator(AutoMinorLocator(5))
    ax.yaxis.set_minor_locator(AutoMinorLocator(2))
    plt.grid()
    plt.xlabel('Pole-Cusp Distance (km)')
    plt.ylabel(r'$\rho$ ($10^{-11}$ kg/m$^3$)')
    print('End of figure 4\n\n')
    plt.savefig(
        '/Users/guod/Documents/Pole_Density_MLT_Change/'
        'Figures/001_02_Dist_Cusp.pdf')
    return