Esempio n. 1
0
def row_calc_aei(row,srf,asf):
    if row["track_type"] == "aero":
        row["ei"] = 90 if ".Vd." in row["comp_name"] else 0
        row["aei"] = 180 - row['phase_shift'] - row["ei"] + asf(srf(row['sz_name'],(float(row['age_max'])+float(row['age_min']))/2))
    else:
        decimal_year = get_shipmag_decimal_year(row)
        if decimal_year==None: raise ValueError("Intersection point could not be found in data file so IGRF could not be calculated and aei could not be found please check your data, skipping %s"%row['comp_name'])
        igrf = ipmag.igrf([decimal_year,0,float(row['inter_lat']),float(row['inter_lon'])])
        alpha = float(row['strike']) - igrf[0]
        e = np.rad2deg(np.arctan2(np.tan(np.deg2rad(igrf[1])),np.sin(np.deg2rad(alpha))))
        aei = 180 - e - float(row['phase_shift']) + asf(srf(row['sz_name'],(float(row['age_max'])+float(row['age_min']))/2))
        row['ei'] = e
        row['aei'] = aei
    return row
Esempio n. 2
0
def calc_aei(deskew_df,srf,asf):

    deskew_df["ei"] = [(90. if ".Vd." in comp else 0.) for ps,comp in zip(deskew_df["phase_shift"],deskew_df["comp_name"])]

    deskew_df["aei"] = [180. - row['phase_shift']- row["ei"] + asf(srf(row['sz_name'],(float(row['age_max'])+float(row['age_min']))/2)) for i,row in deskew_df.iterrows()]

    for i,row in deskew_df[deskew_df['track_type']=='ship'].iterrows():
        decimal_year = get_shipmag_decimal_year(row)
        if decimal_year==None: print("Intersection point could not be found in data file so IGRF could not be calculated and aei could not be found please check your data, skipping %s"%row['comp_name']); continue
        igrf = ipmag.igrf([decimal_year,0,float(row['inter_lat']),float(row['inter_lon'])])
        alpha = float(row['strike']) - igrf[0]
        e = np.rad2deg(np.arctan2(np.tan(np.deg2rad(igrf[1])),np.sin(np.deg2rad(alpha))))
        aei = 180 - e - float(row['phase_shift']) + asf(srf(row['sz_name'],(float(row['age_max'])+float(row['age_min']))/2))
        deskew_df.at[i,'ei'] = e
        deskew_df.at[i,'aei'] = aei
    return deskew_df
Esempio n. 3
0
 def test_igrf_output(self):
     result = ipmag.igrf([1999.1, 30, 20, 50])
     reference = [1.20288657e+00, 2.82331112e+01, 3.9782338913649881e+04]
     for num, item in enumerate(result):
         self.assertAlmostEqual(item, reference[num])
    def plot_hst_loc_cartopy(self,
                             i=5,
                             df=None,
                             title='',
                             thresh=5,
                             fout='',
                             min_exptime=800,
                             key='start',
                             save=False,
                             orbital_path1=None,
                             orbital_path2=None,
                             projection=ccrs.PlateCarree()):
        fig, ax = plt.subplots(nrows=1,
                               ncols=1,
                               figsize=(8, 7),
                               tight_layout=True,
                               subplot_kw={'projection': projection})
        crs = projection
        transform = crs._as_mpl_transform(ax)
        df = df[df.integration_time.gt(min_exptime)]
        df.sort_values(by='incident_cr_rate', inplace=True)

        # Plot configuration
        ax.coastlines()
        gl = ax.gridlines(crs=crs,
                          draw_labels=True,
                          linewidth=1,
                          color='k',
                          alpha=0.4,
                          linestyle='--')
        fname = '/ifs/missions/projects/plcosmic/hst_cosmic_rays/APJ_plots/HYP_50M_SR_W.tif'
        ax.imshow(plt.imread(fname),
                  origin='upper',
                  transform=crs,
                  extent=[-180, 180, -90, 90])
        gl.xlabels_top = False
        gl.ylabels_left = True
        gl.ylabels_right = False
        gl.xlines = True
        # gl.xlocator = mticker.FixedLocator([-180, -45, 0, 45, 180])
        gl.xformatter = LONGITUDE_FORMATTER
        gl.yformatter = LATITUDE_FORMATTER
        gl.xlocator = MultipleLocator(60)
        gl.ylocator = MultipleLocator(15)
        gl.xlabel_style = {'size': 10, 'color': 'black'}
        gl.xlabel_style = {'color': 'black'}

        date = 2005
        altitude = 565

        # Calculate the B field grid
        # Evenly space grid with 1 degree resolution in both Latitude and Longitude
        lat = np.linspace(-90, 90, 1 * 180 + 1)
        lon = np.linspace(0, 360, 1 * 360 + 1)
        lat_grid, lon_grid = np.meshgrid(lat, lon)
        coordinates = list(zip(lat_grid.ravel(), lon_grid.ravel()))
        B_strength = []
        for coords in coordinates:
            b_field = ipmag.igrf([date, altitude, coords[0], coords[1]])
            B_strength.append(b_field[-1])
        B_strength_grid = np.array(B_strength).reshape(lat_grid.shape)

        # Get the CR rate information
        lat, lon, rate = df['latitude_{}'.format(key)], \
                             df['longitude_{}'.format(key)], \
                             df['incident_cr_rate']
        LOG.info('{} {} {}'.format(len(lat), len(lon), len(rate)))

        # Get average statistics to generate contour
        mean, median, std = sigma_clipped_stats(rate,
                                                sigma_lower=3,
                                                sigma_upper=3)
        LOG.info('{} +\- {}'.format(mean, std))
        norm = ImageNormalize(rate,
                              stretch=LinearStretch(),
                              vmin=mean - thresh * std,
                              vmax=mean + thresh * std)
        cbar_below_mean = [mean - (i + 1) * std for i in range(thresh)]
        cbar_above_mean = [mean + (i + 1) * std for i in range(thresh)]

        cbar_bounds = cbar_below_mean + [mean] + cbar_above_mean
        print(cbar_bounds)
        cbar_bounds.sort()
        sci_cmap = plt.cm.viridis
        custom_norm = colors.BoundaryNorm(boundaries=cbar_bounds,
                                          ncolors=sci_cmap.N)

        scat = ax.scatter(lon.values,
                          lat.values,
                          marker='o',
                          s=3.5,
                          c=rate,
                          alpha=0.2,
                          norm=custom_norm,
                          cmap='viridis',
                          transform=ccrs.PlateCarree())

        cbar_ticks = cbar_bounds
        cax = fig.add_axes([0.1, 0.2, 0.8, 0.05])
        cbar = fig.colorbar(scat,
                            cax=cax,
                            ticks=cbar_ticks,
                            orientation='horizontal')
        cbar.set_alpha(1)
        cbar.draw_all()
        cbar_tick_labels = [f'<x>-{i}$\sigma$' for i in [5, 4, 3, 2, 1]] + [
            '<x>'
        ] + [f'<x>+{i}$\sigma$' for i in [1, 2, 3, 4, 5]]
        cbar.ax.set_xticklabels(cbar_tick_labels,
                                horizontalalignment='right',
                                rotation=30)

        cbar.set_label('CR Flux [CR/s/$cm^2$]', fontsize=10)

        cntr = ax.contour(lon_grid,
                          lat_grid,
                          B_strength_grid,
                          cmap='plasma',
                          levels=10,
                          alpha=1,
                          lw=2,
                          transform=ccrs.PlateCarree())

        h1, l1 = cntr.legend_elements("B_strength_grid")
        l1_custom = [
            f"{val.split('=')[-1].strip('$').strip()} nT" for val in l1
        ]

        leg1 = Legend(ax,
                      h1,
                      l1_custom,
                      loc='upper left',
                      edgecolor='k',
                      fontsize=8,
                      framealpha=0.45,
                      facecolor='tab:gray',
                      bbox_to_anchor=(1.05, 1.03),
                      title='Total Magnetic Intensity')
        ax.add_artist(leg1)

        if orbital_path1 is not None:
            ax.scatter(orbital_path1.metadata['longitude'][::4][1:],
                       orbital_path1.metadata['latitude'][::4][1:],
                       c='k',
                       s=20,
                       label='285 seccond interval')

        if orbital_path2 is not None:
            ax.plot(orbital_path2.metadata['longitude'],
                    orbital_path2.metadata['latitude'],
                    label=f'Orbital Path Over {2000:.0f} seconds',
                    color='k',
                    ls='--',
                    lw=1.25)
        plt.show()
        return fig
Esempio n. 5
0
 def test_igrf_output(self):
     result = ipmag.igrf([1999.1, 30, 20, 50])
     reference = [1.20288657e+00, 2.82331112e+01, 3.9782338913649881e+04]
     for num, item in enumerate(result):
         self.assertAlmostEqual(item, reference[num])
Esempio n. 6
0
def preprocess_m77t(m77tf, data_directory="shipmag_data"):

    #read in data and initialize empty columns and place holder variables
    m77t_df = pd.read_csv(m77tf, sep='\t', dtype=str)
    m77t_df['DECIMAL_YEAR'] = np.nan
    m77t_df['DIS'] = np.nan
    m77t_df['MAG_COR'] = np.nan
    current_dis, prev_lat_lon = 0, []

    if "SURVEY_ID" not in m77t_df.columns:
        m77t_df = pd.read_csv(
            m77tf,
            sep='\t',
            dtype=str,
            names=[
                "SURVEY_ID", "TIMEZONE", "DATE", "TIME", "LAT", "LON",
                "POS_TYPE", "NAV_QUALCO", "BAT_TTIME", "CORR_DEPTH",
                "BAT_CPCO", "BAT_TYPCO", "BAT_QUALCO", "MAG_TOT", "MAG_TOT2",
                "MAG_RES", "MAG_RESSEN", "MAG_DICORR", "MAG_SDEPTH",
                "MAG_QUALCO", "GRA_OBS", "EOTVOS", "FREEAIR", "GRA_QUALCO",
                "LINEID", "POINTID"
            ])

    #check for .lp files existance and skip if it has already been made
    fout_name = os.path.join(data_directory, m77t_df['SURVEY_ID'].iloc[0],
                             m77t_df['SURVEY_ID'].iloc[0] + '.lp')
    if os.path.isfile(fout_name):
        print(
            ".lp file found for %s, skipping to save time. If you would like to regenerate these files please remove them then rerun the script"
            % str(m77t_df['SURVEY_ID'].iloc[0]))
        return

    for i, row in m77t_df.iterrows():

        #create decimal year from datetime
        date = str(row['DATE'])
        if date == str(np.nan):
            print(
                "no date info for record %d of survey %s, skipping this record"
                % (i, row['SURVEY_ID']))
            continue
        dt_row = datetime(int(date[0:4]), int(date[4:6]), int(date[6:8]))
        dec_year = dt_to_dec(dt_row)
        m77t_df.at[i, 'DECIMAL_YEAR'] = round(dec_year, 5)

        #calculate distance from last point and add to total distance
        if prev_lat_lon != []:
            #/1000 to convert m to km
            current_dis += Geodesic.WGS84.Inverse(
                float(row['LAT']), float(row['LON']), prev_lat_lon[0],
                prev_lat_lon[1])['s12'] / 1000
        prev_lat_lon = [float(row['LAT']), float(row['LON'])]
        m77t_df.at[i, 'DIS'] = round(current_dis, 5)

        #determine IGRF and remove from uncorrected intensity
        igrf_cor = ipmag.igrf(
            [dec_year, 0, float(row['LAT']),
             float(row['LON'])])[2]
        mag_cor = float(row['MAG_TOT']) - igrf_cor
        if mag_cor < 3000 or mag_cor > -3000:
            m77t_df.at[i, 'MAG_COR'] = round(mag_cor, 5)

    round3_func = lambda x: round(x, 3)
    dis_array = list(
        map(
            round3_func,
            np.arange(float(m77t_df['DIS'].tolist()[0]),
                      float(m77t_df['DIS'].tolist()[-1]),
                      1)))  #spacing of 1 km, because I can
    decimal_year_array = list(
        map(
            round3_func,
            np.interp(dis_array, list(map(float, m77t_df['DIS'])),
                      list(map(float, m77t_df['DECIMAL_YEAR'].tolist())))))
    mag_cor_array = list(
        map(
            round3_func,
            np.interp(dis_array, list(map(float, m77t_df['DIS'])),
                      list(map(float, m77t_df['MAG_COR'])))))
    lat_array = list(
        map(
            round3_func,
            np.interp(dis_array, list(map(float, m77t_df['DIS'])),
                      list(map(float, m77t_df['LAT'])))))
    lon_array = list(
        map(
            round3_func,
            np.interp(dis_array, list(map(float, m77t_df['DIS'])),
                      convert_to_0_360(m77t_df['LON']))))

    interp_df = pd.DataFrame({
        'dis': dis_array,
        'decimal_year': decimal_year_array,
        'mag_cor': mag_cor_array,
        'lat': lat_array,
        'lon': lon_array
    })

    #    #check distance
    #    interp_df['dis_check'] = np.nan
    #    current_dis,prev_lat_lon = 0,[]
    #    for i,row in interp_df.iterrows():
    #        #calculate distance from last point and add to total distance
    #        if prev_lat_lon!=[]:
    #            #/1000 to convert m to km
    #            current_dis += Geodesic.WGS84.Inverse(float(row['lat']),float(row['lon']),prev_lat_lon[0],prev_lat_lon[1])['s12']/1000
    #        prev_lat_lon = [float(row['lat']),float(row['lon'])]
    #        interp_df.at[i,'dis_check'] = round(current_dis,5))

    #write to .lp file
    print("saving %s" % fout_name)
    interp_df[['dis', 'decimal_year', 'mag_cor', 'lat',
               'lon']].to_csv(fout_name, sep='\t', index=False, header=False)
Esempio n. 7
0
def aeromag_preprocess(aeromag_files,
                       date_file=os.path.join('..', 'raw_data',
                                              'dates.aeromag'),
                       geoid=Geodesic.WGS84):
    for aeromag_file in aeromag_files:  #iterate over all aeromag files

        track, extension = os.path.basename(aeromag_file).split(
            '.')  #segment the name into parts
        #read data and make a empty dataframe for output data
        adf = utl.open_mag_file(aeromag_file).dropna()
        ddf = pd.read_csv(date_file, sep='\t', index_col=0)
        idf = pd.DataFrame(columns=[
            'dis', 'lat', 'lon', 'alt', 'v_comp', 'e_comp', 'n_comp', 'h_comp',
            't_comp'
        ])

        dis = 0
        decimal_year = float(ddf.loc[track]['decimal_year'])
        prev_lat, prev_lon = None, None
        for i, row in adf.iterrows():  #iterate over rows

            row["lon"] = utl.convert_to_0_360(row["lon"])
            adf.at[i, "lon"] = row["lon"]

            try:
                #check for data gaps
                if np.isnan(row['lat']) or np.isnan(row['lon']) or np.isnan(
                        row['alt']) or np.isnan(row['mag']) or np.isnan(
                            row['v_comp']) or np.isnan(
                                row['e_comp']) or np.isnan(
                                    row['n_comp']) or np.isnan(row['h_comp']):
                    continue
                    #check None
                elif (row['lat'] == None) or (row['lon'] == None) or (
                        row['alt'] == None) or (row['mag'] == None) or (
                            row['v_comp'] == None) or (row['e_comp'] == None):
                    continue
                    #check for absurd values outside of the domain of the varible (this will capture null values of -99999)
                elif (abs(float(row['lat'])) > 90) or (abs(
                        utl.convert_to_180_180(row['lon'])) > 180) or (float(
                            row['alt']) < 0) or (abs(float(
                                row['mag'])) == 99999) or (abs(
                                    float(row['v_comp'])) == 99999) or (abs(
                                        float(row['e_comp'])) == 99999):
                    continue
            except ValueError as e:
                continue  #This implies a value which is not convertable to a float as all of these should be floats this datum must be skipped

            if prev_lat != None and prev_lon != None:  #calculate distance
                dis += geoid.Inverse(float(row['lat']), float(row['lon']),
                                     prev_lat, prev_lon)['s12'] / 1000
            adf.at[i, 'dis'] = dis

            #calculate and remove IGRF
            dec, inc, mag = ipmag.igrf([
                decimal_year,
                float(row['alt']) * 0.3048e-3,
                float(row['lat']),
                float(row['lon'])
            ])
            res_v_comp = mag * np.sin(np.deg2rad(inc))
            res_e_comp = mag * np.cos(np.deg2rad(inc)) * np.sin(
                np.deg2rad(dec))
            res_n_comp = mag * np.cos(np.deg2rad(inc)) * np.cos(
                np.deg2rad(dec))
            res_h_comp = mag * np.cos(np.deg2rad(inc))
            res_t_comp = mag

            adf.at[i, 'res_v_comp'] = float(row['v_comp']) - res_v_comp
            adf.at[i, 'res_e_comp'] = float(row['e_comp']) - res_e_comp
            adf.at[i, 'res_n_comp'] = float(row['n_comp']) - res_n_comp
            adf.at[i, 'res_h_comp'] = float(row['h_comp']) - res_h_comp
            adf.at[i, 'res_t_comp'] = float(row['mag']) - res_t_comp

            prev_lat, prev_lon = float(row['lat']), float(row['lon'])


#        adf = adf[(adf['res_e_comp']<3000) & (adf['res_n_comp']<3000) & (adf['res_v_comp']<3000) & (adf['res_h_comp']<3000) & (adf['res_t_comp']<3000)]

#remove a second order polynomial fromm the magnetic data I don't know why but this is something done
        for col in [
                'res_e_comp', 'res_n_comp', 'res_h_comp', 'res_v_comp',
                'res_t_comp'
        ]:
            #            pols = np.polyfit(adf['dis'].tolist(),adf[col].tolist(),3)
            #            mag_fit = np.polyval(pols,adf['dis'].tolist())
            #            adf['cor'+col.lstrip('res')] = adf[col].to_numpy() - mag_fit
            adf['cor' + col.lstrip('res')] = adf[col].to_numpy()

        #iterpolate and round data
        adf = adf.dropna()
        idf['dis'] = np.arange(adf['dis'].iloc[0], adf['dis'].iloc[-1] + .1,
                               .1)  #spacing of 1 km, because I can
        idf['lat'] = np.interp(idf['dis'], adf['dis'], adf['lat'])
        idf['lon'] = np.interp(idf['dis'], adf['dis'], adf['lon'])
        idf['alt'] = np.interp(idf['dis'], adf['dis'], .3048 * adf['alt'])
        idf['v_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_v_comp'])
        idf['e_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_e_comp'])
        idf['n_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_n_comp'])
        idf['h_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_h_comp'])
        idf['t_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_t_comp'])

        adf[['dis', 'alt', 'cor_v_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Vd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_e_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Ed',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_n_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Nd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_h_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Hd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_t_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Td',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'v_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Vd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'e_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Ed.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'n_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Nd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'h_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Hd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 't_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Td.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")

        if extension.startswith('c'):
            shutil.copyfile(aeromag_file, aeromag_file + '.lp')

        latlon_df = adf[['lat', 'lon']]
        latlon_file = aeromag_file + ".latlon"
        latlon_df.to_csv(latlon_file, sep=' ', index=False, header=False)