Ejemplo n.º 1
0
def read_and_fit_ridge_data(ridge_loc_path):
    ridge_loc_file = open(ridge_loc_path,'r')
    lines = ridge_loc_file.readlines()

    ridge_loc_dict,current_head = {},''
    for line in lines:
        if line.startswith('>'):
            current_head = line.strip('> \n')
            ridge_loc_dict[current_head] = []
        else:
            ridge_loc_dict[current_head].append(list(map(float,line.strip('\n').split('\t'))))

    for sz in list(ridge_loc_dict.keys()):
        if ridge_loc_dict[sz]==[]: ridge_loc_dict.pop(sz); continue
        ar = np.array(ridge_loc_dict[sz])
        dists = [Geodesic.WGS84.Inverse(ar[0,1],utl.convert_to_0_360(ar[0,0]),lat,utl.convert_to_0_360(lon))['s12']/1000 for lat,lon in zip(ar[:,1],utl.convert_to_0_360(ar[:,0]))]
        new_dists = np.arange(0,dists[-1],10)
        ridge_lons = np.interp(new_dists,dists,utl.convert_to_0_360(ar[:,0]))
        ridge_min_lon = min(ridge_lons)
        ridge_max_lon = max(ridge_lons)
        ridge_lats = np.interp(new_dists,dists,ar[:,1])
        ridge_min_lat = min(ridge_lats)
        ridge_max_lat = max(ridge_lats)
        ridge_lon_lats = [[lon,lat] for lon,lat in zip(ridge_lons,ridge_lats)]
        ridge_loc_dict[sz] = ridge_lon_lats

    def get_ridge_loc(sz,track_lon_lats):
        if sz not in ridge_loc_dict.keys(): print("sz not found when looking for ridge location, was given spreading zone %s but only had options %s"%(str(sz),str(ridge_loc_dict.keys()))); return None,None
        idx = gp.intersect_bf(track_lon_lats, ridge_loc_dict[sz],e=.5)
        if idx == [None,None]: print("could not calculate intersept"); return None,None
        else: return idx[0][0]

    return get_ridge_loc
Ejemplo n.º 2
0
def seperate_chron_into_spreading_zones(chron_to_analyse):
    #separate chrons into the different spreading zones
    spreading_zone_files = []
    chron, chron_color = chron_to_analyse
    fchron = open("../raw_data/chrons/cande/cande.%s" % str(chron))
    string = fchron.read()
    fchron.close()
    spreading_zones = string.split('>')
    utl.check_dir('spreading_zones')
    for i, spreading_zone in enumerate(spreading_zones):
        if spreading_zone == '': continue
        headerless_spreading_zone = spreading_zone.split('\n')[1:]
        headerless_spreading_zone_string = reduce(lambda x, y: x + '\n' + y,
                                                  headerless_spreading_zone)
        fchron_out_path = os.path.join('spreading_zones',
                                       'chron%s_sz%d.txt' % (chron, i))
        fchron_out = open(fchron_out_path, 'w+')
        fchron_out.write(headerless_spreading_zone_string)
        fchron_out.close()
        spreading_zone_files.append(fchron_out_path)
    ccz, gcz = utl.get_barckhausen_2013_chrons()
    if str(chron) in ccz.keys():
        i += 1
        ccz_data = np.array([(utl.convert_to_0_360(lonlat[0]),
                              float(lonlat[1])) for lonlat in ccz[str(chron)]])
        if len(ccz_data) > 1:
            nlons = np.arange(min(ccz_data[:, 0]), max(ccz_data[:, 0]), .025)
            nlats = np.interp(nlons, ccz_data[:, 0], ccz_data[:, 1])
            out_str = reduce(lambda x, y: x + '\n' + y, [
                str(nlon) + ' ' + str(nlat)
                for nlon, nlat in zip(nlons, nlats)
            ])
            fchron_out_path = os.path.join('spreading_zones',
                                           'chron%s_sz%d.txt' % (chron, i))
            print("Barckhausen data for CCZ included in %s" % fchron_out_path)
            fchron_out = open(fchron_out_path, 'w+')
            fchron_out.write(out_str)
            fchron_out.close()
            spreading_zone_files.append(fchron_out_path)
    if str(chron) in gcz.keys():
        i += 1
        gcz_data = np.array([(utl.convert_to_0_360(lonlat[0]),
                              float(lonlat[1])) for lonlat in gcz[str(chron)]])
        if len(gcz_data) > 1:
            nlats = np.arange(min(gcz_data[:, 1]), max(gcz_data[:, 1]), .05)
            nlons = np.interp(nlats, gcz_data[:, 1], gcz_data[:, 0])
            out_str = reduce(lambda x, y: x + '\n' + y, [
                str(nlon) + ' ' + str(nlat)
                for nlon, nlat in zip(nlons, nlats)
            ])
            fchron_out_path = os.path.join('spreading_zones',
                                           'chron%s_sz%d.txt' % (chron, i))
            print("Barckhausen data for GCZ included in %s" % fchron_out_path)
            fchron_out = open(fchron_out_path, 'w+')
            fchron_out.write(out_str)
            fchron_out.close()
            spreading_zone_files.append(fchron_out_path)
    return spreading_zone_files
Ejemplo n.º 3
0
def get_shipmag_decimal_year(row,deg_e=.01):
    """
    takes a row (pandas series) of a deskew file which is of type ship and returns the decimal year for the
    intersection point. returns none if not found.
    """
    if row['track_type']!='ship':
        raise ValueError("get_shipmag_decimal_year can only run on shipmag data recieved data of type %s instead"%str(row['track_type']))
    data_file_path = os.path.join(row["data_dir"],row["comp_name"])
    data_df = utl.open_mag_file(data_file_path)
#    data_df = pd.read_csv(data_file_path,names=["dist","decimal_year","mag","lat","lon"],delim_whitespace=True)
    decimal_year=None
    for j,datarow in data_df.iterrows(): #iterate to find the distance associated with the current lat lon
        if (float(datarow['lat'])>=float(row['inter_lat'])-deg_e and \
          float(datarow['lat'])<=float(row['inter_lat'])+deg_e) and \
          (utl.convert_to_0_360(datarow['lon'])>=utl.convert_to_0_360(row['inter_lon'])-deg_e and \
          utl.convert_to_0_360(datarow['lon'])<=utl.convert_to_0_360(row['inter_lon'])+deg_e):
            decimal_year=float(datarow['dec_year']); break
    return decimal_year
Ejemplo n.º 4
0
def read_and_fit_fz_data(fz_directory=os.path.join('..','raw_data','fracture_zones')):
    fzs = glob.glob(os.path.join(fz_directory,'*'))
    lfz = []
    for fz in fzs:
        fzdf = pd.read_csv(fz,sep='\t')
        dists = [Geodesic.WGS84.Inverse(fzdf['Latitude'][0],utl.convert_to_0_360(fzdf['Longitude'][0]),lat,utl.convert_to_0_360(lon))['s12']/1000 for lat,lon in zip(fzdf['Latitude'],fzdf['Longitude'])]
        new_dists = np.arange(0,dists[-1],10)
        fz_lons = np.interp(new_dists,dists,utl.convert_to_0_360(fzdf['Longitude']))
        fz_lats = np.interp(new_dists,dists,fzdf['Latitude'])
        fz_lon_lats = [[lon,lat] for lon,lat in zip(fz_lons,fz_lats)]
        lfz.append(fz_lon_lats)

    def get_fz_loc(track_lon_lats,e=.5):
        inters = []
        for fz in lfz:
            idx = intersect_bf(track_lon_lats, fz,e=e)
            if idx != [None,None]: inters.append(idx[0][0])
        return inters

    return get_fz_loc
Ejemplo n.º 5
0
def get_track_intersects(chron_to_analyse,
                         tracks_or_cuts,
                         spreading_zone_files,
                         data_directory='.',
                         bounding_lats=(-90, 90),
                         bounding_lons=(0, 360),
                         e=1):
    """ This function works in 0-360 longitude because otherwise there would be a discontinuty in the Pacific the region of interest """
    chron, chron_color = chron_to_analyse
    chron_name = "chron%s" % (str(chron))
    bound_check_func = lambda x: bounding_lats[0] < float(x[
        1]) and bounding_lats[1] > float(x[1]) and bounding_lons[0] < float(x[
            0]) and bounding_lons[1] > float(x[0])
    intersecting_tracks, out_string = [], ""
    for track in tqdm(tracks_or_cuts):
        print(track)
        dft = utl.open_mag_file(track)
        if dft.empty: continue
        lt = [[utl.convert_to_0_360(lon),
               float(lat)] for lon, lat in zip(dft['lon'], dft['lat'])]
        if not list(filter(bound_check_func, lt)):
            print("track out of bounds, skipping track")
            continue

        for spreading_zone_file in spreading_zone_files:
            lsz = [[line.split()[0], line.split()[1]]
                   for line in open(spreading_zone_file).readlines()
                   if len(line.split()) > 1]
            if not list(filter(bound_check_func, lsz)): continue
            idx = intersect_bf(lt, lsz, e=e)

            if not any(idx): continue
            else:
                print("-----------intersected in bounds-------------")
                intersecting_tracks.append(track)
                out_string += "%s\t%s\t%s\n" % (track, spreading_zone_file,
                                                str(idx))
                break

    print("found %d intersecting tracks" % len(intersecting_tracks))
    utl.check_dir(data_directory)
    fout_name = os.path.join(
        data_directory,
        "usable_tracks_and_intersects_for_%s.txt" % str(chron_name))
    if os.path.isfile(fout_name):
        print("backing up %s to %s" % (fout_name, fout_name + '.bak'))
        shutil.copyfile(fout_name, fout_name + '.bak')
    fout = open(fout_name, 'w+')
    print("writing to %s" % fout_name)
    fout.write(out_string)
    fout.close()

    return intersecting_tracks, fout_name
Ejemplo n.º 6
0
    def cmp_grav_files(
        x, y
    ):  #sorting function utilizing file names to order concatonation of sandwell data
        xfile = os.path.basename(x)
        yfile = os.path.basename(y)
        xlat, xlon = list(map(float, re.findall(r'\d+', xfile)))
        ylat, ylon = list(map(float, re.findall(r'\d+', yfile)))

        if "S" in xfile: xlat = -xlat
        if "W" in xfile: xlon = 360 - xlon
        if "S" in yfile: ylat = -ylat
        if "W" in yfile: ylon = 360 - ylon

        if xlat - ylat == 0:
            return (utl.convert_to_0_360(ylon) - utl.convert_to_0_360(
                window[1])) % 360 - (utl.convert_to_0_360(xlon) -
                                     utl.convert_to_0_360(window[1])) % 360
            #            if (ylon-window[1])==0: return -1
            #            elif (xlon-window[1])==0: return 1
            #            else: return (ylon-window[1])%360 - (xlon-window[1])%360
        else:
            return ylat - xlat
Ejemplo n.º 7
0
def find_fz_crossings(deskew_path,fz_directory=os.path.join('..','raw_data','fracture_zones')):
    deskew_df = utl.open_deskew_file(deskew_path)
    get_fz_loc = read_and_fit_fz_data(fz_directory)

    fz_inter_dict = {}
    for i,row in deskew_df.iterrows():
        data_path = os.path.join(row['data_dir'],row['comp_name'])
        data_df = utl.open_mag_file(data_path)
        track_lon_lats = [[utl.convert_to_0_360(lon),lat] for lon,lat in zip(data_df['lon'],data_df['lat'])]
        inters = get_fz_loc(track_lon_lats)
        if inters != []: fz_inter_dict[row['comp_name']] = inters

    fz_inter_df = pd.DataFrame({'inters':fz_inter_dict})
    fz_inter_df.to_csv('fz_intercepts.txt',sep='\t')
Ejemplo n.º 8
0
def create_maxtab_file(deskew_path,anomoly_name,outfile=None):
    deskew_df = utl.open_deskew_file(deskew_path)
    dates_data = pd.read_csv("../raw_data/dates.aeromag",sep='\t',header=0,index_col=0)
    out_str = ""
    for i,row in deskew_df.iterrows():
        if '#' in row['comp_name']: continue
        track_name = row['comp_name'].split('.')[0]
        if row['track_type']=='ship': date = "%.2f"%get_shipmag_decimal_year(row)
        else: date = "%.2f"%float(dates_data.loc[track_name]["decimal_year"])
        phase_shift = "%.2f"%utl.convert_to_0_360(float(row['phase_shift']))
        out_str += ' '*(17-len(row['comp_name']))+ row['comp_name'] + ' '
        out_str += 'V' if 'Vd' in row['comp_name'] else 'E'
        out_str += ' '*(6-len(anomoly_name)) + str(anomoly_name)
        out_str += ' '*(9-len(date)) + date
        out_str += ' '*(8-len("%.2f"%float(row['inter_lat']))) + "%.2f"%float(row['inter_lat'])
        out_str += ' '*(8-len("%.2f"%float(row['inter_lon']))) + "%.2f"%float(row['inter_lon'])
        out_str += ' '*(8-len("%.2f"%(float(row['strike'])))) + "%.2f"%(float(row['strike']))
        out_str += ' '*(7-len(phase_shift)) + phase_shift
        out_str += ' '*(11-len('10.000')) + '10.000' + '\n'
    if outfile==None: outfile = "maxtab.%s"%anomoly_name
    print("saving to %s"%outfile)
    out_file = open(outfile,'w+')
    out_file.write(out_str)
    out_file.close()
Ejemplo n.º 9
0
def old_get_lon_lat_from_plot_pick(deskew_row,plot_pick,dist_e=.01,verbose=False):
    drow,correction=deskew_row,plot_pick

    data_file_path = os.path.join(drow["data_dir"],drow["comp_name"])
    data_df = pd.read_csv(data_file_path,names=["dist","idk","mag","lat","lon"],delim_whitespace=True)

    projected_distances = utl.calc_projected_distance(drow['inter_lon'],drow['inter_lat'],data_df['lon'].tolist(),data_df['lat'].tolist(),drow['strike'])

    found_dist=False
    for j,row in projected_distances.iterrows():
        if row['dist']>=correction-dist_e and row['dist']<=correction+dist_e:
            picked_lat = round(row['lat'],3) #change lat for the new deskew file
            picked_lon = round(utl.convert_to_0_360(row['lon']),3) #change lon for the new deskew file
            picked_distance = row['dist']
            found_dist=True
            break

    if found_dist:
        if verbose: print("found lat lon of %s at a distance %.3f"%(drow["comp_name"],picked_distance))
    else:
        if verbose: print("couldn't find picked distance in datafile to calculate lat and lon for %s"%drow["comp_name"])
        return (drow['inter_lon'],drow['inter_lat'],0)

    return picked_lon,picked_lat,picked_distance
Ejemplo n.º 10
0
def calc_strikes_and_add_err(
        dsk_path,
        mlat=90,
        mlon=0,
        ma=1,
        mb=1,
        mphi=0,
        geoid=Geodesic(6371, 0.0),
        outfile=None,
        filter_by_quality=False,
        visualize=False,
        visual_padding=3.,
        down_sample_factor=5.,
        sandwell_files_path="../raw_data/gravity/Sandwell",
        convergence_level=0.01,
        euler_pole=None):
    """
    Function that does the heavy lifting calculating the great circles and associated strikes
    for anomaly crossings. Will also add average strike uncertainty to a paleomagnetic pole
    if provided. Function prints quite a lot for user diagnostics, pipe to /dev/null to silence.

    Parameters
    ----------
    dsk_path : str
            Path to deskew file containing the profiles to invert strikes on
    mlat : float, optional
    mlon : float, optional
    ma : float, optional
    mb : float, optional
    mphi : float, optional
    geoid : geographiclib.geodesic.Geodesic, optional
            geodesic to use for projection default is sphere radius 6371 flattening of 0
    outfile : str, optional
            output deskew file with the corrected strikes
    filter_by_quality : bool, optional
            bool that descides if "bad" data is filtered out of strike fit (Default : False)
    visualize : bool, optional
            weather or not you want to render images showing the fit to sites (Default : False)
    visual_padding : float, optional
            how much to pad out plotting window in degrees (Default : 3.)
    down_sample_factor : float, optional
            how much to downsample the gravity data operates as a divisor so 2 means half of the gravity will plot
            (Default : 5.)
    sandwell_files_path : str, optional
            path to the files containing the sandwell gravity grid to render in .tiff format
    convergence_level : float, optional
            the convergence criteria to pass to the function finding the maximum likelihood pole
    euler_pole : iterable, optional
            changes opperation to predict strikes solely from the input euler pole rather than from the data will
            report the degree to which the data predicted strikes agree with the input euler pole as well. Multipule
            Euler poles can be passed and estimates based on all will be reported, however, only the last euler
            pole will be saved in output deskew file

    Returns
    ----------


    Raises
    ----------
    RuntimeError
    ValueError
    """
    (mx, my, mz), mcov = latlon2cart(mlat, mlon,
                                     ellipse_to_cov(mlat, mlon, ma, mb, mphi))

    dsk_df = utl.open_deskew_file(dsk_path)
    dsk_df.sort_values("inter_lat", inplace=True, ascending=False)
    if filter_by_quality:
        bad_dsk_data = dsk_df[dsk_df["quality"] != "g"]
        dsk_df = dsk_df[dsk_df["quality"] == "g"]
    tcov, strike_diffs = np.zeros([3, 3]), []
    szs_to_calc = dsk_df["sz_name"].drop_duplicates(
    )  #.drop(24) #removes MahiMahi

    if isinstance(euler_pole, type(None)) or len(euler_pole) == 0:
        euler_poles = [None]
    elif len(euler_pole) == 2 and (isinstance(euler_pole[0], float)
                                   or isinstance(euler_pole[0], int)):
        euler_poles = [euler_pole]
    elif len(euler_pole) > 0 and (isinstance(euler_pole[0], list)
                                  or isinstance(euler_pole[0], tuple)):
        euler_poles = euler_pole
    else:
        raise ValueError(
            "Euler pole must be None or either a list of euler poles which are length=2 or a single euler pole with lat and lon entries. (i.e. [90,0] or [[90,0],[0,0]])"
        )

    n = 0
    for sz in szs_to_calc:
        sz_df = dsk_df[dsk_df["sz_name"] == sz]
        print(sz, ":", len(sz_df.index))

        if visualize:
            window = [
                utl.convert_to_0_360(sz_df["inter_lon"].min() -
                                     visual_padding),
                utl.convert_to_0_360(sz_df["inter_lon"].max() +
                                     visual_padding),
                sz_df["inter_lat"].min() - visual_padding,
                sz_df["inter_lat"].max() + visual_padding
            ]
            fig = plt.figure(dpi=100)
            proj = ccrs.Mercator(central_longitude=sz_df["inter_lon"].mean())
            ax = fig.add_subplot(111, projection=proj)
            ax.set_xticks(np.arange(0, 370, 10.), crs=ccrs.PlateCarree())
            ax.set_yticks(np.arange(-80, 90, 10.), crs=ccrs.PlateCarree())
            ax.tick_params(grid_linewidth=.5,
                           grid_linestyle=":",
                           color="k",
                           labelsize=8)
            lon_formatter = LongitudeFormatter(zero_direction_label=True)
            lat_formatter = LatitudeFormatter()
            ax.xaxis.set_major_formatter(lon_formatter)
            ax.yaxis.set_major_formatter(lat_formatter)
            land = cfeature.NaturalEarthFeature('physical',
                                                'land',
                                                "50m",
                                                edgecolor="black",
                                                facecolor="grey",
                                                linewidth=2)
            ax.add_feature(land)

        num_sites = (sz_df["track_type"] == "aero").sum() / 2 + (
            sz_df["track_type"] == "ship").sum()

        if num_sites > 2:  #overdetermined case
            data = {
                "dec": [],
                "inc": [],
                "phs": [],
                "ell": [],
                "ccl": [],
                "azi": [],
                "amp": []
            }
            for i, row in sz_df.iterrows():
                if row["track_type"] == "aero":
                    if "Ed" in row["comp_name"]: continue
                    elif "Vd" in row["comp_name"]:
                        other_comp = sz_df[sz_df["comp_name"] ==
                                           row["comp_name"].replace(
                                               "Vd", "Ed")].iloc[0]
                        row["inter_lat"] = (row["inter_lat"] +
                                            other_comp["inter_lat"]) / 2
                        row["inter_lon"] = (row["inter_lon"] +
                                            other_comp["inter_lon"]) / 2
                    else:
                        raise RuntimeError(
                            "You really shouldn't have gotten here, you have aeromag that can't find its second component"
                        )
                if visualize:
                    if row["quality"] != "g": marker = "X"
                    else:
                        if row["track_type"] == "ship": marker = "o"
                        else: marker = "s"
                    ax.scatter(row["inter_lon"],
                               row["inter_lat"],
                               facecolors=(row["r"], row["g"], row["b"]),
                               edgecolors="k",
                               transform=ccrs.PlateCarree(),
                               marker=marker,
                               zorder=100)
                data["ccl"].append([
                    row["comp_name"],
                    [90.0, 0.10, row["inter_lat"], row["inter_lon"]]
                ])
            (plat, plon, _, maj_se, min_se,
             phi), chisq, dof = pymax.max_likelihood_pole(
                 data, convergence_level=convergence_level)
            for i in range(len(data["ccl"])):
                data["ccl"][i][1][1] *= np.sqrt(chisq)
            (plat, plon, _, maj_se, min_se,
             phi), chisq, dof = pymax.max_likelihood_pole(
                 data, convergence_level=convergence_level)
            print("\t", (plat, plon, maj_se, min_se, phi), chisq, dof)
            (_, _, _), scov = latlon2cart(
                plat, plon, ellipse_to_cov(plat, plon, maj_se, min_se, phi))
            tcov += scov
            n += 1
            for ep_idx, euler_pole in enumerate(euler_poles):
                if not isinstance(euler_pole, type(None)):
                    print(
                        "--------------------------------------------------------------------------------"
                    )
                    print("Euler Pole: %.1f, %.1f" %
                          (euler_pole[0], euler_pole[1]))
                estrikes, dists = [], []
                for i, row in sz_df.iterrows():
                    if not isinstance(euler_pole, type(None)):
                        geodict = geoid.Inverse(*euler_pole, row["inter_lat"],
                                                row["inter_lon"])
                        pgeodict = geoid.Inverse(plat, plon, row["inter_lat"],
                                                 row["inter_lon"])
                        strike = geodict["azi2"]
                        pstrike = pgeodict["azi2"] + 90
                        if pstrike < 0: pstrike += 180
                        strike_diff = abs(strike - pstrike)
                        if strike_diff > 90:
                            strike_diff = abs(180 - strike_diff)
                        if len(strike_diffs) < ep_idx + 1:
                            strike_diffs.append([])
                        strike_diffs[ep_idx].append(strike_diff)
                        estrikes.append(geodict["azi1"] + 180)
                    else:
                        pgeodict = geoid.Inverse(plat, plon, row["inter_lat"],
                                                 row["inter_lon"])
                        strike = pgeodict["azi2"] + 90
                    dists.append(pgeodict["a12"])
                    if strike < 0: strike += 360
                    if strike < 180: strike += 180
                    dsk_df.at[i, "strike"] = strike
                    if not isinstance(euler_pole, type(None)):
                        print("\t\t", row["comp_name"], "\n",
                              "\t\t\tEuler Pole Strike: ", strike,
                              "\n\t\t\tPredicted Strike: ", pstrike)
                    else:
                        print("\t\t", row["comp_name"], strike)

                if visualize:
                    pdis = np.mean(dists)
                    print("Average Distance to GC Pole: ", pdis)
                    ax = psk.plot_small_circle(plon,
                                               plat,
                                               pdis,
                                               color="k",
                                               m=ax,
                                               geoid=Geodesic(6371., 0.),
                                               transform=ccrs.PlateCarree(),
                                               alpha=.7,
                                               linewidth=5,
                                               zorder=1)
                    if not isinstance(euler_pole, type(None)):
                        estrike = np.mean(estrikes)
                        print("Average Azimuth of Sites Relative to EP: ",
                              estrike)
                        ep_color = plt.rcParams['axes.prop_cycle'].by_key(
                        )['color'][(ep_idx % 9) + 1]
                        ax = psk.plot_great_circle(
                            euler_pole[1],
                            euler_pole[0],
                            estrike,
                            m=ax,
                            color=ep_color,
                            geoid=Geodesic(6371., 0.),
                            transform=ccrs.PlateCarree(),
                            alpha=.7,
                            linewidth=3,
                            zorder=2)
            if visualize:
                all_lons, all_lats, all_grav = pg.get_sandwell(
                    window,
                    down_sample_factor,
                    resample_method=Resampling.average,
                    sandwell_files_path=os.path.join(sandwell_files_path,
                                                     "*.tiff"))
                print("Plotting Gravity")
                start_time = time()
                print("Grid Sizes: ", all_lons.shape, all_lats.shape,
                      all_grav.shape)
                fcm = ax.contourf(all_lons,
                                  all_lats,
                                  all_grav,
                                  60,
                                  cmap="Blues_r",
                                  alpha=.75,
                                  transform=ccrs.PlateCarree(),
                                  zorder=0,
                                  vmin=0,
                                  vmax=255)
                print("Runtime: ", time() - start_time)
                ax.set_extent(window, ccrs.PlateCarree())
                vis_outpath = os.path.join(os.path.dirname(dsk_path),
                                           "strike_fit_%s" % sz)
                print("Saving: %s" % vis_outpath)
                fig.savefig(vis_outpath)

        elif num_sites == 2:  #equal determined case
            strike = geoid.Inverse(sz_df.iloc[0]["inter_lat"],
                                   sz_df.iloc[0]["inter_lon"],
                                   sz_df.iloc[1]["inter_lat"],
                                   sz_df.iloc[1]["inter_lon"])["azi1"]
            if strike < 0: strike += 360
            if strike < 180: strike += 180
            for i, row in sz_df.iterrows():
                dsk_df.at[i, "strike"] = strike
                print("\t", row["comp_name"], strike)
        else:  #under determined case; just ignore
            pass

    if filter_by_quality:
        dsk_df = dsk_df.append(bad_dsk_data)
        dsk_df.sort_values("inter_lat", inplace=True, ascending=False)

    print("--------------------------------------")
    (mlat, mlon), totcov = cart2latlon(mx, my, mz, mcov + (tcov / n))
    full_unc = cov_to_ellipse(mlat, mlon, totcov)
    print("Strike Covariance Matrix:\n", tcov)
    print("Full Uncertainty: ", full_unc)
    if not isinstance(euler_pole, type(None)):
        if visualize:
            all_strike_diffs = []
            fig_all = plt.figure(dpi=100)
            ax_all = fig_all.add_subplot(111)
            for ep_idx in range(len(strike_diffs)):
                ep_color = plt.rcParams['axes.prop_cycle'].by_key()['color'][
                    (ep_idx % 9) + 1]
                #Do histogram for each individual euler pole
                print(
                    "For EP %d -> Mean, Median, Min, Max Strike Differences: "
                    % ep_idx,
                    sum(strike_diffs[ep_idx]) / len(strike_diffs[ep_idx]),
                    np.median(strike_diffs[ep_idx]), min(strike_diffs[ep_idx]),
                    max(strike_diffs[ep_idx]))
                fig = plt.figure(dpi=100)
                ax = fig.add_subplot(111)
                ax.hist(strike_diffs[ep_idx],
                        bins=np.arange(0., 4.2, 0.2),
                        color=ep_color)
                ax.axvline(sum(strike_diffs[ep_idx]) /
                           len(strike_diffs[ep_idx]),
                           color="tab:blue",
                           linestyle="--")
                ax.axvline(np.median(strike_diffs[ep_idx]), color="cyan")
                vis_outpath = os.path.join(
                    os.path.dirname(dsk_path),
                    "strike_fit_epstats_%d.png" % ep_idx)
                print("Saving: %s" % vis_outpath)
                fig.savefig(vis_outpath)
                #Do stacked histogram for all euler poles
                all_strike_diffs += list(strike_diffs[ep_idx])
                ax_all.hist(all_strike_diffs,
                            bins=np.arange(0., 4.2, 0.2),
                            color=ep_color,
                            zorder=len(strike_diffs) - ep_idx)


#            all_strike_diffs = reduce(lambda x,y=[]: x+y, strike_diffs)
            print("For All EP -> Mean, Median, Min, Max Strike Differences: ",
                  sum(all_strike_diffs) / len(all_strike_diffs),
                  np.median(all_strike_diffs), min(all_strike_diffs),
                  max(all_strike_diffs))
            ax_all.axvline(sum(all_strike_diffs) / len(all_strike_diffs),
                           color="tab:red",
                           linestyle="--")
            ax_all.axvline(np.median(all_strike_diffs), color="tab:orange")
            vis_outpath = os.path.join(os.path.dirname(dsk_path),
                                       "strike_fit_all_epstats.png")
            print("Saving: %s" % vis_outpath)
            fig_all.savefig(vis_outpath)
            all_strike_diffs = reduce(lambda x, y=[]: x + y, strike_diffs)
            print(
                "For All EP (Check) -> Mean, Median, Min, Max Strike Differences: ",
                sum(all_strike_diffs) / len(all_strike_diffs),
                np.median(all_strike_diffs), min(all_strike_diffs),
                max(all_strike_diffs))

    if isinstance(outfile, type(None)):
        outfile = os.path.join(os.path.dirname(dsk_path),
                               "strike_cor_" + os.path.basename(dsk_path))
    print("Writing to %s" % str(outfile))
    utl.write_deskew_file(outfile, dsk_df)

    return full_unc
Ejemplo n.º 11
0
def plot_az_strike(track, spreading_zone_file, idx, az, strike, chron_color,
                   chron_name, results_directory, fout_name):

    #Create Figure
    fig = plt.figure(figsize=(9, 9), dpi=80)

    #Create Chron markers
    dft = utl.open_mag_file(track)
    lt = [[utl.convert_to_0_360(lon), float(lat)]
          for lon, lat in zip(dft['lon'], dft['lat'])]
    at = np.array(lt)
    lsz = [
        list(map(float, line.split()))
        for line in open(spreading_zone_file).readlines()
    ]
    asz = np.array(lsz)

    #Create Map
    #            gcm = create_basic_map() #uses defaults, hit shift-tab in parens to see what they are

    llcrnrlon = min(at[:, 0]) - 20 if min(at[:, 0]) - 20 > 0 else 0
    llcrnrlat = min(at[:, 1]) - 20 if min(at[:, 1]) - 20 > -89 else -89
    urcrnrlon = max(at[:, 0]) + 20 if max(at[:, 0]) + 20 < 360 else 360
    urcrnrlat = max(at[:, 1]) + 20 if max(at[:, 1]) + 20 < 89 else 89

    gcm = create_basic_map(projection='merc',
                           llcrnrlat=llcrnrlat,
                           urcrnrlat=urcrnrlat,
                           llcrnrlon=llcrnrlon,
                           urcrnrlon=urcrnrlon,
                           fig=fig)

    sz_handle, = gcm.plot(asz[:, 0],
                          asz[:, 1],
                          color=chron_color,
                          zorder=1,
                          label=chron_name,
                          transform=ccrs.PlateCarree())

    gcm_handle, = gcm.plot(at[:, 0],
                           at[:, 1],
                           color='k',
                           zorder=2,
                           label=os.path.basename(track),
                           transform=ccrs.PlateCarree())

    gcm.scatter(at[idx[1][0]][0],
                at[idx[1][0]][1],
                color='g',
                marker='o',
                s=10,
                zorder=3,
                label='nearest intercept',
                transform=ccrs.PlateCarree())

    geodict = Geodesic(6371000., 0.).Direct(float(at[idx[1][0]][1]),
                                            float(at[idx[1][0]][0]), float(az),
                                            1000000)
    b_lon, b_lat = (360 + geodict["lon2"]) % 360, geodict["lat2"]
    gcm.arrow(b_lon,
              b_lat,
              b_lon - at[idx[1][0]][0],
              b_lat - at[idx[1][0]][1],
              fc="white",
              ec="r",
              linewidth=1,
              head_width=1,
              head_length=1,
              label='azimuth',
              transform=ccrs.PlateCarree())

    geodict = Geodesic(6371000., 0.).Direct(float(at[idx[1][0]][1]),
                                            float(at[idx[1][0]][0]),
                                            float(strike), 1000000)
    b_lon, b_lat = (360 + geodict["lon2"]) % 360, geodict["lat2"]
    gcm.arrow(b_lon,
              b_lat,
              b_lon - at[idx[1][0]][0],
              b_lat - at[idx[1][0]][1],
              fc="white",
              ec="pink",
              linewidth=1,
              head_width=1,
              head_length=1,
              label='strike',
              transform=ccrs.PlateCarree())

    #plot title and labels
    plt.title(os.path.basename(track))
    plt.legend(loc='best')

    az_plots_dir = os.path.join(results_directory, "azimuth_strike_plots")
    utl.check_dir(az_plots_dir)

    fig.savefig(
        os.path.join(az_plots_dir,
                     os.path.basename(fout_name)[:-5] + "png"))
    plt.close(fig)
Ejemplo n.º 12
0
def cut_tracks_and_flip(track_cuts, data_directory, heading="east"):

    cut_tracks, flipped_data = [], []
    for track, cuts in track_cuts.items():
        print("Starting Track: %s" % track)
        directory, path = os.path.split(track)
        dfin = utl.open_mag_file(track)
        #        fin = open(track,'r')
        #        lines = fin.readlines()
        #        fin.close()
        #        lines = [line.split() for line in lines]
        #        dfin = pd.DataFrame(lines,columns=["time","lat","lon","n_comp","s_comp","h_comp","v_comp","mag","dec","inc","None","alt"])
        lats = list(map(float, dfin['lat'].tolist()))
        lons = list(map(utl.convert_to_0_360, dfin['lon'].tolist()))
        df_segments = []
        for cut in cuts:
            try:
                cut_index = [[lon, lat] for lon, lat in zip(lons, lats)
                             ].index([utl.convert_to_0_360(cut[0]), cut[1]])
            except ValueError as e:
                import pdb
                pdb.set_trace()
            print("cutting track: %s along index: %d" % (track, cut_index))
            df_segments.append(dfin.loc[:cut_index])
            dfin = dfin.loc[cut_index:]
        df_segments.append(dfin)
        i = 1
        for df_segment in df_segments:
            if len(df_segment) == 0: continue
            if heading == 'east':
                flip_bool = (utl.convert_to_0_360(df_segment['lon'].iloc[0]) >
                             utl.convert_to_0_360(df_segment['lon'].iloc[-1])
                             )  #is heading easterly
            elif heading == 'west':
                flip_bool = (utl.convert_to_0_360(df_segment['lon'].iloc[0]) <
                             utl.convert_to_0_360(df_segment['lon'].iloc[-1])
                             )  #is heading westerly
            elif heading == 'north':
                flip_bool = (
                    df_segment['lat'].iloc[0] > df_segment['lat'].iloc[-1]
                )  #is heading northerly
            elif heading == 'south':
                flip_bool = (
                    df_segment['lat'].iloc[0] < df_segment['lat'].iloc[-1]
                )  #is heading southerly
            else:
                print(
                    "the heading provided is not a cardinal direction please rerun with this corrected"
                )
                return
            if flip_bool:
                print(
                    "flipping data for cut: %d track: %s such that path is %serly and thus (hopefully) oldest data first"
                    % (i, path, heading))
                df_segment = df_segment.iloc[::-1]
                flipped_data.append(track.split('.')[0] + '.c%d' % i)
            if not os.path.isdir(os.path.join(directory, 'c%d' % i)):
                print("making directory %s" %
                      os.path.join(directory, 'c%d' % i))
                utl.check_dir(os.path.join(directory, 'c%d' % i))
            segment_path = os.path.join(directory, 'c%d' % i,
                                        path.split('.')[0] + '.c%d' % i)
            i += 1
            print("writing: %s" % segment_path)
            df_segment.to_csv(segment_path,
                              sep='\t',
                              header=False,
                              index=False)
            cut_tracks.append(segment_path)
    f_flipped = open(os.path.join(data_directory, "flipped_data.txt"), 'w+')
    f_flipped.write(reduce(lambda x, y: x + '\n' + y, flipped_data))
    f_flipped.close()
    return cut_tracks, flipped_data
Ejemplo n.º 13
0
    def plot_tracks(self):
        try:
            dsk_row = self.parent.dsk_row
            dsk_data = self.parent.deskew_df[self.parent.deskew_df["sz_name"]
                                             == dsk_row["sz_name"]]
        except AttributeError:
            return
        #        projected_distances = utl.calc_projected_distance(dsk_row['inter_lon'],dsk_row['inter_lat'],mag_data['lon'].tolist(),mag_data['lat'].tolist(),dsk_row['strike'])
        #        dis = max(abs(projected_distances["dist"]))*1000
        #        geodict1 = Geodesic.WGS84.Direct(dsk_row['inter_lat'],dsk_row['inter_lon'],dsk_row['strike']-90,dis)
        #        geodict2 = Geodesic.WGS84.Direct(dsk_row['inter_lat'],dsk_row['inter_lon'],dsk_row['strike']+90,dis)
        self.ax.plot([geodict1["lon2"], geodict2["lon2"]],
                     [geodict1["lat2"], geodict2["lat2"]],
                     transform=ccrs.Geodetic(),
                     color="black",
                     linewidth=1,
                     linestyle='--')

        for j, (i, row) in enumerate(dsk_data.iterrows()):
            # Read in deskewed profile
            # This is hard-coded now. It will be updated to take a list of profiles in the future
            if not os.path.isfile(infile):
                self.parent.user_warning("Data file %s could not be found" %
                                         infile)
            dskd = utl.open_mag_file(
                os.path.join(row['data_dir'], row["comp_name"]))

            # Define the angle along which to project
            perp = row["strike"] - 180
            lon = dskd["lon"]
            lat = dskd["lat"]
            mag = sk.phase_shift_data(dskd["mag"].tolist(), row["phase_shift"])

            # Find distance to project
            if row["track_type"] == 'ship':
                pcol = '#000000'
                scle = 0.2 * 1e3
            if row["track_type"] == 'aero':
                if 'Ed' in row["comp_name"]:
                    pcol = 'purple'
                else:
                    pcol = 'darkorchid'
                scle = 0.5 * 1e3

            # Project amplitude onto map
            mlats, mlons = [], []
            for i in range(len(mag)):
                gdsc = self.geoid.Direct(lat[i], lon[i], perp, mag[i] * scle)
                mlons.append(gdsc['lon2'])
                mlats.append(gdsc['lat2'])

            # Plot map elements
            deskew_tracks.append(
                self.ax.plot(utl.convert_to_0_360(lon),
                             lat,
                             '--',
                             linewidth=1.0,
                             transform=ccrs.PlateCarree(),
                             color=pcol,
                             zorder=990))
            deskew_tracks.append(
                self.ax.plot(utl.convert_to_0_360(mlons),
                             mlats,
                             '-',
                             linewidth=1.0,
                             transform=ccrs.PlateCarree(),
                             color=pcol,
                             zorder=1000))
            deskew_fill.append(
                self.ax.fill_between(utl.convert_to_0_360(
                    np.array(mlons)[mag > 0]),
                                     np.array(mlats)[mag > 0],
                                     lat[mag > 0],
                                     transform=ccrs.PlateCarree(),
                                     alpha=0.5,
                                     color=pcol))
Ejemplo n.º 14
0
def preprocess_m77t(m77tf, data_directory="shipmag_data"):

    #read in data and initialize empty columns and place holder variables
    m77t_df = pd.read_csv(m77tf, sep='\t', dtype=str)
    m77t_df['DECIMAL_YEAR'] = np.nan
    m77t_df['DIS'] = np.nan
    m77t_df['MAG_COR'] = np.nan
    current_dis, prev_lat_lon = 0, []

    if "SURVEY_ID" not in m77t_df.columns:
        m77t_df = pd.read_csv(
            m77tf,
            sep='\t',
            dtype=str,
            names=[
                "SURVEY_ID", "TIMEZONE", "DATE", "TIME", "LAT", "LON",
                "POS_TYPE", "NAV_QUALCO", "BAT_TTIME", "CORR_DEPTH",
                "BAT_CPCO", "BAT_TYPCO", "BAT_QUALCO", "MAG_TOT", "MAG_TOT2",
                "MAG_RES", "MAG_RESSEN", "MAG_DICORR", "MAG_SDEPTH",
                "MAG_QUALCO", "GRA_OBS", "EOTVOS", "FREEAIR", "GRA_QUALCO",
                "LINEID", "POINTID"
            ])

    #check for .lp files existance and skip if it has already been made
    fout_name = os.path.join(data_directory, m77t_df['SURVEY_ID'].iloc[0],
                             m77t_df['SURVEY_ID'].iloc[0] + '.lp')
    if os.path.isfile(fout_name):
        print(
            ".lp file found for %s, skipping to save time. If you would like to regenerate these files please remove them then rerun the script"
            % str(m77t_df['SURVEY_ID'].iloc[0]))
        return

    for i, row in m77t_df.iterrows():

        #create decimal year from datetime
        date = str(row['DATE'])
        if date == str(np.nan):
            print(
                "no date info for record %d of survey %s, skipping this record"
                % (i, row['SURVEY_ID']))
            continue
        dt_row = datetime(int(date[0:4]), int(date[4:6]), int(date[6:8]))
        dec_year = dt_to_dec(dt_row)
        m77t_df.at[i, 'DECIMAL_YEAR'] = round(dec_year, 5)

        #calculate distance from last point and add to total distance
        if prev_lat_lon != []:
            #/1000 to convert m to km
            current_dis += Geodesic.WGS84.Inverse(
                float(row['LAT']), float(row['LON']), prev_lat_lon[0],
                prev_lat_lon[1])['s12'] / 1000
        prev_lat_lon = [float(row['LAT']), float(row['LON'])]
        m77t_df.at[i, 'DIS'] = round(current_dis, 5)

        #determine IGRF and remove from uncorrected intensity
        igrf_cor = ipmag.igrf(
            [dec_year, 0, float(row['LAT']),
             float(row['LON'])])[2]
        mag_cor = float(row['MAG_TOT']) - igrf_cor
        if mag_cor < 3000 or mag_cor > -3000:
            m77t_df.at[i, 'MAG_COR'] = round(mag_cor, 5)

    round3_func = lambda x: round(x, 3)
    dis_array = list(
        map(
            round3_func,
            np.arange(float(m77t_df['DIS'].tolist()[0]),
                      float(m77t_df['DIS'].tolist()[-1]),
                      1)))  #spacing of 1 km, because I can
    decimal_year_array = list(
        map(
            round3_func,
            np.interp(dis_array, list(map(float, m77t_df['DIS'])),
                      list(map(float, m77t_df['DECIMAL_YEAR'].tolist())))))
    mag_cor_array = list(
        map(
            round3_func,
            np.interp(dis_array, list(map(float, m77t_df['DIS'])),
                      list(map(float, m77t_df['MAG_COR'])))))
    lat_array = list(
        map(
            round3_func,
            np.interp(dis_array, list(map(float, m77t_df['DIS'])),
                      list(map(float, m77t_df['LAT'])))))
    lon_array = list(
        map(
            round3_func,
            np.interp(dis_array, list(map(float, m77t_df['DIS'])),
                      convert_to_0_360(m77t_df['LON']))))

    interp_df = pd.DataFrame({
        'dis': dis_array,
        'decimal_year': decimal_year_array,
        'mag_cor': mag_cor_array,
        'lat': lat_array,
        'lon': lon_array
    })

    #    #check distance
    #    interp_df['dis_check'] = np.nan
    #    current_dis,prev_lat_lon = 0,[]
    #    for i,row in interp_df.iterrows():
    #        #calculate distance from last point and add to total distance
    #        if prev_lat_lon!=[]:
    #            #/1000 to convert m to km
    #            current_dis += Geodesic.WGS84.Inverse(float(row['lat']),float(row['lon']),prev_lat_lon[0],prev_lat_lon[1])['s12']/1000
    #        prev_lat_lon = [float(row['lat']),float(row['lon'])]
    #        interp_df.at[i,'dis_check'] = round(current_dis,5))

    #write to .lp file
    print("saving %s" % fout_name)
    interp_df[['dis', 'decimal_year', 'mag_cor', 'lat',
               'lon']].to_csv(fout_name, sep='\t', index=False, header=False)
Ejemplo n.º 15
0
    def plot_skewnesses_by_lat(self, clip_on=True):

        try:
            sz_name = self.sz_names_box.GetValue()
            rows = self.parent.deskew_df[self.parent.deskew_df["sz_name"] ==
                                         sz_name]
            rows.sort_values("inter_lat", inplace=True, ascending=False)
        except (AttributeError, KeyError) as e:
            print("Spreading Zone %s not found in deskew file" %
                  str(self.sz_names_box.GetValue()))
            return
        try:
            xlims = self.parent.ax.get_xlim()
            ylims = self.parent.ax.get_ylim()
        except AttributeError:
            xlims, ylims = (-300, 300), (-150, 150)
        axs = self.fig.subplots(self.maximum_profiles,
                                1,
                                sharex=True,
                                sharey=True)
        #        for ax in axs:
        #            ax.set_facecolor("grey")
        axs = axs[:len(rows)]

        for j, (ax, (i, row)) in enumerate(zip(axs, rows.iterrows())):
            print(j, row["comp_name"], xlims, ylims)
            ax.set_anchor('W')

            psk.remove_axis_lines_and_ticks(ax)

            min_proj_dis, max_proj_dis = psk.plot_skewness_data(
                row,
                float(row['phase_shift']),
                ax,
                picker=True,
                clip_on=clip_on,
                xlims=xlims,
                flip=True)

            ax.annotate(
                r"%s" % row['comp_name'] + "\n" +
                r"%.1f$^\circ$N,%.1f$^\circ$E" % (float(
                    row['inter_lat']), utl.convert_to_0_360(row['inter_lon'])),
                xy=(-.215, .5),
                xycoords="axes fraction",
                fontsize=self.fontsize,
                va="center",
                ha="left")
            ax.annotate(r"$\theta$=%.1f" % float(row['phase_shift']) + "\n" +
                        r"$e_a$=%.1f" % float(row['aei']),
                        xy=(1.15, .5),
                        xycoords="axes fraction",
                        fontsize=self.fontsize,
                        va="center",
                        ha="right")
            #            ax.set_ylabel(r"$\theta$=%.1f"%float(row['phase_shift'])+"\n"+r"$e_a$=%.1f"%float(row['aei']),rotation=0,fontsize=self.fontsize)
            ax.yaxis.set_label_coords(1.05, .45)
            ax.patch.set_alpha(0.0)
            #            ax.format_coord = format_coord

            if self.show_synth_button.GetValue():
                try:
                    ax.plot(self.parent.dis_synth,
                            self.parent.synth,
                            'r-',
                            alpha=.4,
                            zorder=1)
                except (AttributeError, IndexError):
                    print(
                        "No synthetic found to render in skewness by latitude window"
                    )

        scale = np.sqrt(sum(np.array(xlims)**2))
        if not scale < 20 or scale > 3000:
            ax.set_xlim(xlims)
            ax.set_ylim(ylims)

        if self.parent.spreading_rate_path != None:
            psk.plot_chron_span_on_axes(
                sz_name,
                self.fig.get_axes(),
                rows[['age_min', 'age_max']].iloc[0],
                spreading_rate_path=self.parent.spreading_rate_path)

#        self.fig.subplots_adjust(hspace=.0) #remove space between subplot axes

        self.canvas.draw()
Ejemplo n.º 16
0
def plot_track_cuts(x, y, sx, sy, idx, chrons_info, track_name,
                    results_directory):
    fig = plt.figure(figsize=(9, 9), dpi=80)

    llcrnrlon = min(utl.convert_to_0_360(y)) - 5 if min(
        utl.convert_to_0_360(y)) - 5 >= 0 else 0
    llcrnrlat = min(x) - 5 if min(x) - 5 >= -89 else -89
    urcrnrlon = max(utl.convert_to_0_360(y)) + 5 if max(
        utl.convert_to_0_360(y)) + 5 <= 359 else 359
    urcrnrlat = max(x) + 5 if max(x) + 5 <= 90 else 89

    #Make a map
    turning_points_map = create_basic_map(projection='merc',
                                          llcrnrlat=llcrnrlat,
                                          urcrnrlat=urcrnrlat,
                                          llcrnrlon=llcrnrlon,
                                          urcrnrlon=urcrnrlon,
                                          center_lon=180.,
                                          fig=fig)

    #Create Chron markers
    plot_chron_info(chrons_info, turning_points_map, coord_0_360=True)

    #Plot the data
    if len(x) != len(y) or len(sx) != len(sy):
        print(
            "error plotting track on map it probably crosses a pole, opening debugger"
        )
        import pdb
        pdb.set_trace()
    handle1, = turning_points_map.plot(y,
                                       x,
                                       color="slategrey",
                                       linestyle="-",
                                       label='original path',
                                       zorder=1,
                                       transform=ccrs.PlateCarree())
    handle2, = turning_points_map.plot(sy,
                                       sx,
                                       color="black",
                                       linestyle=':',
                                       label='simplified path',
                                       zorder=2,
                                       transform=ccrs.PlateCarree())
    handle3, = turning_points_map.plot(sy[idx],
                                       sx[idx],
                                       'ro',
                                       markersize=7,
                                       label='turning points',
                                       zorder=3,
                                       transform=ccrs.PlateCarree())

    #Name stuff
    plt.title(track_name)
    handles = [handle1, handle2, handle3]
    add_chron_info_to_legend(chrons_info, handles)
    plt.legend(handles=handles, loc='best')

    #Save the plot
    fig.savefig(
        os.path.join(results_directory, "turning_points", track_name + ".png"))
    plt.close(fig)
Ejemplo n.º 17
0
def get_sandwell(window,
                 down_sample_factor,
                 sandwell_files_path="../raw_data/gravity/Sandwell/*.tiff",
                 resample_method=Resampling.average):
    sandwell_files = glob.glob(sandwell_files_path)

    def cmp_grav_files(
        x, y
    ):  #sorting function utilizing file names to order concatonation of sandwell data
        xfile = os.path.basename(x)
        yfile = os.path.basename(y)
        xlat, xlon = list(map(float, re.findall(r'\d+', xfile)))
        ylat, ylon = list(map(float, re.findall(r'\d+', yfile)))

        if "S" in xfile: xlat = -xlat
        if "W" in xfile: xlon = 360 - xlon
        if "S" in yfile: ylat = -ylat
        if "W" in yfile: ylon = 360 - ylon

        if xlat - ylat == 0:
            return (utl.convert_to_0_360(ylon) - utl.convert_to_0_360(
                window[1])) % 360 - (utl.convert_to_0_360(xlon) -
                                     utl.convert_to_0_360(window[1])) % 360
            #            if (ylon-window[1])==0: return -1
            #            elif (xlon-window[1])==0: return 1
            #            else: return (ylon-window[1])%360 - (xlon-window[1])%360
        else:
            return ylat - xlat

    idx, all_gravs, prev_file, all_lats, all_lons = 0, [], None, np.array(
        []), np.array([])
    for filepath in sorted(sandwell_files, key=cmp_to_key(cmp_grav_files)):
        with rasterio.open(filepath) as dataset:
            lats = np.arange(dataset.bounds[3], dataset.bounds[1],
                             -dataset.res[1] * down_sample_factor)
            lons = np.arange(dataset.bounds[0], dataset.bounds[2],
                             dataset.res[0] * down_sample_factor)
            if utl.convert_to_0_360(round(window[0],
                                          3)) > utl.convert_to_0_360(
                                              round(dataset.bounds[0], 3)):
                lon_lb = int((utl.convert_to_0_360(window[0]) -
                              utl.convert_to_0_360(dataset.bounds[0])) /
                             (dataset.res[0] * down_sample_factor) + .5)
            else:
                lon_lb = 0
            if utl.convert_to_0_360(round(window[1],
                                          3)) < utl.convert_to_0_360(
                                              round(dataset.bounds[2], 3)):
                lon_ub = int((utl.convert_to_0_360(window[1]) -
                              utl.convert_to_0_360(dataset.bounds[2])) /
                             (dataset.res[0] * down_sample_factor) + .5)
            else:
                lon_ub = -1
            if round(window[2], 3) > round(dataset.bounds[1], 3):
                lat_lb = int((window[2] - dataset.bounds[1]) /
                             (dataset.res[1] * down_sample_factor) + .5)
            else:
                lat_lb = 0
            if round(window[3], 3) < round(dataset.bounds[3], 3):
                lat_ub = int((window[3] - dataset.bounds[3]) /
                             (dataset.res[1] * down_sample_factor) + .5)
            else:
                lat_ub = -1
            lats = lats[-(lat_ub):-lat_lb - 1]
            lons = lons[abs(lon_lb):lon_ub - 1]
            if len(lons) == 0 or len(lats) == 0: continue
            print("Loading: %s" % str(filepath))
            grav = dataset.read(
                1,
                #            window=gwindow,
                out_shape=(int(dataset.height / down_sample_factor + .5),
                           int(dataset.width / down_sample_factor + .5)),
                #            resampling=Resampling.gauss
                #            resampling=Resampling.nearest
                resampling=resample_method)
            grav = grav[-(lat_ub):-lat_lb - 1, abs(lon_lb):lon_ub - 1]

        if len(lats) > grav.shape[0]: lats = lats[:grav.shape[0]]
        if len(lons) > grav.shape[1]: lons = lons[:grav.shape[1]]

        if prev_file != None:
            xfile = os.path.basename(prev_file)
            yfile = os.path.basename(filepath)
            xlat, xlon = list(map(float, re.findall(r'\d+', xfile)))
            ylat, ylon = list(map(float, re.findall(r'\d+', yfile)))

            if "S" in xfile: xlat = -xlat
            if "W" in xfile: xlon = 360 - xlon
            if "S" in yfile: ylat = -ylat
            if "W" in yfile: ylon = 360 - ylon

            if abs(round(xlat - ylat)) != 0:
                #create new entry
                idx += 1
                all_lats = np.hstack([all_lats, lats])
                all_gravs.append(grav)
            elif abs(round(xlon - ylon)) == 60:
                #concatonate left
                if idx == 0: all_lons = np.hstack([lons, all_lons])
                all_gravs[idx] = np.hstack([grav, all_gravs[idx]])
            else:
                raise RuntimeError(
                    "Couldn't coordinate how to concatonate %s to gravity array"
                    % str(filepath))
        else:
            all_lats = np.hstack([all_lats, lats])
            all_lons = np.hstack([lons, all_lons])
            all_gravs.append(grav)

        prev_file = filepath

    try:
        all_grav = all_gravs[0]
        for next_grav in all_gravs[1:]:
            all_grav = np.vstack([all_grav, next_grav])
    except (UnboundLocalError, IndexError) as err:
        return np.array([]), np.array([]), np.array([])

    return all_lons, all_lats, np.array(all_grav)
Ejemplo n.º 18
0
def aeromag_preprocess(aeromag_files,
                       date_file=os.path.join('..', 'raw_data',
                                              'dates.aeromag'),
                       geoid=Geodesic.WGS84):
    for aeromag_file in aeromag_files:  #iterate over all aeromag files

        track, extension = os.path.basename(aeromag_file).split(
            '.')  #segment the name into parts
        #read data and make a empty dataframe for output data
        adf = utl.open_mag_file(aeromag_file).dropna()
        ddf = pd.read_csv(date_file, sep='\t', index_col=0)
        idf = pd.DataFrame(columns=[
            'dis', 'lat', 'lon', 'alt', 'v_comp', 'e_comp', 'n_comp', 'h_comp',
            't_comp'
        ])

        dis = 0
        decimal_year = float(ddf.loc[track]['decimal_year'])
        prev_lat, prev_lon = None, None
        for i, row in adf.iterrows():  #iterate over rows

            row["lon"] = utl.convert_to_0_360(row["lon"])
            adf.at[i, "lon"] = row["lon"]

            try:
                #check for data gaps
                if np.isnan(row['lat']) or np.isnan(row['lon']) or np.isnan(
                        row['alt']) or np.isnan(row['mag']) or np.isnan(
                            row['v_comp']) or np.isnan(
                                row['e_comp']) or np.isnan(
                                    row['n_comp']) or np.isnan(row['h_comp']):
                    continue
                    #check None
                elif (row['lat'] == None) or (row['lon'] == None) or (
                        row['alt'] == None) or (row['mag'] == None) or (
                            row['v_comp'] == None) or (row['e_comp'] == None):
                    continue
                    #check for absurd values outside of the domain of the varible (this will capture null values of -99999)
                elif (abs(float(row['lat'])) > 90) or (abs(
                        utl.convert_to_180_180(row['lon'])) > 180) or (float(
                            row['alt']) < 0) or (abs(float(
                                row['mag'])) == 99999) or (abs(
                                    float(row['v_comp'])) == 99999) or (abs(
                                        float(row['e_comp'])) == 99999):
                    continue
            except ValueError as e:
                continue  #This implies a value which is not convertable to a float as all of these should be floats this datum must be skipped

            if prev_lat != None and prev_lon != None:  #calculate distance
                dis += geoid.Inverse(float(row['lat']), float(row['lon']),
                                     prev_lat, prev_lon)['s12'] / 1000
            adf.at[i, 'dis'] = dis

            #calculate and remove IGRF
            dec, inc, mag = ipmag.igrf([
                decimal_year,
                float(row['alt']) * 0.3048e-3,
                float(row['lat']),
                float(row['lon'])
            ])
            res_v_comp = mag * np.sin(np.deg2rad(inc))
            res_e_comp = mag * np.cos(np.deg2rad(inc)) * np.sin(
                np.deg2rad(dec))
            res_n_comp = mag * np.cos(np.deg2rad(inc)) * np.cos(
                np.deg2rad(dec))
            res_h_comp = mag * np.cos(np.deg2rad(inc))
            res_t_comp = mag

            adf.at[i, 'res_v_comp'] = float(row['v_comp']) - res_v_comp
            adf.at[i, 'res_e_comp'] = float(row['e_comp']) - res_e_comp
            adf.at[i, 'res_n_comp'] = float(row['n_comp']) - res_n_comp
            adf.at[i, 'res_h_comp'] = float(row['h_comp']) - res_h_comp
            adf.at[i, 'res_t_comp'] = float(row['mag']) - res_t_comp

            prev_lat, prev_lon = float(row['lat']), float(row['lon'])


#        adf = adf[(adf['res_e_comp']<3000) & (adf['res_n_comp']<3000) & (adf['res_v_comp']<3000) & (adf['res_h_comp']<3000) & (adf['res_t_comp']<3000)]

#remove a second order polynomial fromm the magnetic data I don't know why but this is something done
        for col in [
                'res_e_comp', 'res_n_comp', 'res_h_comp', 'res_v_comp',
                'res_t_comp'
        ]:
            #            pols = np.polyfit(adf['dis'].tolist(),adf[col].tolist(),3)
            #            mag_fit = np.polyval(pols,adf['dis'].tolist())
            #            adf['cor'+col.lstrip('res')] = adf[col].to_numpy() - mag_fit
            adf['cor' + col.lstrip('res')] = adf[col].to_numpy()

        #iterpolate and round data
        adf = adf.dropna()
        idf['dis'] = np.arange(adf['dis'].iloc[0], adf['dis'].iloc[-1] + .1,
                               .1)  #spacing of 1 km, because I can
        idf['lat'] = np.interp(idf['dis'], adf['dis'], adf['lat'])
        idf['lon'] = np.interp(idf['dis'], adf['dis'], adf['lon'])
        idf['alt'] = np.interp(idf['dis'], adf['dis'], .3048 * adf['alt'])
        idf['v_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_v_comp'])
        idf['e_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_e_comp'])
        idf['n_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_n_comp'])
        idf['h_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_h_comp'])
        idf['t_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_t_comp'])

        adf[['dis', 'alt', 'cor_v_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Vd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_e_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Ed',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_n_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Nd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_h_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Hd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_t_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Td',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'v_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Vd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'e_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Ed.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'n_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Nd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'h_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Hd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 't_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Td.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")

        if extension.startswith('c'):
            shutil.copyfile(aeromag_file, aeromag_file + '.lp')

        latlon_df = adf[['lat', 'lon']]
        latlon_file = aeromag_file + ".latlon"
        latlon_df.to_csv(latlon_file, sep=' ', index=False, header=False)
Ejemplo n.º 19
0
def generate_az_strike_files(track_sz_and_inters,
                             chron_to_analyse,
                             heading,
                             results_directory,
                             plot=False):
    """results_directory is just used to find a good place to output plots az and strike data are saved to the data_directory along with normal data files"""

    chron, chron_color = chron_to_analyse
    chron_name = "chron%s" % (str(chron))
    tracks, az_files = [], []
    for track, spreading_zone_file, inter in track_sz_and_inters:
        idx = utl.read_idx_from_string(inter)

        gcp_lon, gcp_lat = open(spreading_zone_file[:-3] + 'gcp').readlines(
        )[5].split(
        )[0:
          2]  #GMT has varriable ouutput to fitcircle so some of these files will have the N eigin value pole here and some the South this may need to be adapted

        az = utl.convert_to_0_360(
            Geodesic.WGS84.Inverse(float(idx[0][1][1]), float(idx[0][1][0]),
                                   float(gcp_lat), float(gcp_lon))['azi1'])

        if heading == 'east' and az >= 180: az -= 180
        elif heading == 'west' and az <= 180: az += 180
        elif heading == 'north' and (az <= 270 and az >= 90):
            az = utl.wrap_0_360(180 + az)
        elif heading == 'south' and (az >= 270 or az <= 90):
            az = utl.wrap_0_360(180 + az)
        elif heading not in ['east', 'west', 'north', 'south']:
            print(
                "heading is not a cardinal direction was given %s so az cannot be corrected proceeding with %.3f as azimuth of younging for track %s correct manually in *.azszs if this is incorrect"
                % (heading, float(az), str(track)))

        strike = utl.wrap_0_360(az + 90)

        print("track", os.path.basename(track))
        print("spreading zone", os.path.basename(spreading_zone_file))
        print("azimuth", az, "strike", strike)

        fout_name = track[:-3] + '_' + track[-2:] + '_' + os.path.basename(
            spreading_zone_file)[:-4] + '.azszs'
        fout = open(fout_name, 'w+')
        out_str = """track chron spreadingzone intercept_track intercept_chron azimuth strike\n"""
        out_str += """%s %s %s %s %s %s %s""" % (
            track, chron_name, spreading_zone_file, str(
                idx[1][0]), str(idx[1][1]), az, strike)
        fout.write(out_str)
        fout.close()

        az_files.append(fout_name)
        tracks.append(track)

        # Deletes the Matlab/Octave file; not needed if Python is used to find strike
        #if os.path.isfile('.tmp_az.txt'): os.remove('.tmp_az.txt')

        if plot:
            pg.plot_az_strike(track, spreading_zone_file, idx, az, strike,
                              chron_color, chron_name, results_directory,
                              fout_name)

    return tracks, az_files
Ejemplo n.º 20
0
import matplotlib as mpl
import matplotlib.pyplot as plt
import pyskew.utilities as utl
import pyskew.skewness as sk
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
import pyskew.plot_gravity as pg
from time import time
from rasterio.enums import Resampling

#deskew1 = pd.read_csv('/home/dtw6/Code/PySkew/24r/data/C24r.deskew', sep='\t')
if os.path.isfile(sys.argv[1]): dsk_path = sys.argv[1]
else: raise IOError("File not found: %s" % str(sys.argv[1]))
if "-w" in sys.argv:
    idx = sys.argv.index("-w")
    window = [
        utl.convert_to_0_360(sys.argv[idx + 1]),
        utl.convert_to_0_360(sys.argv[idx + 2]),
        float(sys.argv[idx + 3]),
        float(sys.argv[idx + 4])
    ]  #lon_min,lon_max,lat_min,lat_max
else:
    window = [160., 240., -40., 40.]
if "-d" in sys.argv:
    down_sample_factor = float(sys.argv[sys.argv.index("-d") + 1])
else:
    down_sample_factor = 10
if "-swf" in sys.argv:
    sandwell_files_path = sys.argv[sys.argv.index("-swf") + 1]
else:
    sandwell_files_path = "../raw_data/gravity/Sandwell"
if "-rb" in sys.argv: remove_bad_data = True
Ejemplo n.º 21
0
def plot_chron_info(chrons_info,
                    m,
                    coord_0_360=False,
                    chron_dir=os.path.join("..", "raw_data", "chrons",
                                           "cande"),
                    barckhausen_path=os.path.join(
                        "..", 'raw_data', 'chrons', 'Barckhausen2013',
                        'GSFML.Barckhausen++_2013_MGR.picks.gmt'),
                    **kwargs):
    #Create Chron markers
    for chron_info in chrons_info:
        chron, chron_color = chron_info
        chron_path = os.path.join(chron_dir, "cande.%s" % str(chron))
        if not os.path.isfile(chron_path):
            print(
                "no file %s, so there is probably no chron %s in cande's model."
                % (chron_path, str(chron)))
            continue
        fchron = open(chron_path, 'r')
        lines = fchron.readlines()
        fchron.close()
        entries = [[], []]
        for line in lines[1:]:
            entry = line.split()
            if entry[0] == '>':
                if len(entries[0]) < 2 or len(entries[1]) < 2:
                    entries = [[], []]
                    continue
                lats = entries[1]
                lons = entries[0]
                m.plot(lons,
                       lats,
                       color=chron_color,
                       transform=ccrs.PlateCarree(),
                       **kwargs)
                entries = [[], []]
            else:
                if coord_0_360:
                    entries[0].append(float(entry[0]))
                    entries[1].append(float(entry[1]))
                else:
                    entries[0].append(utl.convert_to_180_180(entry[0]))
                    entries[1].append(float(entry[1]))
        ccz, gcz = utl.get_barckhausen_2013_chrons(
            barckhausen_path=barckhausen_path)
        if str(chron) in ccz.keys():
            if ccz[str(chron)]:
                lonlats = np.array(ccz[str(chron)])
                if coord_0_360:
                    lonlats[:, 0] = utl.convert_to_0_360(lonlats[:, 0])
                m.plot(lons,
                       lats,
                       color=chron_color,
                       transform=ccrs.PlateCarree(),
                       **kwargs)
        if str(chron) in gcz.keys():
            if gcz[str(chron)]:
                lonlats = np.array(gcz[str(chron)])
                if coord_0_360:
                    lonlats[:, 0] = utl.convert_to_0_360(lonlats[:, 0])
                m.plot(lons,
                       lats,
                       color=chron_color,
                       transform=ccrs.PlateCarree(),
                       **kwargs)