Esempio n. 1
0
def filter_deskew_and_calc_aei(deskew_path,spreading_rate_path=None,anomalous_skewness_model_path=None):
    """Creates Datatable"""

    deskew_df = utl.open_deskew_file(deskew_path)
    asf,srf,sz_list = get_asf_srf(spreading_rate_path,anomalous_skewness_model_path)

    return calc_aei(deskew_df,srf,asf)
Esempio n. 2
0
def update_useable_tracks_from_deskew(deskew_path,useable_track_path):
    useable_df = pd.read_csv(useable_track_path, sep='\t', header=None)
    useable_df['tracks'] = list(map(os.path.basename, useable_df[0].tolist()))
    deskew_df = utl.open_deskew_file(deskew_path)
    useable_tracks = list(map(lambda x: x.rstrip('.Ed .lp .Vd'), deskew_df['comp_name'].tolist()))
    new_useable_df = useable_df[useable_df['tracks'].isin(useable_tracks)][[0,1,2]]
    directory = os.path.dirname(useable_track_path)
    new_useable_track_filename = 'new_' + os.path.basename(useable_track_path)
    out_path = os.path.join(directory,new_useable_track_filename)
    new_useable_df.to_csv(out_path, sep='\t', index=False, header=False)
Esempio n. 3
0
def create_deskewed_data_file(deskew_path):
    #read deskew file
    deskew_df = utl.open_deskew_file(deskew_path)

    #iterate mag files
    for i,row in deskew_df.iterrows():
        #read mag files
        data_path = os.path.join(row['data_dir'],row['comp_name'])
        data_df = utl.open_mag_file(data_path)
        #deskew mag data
        data_df['deskewed_mag'] = phase_shift_data(data_df['mag'],float(row['phase_shift']))
        #save deskewed mag data as $DATAFILE.deskew
        print("writing %s"%(data_path+'.deskewed'))
        data_df[['lon','lat','deskewed_mag']].to_csv(data_path+'.deskewed',sep=',',header=False,index=False)
Esempio n. 4
0
def find_fz_crossings(deskew_path,fz_directory=os.path.join('..','raw_data','fracture_zones')):
    deskew_df = utl.open_deskew_file(deskew_path)
    get_fz_loc = read_and_fit_fz_data(fz_directory)

    fz_inter_dict = {}
    for i,row in deskew_df.iterrows():
        data_path = os.path.join(row['data_dir'],row['comp_name'])
        data_df = utl.open_mag_file(data_path)
        track_lon_lats = [[utl.convert_to_0_360(lon),lat] for lon,lat in zip(data_df['lon'],data_df['lat'])]
        inters = get_fz_loc(track_lon_lats)
        if inters != []: fz_inter_dict[row['comp_name']] = inters

    fz_inter_df = pd.DataFrame({'inters':fz_inter_dict})
    fz_inter_df.to_csv('fz_intercepts.txt',sep='\t')
Esempio n. 5
0
def get_lon_lat_from_plot_picks_and_deskew_file(deskew_path,spreading_rate_picks_path):
    deskew_df = utl.open_deskew_file(deskew_path)
    spreading_rate_picks = pd.read_csv(spreading_rate_picks_path,sep='\t',header=0,index_col=0)

    iso_dict = {}
    lats,lons = [],[]
    for track in spreading_rate_picks.columns:
        iso_dict[track] = {}
        track_picks = spreading_rate_picks[track]
        track_picks_tups = tuple(track_picks.groupby((track_picks.isnull()!=track_picks.isnull().shift()).cumsum()))
        for i,track_picks in track_picks_tups:
            if track_picks[track_picks.notnull()].empty: continue
            prev_anom = track_picks.index[0]
            for anom,pick in track_picks.iloc[1:].iteritems():
                iso_dict[track][anom] = {}
                iso_dist = (pick+track_picks[prev_anom])/2
                drow = deskew_df[deskew_df['comp_name'].str.rstrip('.Ed .Vd .lp') == track]
                if drow.empty: print('problem getting deskew data from spreading rate picks for %s, check track names for now skipping'%track); return pd.DataFrame(),0,0
                drow = drow.iloc[0] #make sure it is the first value and that it is a series
                iso_lon,iso_lat,iso_dist_on_track = get_lon_lat_from_plot_pick(drow,iso_dist)
                iso_dict[track][anom]['lon'] = iso_lon
                iso_dict[track][anom]['lat'] = iso_lat
                lons.append(iso_lon)
                lats.append(iso_lat)
                prev_anom = anom
    iso_df = pd.DataFrame(iso_dict)
    average_lat = sum(lats)/len(lats)
    average_lon = sum(lons)/len(lons)

    # Initialize empty dataframe to hold isochron picks
    pick_df = pd.DataFrame(index=iso_df.columns, columns=['lon','lat'])
    for anom in iso_df.index:
        for track in iso_df.columns:
            pick_df.loc[track] = iso_df[track][anom]
        # Flip columns so latitude comes before longitude, because we aren't savages, Kevin
        pick_df = pick_df.reindex(columns=['lat','lon'])
        # Drop rows without data
        pick_df = pick_df.dropna()
        # Define path and filename
        picks_path = os.path.join(os.path.dirname(spreading_rate_picks_path),os.path.basename(spreading_rate_picks_path).split('.')[0] + '_' + anom + '.txt')
        #import pdb; pdb.set_trace()
        pick_df.to_csv(picks_path,sep='\t')

    return iso_df,average_lon,average_lat
Esempio n. 6
0
def correct_site(site_cor_path,deskew_path,dist_e=.5):
    #backup the .deskew file
    if not os.path.isfile(deskew_path+'.ccbak'):
        print("backing up %s to %s"%(deskew_path,deskew_path+'.ccbak'))
        shutil.copyfile(deskew_path,deskew_path+'.ccbak')

    #read in the deskew and site_cor file
    deskew_df = utl.open_deskew_file(deskew_path)
    site_cor_df = pd.read_csv(site_cor_path,sep="\t")

    #copy the deskew df so I can change it and save the corrected latitudes and longitudes
    new_deskew_df = deskew_df.copy()
    for i,drow in deskew_df.iterrows():
        crow = site_cor_df[site_cor_df["comp_name"]==drow["comp_name"]]

        if drow['comp_name'].startswith('#'): continue #commented lines check
        if crow.empty: print("no correction found for component %s"%drow["comp_name"]); continue #no correction for this component check

        if drow['track_type'] == 'aero':
            #Find other component direction so we can average the shift between components
            if 'E' in drow["comp_name"]:
                other_crow = site_cor_df[site_cor_df["comp_name"]==drow["comp_name"].replace('E','V')]
            elif 'V' in drow["comp_name"]:
                other_crow = site_cor_df[site_cor_df["comp_name"]==drow["comp_name"].replace('V','E')]
            else: print("Problem determining component direction for %s"%drow["comp_name"]); continue
            #check that the intersept distance correction between E and V are not more than 3 deg different
            if abs(float(crow['correction']) - float(other_crow['correction']))>3:
                print("correction for %s is >3 km different from the other componenet's correction, and the average may be off"%(drow['comp_name']))
            correction = (float(crow['correction'])+float(other_crow['correction']))/2
        elif drow['track_type'] == 'ship':
            correction = float(crow['correction'])
        else:
            print("could not determine the track type for %s please check your deskew file, skipping"%drow["comp_name"]); continue

        corrected_lon,corrected_lat,corrected_dist = get_lon_lat_from_plot_pick(drow,correction,dist_e=dist_e)

        new_deskew_df.at[i, 'inter_lat'] =  corrected_lat
        new_deskew_df.at[i, 'inter_lon'] =  corrected_lon

    new_deskew_df.to_csv(deskew_path,sep="\t",index=False)
Esempio n. 7
0
def create_maxtab_file(deskew_path,anomoly_name,outfile=None):
    deskew_df = utl.open_deskew_file(deskew_path)
    dates_data = pd.read_csv("../raw_data/dates.aeromag",sep='\t',header=0,index_col=0)
    out_str = ""
    for i,row in deskew_df.iterrows():
        if '#' in row['comp_name']: continue
        track_name = row['comp_name'].split('.')[0]
        if row['track_type']=='ship': date = "%.2f"%get_shipmag_decimal_year(row)
        else: date = "%.2f"%float(dates_data.loc[track_name]["decimal_year"])
        phase_shift = "%.2f"%utl.convert_to_0_360(float(row['phase_shift']))
        out_str += ' '*(17-len(row['comp_name']))+ row['comp_name'] + ' '
        out_str += 'V' if 'Vd' in row['comp_name'] else 'E'
        out_str += ' '*(6-len(anomoly_name)) + str(anomoly_name)
        out_str += ' '*(9-len(date)) + date
        out_str += ' '*(8-len("%.2f"%float(row['inter_lat']))) + "%.2f"%float(row['inter_lat'])
        out_str += ' '*(8-len("%.2f"%float(row['inter_lon']))) + "%.2f"%float(row['inter_lon'])
        out_str += ' '*(8-len("%.2f"%(float(row['strike'])))) + "%.2f"%(float(row['strike']))
        out_str += ' '*(7-len(phase_shift)) + phase_shift
        out_str += ' '*(11-len('10.000')) + '10.000' + '\n'
    if outfile==None: outfile = "maxtab.%s"%anomoly_name
    print("saving to %s"%outfile)
    out_file = open(outfile,'w+')
    out_file.write(out_str)
    out_file.close()
Esempio n. 8
0
def flip_spreading_zone(deskew_path,spreading_zone):
    dskf = open_deskew_file(deskew_path)
    for i,row in dskf.iterrows():
        if row['sz_name']==spreading_zone:
            flip_data_file(os.path.join(row['data_dir'],row['comp_name']))
Esempio n. 9
0
#img_extent = (dataset.bounds[0], dataset.bounds[2], dataset.bounds[1], dataset.bounds[3])
#band1 = dataset.read(1)
#ax.imshow(band1, origin='upper', extent=img_extent, transform=proj, zorder=0, alpha=0.75)

running, inp = True, "r"
plt.ion()
plt.show()
while running:

    if "rl" in inp or inp.split()[0] == "r":
        try:
            for sp in site_points:
                sp.remove()
        except NameError:
            pass
        deskew = utl.open_deskew_file(dsk_path)
        print("Plotting Sites")
        site_points = []
        for i, sz_name in enumerate(deskew["sz_name"].drop_duplicates()):
            sz_dsk = deskew[deskew["sz_name"] == sz_name]
            bad_data = sz_dsk[sz_dsk["quality"] != "g"]
            bad_color = bad_data[["r", "g", "b"]].to_numpy()
            good_data = sz_dsk[sz_dsk["quality"] == "g"]
            aero_df = good_data[good_data["track_type"] == "aero"]
            ship_df = good_data[good_data["track_type"] == "ship"]
            aero_color = aero_df[["r", "g", "b"]].to_numpy()
            ship_color = ship_df[["r", "g", "b"]].to_numpy()
            #            color = (sz_dsk.iloc[0]["r"],sz_dsk.iloc[0]["g"],sz_dsk.iloc[0]["b"])
            site_points.append(
                ax.scatter(utl.convert_to_0_360(bad_data["inter_lon"]),
                           bad_data["inter_lat"],
Esempio n. 10
0
def calc_strikes_and_add_err(
        dsk_path,
        mlat=90,
        mlon=0,
        ma=1,
        mb=1,
        mphi=0,
        geoid=Geodesic(6371, 0.0),
        outfile=None,
        filter_by_quality=False,
        visualize=False,
        visual_padding=3.,
        down_sample_factor=5.,
        sandwell_files_path="../raw_data/gravity/Sandwell",
        convergence_level=0.01,
        euler_pole=None):
    """
    Function that does the heavy lifting calculating the great circles and associated strikes
    for anomaly crossings. Will also add average strike uncertainty to a paleomagnetic pole
    if provided. Function prints quite a lot for user diagnostics, pipe to /dev/null to silence.

    Parameters
    ----------
    dsk_path : str
            Path to deskew file containing the profiles to invert strikes on
    mlat : float, optional
    mlon : float, optional
    ma : float, optional
    mb : float, optional
    mphi : float, optional
    geoid : geographiclib.geodesic.Geodesic, optional
            geodesic to use for projection default is sphere radius 6371 flattening of 0
    outfile : str, optional
            output deskew file with the corrected strikes
    filter_by_quality : bool, optional
            bool that descides if "bad" data is filtered out of strike fit (Default : False)
    visualize : bool, optional
            weather or not you want to render images showing the fit to sites (Default : False)
    visual_padding : float, optional
            how much to pad out plotting window in degrees (Default : 3.)
    down_sample_factor : float, optional
            how much to downsample the gravity data operates as a divisor so 2 means half of the gravity will plot
            (Default : 5.)
    sandwell_files_path : str, optional
            path to the files containing the sandwell gravity grid to render in .tiff format
    convergence_level : float, optional
            the convergence criteria to pass to the function finding the maximum likelihood pole
    euler_pole : iterable, optional
            changes opperation to predict strikes solely from the input euler pole rather than from the data will
            report the degree to which the data predicted strikes agree with the input euler pole as well. Multipule
            Euler poles can be passed and estimates based on all will be reported, however, only the last euler
            pole will be saved in output deskew file

    Returns
    ----------


    Raises
    ----------
    RuntimeError
    ValueError
    """
    (mx, my, mz), mcov = latlon2cart(mlat, mlon,
                                     ellipse_to_cov(mlat, mlon, ma, mb, mphi))

    dsk_df = utl.open_deskew_file(dsk_path)
    dsk_df.sort_values("inter_lat", inplace=True, ascending=False)
    if filter_by_quality:
        bad_dsk_data = dsk_df[dsk_df["quality"] != "g"]
        dsk_df = dsk_df[dsk_df["quality"] == "g"]
    tcov, strike_diffs = np.zeros([3, 3]), []
    szs_to_calc = dsk_df["sz_name"].drop_duplicates(
    )  #.drop(24) #removes MahiMahi

    if isinstance(euler_pole, type(None)) or len(euler_pole) == 0:
        euler_poles = [None]
    elif len(euler_pole) == 2 and (isinstance(euler_pole[0], float)
                                   or isinstance(euler_pole[0], int)):
        euler_poles = [euler_pole]
    elif len(euler_pole) > 0 and (isinstance(euler_pole[0], list)
                                  or isinstance(euler_pole[0], tuple)):
        euler_poles = euler_pole
    else:
        raise ValueError(
            "Euler pole must be None or either a list of euler poles which are length=2 or a single euler pole with lat and lon entries. (i.e. [90,0] or [[90,0],[0,0]])"
        )

    n = 0
    for sz in szs_to_calc:
        sz_df = dsk_df[dsk_df["sz_name"] == sz]
        print(sz, ":", len(sz_df.index))

        if visualize:
            window = [
                utl.convert_to_0_360(sz_df["inter_lon"].min() -
                                     visual_padding),
                utl.convert_to_0_360(sz_df["inter_lon"].max() +
                                     visual_padding),
                sz_df["inter_lat"].min() - visual_padding,
                sz_df["inter_lat"].max() + visual_padding
            ]
            fig = plt.figure(dpi=100)
            proj = ccrs.Mercator(central_longitude=sz_df["inter_lon"].mean())
            ax = fig.add_subplot(111, projection=proj)
            ax.set_xticks(np.arange(0, 370, 10.), crs=ccrs.PlateCarree())
            ax.set_yticks(np.arange(-80, 90, 10.), crs=ccrs.PlateCarree())
            ax.tick_params(grid_linewidth=.5,
                           grid_linestyle=":",
                           color="k",
                           labelsize=8)
            lon_formatter = LongitudeFormatter(zero_direction_label=True)
            lat_formatter = LatitudeFormatter()
            ax.xaxis.set_major_formatter(lon_formatter)
            ax.yaxis.set_major_formatter(lat_formatter)
            land = cfeature.NaturalEarthFeature('physical',
                                                'land',
                                                "50m",
                                                edgecolor="black",
                                                facecolor="grey",
                                                linewidth=2)
            ax.add_feature(land)

        num_sites = (sz_df["track_type"] == "aero").sum() / 2 + (
            sz_df["track_type"] == "ship").sum()

        if num_sites > 2:  #overdetermined case
            data = {
                "dec": [],
                "inc": [],
                "phs": [],
                "ell": [],
                "ccl": [],
                "azi": [],
                "amp": []
            }
            for i, row in sz_df.iterrows():
                if row["track_type"] == "aero":
                    if "Ed" in row["comp_name"]: continue
                    elif "Vd" in row["comp_name"]:
                        other_comp = sz_df[sz_df["comp_name"] ==
                                           row["comp_name"].replace(
                                               "Vd", "Ed")].iloc[0]
                        row["inter_lat"] = (row["inter_lat"] +
                                            other_comp["inter_lat"]) / 2
                        row["inter_lon"] = (row["inter_lon"] +
                                            other_comp["inter_lon"]) / 2
                    else:
                        raise RuntimeError(
                            "You really shouldn't have gotten here, you have aeromag that can't find its second component"
                        )
                if visualize:
                    if row["quality"] != "g": marker = "X"
                    else:
                        if row["track_type"] == "ship": marker = "o"
                        else: marker = "s"
                    ax.scatter(row["inter_lon"],
                               row["inter_lat"],
                               facecolors=(row["r"], row["g"], row["b"]),
                               edgecolors="k",
                               transform=ccrs.PlateCarree(),
                               marker=marker,
                               zorder=100)
                data["ccl"].append([
                    row["comp_name"],
                    [90.0, 0.10, row["inter_lat"], row["inter_lon"]]
                ])
            (plat, plon, _, maj_se, min_se,
             phi), chisq, dof = pymax.max_likelihood_pole(
                 data, convergence_level=convergence_level)
            for i in range(len(data["ccl"])):
                data["ccl"][i][1][1] *= np.sqrt(chisq)
            (plat, plon, _, maj_se, min_se,
             phi), chisq, dof = pymax.max_likelihood_pole(
                 data, convergence_level=convergence_level)
            print("\t", (plat, plon, maj_se, min_se, phi), chisq, dof)
            (_, _, _), scov = latlon2cart(
                plat, plon, ellipse_to_cov(plat, plon, maj_se, min_se, phi))
            tcov += scov
            n += 1
            for ep_idx, euler_pole in enumerate(euler_poles):
                if not isinstance(euler_pole, type(None)):
                    print(
                        "--------------------------------------------------------------------------------"
                    )
                    print("Euler Pole: %.1f, %.1f" %
                          (euler_pole[0], euler_pole[1]))
                estrikes, dists = [], []
                for i, row in sz_df.iterrows():
                    if not isinstance(euler_pole, type(None)):
                        geodict = geoid.Inverse(*euler_pole, row["inter_lat"],
                                                row["inter_lon"])
                        pgeodict = geoid.Inverse(plat, plon, row["inter_lat"],
                                                 row["inter_lon"])
                        strike = geodict["azi2"]
                        pstrike = pgeodict["azi2"] + 90
                        if pstrike < 0: pstrike += 180
                        strike_diff = abs(strike - pstrike)
                        if strike_diff > 90:
                            strike_diff = abs(180 - strike_diff)
                        if len(strike_diffs) < ep_idx + 1:
                            strike_diffs.append([])
                        strike_diffs[ep_idx].append(strike_diff)
                        estrikes.append(geodict["azi1"] + 180)
                    else:
                        pgeodict = geoid.Inverse(plat, plon, row["inter_lat"],
                                                 row["inter_lon"])
                        strike = pgeodict["azi2"] + 90
                    dists.append(pgeodict["a12"])
                    if strike < 0: strike += 360
                    if strike < 180: strike += 180
                    dsk_df.at[i, "strike"] = strike
                    if not isinstance(euler_pole, type(None)):
                        print("\t\t", row["comp_name"], "\n",
                              "\t\t\tEuler Pole Strike: ", strike,
                              "\n\t\t\tPredicted Strike: ", pstrike)
                    else:
                        print("\t\t", row["comp_name"], strike)

                if visualize:
                    pdis = np.mean(dists)
                    print("Average Distance to GC Pole: ", pdis)
                    ax = psk.plot_small_circle(plon,
                                               plat,
                                               pdis,
                                               color="k",
                                               m=ax,
                                               geoid=Geodesic(6371., 0.),
                                               transform=ccrs.PlateCarree(),
                                               alpha=.7,
                                               linewidth=5,
                                               zorder=1)
                    if not isinstance(euler_pole, type(None)):
                        estrike = np.mean(estrikes)
                        print("Average Azimuth of Sites Relative to EP: ",
                              estrike)
                        ep_color = plt.rcParams['axes.prop_cycle'].by_key(
                        )['color'][(ep_idx % 9) + 1]
                        ax = psk.plot_great_circle(
                            euler_pole[1],
                            euler_pole[0],
                            estrike,
                            m=ax,
                            color=ep_color,
                            geoid=Geodesic(6371., 0.),
                            transform=ccrs.PlateCarree(),
                            alpha=.7,
                            linewidth=3,
                            zorder=2)
            if visualize:
                all_lons, all_lats, all_grav = pg.get_sandwell(
                    window,
                    down_sample_factor,
                    resample_method=Resampling.average,
                    sandwell_files_path=os.path.join(sandwell_files_path,
                                                     "*.tiff"))
                print("Plotting Gravity")
                start_time = time()
                print("Grid Sizes: ", all_lons.shape, all_lats.shape,
                      all_grav.shape)
                fcm = ax.contourf(all_lons,
                                  all_lats,
                                  all_grav,
                                  60,
                                  cmap="Blues_r",
                                  alpha=.75,
                                  transform=ccrs.PlateCarree(),
                                  zorder=0,
                                  vmin=0,
                                  vmax=255)
                print("Runtime: ", time() - start_time)
                ax.set_extent(window, ccrs.PlateCarree())
                vis_outpath = os.path.join(os.path.dirname(dsk_path),
                                           "strike_fit_%s" % sz)
                print("Saving: %s" % vis_outpath)
                fig.savefig(vis_outpath)

        elif num_sites == 2:  #equal determined case
            strike = geoid.Inverse(sz_df.iloc[0]["inter_lat"],
                                   sz_df.iloc[0]["inter_lon"],
                                   sz_df.iloc[1]["inter_lat"],
                                   sz_df.iloc[1]["inter_lon"])["azi1"]
            if strike < 0: strike += 360
            if strike < 180: strike += 180
            for i, row in sz_df.iterrows():
                dsk_df.at[i, "strike"] = strike
                print("\t", row["comp_name"], strike)
        else:  #under determined case; just ignore
            pass

    if filter_by_quality:
        dsk_df = dsk_df.append(bad_dsk_data)
        dsk_df.sort_values("inter_lat", inplace=True, ascending=False)

    print("--------------------------------------")
    (mlat, mlon), totcov = cart2latlon(mx, my, mz, mcov + (tcov / n))
    full_unc = cov_to_ellipse(mlat, mlon, totcov)
    print("Strike Covariance Matrix:\n", tcov)
    print("Full Uncertainty: ", full_unc)
    if not isinstance(euler_pole, type(None)):
        if visualize:
            all_strike_diffs = []
            fig_all = plt.figure(dpi=100)
            ax_all = fig_all.add_subplot(111)
            for ep_idx in range(len(strike_diffs)):
                ep_color = plt.rcParams['axes.prop_cycle'].by_key()['color'][
                    (ep_idx % 9) + 1]
                #Do histogram for each individual euler pole
                print(
                    "For EP %d -> Mean, Median, Min, Max Strike Differences: "
                    % ep_idx,
                    sum(strike_diffs[ep_idx]) / len(strike_diffs[ep_idx]),
                    np.median(strike_diffs[ep_idx]), min(strike_diffs[ep_idx]),
                    max(strike_diffs[ep_idx]))
                fig = plt.figure(dpi=100)
                ax = fig.add_subplot(111)
                ax.hist(strike_diffs[ep_idx],
                        bins=np.arange(0., 4.2, 0.2),
                        color=ep_color)
                ax.axvline(sum(strike_diffs[ep_idx]) /
                           len(strike_diffs[ep_idx]),
                           color="tab:blue",
                           linestyle="--")
                ax.axvline(np.median(strike_diffs[ep_idx]), color="cyan")
                vis_outpath = os.path.join(
                    os.path.dirname(dsk_path),
                    "strike_fit_epstats_%d.png" % ep_idx)
                print("Saving: %s" % vis_outpath)
                fig.savefig(vis_outpath)
                #Do stacked histogram for all euler poles
                all_strike_diffs += list(strike_diffs[ep_idx])
                ax_all.hist(all_strike_diffs,
                            bins=np.arange(0., 4.2, 0.2),
                            color=ep_color,
                            zorder=len(strike_diffs) - ep_idx)


#            all_strike_diffs = reduce(lambda x,y=[]: x+y, strike_diffs)
            print("For All EP -> Mean, Median, Min, Max Strike Differences: ",
                  sum(all_strike_diffs) / len(all_strike_diffs),
                  np.median(all_strike_diffs), min(all_strike_diffs),
                  max(all_strike_diffs))
            ax_all.axvline(sum(all_strike_diffs) / len(all_strike_diffs),
                           color="tab:red",
                           linestyle="--")
            ax_all.axvline(np.median(all_strike_diffs), color="tab:orange")
            vis_outpath = os.path.join(os.path.dirname(dsk_path),
                                       "strike_fit_all_epstats.png")
            print("Saving: %s" % vis_outpath)
            fig_all.savefig(vis_outpath)
            all_strike_diffs = reduce(lambda x, y=[]: x + y, strike_diffs)
            print(
                "For All EP (Check) -> Mean, Median, Min, Max Strike Differences: ",
                sum(all_strike_diffs) / len(all_strike_diffs),
                np.median(all_strike_diffs), min(all_strike_diffs),
                max(all_strike_diffs))

    if isinstance(outfile, type(None)):
        outfile = os.path.join(os.path.dirname(dsk_path),
                               "strike_cor_" + os.path.basename(dsk_path))
    print("Writing to %s" % str(outfile))
    utl.write_deskew_file(outfile, dsk_df)

    return full_unc