Ejemplo n.º 1
0
 def on_sub_btn(self, event):
     if self.parent.user_warning(
             "WARNING: This is a non-reversable function please be sure this is what you want to do. Also if you are not looking at the 0 phase profile this may have unexpected results, please view the 0 phase profile before hitting OK."
     ):
         try:
             self.deg = int(self.deg_box.GetValue())
         except (ValueError, TypeError) as e:
             self.parent.user_warning(
                 "Degree of polynomial must be a natural number")
         try:
             mag_path = os.path.join(self.parent.dsk_row["data_dir"],
                                     self.parent.dsk_row["comp_name"])
             mag_df = utl.open_mag_file(mag_path)
             self.projected_distances = utl.calc_projected_distance(
                 self.parent.dsk_row["inter_lon"],
                 self.parent.dsk_row["inter_lat"], mag_df["lon"],
                 mag_df["lat"], self.parent.dsk_row["strike"])["dist"]
             shifted_mag = mag_df['mag'].tolist()
             self.pols = np.polyfit(self.projected_distances, shifted_mag,
                                    self.deg)
             self.poly = np.polyval(self.pols, self.projected_distances)
             mag_df["mag"] = mag_df["mag"] - self.poly
             utl.write_mag_file_df(mag_df, mag_path)
         except AttributeError:
             return
     self.on_plot_btn(event)
Ejemplo n.º 2
0
def shipmag_preprocess(shipmag_files):
    for shipmag_file in shipmag_files:
        if os.path.basename(shipmag_file).split('.')[-1].startswith('c'):
            shutil.copyfile(shipmag_file, shipmag_file + '.lp')
        ship_df = utl.open_mag_file(shipmag_file)
        latlon_df = ship_df[['lat', 'lon']]
        latlon_file = shipmag_file + ".latlon"
        latlon_df.to_csv(latlon_file, sep=' ', index=False, header=False)
Ejemplo n.º 3
0
def plot_tracks(chrons_info,
                results_directory,
                tracks=[],
                track_dir="all_tracks",
                lon_0=180,
                lat_0=0,
                cuts=False):
    """
    plots track files in tracks with chron info on a default ortho map centered on the pacific for rough estimation of intersept and places these plots in the results directory. If no tracks provided it plots them all
    """

    if tracks == []:
        tracks = glob.glob('../raw_data/hi_alt/**/*.DAT') + glob.glob(
            '../raw_data/ship/**/*.lp')

    all_tracks_dir = os.path.join(results_directory, track_dir)
    utl.check_dir(all_tracks_dir)

    #Start loop making plots
    for track in tracks:

        #Get Track Name
        track_name = os.path.basename(os.path.basename(track))
        if not cuts: track_name = track_name.split('.')[0]
        else: track_name = track_name.replace('.', '-')

        #Create Figure
        fig = plt.figure(figsize=(9, 9), dpi=80)

        #Create Map
        aero_track_map = create_basic_map(projection='ortho',
                                          center_lon=lon_0,
                                          center_lat=lat_0,
                                          fig=fig)

        #Create Chron markers
        plot_chron_info(chrons_info, aero_track_map)

        dfin = utl.open_mag_file(track)

        lats = list(map(float, dfin['lat'].tolist()))
        lons = list(map(float, dfin['lon'].tolist()))

        aero_track_handle, = aero_track_map.plot(lons,
                                                 lats,
                                                 color='k',
                                                 zorder=3,
                                                 label=track_name,
                                                 transform=ccrs.PlateCarree())

        #plot title and labels
        plt.title(track_name)
        handles = [aero_track_handle]
        add_chron_info_to_legend(chrons_info, handles)
        plt.legend(handles=handles, loc='best')

        fig.savefig(os.path.join(all_tracks_dir, track_name + ".png"))
        plt.close(fig)
Ejemplo n.º 4
0
def get_idx_from_plot_pick(deskew_row,plot_pick,flip=False,dist_e=None):
    data_file_path = os.path.join(deskew_row["data_dir"],deskew_row["comp_name"])
    data_df = utl.open_mag_file(data_file_path)

    if flip: projected_distances = utl.calc_projected_distance(deskew_row['inter_lon'],deskew_row['inter_lat'],data_df['lon'].tolist(),data_df['lat'].tolist(),(180+deskew_row['strike'])%360)
    else: projected_distances = utl.calc_projected_distance(deskew_row['inter_lon'],deskew_row['inter_lat'],data_df['lon'].tolist(),data_df['lat'].tolist(),deskew_row['strike'])

    min_idx = (projected_distances["dist"]-plot_pick).abs().idxmin()

    return min_idx,projected_distances["dist"][min_idx]
Ejemplo n.º 5
0
def get_track_intersects(chron_to_analyse,
                         tracks_or_cuts,
                         spreading_zone_files,
                         data_directory='.',
                         bounding_lats=(-90, 90),
                         bounding_lons=(0, 360),
                         e=1):
    """ This function works in 0-360 longitude because otherwise there would be a discontinuty in the Pacific the region of interest """
    chron, chron_color = chron_to_analyse
    chron_name = "chron%s" % (str(chron))
    bound_check_func = lambda x: bounding_lats[0] < float(x[
        1]) and bounding_lats[1] > float(x[1]) and bounding_lons[0] < float(x[
            0]) and bounding_lons[1] > float(x[0])
    intersecting_tracks, out_string = [], ""
    for track in tqdm(tracks_or_cuts):
        print(track)
        dft = utl.open_mag_file(track)
        if dft.empty: continue
        lt = [[utl.convert_to_0_360(lon),
               float(lat)] for lon, lat in zip(dft['lon'], dft['lat'])]
        if not list(filter(bound_check_func, lt)):
            print("track out of bounds, skipping track")
            continue

        for spreading_zone_file in spreading_zone_files:
            lsz = [[line.split()[0], line.split()[1]]
                   for line in open(spreading_zone_file).readlines()
                   if len(line.split()) > 1]
            if not list(filter(bound_check_func, lsz)): continue
            idx = intersect_bf(lt, lsz, e=e)

            if not any(idx): continue
            else:
                print("-----------intersected in bounds-------------")
                intersecting_tracks.append(track)
                out_string += "%s\t%s\t%s\n" % (track, spreading_zone_file,
                                                str(idx))
                break

    print("found %d intersecting tracks" % len(intersecting_tracks))
    utl.check_dir(data_directory)
    fout_name = os.path.join(
        data_directory,
        "usable_tracks_and_intersects_for_%s.txt" % str(chron_name))
    if os.path.isfile(fout_name):
        print("backing up %s to %s" % (fout_name, fout_name + '.bak'))
        shutil.copyfile(fout_name, fout_name + '.bak')
    fout = open(fout_name, 'w+')
    print("writing to %s" % fout_name)
    fout.write(out_string)
    fout.close()

    return intersecting_tracks, fout_name
Ejemplo n.º 6
0
def create_deskewed_data_file(deskew_path):
    #read deskew file
    deskew_df = utl.open_deskew_file(deskew_path)

    #iterate mag files
    for i,row in deskew_df.iterrows():
        #read mag files
        data_path = os.path.join(row['data_dir'],row['comp_name'])
        data_df = utl.open_mag_file(data_path)
        #deskew mag data
        data_df['deskewed_mag'] = phase_shift_data(data_df['mag'],float(row['phase_shift']))
        #save deskewed mag data as $DATAFILE.deskew
        print("writing %s"%(data_path+'.deskewed'))
        data_df[['lon','lat','deskewed_mag']].to_csv(data_path+'.deskewed',sep=',',header=False,index=False)
Ejemplo n.º 7
0
def find_fz_crossings(deskew_path,fz_directory=os.path.join('..','raw_data','fracture_zones')):
    deskew_df = utl.open_deskew_file(deskew_path)
    get_fz_loc = read_and_fit_fz_data(fz_directory)

    fz_inter_dict = {}
    for i,row in deskew_df.iterrows():
        data_path = os.path.join(row['data_dir'],row['comp_name'])
        data_df = utl.open_mag_file(data_path)
        track_lon_lats = [[utl.convert_to_0_360(lon),lat] for lon,lat in zip(data_df['lon'],data_df['lat'])]
        inters = get_fz_loc(track_lon_lats)
        if inters != []: fz_inter_dict[row['comp_name']] = inters

    fz_inter_df = pd.DataFrame({'inters':fz_inter_dict})
    fz_inter_df.to_csv('fz_intercepts.txt',sep='\t')
Ejemplo n.º 8
0
def make_tabs(directory):
    ship_cut_tracks = glob.glob(os.path.join(directory, '**', '**', '*.lp'))
    aero_cut_tracks = glob.glob(
        os.path.join(directory, '**', '**', '**', '*.DAT'))
    print(
        "------NOTE: all _tab files will be skipped to prevent recursion-------"
    )
    print(ship_cut_tracks + aero_cut_tracks)

    for cut_track in (ship_cut_tracks + aero_cut_tracks):
        if '_tab.' in cut_track: continue
        df = open_mag_file(cut_track)
        df.to_csv(cut_track.split('.')[0] + '_tab.' + cut_track.split('.')[1],
                  sep='\t',
                  index=False)
Ejemplo n.º 9
0
def flip_data_file(data_path):

    #backup old data file
    data_back = data_path+'.bak'
    print('backing up %s to %s'%(data_path,data_back))
    shutil.copyfile(data_path,data_back)

    #flip data file
    data_df = open_mag_file(data_path)
    print("flipping data in %s"%data_path)
#    import pdb; pdb.set_trace()
    data_df = data_df.iloc[::-1]

    #write out data file
    print("overwriting %s"%data_path)
    write_mag_file_df(data_df,data_path)
Ejemplo n.º 10
0
def get_shipmag_decimal_year(row,deg_e=.01):
    """
    takes a row (pandas series) of a deskew file which is of type ship and returns the decimal year for the
    intersection point. returns none if not found.
    """
    if row['track_type']!='ship':
        raise ValueError("get_shipmag_decimal_year can only run on shipmag data recieved data of type %s instead"%str(row['track_type']))
    data_file_path = os.path.join(row["data_dir"],row["comp_name"])
    data_df = utl.open_mag_file(data_file_path)
#    data_df = pd.read_csv(data_file_path,names=["dist","decimal_year","mag","lat","lon"],delim_whitespace=True)
    decimal_year=None
    for j,datarow in data_df.iterrows(): #iterate to find the distance associated with the current lat lon
        if (float(datarow['lat'])>=float(row['inter_lat'])-deg_e and \
          float(datarow['lat'])<=float(row['inter_lat'])+deg_e) and \
          (utl.convert_to_0_360(datarow['lon'])>=utl.convert_to_0_360(row['inter_lon'])-deg_e and \
          utl.convert_to_0_360(datarow['lon'])<=utl.convert_to_0_360(row['inter_lon'])+deg_e):
            decimal_year=float(datarow['dec_year']); break
    return decimal_year
Ejemplo n.º 11
0
 def fit_poly(self):
     try:
         self.deg = int(self.deg_box.GetValue())
     except (ValueError, TypeError) as e:
         self.parent.user_warning(
             "Degree of polynomial must be a natural number")
     try:
         mag_path = os.path.join(self.parent.dsk_row["data_dir"],
                                 self.parent.dsk_row["comp_name"])
         mag_df = utl.open_mag_file(mag_path)
         self.projected_distances = utl.calc_projected_distance(
             self.parent.dsk_row["inter_lon"],
             self.parent.dsk_row["inter_lat"], mag_df["lon"], mag_df["lat"],
             (180 + self.parent.dsk_row["strike"]) % 360)["dist"]
         shifted_mag = sk.phase_shift_data(
             mag_df['mag'].tolist(), self.parent.dsk_row["phase_shift"])
         self.pols = np.polyfit(self.projected_distances, shifted_mag,
                                self.deg)
         self.poly = np.polyval(self.pols, self.projected_distances)
     except AttributeError:
         return
Ejemplo n.º 12
0
    def draw_figures(self):

        ####################################################Get Values
        dsk_row = self.parent.dsk_row
        track = self.parent.track
        ddis = float(self.parent.samp_dis_box.GetValue())
        if ddis==0: self.parent.user_warning("Synthetic is required for comparision of phase, start by initalilzing a synthetic"); return
        synth_dis = self.parent.dis_synth
        synth_mag = self.parent.synth

        filter_type = self.filter_type_box.GetValue()
        lowcut = float(self.lowcut_box.GetValue())
        highcut = float(self.highcut_box.GetValue())
        order = int(self.order_box.GetValue())

        left_bound = float(self.low_bound_box.GetValue())
        right_bound = float(self.high_bound_box.GetValue())
        aero_diff = float(self.aero_diff_box.GetValue())

        left_idx = np.argmin(np.abs(synth_dis-left_bound))
        right_idx = np.argmin(np.abs(synth_dis-right_bound))
        left_idx,right_idx = min([left_idx,right_idx]),max([left_idx,right_idx])

        bin_range,bin_num = (-180,180),120

        ###################################################Filter Data

        data_path = os.path.join(dsk_row["data_dir"],dsk_row["comp_name"])
        data_df = utl.open_mag_file(data_path)
        projected_distances = utl.calc_projected_distance(dsk_row['inter_lon'],dsk_row['inter_lat'],data_df['lon'].tolist(),data_df['lat'].tolist(),(180+dsk_row['strike'])%360)
        shifted_mag = sk.phase_shift_data(data_df["mag"],dsk_row["phase_shift"])
        if np.any(np.diff(projected_distances["dist"])<0): itshifted_mag = np.interp(-synth_dis,-projected_distances["dist"],shifted_mag)
        else: itshifted_mag = np.interp(synth_dis,projected_distances["dist"],shifted_mag)
        fitshifted_mag = self.filters[filter_type](itshifted_mag,lowcut,highcut,fs=1/ddis,order=order)

        ###################################################Actual Plotting

        outer = gridspec.GridSpec(4, 1)

        ###################################################Axis 0: Magnetic profiles
        self.ax0 = self.fig.add_subplot(outer[0])

        if self.parent.show_other_comp: #Handle Other Aeromag Component

            if dsk_row["track_type"]=="aero":
                if "Ed.lp" in track:
                    other_track = track.replace("Ed.lp","Vd.lp")
                    total_track = track.replace("Ed.lp","Td.lp")
                    other_phase = dsk_row["phase_shift"]-90
                elif "Hd.lp" in track:
                    other_track = track.replace("Hd.lp","Vd.lp")
                    total_track = track.replace("Hd.lp","Td.lp")
                    other_phase = dsk_row["phase_shift"]-90
                elif "Vd.lp" in track:
                    other_track = track.replace("Vd.lp","Ed.lp")
                    total_track = track.replace("Vd.lp","Td.lp")
                    if other_track not in self.parent.deskew_df["comp_name"].tolist(): other_track = track.replace("Vd.lp","Hd.lp")
                    other_phase = dsk_row["phase_shift"]+90
                else: self.parent.user_warning("Improperly named component files should have either Ed.lp, Hd.lp, or Vd.lp got: %s"%track); return
                oth_row = self.parent.deskew_df[self.parent.deskew_df["comp_name"]==other_track].iloc[0]

                oth_data_path = os.path.join(oth_row["data_dir"],oth_row["comp_name"])
                tot_data_path = os.path.join(oth_row["data_dir"],total_track) #Should be in same place

                oth_data_df = utl.open_mag_file(oth_data_path)
                oth_shifted_mag = sk.phase_shift_data(oth_data_df["mag"],other_phase)
                if np.any(np.diff(projected_distances["dist"])<0): oth_itshifted_mag = np.interp(-synth_dis,-projected_distances["dist"],oth_shifted_mag)
                else: oth_itshifted_mag = np.interp(synth_dis,projected_distances["dist"],oth_data_df)
                oth_fitshifted_mag = self.filters[filter_type](oth_itshifted_mag,lowcut,highcut,fs=1/ddis,order=order)
                if filter_type=="None": psk.plot_skewness_data(oth_row,other_phase,self.ax0,xlims=[None,None],color='darkgreen',zorder=2,picker=True,alpha=.7,return_objects=True,flip=True)
                else: self.ax0.plot(synth_dis,oth_fitshifted_mag,color="#299C29",zorder=3,alpha=.6)

                tot_data_df = utl.open_mag_file(tot_data_path)
                if np.any(np.diff(projected_distances["dist"])<0): tot_imag = np.interp(-synth_dis,-projected_distances["dist"],tot_data_df["mag"])
                else: tot_imag = np.interp(synth_dis,projected_distances["dist"],tot_data_df["mag"])
                tot_fimag = self.filters[filter_type](tot_imag,lowcut,highcut,fs=1/ddis,order=order)

        if filter_type=="None": psk.plot_skewness_data(dsk_row,dsk_row["phase_shift"],self.ax0,xlims=[None,None],zorder=3,picker=True,return_objects=True,flip=True)
        else: self.ax0.plot(synth_dis,fitshifted_mag,color="#7F7D7D",zorder=3,alpha=.6)
        self.ax0.plot(self.parent.dis_synth,self.parent.synth,'r-',alpha=.4,zorder=1)
        self.ax0.set_ylabel("Magnetic Profiles")
#        self.ax0.get_xaxis().set_ticklabels([])

        ###################################################Axis 1/2: Phase Angles and Differences

        self.ax1 = self.fig.add_subplot(outer[1], sharex=self.ax0)
        self.ax2 = self.fig.add_subplot(outer[2], sharex=self.ax0)

        ###################################################Calculate: Phase Differences
        trimmed_dis = synth_dis[left_idx:right_idx]
        trimmed_synth = synth_mag[left_idx:right_idx]
        trimmed_fitshifted_mag = fitshifted_mag[left_idx:right_idx]

        al_data = np.angle(hilbert(fitshifted_mag),deg=False)[left_idx:right_idx]
        al_synth = np.angle(hilbert(np.real(synth_mag)),deg=False)[left_idx:right_idx]

        data_synth_diff = phase_diff_func(al_synth,al_data)

        if self.parent.show_other_comp and dsk_row["track_type"]=="aero":
            trimmed_oth_fitshifted_mag = oth_fitshifted_mag[left_idx:right_idx]
            al_oth = np.angle(hilbert(oth_fitshifted_mag),deg=False)[left_idx:right_idx]

            oth_synth_diff = phase_diff_func(al_synth,al_oth)
            oth_data_diff = phase_diff_func(al_oth,al_data)

            if abs(aero_diff) > 0:
                idx = ma.array(np.abs(oth_data_diff)<aero_diff)

                self.ax1.plot((trimmed_dis[~idx]),(np.rad2deg(al_oth[~idx])),color="darkgreen",linestyle=":")

                self.ax2.plot((trimmed_dis[~idx]),(oth_synth_diff[~idx]),color="tab:pink",alpha=.8,linestyle=":")
                self.ax2.plot((trimmed_dis[~idx]),(oth_data_diff[~idx]),color="tab:grey",alpha=.8,linestyle=":")

                self.ax1.plot((trimmed_dis[~idx]),(np.rad2deg(al_data[~idx])),color="k",linestyle=":")
                self.ax1.plot((trimmed_dis[~idx]),(np.rad2deg(al_synth[~idx])),color="r",linestyle=":")

                self.ax2.plot((trimmed_dis[~idx]),(data_synth_diff[~idx]),color="tab:red",alpha=.8,linestyle=":")

#                import pdb; pdb.set_trace()
#                not_trimmed_dis = (trimmed_dis[~idx])
#                not_trimmed_dis[np.diff(~idx,prepend=[0])] = ma.masked
#                not_al_data = (al_data[~idx])
#                not_al_data[np.diff(~idx)] = ma.masked
#                not_al_synth = (al_synth[~idx])
#                not_al_synth[np.diff(~idx)] = ma.masked
#                not_al_oth = (al_oth[~idx])
#                not_al_oth[np.diff(~idx)] = ma.masked
#                not_data_synth_diff = (data_synth_diff[~idx])
#                not_data_synth_diff[np.diff(~idx)] = ma.masked
#                not_oth_synth_diff = (oth_synth_diff[~idx])
#                not_oth_synth_diff[np.diff(~idx)] = ma.masked
#                not_oth_data_diff = (oth_data_diff[~idx])
#                not_oth_data_diff[np.diff(~idx)] = ma.masked
                trimmed_dis = (trimmed_dis[idx])
                al_data = (al_data[idx])
                al_synth = (al_synth[idx])
                al_oth = (al_oth[idx])
                data_synth_diff = (data_synth_diff[idx])
                oth_synth_diff = (oth_synth_diff[idx])
                oth_data_diff = (oth_data_diff[idx])

            self.ax1.plot(trimmed_dis,np.rad2deg(al_oth),color="darkgreen")

            self.ax2.plot(trimmed_dis,oth_synth_diff,color="tab:pink",alpha=.8)
            self.ax2.plot(trimmed_dis,oth_data_diff,color="tab:grey",alpha=.8)

        self.ax1.plot(trimmed_dis,np.rad2deg(al_data),color="k")
        self.ax1.plot(trimmed_dis,np.rad2deg(al_synth),color="r")

        self.ax2.plot(trimmed_dis,data_synth_diff,color="tab:red",alpha=.8)
#        self.ax2.get_xaxis.set_ticklabels
        self.ax0.set_xlim(*self.parent.ax.get_xlim())
        self.ax0.set_ylim(*self.parent.ax.get_ylim())

        self.ax1.set_ylabel("Phase Angles")
        self.ax2.set_ylabel("Phase Differences")


        ###################################################Axis 2.1: Power Spectrum
#        inner = gridspec.GridSpecFromSubplotSpec(1, 2, subplot_spec=outer[2])#, hspace=0.)
#        self.ax2 = self.fig.add_subplot(inner[0])


        ###################################################Axis 2.2: Phase Statistics
        self.ax3 = self.fig.add_subplot(outer[3])

        if self.parent.show_other_comp and dsk_row["track_type"]=="aero":
            self.ax3.hist(oth_synth_diff,range=bin_range,bins=bin_num,color="tab:pink",alpha=.5,zorder=2)
            self.ax3.hist(oth_data_diff,range=bin_range,bins=bin_num,color="tab:grey",alpha=.5,zorder=1)

        self.ax3.hist(data_synth_diff,range=bin_range,bins=bin_num,color="tab:red",alpha=.5,zorder=3)
        self.ax3.axvline(np.median(data_synth_diff),color="k",alpha=.5,zorder=5,linestyle=":")
        self.ax3.axvline(np.mean(data_synth_diff),color="k",alpha=.5,zorder=5)
        self.ax3.axvspan(np.mean(data_synth_diff)-np.std(data_synth_diff),np.mean(data_synth_diff)+np.std(data_synth_diff),color="tab:grey",alpha=.3,zorder=0)

        self.ax3.annotate(r"$\theta_{mean}$ = $%.1f^\circ \pm %.1f^\circ$"%(np.mean(data_synth_diff),np.std(data_synth_diff)) + "\n" + r"$\theta_{median}$ = %.1f$^\circ$"%np.median(data_synth_diff),xy=(0.02,1-0.02),xycoords="axes fraction",bbox=dict(boxstyle="round", fc="w",alpha=.5),fontsize=self.fontsize,va='top',ha='left')

        self.ax3.set_ylabel(r"$\Delta \theta$ Count")

        self.fig.suptitle("%s\n%s\n"%(dsk_row["sz_name"],track))

        ###################################################Power Figure

        N = (right_idx-left_idx) #Length of signal in distance domain
        NW = 3 #following Parker and O'brien '97 and HJ-Gordon '03 we use a time-bandwith product of 6 (Nw is half)
        Ns = 5 #Number of points to use in running average smoothing

        #Handle Distance Domain
#        import pdb; pdb.set_trace()
        Sk_complex, weights, eigenvalues=pmtm(itshifted_mag[left_idx:right_idx], NW=NW, NFFT=N, show=False)
        Sk = np.abs(Sk_complex)**2
        smoothed_tshifted_freq = (np.mean(Sk * np.transpose(weights), axis=0) * ddis)[N//2:][::-1]
#        smoothed_tshifted_freq = np.convolve(smoothed_tshifted_freq, np.ones((Ns,))/Ns, mode='same') #10 point running average smoothing
        tdata_freqs = np.linspace(0.0, 1.0/(2.0*ddis), N-N//2) #0 to Nyquest

        self.power_ax = self.power_fig.add_subplot(111)

        if self.parent.show_other_comp and dsk_row["track_type"]=="aero":
            Sk_complex, weights, eigenvalues=pmtm(oth_itshifted_mag[left_idx:right_idx], NW=NW, NFFT=N, show=False)
            Sk = np.abs(Sk_complex)**2
            oth_smoothed_tshifted_freq = (np.mean(Sk * np.transpose(weights), axis=0) * ddis)[N//2:][::-1]
#            oth_smoothed_tshifted_freq = np.convolve(oth_smoothed_tshifted_freq, np.ones((Ns,))/Ns, mode='same') #10 point running average smoothing
            self.power_ax.semilogy(tdata_freqs, oth_smoothed_tshifted_freq, color="darkgreen")
#            self.power_ax.semilogy(tdata_freqs, oth_smoothed_tshifted_freq+smoothed_tshifted_freq, color="grey")


            Sk_complex, weights, eigenvalues=pmtm(tot_imag[left_idx:right_idx], NW=NW, NFFT=N, show=False)
            Sk = np.abs(Sk_complex)**2
            tot_smoothed_tshifted_freq = (np.mean(Sk * np.transpose(weights), axis=0) * ddis)[N//2:][::-1]
#            tot_smoothed_tshifted_freq = np.convolve(tot_smoothed_tshifted_freq, np.ones((Ns,))/Ns, mode='same') #10 point running average smoothing
            self.power_ax.semilogy(tdata_freqs, tot_smoothed_tshifted_freq, color="tab:orange")

        #Old Numpy Method
#        synth_freqs = np.fft.fftfreq(len(synth_dis[left_idx:right_idx]),ddis)
#        tdata_freqs = np.fft.fftfreq(len(shifted_mag[left_idx:right_idx]),ddis)
#        tshifted_freq = np.fft.fft(shifted_mag[left_idx:right_idx])
#        fitshifted_freq = np.fft.fft(fitshifted_mag[left_idx:right_idx])
#        tsynth_freq = np.fft.fft(synth_mag[left_idx:right_idx])

        self.power_ax.semilogy(tdata_freqs, smoothed_tshifted_freq, color="k",zorder=100)

#        self.power_ax.semilogy(tdata_freqs, np.abs(tshifted_freq), color="k")
#        self.power_ax.plot(synth_freqs, np.abs(fitshifted_freq), color="#7F7D7D")
#        self.power_ax.plot(synth_freqs, np.abs(tsynth_freq), color="r")

        self.power_ax.set_xlim(0.0,0.4)
        self.power_ax.set_ylim(1e-1,1e6)
Ejemplo n.º 13
0
def cut_tracks_and_flip(track_cuts, data_directory, heading="east"):

    cut_tracks, flipped_data = [], []
    for track, cuts in track_cuts.items():
        print("Starting Track: %s" % track)
        directory, path = os.path.split(track)
        dfin = utl.open_mag_file(track)
        #        fin = open(track,'r')
        #        lines = fin.readlines()
        #        fin.close()
        #        lines = [line.split() for line in lines]
        #        dfin = pd.DataFrame(lines,columns=["time","lat","lon","n_comp","s_comp","h_comp","v_comp","mag","dec","inc","None","alt"])
        lats = list(map(float, dfin['lat'].tolist()))
        lons = list(map(utl.convert_to_0_360, dfin['lon'].tolist()))
        df_segments = []
        for cut in cuts:
            try:
                cut_index = [[lon, lat] for lon, lat in zip(lons, lats)
                             ].index([utl.convert_to_0_360(cut[0]), cut[1]])
            except ValueError as e:
                import pdb
                pdb.set_trace()
            print("cutting track: %s along index: %d" % (track, cut_index))
            df_segments.append(dfin.loc[:cut_index])
            dfin = dfin.loc[cut_index:]
        df_segments.append(dfin)
        i = 1
        for df_segment in df_segments:
            if len(df_segment) == 0: continue
            if heading == 'east':
                flip_bool = (utl.convert_to_0_360(df_segment['lon'].iloc[0]) >
                             utl.convert_to_0_360(df_segment['lon'].iloc[-1])
                             )  #is heading easterly
            elif heading == 'west':
                flip_bool = (utl.convert_to_0_360(df_segment['lon'].iloc[0]) <
                             utl.convert_to_0_360(df_segment['lon'].iloc[-1])
                             )  #is heading westerly
            elif heading == 'north':
                flip_bool = (
                    df_segment['lat'].iloc[0] > df_segment['lat'].iloc[-1]
                )  #is heading northerly
            elif heading == 'south':
                flip_bool = (
                    df_segment['lat'].iloc[0] < df_segment['lat'].iloc[-1]
                )  #is heading southerly
            else:
                print(
                    "the heading provided is not a cardinal direction please rerun with this corrected"
                )
                return
            if flip_bool:
                print(
                    "flipping data for cut: %d track: %s such that path is %serly and thus (hopefully) oldest data first"
                    % (i, path, heading))
                df_segment = df_segment.iloc[::-1]
                flipped_data.append(track.split('.')[0] + '.c%d' % i)
            if not os.path.isdir(os.path.join(directory, 'c%d' % i)):
                print("making directory %s" %
                      os.path.join(directory, 'c%d' % i))
                utl.check_dir(os.path.join(directory, 'c%d' % i))
            segment_path = os.path.join(directory, 'c%d' % i,
                                        path.split('.')[0] + '.c%d' % i)
            i += 1
            print("writing: %s" % segment_path)
            df_segment.to_csv(segment_path,
                              sep='\t',
                              header=False,
                              index=False)
            cut_tracks.append(segment_path)
    f_flipped = open(os.path.join(data_directory, "flipped_data.txt"), 'w+')
    f_flipped.write(reduce(lambda x, y: x + '\n' + y, flipped_data))
    f_flipped.close()
    return cut_tracks, flipped_data
Ejemplo n.º 14
0
def aeromag_preprocess(aeromag_files,
                       date_file=os.path.join('..', 'raw_data',
                                              'dates.aeromag'),
                       geoid=Geodesic.WGS84):
    for aeromag_file in aeromag_files:  #iterate over all aeromag files

        track, extension = os.path.basename(aeromag_file).split(
            '.')  #segment the name into parts
        #read data and make a empty dataframe for output data
        adf = utl.open_mag_file(aeromag_file).dropna()
        ddf = pd.read_csv(date_file, sep='\t', index_col=0)
        idf = pd.DataFrame(columns=[
            'dis', 'lat', 'lon', 'alt', 'v_comp', 'e_comp', 'n_comp', 'h_comp',
            't_comp'
        ])

        dis = 0
        decimal_year = float(ddf.loc[track]['decimal_year'])
        prev_lat, prev_lon = None, None
        for i, row in adf.iterrows():  #iterate over rows

            row["lon"] = utl.convert_to_0_360(row["lon"])
            adf.at[i, "lon"] = row["lon"]

            try:
                #check for data gaps
                if np.isnan(row['lat']) or np.isnan(row['lon']) or np.isnan(
                        row['alt']) or np.isnan(row['mag']) or np.isnan(
                            row['v_comp']) or np.isnan(
                                row['e_comp']) or np.isnan(
                                    row['n_comp']) or np.isnan(row['h_comp']):
                    continue
                    #check None
                elif (row['lat'] == None) or (row['lon'] == None) or (
                        row['alt'] == None) or (row['mag'] == None) or (
                            row['v_comp'] == None) or (row['e_comp'] == None):
                    continue
                    #check for absurd values outside of the domain of the varible (this will capture null values of -99999)
                elif (abs(float(row['lat'])) > 90) or (abs(
                        utl.convert_to_180_180(row['lon'])) > 180) or (float(
                            row['alt']) < 0) or (abs(float(
                                row['mag'])) == 99999) or (abs(
                                    float(row['v_comp'])) == 99999) or (abs(
                                        float(row['e_comp'])) == 99999):
                    continue
            except ValueError as e:
                continue  #This implies a value which is not convertable to a float as all of these should be floats this datum must be skipped

            if prev_lat != None and prev_lon != None:  #calculate distance
                dis += geoid.Inverse(float(row['lat']), float(row['lon']),
                                     prev_lat, prev_lon)['s12'] / 1000
            adf.at[i, 'dis'] = dis

            #calculate and remove IGRF
            dec, inc, mag = ipmag.igrf([
                decimal_year,
                float(row['alt']) * 0.3048e-3,
                float(row['lat']),
                float(row['lon'])
            ])
            res_v_comp = mag * np.sin(np.deg2rad(inc))
            res_e_comp = mag * np.cos(np.deg2rad(inc)) * np.sin(
                np.deg2rad(dec))
            res_n_comp = mag * np.cos(np.deg2rad(inc)) * np.cos(
                np.deg2rad(dec))
            res_h_comp = mag * np.cos(np.deg2rad(inc))
            res_t_comp = mag

            adf.at[i, 'res_v_comp'] = float(row['v_comp']) - res_v_comp
            adf.at[i, 'res_e_comp'] = float(row['e_comp']) - res_e_comp
            adf.at[i, 'res_n_comp'] = float(row['n_comp']) - res_n_comp
            adf.at[i, 'res_h_comp'] = float(row['h_comp']) - res_h_comp
            adf.at[i, 'res_t_comp'] = float(row['mag']) - res_t_comp

            prev_lat, prev_lon = float(row['lat']), float(row['lon'])


#        adf = adf[(adf['res_e_comp']<3000) & (adf['res_n_comp']<3000) & (adf['res_v_comp']<3000) & (adf['res_h_comp']<3000) & (adf['res_t_comp']<3000)]

#remove a second order polynomial fromm the magnetic data I don't know why but this is something done
        for col in [
                'res_e_comp', 'res_n_comp', 'res_h_comp', 'res_v_comp',
                'res_t_comp'
        ]:
            #            pols = np.polyfit(adf['dis'].tolist(),adf[col].tolist(),3)
            #            mag_fit = np.polyval(pols,adf['dis'].tolist())
            #            adf['cor'+col.lstrip('res')] = adf[col].to_numpy() - mag_fit
            adf['cor' + col.lstrip('res')] = adf[col].to_numpy()

        #iterpolate and round data
        adf = adf.dropna()
        idf['dis'] = np.arange(adf['dis'].iloc[0], adf['dis'].iloc[-1] + .1,
                               .1)  #spacing of 1 km, because I can
        idf['lat'] = np.interp(idf['dis'], adf['dis'], adf['lat'])
        idf['lon'] = np.interp(idf['dis'], adf['dis'], adf['lon'])
        idf['alt'] = np.interp(idf['dis'], adf['dis'], .3048 * adf['alt'])
        idf['v_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_v_comp'])
        idf['e_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_e_comp'])
        idf['n_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_n_comp'])
        idf['h_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_h_comp'])
        idf['t_comp'] = np.interp(idf['dis'], adf['dis'], adf['cor_t_comp'])

        adf[['dis', 'alt', 'cor_v_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Vd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_e_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Ed',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_n_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Nd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_h_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Hd',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        adf[['dis', 'alt', 'cor_t_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Td',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'v_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Vd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'e_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Ed.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'n_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Nd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 'h_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Hd.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")
        idf[['dis', 'alt', 't_comp', 'lat',
             'lon']].to_csv(aeromag_file + '.Td.lp',
                            index=False,
                            header=False,
                            sep='\t',
                            float_format="%.3f")

        if extension.startswith('c'):
            shutil.copyfile(aeromag_file, aeromag_file + '.lp')

        latlon_df = adf[['lat', 'lon']]
        latlon_file = aeromag_file + ".latlon"
        latlon_df.to_csv(latlon_file, sep=' ', index=False, header=False)
Ejemplo n.º 15
0
            for dt in deskew_tracks:
                dt[0].remove()
            for df in deskew_fill:
                df[0].remove()
        except NameError:
            pass
        except TypeError:
            pass
        print("Plotting Tracks: ")
        deskew, deskew_tracks, deskew_fill = utl.open_deskew_file(
            dsk_path), [], []
        for j, (i, row) in enumerate(deskew.iterrows()):
            if remove_bad_data and row["quality"] != "g": continue
            # Read in deskewed profile
            # This is hard-coded now. It will be updated to take a list of profiles in the future
            dskd = utl.open_mag_file(
                os.path.join(row['data_dir'], row["comp_name"]))
            print("\tPlotting: ", row["comp_name"])

            # Define the angle along which to project
            perp = row["strike"] - 180
            lon = dskd["lon"].tolist()
            lat = dskd["lat"].tolist()
            mag = sk.phase_shift_data(dskd["mag"].tolist(), row["phase_shift"])
            #            print("\t\t",perp)
            #            print("\t\t",np.array(lon).shape,np.array(lat).shape,np.array(mag).shape)

            # Find distance to project
            if row["track_type"] == 'ship':
                pcol = '#000000'
                scle = 0.2 * 1e3
            elif row["track_type"] == 'aero':
Ejemplo n.º 16
0
def auto_dsk(dsk_row,synth,bounds,conv_limit=0,conv_bounds=[None,None],phase_args=(0.,360.,1.),highcut=0.,order=3):
    """
    Returns the maximum likelihood phase shift to deskew the data to match a provided synthetic given a bounds
    on a window to match.

    Parameters
    ----------

    dsk_row : Pandas.Series
        Single row of a deskew file with valid path to data file
    synth : list
        This should be the output of the make synthetic function it needs to contain three elements
            0) an array of the synthetic magnetic anomalies
            1) an array of the distance coordinates of the points in 0, should be of equal length to 0, MUST be in
               the same coordinate system as the profile provided in dsk_row!!! Which it may not by default.
            2) the distance resolution of the synthetic in 0 and 1
    bounds : list of floats
        Has two elements which corespond to the left and right bounds of the window
    conv_limit : float, optional
        Weather or not to realign the anomaly each phase shift using a time lagged convolution method which
        increases runtime significantly but can also increase accuracy. This argument should be a positve
        float which corresponds to the amount of +- shift the anomaly is allowed to move used otherwise it should
        be 0 to not use the shift method (Default: 0, which implies not to use method).
    conv_bounds : list of 2 floats, optional
        The left and right boundary in the distance domain to use to time lag convolve the synthetic and the filtered
        data signal. Thus 300 km of signal can be convolved but only the 10 km of motion allowed to pin down the 
        crossing location. (Default: [None,None], which implies conv_bounds=bounds)
    phase_args : tuple or other unpackable sequence, optional
        Arguments to np.arange which define the phases searched in the minimization. (Default: (0.,360.,1.) which
        implies a search of the entire parameter space of phases at 1 degree resolution)
    highcut : float, optional
        The upper cutoff frequency to filter the data by in order to remove any topographic anomalies in the data.
        This value should be between 0 and Nyquest of the synthetic which MUST be regularly sampled like those
        returned by make_synthetic. The data is up or down sampled to the synthetic before filtering. (Default:
        0 which implies not to filter the data)
    order : int, optional
        The order of the lowpass butterworth filter to apply to the data.

    Returns
    ----------

    best_phase : float
        The maximum liklehood phase shift to match the data to the synthetic
    best_shift : float
        the maximum likelihood shift for the best_phase which aligned the two anomalies
    phase_func : Numpy.NdArray
        The summed phase asynchrony between the data and the synthetic as a function of phase shift (best_phase is
        the global minimum of this function)
    best_shifts : Numpy.NdArray
        the maximum likelihood shift as a function of the phase shift
    """

    #Unpack Arguments
    dage = dsk_row["age_max"]-dsk_row["age_min"]
    phases = np.arange(*phase_args)
    left_bound,right_bound = bounds
    synth_mag = np.array(synth[0])
    synth_dis = np.array(synth[1])
    ddis = synth[2]

    data_path = os.path.join(dsk_row["data_dir"],dsk_row["comp_name"])
    data_df = utl.open_mag_file(data_path)
    projected_distances = utl.calc_projected_distance(dsk_row['inter_lon'],dsk_row['inter_lat'],data_df['lon'].tolist(),data_df['lat'].tolist(),(180+dsk_row['strike'])%360)
    if conv_limit: #create the fully interpolated profile for convolution

        #create the shortened synthetic for the time lagged convolution
        if isinstance(conv_bounds[0],type(None)): conv_bounds[0] = bounds[0]
        if isinstance(conv_bounds[1],type(None)): conv_bounds[1] = bounds[1]
        left_idx = np.argmin(np.abs(synth_dis - conv_bounds[0]))
        right_idx = np.argmin(np.abs(synth_dis - conv_bounds[1]))
        right_idx,left_idx = max([right_idx,left_idx]),min([right_idx,left_idx])
        conv_synth,conv_synth_dis = synth_mag[left_idx:right_idx],synth_dis[left_idx:right_idx]

        if np.any(np.diff(projected_distances["dist"])<0): #redefine to the right because interp dumbs
            mag = data_df["mag"].to_numpy()[::-1]
            mag_dis = projected_distances["dist"].to_numpy()[::-1]
        full_imag = np.interp(conv_synth_dis,mag_dis,mag)
        if highcut: full_fimag = butter_lowpass_filter(full_imag,highcut=highcut,fs=1/ddis,order=order)
        else: full_fimag = full_imag

    #trim to only window of relivence
    left_idx = np.argmin(np.abs(synth_dis - left_bound))
    right_idx = np.argmin(np.abs(synth_dis - right_bound))
    right_idx,left_idx = max([right_idx,left_idx]),min([right_idx,left_idx])
    tsynth_mag = synth_mag[left_idx:right_idx]
    tsynth_dis = synth_dis[left_idx:right_idx]
    N = len(tsynth_mag) #because this is easier and regularly sampled plus the user can set it simply
    al2 = np.angle(hilbert(np.real(tsynth_mag),N),deg=False)

    best_shifts = [] #record best shifts as function of phase shift
    phase_async_func = [] #record summed phase asynchrony as a function of phase shift
    for i,phase in enumerate(phases):
        shifted_mag = phase_shift_data(data_df["mag"],phase)

        if conv_limit: #DON'T YOU KNOW WE'RE GONNAAAA DOOOOOOO THE COOONVOLUTIOOOON!!!
            shifted_full_fimag = phase_shift_data(full_fimag,phase)
            correlation_func = np.abs(np.convolve(shifted_full_fimag,conv_synth,"full"))
            correlation_func = correlation_func[int(len(conv_synth)-conv_limit/ddis+.5):int(len(conv_synth)+conv_limit/ddis+.5)]

            best_shift = ddis*(len(correlation_func)/2-np.argmax(correlation_func))/2

        else: best_shift = 0.

        #trim the data to the right segments
        left_idx = np.argmin(np.abs(projected_distances["dist"] - left_bound + best_shift))
        right_idx = np.argmin(np.abs(projected_distances["dist"]- right_bound + best_shift))
        right_idx,left_idx = max([right_idx,left_idx]),min([right_idx,left_idx])
        tproj_dist = projected_distances["dist"][left_idx:right_idx] + best_shift
        tshifted_mag = shifted_mag[left_idx:right_idx]

        #numpy.interp only works for monotonic increasing independent variable data
        if np.any(np.diff(tproj_dist)<0): itshifted_mag = np.interp(-tsynth_dis,-tproj_dist,tshifted_mag)
        else: itshifted_mag = np.interp(tsynth_dis,tproj_dist,tshifted_mag)
        if highcut: fitshifted_mag = butter_lowpass_filter(itshifted_mag,highcut=highcut,fs=1/ddis,order=order)
        else: fitshifted_mag = itshifted_mag

        al1 = np.angle(hilbert(fitshifted_mag,N),deg=False)
        phase_asynchrony = np.sin((al1-al2)/2) #shouldn't go negative but...just in case
        best_shifts.append(best_shift)
        phase_async_func.append(phase_asynchrony.sum())

    best_idx = np.argmin(phase_async_func)

    return phases[best_idx],best_shifts[best_idx],phase_async_func,best_shifts
Ejemplo n.º 17
0
    def plot_tracks(self):
        try:
            dsk_row = self.parent.dsk_row
            dsk_data = self.parent.deskew_df[self.parent.deskew_df["sz_name"]
                                             == dsk_row["sz_name"]]
        except AttributeError:
            return
        #        projected_distances = utl.calc_projected_distance(dsk_row['inter_lon'],dsk_row['inter_lat'],mag_data['lon'].tolist(),mag_data['lat'].tolist(),dsk_row['strike'])
        #        dis = max(abs(projected_distances["dist"]))*1000
        #        geodict1 = Geodesic.WGS84.Direct(dsk_row['inter_lat'],dsk_row['inter_lon'],dsk_row['strike']-90,dis)
        #        geodict2 = Geodesic.WGS84.Direct(dsk_row['inter_lat'],dsk_row['inter_lon'],dsk_row['strike']+90,dis)
        self.ax.plot([geodict1["lon2"], geodict2["lon2"]],
                     [geodict1["lat2"], geodict2["lat2"]],
                     transform=ccrs.Geodetic(),
                     color="black",
                     linewidth=1,
                     linestyle='--')

        for j, (i, row) in enumerate(dsk_data.iterrows()):
            # Read in deskewed profile
            # This is hard-coded now. It will be updated to take a list of profiles in the future
            if not os.path.isfile(infile):
                self.parent.user_warning("Data file %s could not be found" %
                                         infile)
            dskd = utl.open_mag_file(
                os.path.join(row['data_dir'], row["comp_name"]))

            # Define the angle along which to project
            perp = row["strike"] - 180
            lon = dskd["lon"]
            lat = dskd["lat"]
            mag = sk.phase_shift_data(dskd["mag"].tolist(), row["phase_shift"])

            # Find distance to project
            if row["track_type"] == 'ship':
                pcol = '#000000'
                scle = 0.2 * 1e3
            if row["track_type"] == 'aero':
                if 'Ed' in row["comp_name"]:
                    pcol = 'purple'
                else:
                    pcol = 'darkorchid'
                scle = 0.5 * 1e3

            # Project amplitude onto map
            mlats, mlons = [], []
            for i in range(len(mag)):
                gdsc = self.geoid.Direct(lat[i], lon[i], perp, mag[i] * scle)
                mlons.append(gdsc['lon2'])
                mlats.append(gdsc['lat2'])

            # Plot map elements
            deskew_tracks.append(
                self.ax.plot(utl.convert_to_0_360(lon),
                             lat,
                             '--',
                             linewidth=1.0,
                             transform=ccrs.PlateCarree(),
                             color=pcol,
                             zorder=990))
            deskew_tracks.append(
                self.ax.plot(utl.convert_to_0_360(mlons),
                             mlats,
                             '-',
                             linewidth=1.0,
                             transform=ccrs.PlateCarree(),
                             color=pcol,
                             zorder=1000))
            deskew_fill.append(
                self.ax.fill_between(utl.convert_to_0_360(
                    np.array(mlons)[mag > 0]),
                                     np.array(mlats)[mag > 0],
                                     lat[mag > 0],
                                     transform=ccrs.PlateCarree(),
                                     alpha=0.5,
                                     color=pcol))
Ejemplo n.º 18
0
def plot_az_strike(track, spreading_zone_file, idx, az, strike, chron_color,
                   chron_name, results_directory, fout_name):

    #Create Figure
    fig = plt.figure(figsize=(9, 9), dpi=80)

    #Create Chron markers
    dft = utl.open_mag_file(track)
    lt = [[utl.convert_to_0_360(lon), float(lat)]
          for lon, lat in zip(dft['lon'], dft['lat'])]
    at = np.array(lt)
    lsz = [
        list(map(float, line.split()))
        for line in open(spreading_zone_file).readlines()
    ]
    asz = np.array(lsz)

    #Create Map
    #            gcm = create_basic_map() #uses defaults, hit shift-tab in parens to see what they are

    llcrnrlon = min(at[:, 0]) - 20 if min(at[:, 0]) - 20 > 0 else 0
    llcrnrlat = min(at[:, 1]) - 20 if min(at[:, 1]) - 20 > -89 else -89
    urcrnrlon = max(at[:, 0]) + 20 if max(at[:, 0]) + 20 < 360 else 360
    urcrnrlat = max(at[:, 1]) + 20 if max(at[:, 1]) + 20 < 89 else 89

    gcm = create_basic_map(projection='merc',
                           llcrnrlat=llcrnrlat,
                           urcrnrlat=urcrnrlat,
                           llcrnrlon=llcrnrlon,
                           urcrnrlon=urcrnrlon,
                           fig=fig)

    sz_handle, = gcm.plot(asz[:, 0],
                          asz[:, 1],
                          color=chron_color,
                          zorder=1,
                          label=chron_name,
                          transform=ccrs.PlateCarree())

    gcm_handle, = gcm.plot(at[:, 0],
                           at[:, 1],
                           color='k',
                           zorder=2,
                           label=os.path.basename(track),
                           transform=ccrs.PlateCarree())

    gcm.scatter(at[idx[1][0]][0],
                at[idx[1][0]][1],
                color='g',
                marker='o',
                s=10,
                zorder=3,
                label='nearest intercept',
                transform=ccrs.PlateCarree())

    geodict = Geodesic(6371000., 0.).Direct(float(at[idx[1][0]][1]),
                                            float(at[idx[1][0]][0]), float(az),
                                            1000000)
    b_lon, b_lat = (360 + geodict["lon2"]) % 360, geodict["lat2"]
    gcm.arrow(b_lon,
              b_lat,
              b_lon - at[idx[1][0]][0],
              b_lat - at[idx[1][0]][1],
              fc="white",
              ec="r",
              linewidth=1,
              head_width=1,
              head_length=1,
              label='azimuth',
              transform=ccrs.PlateCarree())

    geodict = Geodesic(6371000., 0.).Direct(float(at[idx[1][0]][1]),
                                            float(at[idx[1][0]][0]),
                                            float(strike), 1000000)
    b_lon, b_lat = (360 + geodict["lon2"]) % 360, geodict["lat2"]
    gcm.arrow(b_lon,
              b_lat,
              b_lon - at[idx[1][0]][0],
              b_lat - at[idx[1][0]][1],
              fc="white",
              ec="pink",
              linewidth=1,
              head_width=1,
              head_length=1,
              label='strike',
              transform=ccrs.PlateCarree())

    #plot title and labels
    plt.title(os.path.basename(track))
    plt.legend(loc='best')

    az_plots_dir = os.path.join(results_directory, "azimuth_strike_plots")
    utl.check_dir(az_plots_dir)

    fig.savefig(
        os.path.join(az_plots_dir,
                     os.path.basename(fout_name)[:-5] + "png"))
    plt.close(fig)