def auto_saliency(self, data):
     
     """Preview the saved saliency maps.
     
     Args:
         data (xarray dataset): Saliency map opened netCDF file. 
     
     """
     sm_data=data.saliency_maps
     fig, axes=plt.subplots(4, 8, figsize=(16, 8), sharex=True, sharey=True)
     plt.subplots_adjust(0.02, 0.02, 0.96, 0.94, wspace=0,hspace=0)
     for conv_filter, ax in enumerate(axes.ravel()):
         levels=[0,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80]
         cmap_dbz = colortables.get_colortable('NWSReflectivity')
         ax.contourf(data.dbz,
                     cmap=cmap_dbz, levels=levels, alpha=0.2)
         ax.contour(sm_data[conv_filter, :, :],
                    [-3, -2, -1, 1, 2, 3], vmin=-3, vmax=3, cmap="seismic", linewidths=3.0)
         ax.set_xticks([])
         ax.set_yticks([])
         ax.xaxis.set_ticklabels([])
         ax.yaxis.set_ticklabels([])
         ax.text(16, 16, conv_filter, fontsize=14)
     plt.suptitle("Final Convolution Filter Saliency Maps", fontsize=14, y=0.98)
     plt.show()
 def preview_dbz(self, composite_group, input_index, test_data):
     
     """Preview the testing data ``DBZ`` values to help choose the example for ``saliency_preview``.
     
     Args:
         composite_group (str): The subset of the test data based on prediction outcome. Choices include true positive ``tp``, 
                                true positive > 99% probability ``tp_99``, false positive ``fp``, false positive > 99% probability 
                                ``fp_99``, false negative ``fn``, false negative < 1% probability ``fn_01``, true negative ``tn``, 
                                true negative < 1% probability ``tn_01``.
         input_index (int): The example's index to preview.
         test_data (numpy array): The test data to use for saliency map generation.
     
     """
     levels=[0,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80]
     cmap = colortables.get_colortable('NWSReflectivity')
     return xr.plot.contourf(test_data[composite_group][input_index, :, :, test.extract_dbz_index(test_data)] * test.dbz_std + test.dbz_mean, 
                             cmap=cmap, levels=levels)
    def preview_saliency(self, composite_group, input_index, dl_model, test_data):
        
        """Preview the deep learning model input using saliency maps.
        
        Args:
            composite_group (str): The subset of the test data based on prediction outcome. Choices include true positive ``tp``, 
                                   true positive > 99% probability ``tp_99``, false positive ``fp``, false positive > 99% probability 
                                   ``fp_99``, false negative ``fn``, false negative < 1% probability ``fn_01``, true negative ``tn``, 
                                   true negative < 1% probability ``tn_01``.
            input_index (int): The example's index to to preview.
            dl_model (Keras saved model): The DL model to preview. Layers and activations will be extracted from loaded model.
            test_data (numpy array): The test data to use for saliency map generation.
        
        """
        testdata=test_data[composite_group]
        
        fig, axes=plt.subplots(4, 8, figsize=(16, 8), sharex=True, sharey=True)
        plt.subplots_adjust(0.02, 0.02, 0.96, 0.94, wspace=0,hspace=0)
        
        for conv_filter, ax in enumerate(axes.ravel()):
            print(conv_filter)
            out_diff=K.abs(dl_model.layers[-4].output[0, conv_filter] - 1)     #dense layer that was added
            grad=K.gradients(out_diff, [dl_model.input])[0]
            grad/=K.maximum(K.std(grad), K.epsilon())
            iterate=K.function([dl_model.input, K.learning_phase()], [out_diff, grad])
            input_img_data_neuron_grad=np.zeros((1, 32, 32, 20))
            input_img_data_neuron=np.copy(testdata[input_index:input_index+1,:,:,:-6])
            out_loss, out_grad=iterate([input_img_data_neuron, 1])

            #DBZ
            levels=[0,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80]
            cmap_dbz = colortables.get_colortable('NWSReflectivity')
            ax.contourf(test_data[composite_group][input_index, :, :, self.extract_dbz_index(testdata)] * self.dbz_std + self.dbz_mean, 
                        cmap=cmap_dbz, levels=levels, alpha=0.2)

            ax.contour(gaussian_filter(-out_grad[0, :, :, self.extract_variable_index(testdata)], 1), 
                       [-3, -2, -1, 1, 2, 3], vmin=-3, vmax=3, cmap="seismic", linewidths=3.0)
            ax.set_xticks([])
            ax.set_yticks([])
            ax.xaxis.set_ticklabels([])
            ax.yaxis.set_ticklabels([])
            ax.text(16, 16, conv_filter, fontsize=14)
        plt.suptitle("Final Convolution Filter Saliency Maps", fontsize=14, y=0.98)
        plt.show()
Example #4
0
#x = _x
#y = _y
#c = _c

time = nc['t']

fig = plt.figure(figsize=(15, 15))
ax = fig.add_subplot(1, 1, 1, projection=trans)
#ax.set_xlim(west,east)
#ax.set_ylim(south,north)

vmin = 198
vmax = 320
lcl = 269.5

colormap = colortables.get_colortable('ir_rgbv')

#colormap = pyart.graph.cm.NWSRef_r
colormap = pyart.graph.cm_colorblind.HomeyerRainbow_r
#colors1 = plt.cm.Purples(np.linspace(.7, 1, int(((205-vmin)/(vmax-vmin))*1000)))
#colors2 = colormap(np.linspace(.2, 1, int(((253-205)/(vmax-vmin))*1000)))
colors2 = colormap(
    np.linspace(0, 1, int(((253 - vmin) / (vmax - vmin)) * 1000)))
colors3 = plt.cm.Greys(
    np.linspace(.5, .7, int(((lcl - 253) / (vmax - vmin)) * 1000)))
colors4 = plt.cm.Greys(
    np.linspace(.8, .95, int(((vmax - lcl) / (vmax - vmin)) * 1000)))
#colors4 = plt.cm.Greys_r(np.linspace(.2, .4, int(((vmax-lcl)/(vmax-vmin))*1000)))
#colors = np.vstack((colors1, colors2, colors3, colors4))
colors = np.vstack((colors2, colors3, colors4))
mymap = mcolors.LinearSegmentedColormap.from_list('my_colormap', colors)
Example #5
0
# of (header, data array)
ref_hdr = f.sweeps[sweep][0][4][b'REF'][0]
ref_range = np.arange(ref_hdr.num_gates) * ref_hdr.gate_width + ref_hdr.first_gate
ref = np.array([ray[4][b'REF'][1] for ray in f.sweeps[sweep]])

rho_hdr = f.sweeps[sweep][0][4][b'RHO'][0]
rho_range = (np.arange(rho_hdr.num_gates + 1) - 0.5) * rho_hdr.gate_width + rho_hdr.first_gate
rho = np.array([ray[4][b'RHO'][1] for ray in f.sweeps[sweep]])

###########################################
fig, axes = plt.subplots(1, 2, figsize=(15, 8))
add_metpy_logo(fig, 190, 85, size='large')
for var_data, var_range, ax in zip((ref, rho), (ref_range, rho_range), axes):
    # Turn into an array, then mask
    data = np.ma.array(var_data)
    data[np.isnan(data)] = np.ma.masked

    # Convert az,range to x,y
    xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
    ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))

    # Plot the data
    cmap = colortables.get_colortable('viridis')
    ax.pcolormesh(xlocs, ylocs, data, cmap=cmap)
    ax.set_aspect('equal', 'datalim')
    ax.set_xlim(-40, 20)
    ax.set_ylim(-30, 30)
    add_timestamp(ax, f.dt, y=0.02, high_contrast=True)

plt.show()
Example #6
0
#Import for colortables
from metpy.plots import colortables

# Import for the bonus exercise
from metpy.plots import add_timestamp

fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(1, 1, 1, projection=proj)
ax.add_feature(cfeature.COASTLINE.with_scale('50m'), linewidth=2)
ax.add_feature(cfeature.STATES.with_scale('50m'), linestyle=':', edgecolor='black')
ax.add_feature(cfeature.BORDERS.with_scale('50m'), linewidth=2, edgecolor='black')

im = ax.imshow(dat, extent=(x.min(), x.max(), y.min(), y.max()), origin='upper')

wv_cmap = colortables.get_colortable('WVCIMSS_r')
im.set_cmap(wv_cmap)

#Bonus
start_time = datetime.strptime(ds.start_date_time, '%Y%j%H%M%S')
add_timestamp(ax, time=start_time, pretext=f'GOES-16 Ch. {channel} ',
              high_contrast=True, fontsize=16, y=0.01)

plt.show()