def _loop_over_fields( json_file, pickle_file, inpdir=None, outdir=None, verbose=False): """ """ # Reflectivity texture if verbose: print 'Processing reflectivity' refl = texture_fields.histogram_from_json( json_file, REFL_FIELD, inpdir=inpdir, texture_window=TEXTURE_WINDOW, texture_sample=TEXTURE_SAMPLE, num_bins=BINS_REFL, limits=LIMITS_REFL, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Doppler velocity texture if verbose: print 'Processing Doppler velocity' vdop = texture_fields.histogram_from_json( json_file, VDOP_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_VDOP, limits=LIMITS_VDOP, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Spectrum width texture if verbose: print 'Processing spectrum width' sw = texture_fields.histogram_from_json( json_file, SW_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_SW, limits=LIMITS_SW, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Differential phase texture if verbose: print 'Processing differential phase' phidp = texture_fields.histogram_from_json( json_file, PHIDP_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_PHIDP, limits=LIMITS_PHIDP, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Differential reflectivity texture if verbose: print 'Processing differential reflectivity' zdr = texture_fields.histogram_from_json( json_file, ZDR_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_ZDR, limits=LIMITS_ZDR, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Copolar correlation texture if verbose: print 'Processing copolar correlation' rhohv = texture_fields.histogram_from_json( json_file, RHOHV_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_RHOHV, limits=LIMITS_RHOHV, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Normalized coherent power texture if verbose: print 'Processing normalized coherent power' ncp = texture_fields.histogram_from_json( json_file, NCP_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_NCP, limits=LIMITS_NCP, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Pack histograms together histograms = [refl, vdop, sw, phidp, zdr, rhohv, ncp] # Pickle texture histograms texture_fields._pickle_histograms(histograms, pickle_file, outdir=outdir) return
def _loop_over_fields(json_file, pickle_file, inpdir=None, outdir=None, verbose=False): """ """ # Reflectivity texture if verbose: print 'Processing reflectivity' refl = texture_fields.histogram_from_json(json_file, REFL_FIELD, inpdir=inpdir, texture_window=TEXTURE_WINDOW, texture_sample=TEXTURE_SAMPLE, num_bins=BINS_REFL, limits=LIMITS_REFL, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Doppler velocity texture if verbose: print 'Processing Doppler velocity' vdop = texture_fields.histogram_from_json(json_file, VDOP_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_VDOP, limits=LIMITS_VDOP, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Spectrum width texture if verbose: print 'Processing spectrum width' sw = texture_fields.histogram_from_json(json_file, SW_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_SW, limits=LIMITS_SW, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Differential phase texture if verbose: print 'Processing differential phase' phidp = texture_fields.histogram_from_json(json_file, PHIDP_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_PHIDP, limits=LIMITS_PHIDP, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Differential reflectivity texture if verbose: print 'Processing differential reflectivity' zdr = texture_fields.histogram_from_json(json_file, ZDR_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_ZDR, limits=LIMITS_ZDR, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Copolar correlation texture if verbose: print 'Processing copolar correlation' rhohv = texture_fields.histogram_from_json(json_file, RHOHV_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_RHOHV, limits=LIMITS_RHOHV, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Normalized coherent power texture if verbose: print 'Processing normalized coherent power' ncp = texture_fields.histogram_from_json(json_file, NCP_FIELD, inpdir=inpdir, ray_window=RAY_WINDOW, gate_window=GATE_WINDOW, min_sample=MIN_SAMPLE, bins=BINS_NCP, limits=LIMITS_NCP, min_ncp=MIN_NCP, vcp_sweeps=VCP_SWEEPS, vcp_rays=VCP_RAYS, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, exclude_fields=EXCLUDE_FIELDS, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Pack histograms together histograms = [refl, vdop, sw, phidp, zdr, rhohv, ncp] # Pickle texture histograms texture_fields._pickle_histograms(histograms, pickle_file, outdir=outdir) return
def _loop_over_dict( json_file, pickle_file, inpdir=None, outdir=None, verbose=False): """ """ # Parse files from JSON file with open(json_file, 'r') as fid: files = json.load(fid) if inpdir is not None: files = [os.path.join(inpdir, f) for f in files] # Loop over all files for f in files: # Read radar data radar = read(f, exclude_fields=EXCLUDE_FIELDS) if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS: continue if VCP_RAYS is not None and radar.nrays != VCP_RAYS: continue if verbose: print 'Processing file %s' % os.path.basename(f) # Determine significant detection of the radar gatefilter = noise.velocity_coherency( radar, gatefilter=None, num_bins=BINS_VDOP_COHER, limits=LIMITS_VDOP_COHER, texture_window=(3, 3), texture_sample=5, min_sigma=None, max_sigma=None, nyquist=None, rays_wrap_around=False, remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_value=None, verbose=verbose) gatefilter = noise.spectrum_width_coherency( radar, gatefilter=gatefilter, num_bins=BINS_SW_COHER, limits=LIMITS_SW_COHER, texture_window=(3, 3), texture_sample=5, min_sigma=None, max_sigma=None, rays_wrap_around=False, remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_value=None, verbose=verbose) gatefilter = noise.significant_detection( radar, gatefilter=gatefilter, remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, min_ncp=MIN_NCP, detect_field=None, verbose=verbose) # Compute histogram counts for each texture field texture_fields.histograms_from_radar( radar, HIST_DICT, gatefilter=gatefilter, texture_window=TEXTURE_WINDOW, texture_sample=TEXTURE_SAMPLE, min_ncp=MIN_NCP, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, min_range=MIN_RANGE, max_range=MAX_RANGE, rays_wrap_around=False, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Normalize histograms for each field and compute probability densities for field in HIST_DICT: # Parse bin edges and histogram counts bin_edges = HIST_DICT[field]['bin edges'] counts = HIST_DICT[field]['histogram counts'] # Compute normalized histogram and probability density # Add these to the histogram dictionary counts_norm = counts.astype(np.float64) / counts.max() pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges)) HIST_DICT[field]['normalized histogram'] = counts_norm HIST_DICT[field]['probability density'] = pdf # Include other parameters in the histogram dictionary HIST_DICT[field]['radar files'] = files HIST_DICT[field]['min sweep'] = MIN_SWEEP HIST_DICT[field]['max sweep'] = MAX_SWEEP HIST_DICT[field]['min range'] = MIN_RANGE HIST_DICT[field]['max range'] = MAX_RANGE HIST_DICT[field]['sweeps in VCP'] = VCP_SWEEPS HIST_DICT[field]['rays in VCP'] = VCP_RAYS HIST_DICT[field]['minimum normalized coherent power'] = MIN_NCP # Change dictionary field names to include texture for field in HIST_DICT.keys(): HIST_DICT['{}_texture'.format(field)] = HIST_DICT.pop(field) # Pickle histogram data texture_fields._pickle_histograms( HIST_DICT, pickle_file, outdir=outdir) return
def _loop_over_dict(json_file, pickle_file, inpdir=None, outdir=None, verbose=False): """ """ # Parse files from JSON file with open(json_file, 'r') as fid: files = json.load(fid) if inpdir is not None: files = [os.path.join(inpdir, f) for f in files] # Loop over all files for f in files: # Read radar data radar = read(f, exclude_fields=EXCLUDE_FIELDS) if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS: continue if VCP_RAYS is not None and radar.nrays != VCP_RAYS: continue if verbose: print 'Processing file %s' % os.path.basename(f) # Determine significant detection of the radar gatefilter = noise.velocity_coherency(radar, gatefilter=None, num_bins=BINS_VDOP_COHER, limits=LIMITS_VDOP_COHER, texture_window=(3, 3), texture_sample=5, min_sigma=None, max_sigma=None, nyquist=None, rays_wrap_around=False, remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_value=None, verbose=verbose) gatefilter = noise.spectrum_width_coherency(radar, gatefilter=gatefilter, num_bins=BINS_SW_COHER, limits=LIMITS_SW_COHER, texture_window=(3, 3), texture_sample=5, min_sigma=None, max_sigma=None, rays_wrap_around=False, remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_value=None, verbose=verbose) gatefilter = noise.significant_detection(radar, gatefilter=gatefilter, remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, min_ncp=MIN_NCP, detect_field=None, verbose=verbose) # Compute histogram counts for each texture field texture_fields.histograms_from_radar(radar, HIST_DICT, gatefilter=gatefilter, texture_window=TEXTURE_WINDOW, texture_sample=TEXTURE_SAMPLE, min_ncp=MIN_NCP, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, min_range=MIN_RANGE, max_range=MAX_RANGE, rays_wrap_around=False, fill_value=None, ncp_field=NCP_FIELD, verbose=verbose) # Normalize histograms for each field and compute probability densities for field in HIST_DICT: # Parse bin edges and histogram counts bin_edges = HIST_DICT[field]['bin edges'] counts = HIST_DICT[field]['histogram counts'] # Compute normalized histogram and probability density # Add these to the histogram dictionary counts_norm = counts.astype(np.float64) / counts.max() pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges)) HIST_DICT[field]['normalized histogram'] = counts_norm HIST_DICT[field]['probability density'] = pdf # Include other parameters in the histogram dictionary HIST_DICT[field]['radar files'] = files HIST_DICT[field]['min sweep'] = MIN_SWEEP HIST_DICT[field]['max sweep'] = MAX_SWEEP HIST_DICT[field]['min range'] = MIN_RANGE HIST_DICT[field]['max range'] = MAX_RANGE HIST_DICT[field]['sweeps in VCP'] = VCP_SWEEPS HIST_DICT[field]['rays in VCP'] = VCP_RAYS HIST_DICT[field]['minimum normalized coherent power'] = MIN_NCP # Change dictionary field names to include texture for field in HIST_DICT.keys(): HIST_DICT['{}_texture'.format(field)] = HIST_DICT.pop(field) # Pickle histogram data texture_fields._pickle_histograms(HIST_DICT, pickle_file, outdir=outdir) return