def Read_LUT(prjType, filepath, dataRes): """ 根据数据的投影类型,分辨率读取经纬度查找表 :param prjType: :param filepath: :param dataRes: :return: """ if ((prjType == 'GEO') | (prjType == 'NUL')): lutpath = filepath with Dataset(lutpath, 'r') as fid: lat = fid.variables['lat'][:] lon = fid.variables['lon'][:] elif (prjType == 'NOM'): if (dataRes == '4000M'): lutpath = r'/FY4APGSQCS/QCS/pyFY4AL2src/LUT/FullMask_Grid_4000_1047E_BSQ.nc' elif (dataRes == '1000M'): lutpath = r'/FY4APGSQCS/QCS/pyFY4AL2src/LUT/FullMask_Grid_1000_1047E_BSQ.nc' elif (dataRes == '012KM'): lutpath = r'/FY4APGSQCS/QCS/pyFY4AL2src/LUT/FullMask_Grid_12000_1047E_BSQ.nc' with Dataset(lutpath, 'r') as fid: lat = fid.variables['LAT'][:] lon = fid.variables['LON'][:] lat = ma.masked_values(lat, -999.) lon = ma.masked_values(lon, -999.) return lat, lon
def calculateMeans(self, synMean, synMin, synMed, synMax, synMinCP): """ Calculate mean, median, minimum, maximum and percentiles of pressure values from synthetic events. :param synMean: `numpy.ndarray` :param synMin: `numpy.ndarray` :param synMed: `numpy.ndarray` :param synMax: `numpy.ndarray` :param synMinCP: `numpy.ndarray` """ synMean = ma.masked_values(synMean, -9999.) synMin = ma.masked_values(synMin, -9999.) synMed = ma.masked_values(synMed, -9999.) synMax = ma.masked_values(synMax, -9999.) self.synMean = ma.mean(synMean, axis=0) self.synMed = ma.mean(synMed, axis=0) self.synMin = ma.mean(synMin, axis=0) self.synMax = ma.mean(synMax, axis=0) self.synMeanUpper = percentile(ma.compressed(synMean), per=95, axis=0) self.synMeanLower = percentile(ma.compressed(synMean), per=5, axis=0) self.synMinUpper = percentile(ma.compressed(synMin), per=95, axis=0) self.synMinLower = percentile(ma.compressed(synMin), per=5, axis=0) self.synMinCPDist = np.mean(synMinCP, axis=0) self.synMinCPLower = percentile(synMinCP, per=5, axis=0) self.synMinCPUpper = percentile(synMinCP, per=95, axis=0) r = list(np.random.uniform(high=synMean.shape[0], size=3).astype(int)) self.synRandomMinima = synMean[r, :, :]
def test_unsorted_input(self): "Test tsfromtxt when the dates of the input are not sorted." datatxt = """dates,a,b 2007-04-02 01:00,,0. 2007-04-02 02:00,2.,20 2007-04-02 03:00,, 2007-04-02 00:00,0.,10. 2007-04-02 03:00,3.,30 2007-04-02 01:00,1.,10 2007-04-02 02:00,, """ data = StringIO.StringIO(datatxt) dates = [ Date('H', '2007-04-02 0%i:00' % hour) for hour in (1, 2, 3, 0, 1, 2) ] controla = ma.masked_values([0, -1, 1, 2, -1, -1, 3], -1) controlb = ma.masked_values([10, 0, 10, 20, -1, -1, 30], -1) # data = StringIO.StringIO(datatxt) test = tsfromtxt(data, delimiter=',', names=True, freq='H') assert_equal(test.dtype.names, ['a', 'b']) assert_equal(test['a'], controla) assert_equal(test['a'].mask, controla.mask) assert_equal(test['b'], controlb) assert_equal(test['b'].mask, controlb.mask)
def get_mean_percentile(): p98_red = calculate_percentile(ma.masked_values(controllers.get_red(), 0), 98) p98_green = calculate_percentile(ma.masked_values(controllers.get_green(), 0), 98) p98_blue = calculate_percentile(ma.masked_values(controllers.get_blue(), 0), 100) p98_mean = (p98_red + p98_green + p98_blue) / 3 print(p98_red, p98_green, p98_blue, p98_mean) return p98_red, p98_green, p98_blue, p98_mean
def viz_ress(n, vname): N = pyart.io.read(n) display = pyart.graph.RadarMapDisplay(N) x = N.fields[vname]['data'] m = np.zeros_like(x) m[:, 120:] = 1 y = ma.masked_array(x, m) y = ma.masked_values(y, 0.0) y = ma.masked_values(y, 10.0) y = ma.masked_values(y, 20.0) y = ma.masked_values(y, 140.0) y = ma.masked_values(y, 150.0) N.fields[vname]['data'] = y fig = plt.figure(figsize=(6, 5)) ax = fig.add_subplot(111) display.plot(vname, 0, title=vname, colorbar_label='', ax=ax, vmin=0, vmax=100) display.set_limits(xlim=(-40, 40), ylim=(-40, 40), ax=ax) plt.show() fig.savefig("./tmp_test/" + vname + ".png", bbox_inches='tight')
def _order1_cartesian_curl(): """ Calculate curl using Cartesian first-order differencing. An ma array is returned. Algorithm: First-order differencing (interior points use centered differencing, and end points use forward or backward differencing, as applicable) in Cartesian coordinates (see Glickman [2000], p. 194). """ dFy_dx_N = np.zeros((len(y), len(x)), dtype=float) dFx_dy_N = np.zeros((len(y), len(x)), dtype=float) for iy in range(len(y)): dFy_dx_N[iy,:] = deriv( x, np.ravel(Fy[iy,:]) \ , missing=missing, algorithm='order1') for ix in range(len(x)): dFx_dy_N[:,ix] = deriv( y, np.ravel(Fx[:,ix]) \ , missing=missing, algorithm='order1') dFy_dx = ma.masked_values(dFy_dx_N, missing, copy=0) dFx_dy = ma.masked_values(dFx_dy_N, missing, copy=0) return dFy_dx - dFx_dy
def woa_profile_from_dap(var, d, lat, lon, depth, cfg): """ Monthly Climatologic Mean and Standard Deviation from WOA, used either for temperature or salinity. INPUTS time: [day of the year] lat: [-90<lat<90] lon: [-180<lon<180] depth: [meters] Reads the WOA Monthly Climatology NetCDF file and returns the corresponding WOA values of salinity or temperature mean and standard deviation for the given time, lat, lon, depth. """ if lon < 0: lon = lon+360 url = cfg['url'] doy = int(d.strftime('%j')) dataset = open_url(url) dn = (np.abs(doy-dataset['time'][:])).argmin() xn = (np.abs(lon-dataset['lon'][:])).argmin() yn = (np.abs(lat-dataset['lat'][:])).argmin() if re.match("temperature\d?$", var): mn = ma.masked_values(dataset.t_mn.t_mn[dn, :, yn, xn].reshape( dataset['depth'].shape[0]), dataset.t_mn.attributes['_FillValue']) sd = ma.masked_values(dataset.t_sd.t_sd[dn, :, yn, xn].reshape( dataset['depth'].shape[0]), dataset.t_sd.attributes['_FillValue']) # se = ma.masked_values(dataset.t_se.t_se[dn, :, yn, xn].reshape( # dataset['depth'].shape[0]), dataset.t_se.attributes['_FillValue']) # Use this in the future. A minimum # of samples # dd = ma.masked_values(dataset.t_dd.t_dd[dn, :, yn, xn].reshape( # dataset['depth'].shape[0]), dataset.t_dd.attributes['_FillValue']) elif re.match("salinity\d?$", var): mn = ma.masked_values(dataset.s_mn.s_mn[dn, :, yn, xn].reshape( dataset['depth'].shape[0]), dataset.s_mn.attributes['_FillValue']) sd = ma.masked_values(dataset.s_sd.s_sd[dn, :, yn, xn].reshape( dataset['depth'].shape[0]), dataset.s_sd.attributes['_FillValue']) # dd = ma.masked_values(dataset.s_dd.s_dd[dn, :, yn, xn].reshape( # dataset['depth'].shape[0]), dataset.s_dd.attributes['_FillValue']) zwoa = ma.array(dataset.depth[:]) ind = (depth <= zwoa.max()) & (depth >= zwoa.min()) # Mean value profile f = interp1d(zwoa[~ma.getmaskarray(mn)].compressed(), mn.compressed()) mn_interp = ma.masked_all(depth.shape) mn_interp[ind] = f(depth[ind]) # The stdev profile f = interp1d(zwoa[~ma.getmaskarray(sd)].compressed(), sd.compressed()) sd_interp = ma.masked_all(depth.shape) sd_interp[ind] = f(depth[ind]) output = {'woa_an': mn_interp, 'woa_sd': sd_interp} return output
def woa_profile_from_dap(var, d, lat, lon, depth, cfg): """ Monthly Climatologic Mean and Standard Deviation from WOA, used either for temperature or salinity. INPUTS time: [day of the year] lat: [-90<lat<90] lon: [-180<lon<180] depth: [meters] Reads the WOA Monthly Climatology NetCDF file and returns the corresponding WOA values of salinity or temperature mean and standard deviation for the given time, lat, lon, depth. """ if lon < 0: lon = lon+360 url = cfg['url'] doy = int(d.strftime('%j')) dataset = open_url(url) dn = (np.abs(doy-dataset['time'][:])).argmin() xn = (np.abs(lon-dataset['lon'][:])).argmin() yn = (np.abs(lat-dataset['lat'][:])).argmin() if re.match(r'temperature\d?$', var): mn = ma.masked_values(dataset.t_mn.t_mn[dn, :, yn, xn].reshape( dataset['depth'].shape[0]), dataset.t_mn.attributes['_FillValue']) sd = ma.masked_values(dataset.t_sd.t_sd[dn, :, yn, xn].reshape( dataset['depth'].shape[0]), dataset.t_sd.attributes['_FillValue']) # se = ma.masked_values(dataset.t_se.t_se[dn, :, yn, xn].reshape( # dataset['depth'].shape[0]), dataset.t_se.attributes['_FillValue']) # Use this in the future. A minimum # of samples # dd = ma.masked_values(dataset.t_dd.t_dd[dn, :, yn, xn].reshape( # dataset['depth'].shape[0]), dataset.t_dd.attributes['_FillValue']) elif re.match(r'salinity\d?$', var): mn = ma.masked_values(dataset.s_mn.s_mn[dn, :, yn, xn].reshape( dataset['depth'].shape[0]), dataset.s_mn.attributes['_FillValue']) sd = ma.masked_values(dataset.s_sd.s_sd[dn, :, yn, xn].reshape( dataset['depth'].shape[0]), dataset.s_sd.attributes['_FillValue']) # dd = ma.masked_values(dataset.s_dd.s_dd[dn, :, yn, xn].reshape( # dataset['depth'].shape[0]), dataset.s_dd.attributes['_FillValue']) zwoa = ma.array(dataset.depth[:]) ind = (depth <= zwoa.max()) & (depth >= zwoa.min()) # Mean value profile f = interp1d(zwoa[~ma.getmaskarray(mn)].compressed(), mn.compressed()) mn_interp = ma.masked_all(depth.shape) mn_interp[ind] = f(depth[ind]) # The stdev profile f = interp1d(zwoa[~ma.getmaskarray(sd)].compressed(), sd.compressed()) sd_interp = ma.masked_all(depth.shape) sd_interp[ind] = f(depth[ind]) output = {'woa_an': mn_interp, 'woa_sd': sd_interp} return output
def test_ma(): x = ma.array([1.0, 2, 3]) assert K(x) == k('1.0 2 3') x = ma.masked_values([1.0, 0, 2], 0) assert K(x) == k('1 0n 2') s = ma.masked_values(0.0, 0) assert K(s) == k('0n')
def AddBarbsMap(self, m, data): """ 为底图添加连续型数据 :param m: 底图 :param data: 连续型数据 :return: """ wind_speed = data[0] wind_direction = data[1] pressure = data[2] wind_mdirection = ma.masked_values(wind_direction, -999) wind_mspeed = ma.masked_values(wind_speed, -999) mpressure = ma.masked_values(pressure, -999) x, y = m(self.lon, self.lat) U = -wind_mspeed * np.sin(np.deg2rad(wind_mdirection)) * 2.5 V = -wind_mspeed * np.cos(np.deg2rad(wind_mdirection)) * 2.5 m.barbs(x[mpressure < 400], y[mpressure < 400], U[mpressure < 400], V[mpressure < 400], pivot='middle', flagcolor='red', lw=0.5, length=5) m.barbs(x[(mpressure >= 400) & (mpressure <= 700)], y[(mpressure >= 400) & (mpressure <= 700)], U[(mpressure >= 400) & (mpressure <= 700)], V[(mpressure >= 400) & (mpressure <= 700)], pivot='middle', flagcolor='#00ff00', lw=0.5, length=5) m.barbs(x[mpressure > 700], y[mpressure > 700], U[mpressure > 700], V[mpressure > 700], pivot='middle', flagcolor='#0009fa', lw=0.5, length=5) colors = ["red", "#00ff00", "#0009fa"] names = ["<400hPa", "400~700hPa", ">700hPa"] for i in range(len(colors)): if (i > 5): xx = 0.1 + 0.15 * (i - 6) yy = 0.1 else: xx = 0.1 + 0.15 * i yy = 0.4 CMap.axes2.add_patch( plt.Rectangle((xx, yy), 0.04, 0.2, color=colors[i])) CMap.axes2.text(xx + 0.04, yy + 0.05, names[i], fontsize=58) return True
def npTable(target): #load into numpy array. if the side of pier is west return 1, else retun 0 #dont bother reading in column 4, I dont know what the data mean, weird format t=np.loadtxt(target, skiprows=10, converters = {2: lambda s: True if s=='West' else False}, usecols=[i for i in range(0,36) if i !=4]) #mask bad values tt=ma.masked_values(t,-99.0) ttt=ma.masked_values(tt,999) return ttt
def viz_ress(n, vname): N = pyart.io.read(n) display = pyart.graph.RadarMapDisplay(N) x = N.fields[vname]['data'] m = np.zeros_like(x) m[:,120:] = 1 y = ma.masked_array(x, m) y = ma.masked_values(y, 0.0) y = ma.masked_values(y, 10.0) y = ma.masked_values(y, 20.0) y = ma.masked_values(y, 50.0) y = ma.masked_values(y, 70.0) y = ma.masked_values(y, 90.0) y = ma.masked_values(y, 100.0) y = ma.masked_values(y, 140.0) y = ma.masked_values(y, 150.0) y = np.where(y == 80, 0, y) y = np.where(y == 40, 1, y) y = np.where(y == 30, 2, y) y = np.where(y == 60, 3, y) y = ma.masked_where(y > 3, y) N.fields[vname]['data'] = y fig = plt.figure(figsize=(6, 5)) ax = fig.add_subplot(111) #display.plot(vname, 0, title=vname, colorbar_label='', ax=ax) display.plot(vname, 0, title=vname, colorbar_label='', ticks=range(4), ticklabs=['Big Drops', 'Dry Snow', 'Ice Crystals', 'Rain'], ax=ax, vmin=-0.5, vmax=3.5, cmap=discrete_cmap(4, 'rainbow')) display.set_limits(xlim=(-40, 40), ylim=(-40, 40), ax=ax) plt.show(); fig.savefig("./tmp_test/"+vname+".png", bbox_inches='tight')
def DrawNOMMap(data, lat, lon, dataSour, titlename, filename, level=np.arange(-5, 36, 1)): if (dataSour == 'FY2F'): lon0 = 112. elif (dataSour == 'FY2G'): lon0 = 104.5 elif (dataSour == 'FY2H'): lon0 = 79. fig = plt.figure(figsize=(27.48, 30), dpi=100) # 图像的长*高=2748*3000 axes1 = fig.add_axes([0., 0.084, 1., 0.916 ]) # 加两个panel,地图区和图例区域,四参数分别为:左边、下边框离边缘距离百分比,绘图区的宽和高 axes2 = fig.add_axes([0., 0., 1., 0.084], facecolor='#e8e8e8') cax = fig.add_axes([0.1, 0.03, 0.65, 0.02]) mlat = ma.masked_values(lat, 99999.) mlon = ma.masked_values(lon, 99999.) m = Basemap( projection='nsper', lat_0=0, lon_0=lon0, resolution='l', ax=axes1 ) # resoluiton:c (crude), l (low), i (intermediate), h (high), f (full) x, y = m(mlon, mlat) m.drawcoastlines() m.drawcountries() # m.drawlsmask(land_color='#bebebe', ocean_color='#01008a') # 陆地,海洋的颜色 m.drawparallels(range(-90, 90, 10)) m.drawmeridians(range(0, 360, 10)) cs = m.contourf(x, y, data, cmap=plt.cm.jet, levels=level) # 定义分级:levels=np.arange(250, 355, 5) cb = plt.colorbar(cs, cax=cax, orientation='horizontal') cb.ax.tick_params(labelsize=28) axes2.spines['left'].set_visible(False) axes2.spines['top'].set_visible(False) axes2.text(0.76, 0.4, "Unit:(" + u'\u2103' + ")", fontsize=32) cax.set_title(titlename, family='Times New Roman', fontsize=42, fontweight='bold', pad=20) #添加Logo axicon = fig.add_axes([0.85, 0.01, 0.15, 0.05]) axicon.imshow(plt.imread('/FY4APGSQCS/QCS/pyFY4AL2src/Logo/logo.jpg'), origin='upper') axicon.axis('off') fig.savefig(filename) plt.close() return 0
def test_interval_missing_observations(): with Model() as model: obs1 = ma.masked_values([1, 2, -1, 4, -1], value=-1) obs2 = ma.masked_values([-1, -1, 6, -1, 8], value=-1) rng = aesara.shared(np.random.RandomState(2323), borrow=True) with pytest.warns(ImputationWarning): theta1 = Uniform("theta1", 0, 5, observed=obs1, rng=rng) with pytest.warns(ImputationWarning): theta2 = Normal("theta2", mu=theta1, observed=obs2, rng=rng) assert "theta1_observed" in model.named_vars assert "theta1_missing_interval__" in model.named_vars assert not hasattr( model.rvs_to_values[model.named_vars["theta1_observed"]].tag, "transform" ) prior_trace = sample_prior_predictive(return_inferencedata=False) # Make sure the observed + missing combined deterministics have the # same shape as the original observations vectors assert prior_trace["theta1"].shape[-1] == obs1.shape[0] assert prior_trace["theta2"].shape[-1] == obs2.shape[0] # Make sure that the observed values are newly generated samples assert np.all(np.var(prior_trace["theta1_observed"], 0) > 0.0) assert np.all(np.var(prior_trace["theta2_observed"], 0) > 0.0) # Make sure the missing parts of the combined deterministic matches the # sampled missing and observed variable values assert np.mean(prior_trace["theta1"][:, obs1.mask] - prior_trace["theta1_missing"]) == 0.0 assert np.mean(prior_trace["theta1"][:, ~obs1.mask] - prior_trace["theta1_observed"]) == 0.0 assert np.mean(prior_trace["theta2"][:, obs2.mask] - prior_trace["theta2_missing"]) == 0.0 assert np.mean(prior_trace["theta2"][:, ~obs2.mask] - prior_trace["theta2_observed"]) == 0.0 assert {"theta1", "theta2"} <= set(prior_trace.keys()) trace = sample( chains=1, draws=50, compute_convergence_checks=False, return_inferencedata=False ) assert np.all(0 < trace["theta1_missing"].mean(0)) assert np.all(0 < trace["theta2_missing"].mean(0)) assert "theta1" not in trace.varnames assert "theta2" not in trace.varnames # Make sure that the observed values are newly generated samples and that # the observed and deterministic matche pp_trace = sample_posterior_predictive(trace, return_inferencedata=False, keep_size=False) assert np.all(np.var(pp_trace["theta1"], 0) > 0.0) assert np.all(np.var(pp_trace["theta2"], 0) > 0.0) assert np.mean(pp_trace["theta1"][:, ~obs1.mask] - pp_trace["theta1_observed"]) == 0.0 assert np.mean(pp_trace["theta2"][:, ~obs2.mask] - pp_trace["theta2_observed"]) == 0.0
def fine_fuel(rs_fname): """Opens a named restart file, sums the fine fuel variables over all PFTs. This results in a map of fuel loading.""" d = nc.Dataset(rs_fname) met = d.variables["fuel_1hr_met"] struct = d.variables["fuel_1hr_str"] met_raw = ma.masked_values(met[:], met.missing_value) met_map = np.sum(met_raw, axis=(0, 1)) struct_raw = ma.masked_values(struct[:], struct.missing_value) struct_map = np.sum(struct_raw, axis=(0, 1)) d.close() return met_map + struct_map
def use_netcdf_files(): nc = netcdf.netcdf_file('/home/nicholas/data/netcdf_files/CFLX_2000_2009.nc', 'r') all_data = nc.variables['Cflx'][:, :45, :180] nc.close() all_data = all_data * 1000 * 24 * 60 * 60 all_data = ma.masked_values(all_data, 1e20) nc = netcdf.netcdf_file('/home/nicholas/data/netcdf_files/ORCA2.0_grid.nc', 'r') mask = nc.variables['mask'][0, :45, :180] nc.close() mask = ma.masked_values(mask, -1e34) return all_data, mask
def fine_fuel(rs_fname): """Opens a named restart file, sums the fine fuel variables over all PFTs. This results in a map of fuel loading.""" d = nc.Dataset(rs_fname) met = d.variables['fuel_1hr_met'] struct = d.variables['fuel_1hr_str'] met_raw = ma.masked_values(met[:], met.missing_value) met_map = np.sum(met_raw, axis=(0, 1)) struct_raw = ma.masked_values(struct[:], struct.missing_value) struct_map = np.sum(struct_raw, axis=(0, 1)) d.close() return met_map + struct_map
def test_internal_missing_observations(): with Model() as model: obs1 = ma.masked_values([1, 2, -1, 4, -1], value=-1) obs2 = ma.masked_values([-1, -1, 6, -1, 8], value=-1) with pytest.warns(ImputationWarning): theta1 = Normal('theta1', mu=2, observed=obs1) with pytest.warns(ImputationWarning): theta2 = Normal('theta2', mu=theta1, observed=obs2) prior_trace = sample_prior_predictive() assert set(['theta1', 'theta2']) <= set(prior_trace.keys()) sample()
def plot_res(n0h, n0c, n0k, n0r, n0x, results, labels): viz_res(n0c, 'cross_correlation_ratio') viz_res(n0k, 'specific_differential_phase') viz_res(n0x, 'differential_reflectivity') viz_resr(n0r, 'reflectivity') viz_ress(n0h, 'radar_echo_classification', labels) N0H = pyart.io.read(n0h) display_h = pyart.graph.RadarMapDisplay(N0H) data_n0h = N0H.fields['radar_echo_classification']['data'] m = np.zeros_like(data_n0h) m[:, 180:] = 1 y = ma.masked_array(data_n0h, m) y = ma.masked_values(y, 0.0) y = ma.masked_values(y, 10.0) y = ma.masked_values(y, 20.0) y = ma.masked_values(y, 50.0) y = ma.masked_values(y, 70.0) y = ma.masked_values(y, 90.0) y = ma.masked_values(y, 100.0) y = ma.masked_values(y, 120.0) y = ma.masked_values(y, 140.0) y = ma.masked_values(y, 150.0) results = ma.masked_where(ma.getmask(y[:, :180]), results) for j in range(len(idx)): for k in range(len(idy)): r1 = idx[j] c1 = idy[k] y[r1:r1 + 30, c1:c1 + 30] = results[r1:r1 + 30, c1:c1 + 30] N0H.fields['radar_echo_classification']['data'] = y fig = plt.figure(figsize=(6, 5)) ax = fig.add_subplot(111) display_h.plot('radar_echo_classification', 0, title='classification results', colorbar_label='', ticks=range(4), ticklabs=['Big Drops', 'Dry Snow', 'Ice Crystals', 'Rain'], ax=ax, vmin=-0.5, vmax=3.5, cmap=discrete_cmap(4, 'rainbow')) #display_h.plot('radar_echo_classification', 0, title='classification results', colorbar_label='', ax=ax, cmap=cMap) display_h.set_limits(xlim=(-50, 50), ylim=(-50, 50), ax=ax) plt.show() fig.savefig("res.png", bbox_inches='tight')
def eke(cutperiod=360, dt=7, verbose=False): """ Include the possibility to do with a different dataset, like anomaly or ref. ATENTION, need to move user and password out of here. """ from maud import window_1Dmean l = cutperiod * 24 # From days to hours. Aviso time is on hours. #self.metadata['urlbase'] = "http://%s:%[email protected]/thredds/dodsC" % (self.metadata['username'], self.metadata['password']) url_uv = "http://*****:*****@opendap.aviso.oceanobs.com/thredds/dodsC/dataset-duacs-dt-upd-global-merged-madt-uv-daily" dataset = open_url(url_uv) T, I, J = dataset.Grid_0001.shape eke = ma.masked_all((I, J)) I, J = numpy.nonzero( ma.masked_values( dataset.Grid_0001.Grid_0001[-300::60, :, :], dataset.Grid_0001.attributes['_FillValue']).max(axis=0)) t = ma.array(dataset.time[::dt]) if verbose: from progressbar import ProgressBar pbar = ProgressBar(maxval=I.shape[0]).start() n = -1 for i, j in zip(I, J): if verbose: n += 1 pbar.update(n) doit = True while doit: try: u = ma.masked_values( dataset.Grid_0001.Grid_0001[::dt, i, j], dataset.Grid_0001.attributes['_FillValue']) * 1e-2 v = ma.masked_values( dataset.Grid_0002.Grid_0002[::dt, i, j], dataset.Grid_0002.attributes['_FillValue']) * 1e-2 u_prime = u - window_1Dmean(u, l=l, t=t, axis=0) v_prime = v - window_1Dmean(v, l=l, t=t, axis=0) eke[i, j] = (u_prime**2 + v_prime**2).mean() / 2. doit = False except: print "I had some trouble. I'll wait a litte bit and try again" time.sleep(10) if verbose: pbar.finish() return eke
def unityOfMatch(m1, m2): """ @param m1 Nx1 list of index locations (or -1) for second image that match to first @param m2 Mx1 list of index locations (or -1) for first image that match to second:w @return (indexes of rel feats for im1, indexes of rel feats for im2) """ m1m = ma.masked_values(m1.T[0], -1).astype(int) m2m = ma.masked_values(m2.T[0], -1).astype(int) mid = m1m[m2m] whereMatch = np.logical_and(np.equal(m1m[m2m], np.arange(m2m.size)), m2m) locOfGoods = np.where(np.logical_and(whereMatch, np.not_equal(m2m, -1)))[0].astype(int) return m2.T[0].astype(int)[locOfGoods], locOfGoods
def test_missing_dual_observations(): with Model() as model: obs1 = ma.masked_values([1, 2, -1, 4, -1], value=-1) obs2 = ma.masked_values([-1, -1, 6, -1, 8], value=-1) beta1 = Normal('beta1', 1, 1) beta2 = Normal('beta2', 2, 1) latent = Normal('theta', shape=5) with pytest.warns(ImputationWarning): ovar1 = Normal('o1', mu=beta1 * latent, observed=obs1) with pytest.warns(ImputationWarning): ovar2 = Normal('o2', mu=beta2 * latent, observed=obs2) prior_trace = sample_prior_predictive() assert set(['beta1', 'beta2', 'theta', 'o1', 'o2']) <= set(prior_trace.keys()) sample()
def get_trac(self): """Return traction control""" if not hasattr(self, "trac"): self.trac = None if "TCSB" in self.data_dict: self.trac = np.array(self.data_dict["TCSB"], dtype=np.short) self.trac = ma.masked_values(self.trac, UmtriImo2.In_missing) self.trac = self.trac + 1 self.trac = self.trac.filled(UmtriImo2.Out_missing) elif "TCS" in self.data_dict: self.trac = np.array(self.data_dict["TCS"], dtype=np.short) self.trac = ma.masked_values(self.trac, UmtriImo2.In_missing) self.trac = self.trac + 1 self.trac = self.trac.filled(UmtriImo2.Out_missing) return self.trac
def get_brake(self): """Return brake status""" if not hasattr(self, "brake"): self.brake = None if "BR" in self.data_dict: self.brake = np.array(self.data_dict["BR"], dtype=np.short) self.brake = ma.masked_values(self.brake, UmtriImo2.In_missing) self.brake = self.brake + 1 self.brake = self.brake.filled(UmtriImo2.Out_missing) elif "BRK" in self.data_dict: self.brake = np.array(self.data_dict["BRK"], dtype=np.short) self.brake = ma.masked_values(self.brake, UmtriImo2.In_missing) self.brake = self.brake + 1 self.brake = self.brake.filled(UmtriImo2.Out_missing) return self.brake
def _prepare_masked(data, masked, nodata, dtype): if data.shape == data.mask.shape: if masked: return ma.masked_values(data.astype(dtype, copy=False), nodata, copy=False) else: return ma.filled(data.astype(dtype, copy=False), nodata) else: if masked: return ma.masked_values(data.astype(dtype, copy=False), nodata, copy=False) else: return ma.filled(data.astype(dtype, copy=False), nodata)
def get_stab(self): """Return stability""" if not hasattr(self, "stab"): self.stab = None if "ESP" in self.data_dict: self.stab = np.array(self.data_dict["ESP"], dtype=np.short) self.stab = ma.masked_values(self.stab, UmtriImo2.In_missing) self.stab = self.stab + 1 self.stab = self.stab.filled(UmtriImo2.Out_missing) elif "ESC" in self.data_dict: self.brake = np.array(self.data_dict["ESC"], dtype=np.short) self.brake = ma.masked_values(self.brake, UmtriImo2.In_missing) self.brake = self.brake + 1 self.brake = self.brake.filled(UmtriImo2.Out_missing) return self.stab
def plot_map_binary(truth, prediction): if truth.shape != prediction.shape: print("Error: Expecting truth and prediction arrays to have same size but received shapes {} and {}".format(str(truth.shape), str(prediction.shape))) return else: h, w = truth.shape[0], truth.shape[1] tp = np.zeros(truth.shape) fp = np.zeros(truth.shape) fn = np.zeros(truth.shape) tn = np.zeros(truth.shape) for i in range(0, h): for j in range(0, w): if (truth[i, j] == 255 and prediction[i, j] == 255): tp[i, j] = 1 elif (truth[i, j] == 255 and prediction[i, j] == 0): fn[i, j] = 1 elif (truth[i, j] == 0 and prediction[i, j] == 255): fp[i, j] = 1 else: tn[i, j] = 1 iou = np.sum(tp) / (np.sum(tp) + np.sum(fp) + np.sum(fn)) dice = (2 * np.sum(tp)) / (np.sum(tp) + np.sum(fp) + np.sum(tp) + np.sum(fn)) plot_tp = ma.masked_values(tp * 100, 0) plot_fp = ma.masked_values(fp * 50, 0) plot_fn = ma.masked_values(fn, 0) plt.imshow(plot_tp, cmap = "brg", vmin = 0.1) plt.imshow(plot_fp, cmap = "brg", vmin = 0.1, vmax = 100) plt.imshow(plot_fn, cmap = "brg", vmin = 0.1, vmax = 90) plt.title("IoU: {}, Dice: {}".format(str(iou), str(dice))) plt.show()
def load_data(self): """ Sure there is a better way to do it. Think about, should I do things using nvalues as expected number of rows? Maybe do it free, and on checks, validate it. In the case of an incomplete file, I think I should load it anyways, and the check alerts me that it is missing data. There is a problem here. This atol is just a temporary solution, but it's not the proper way to handle it. """ data_rows = re.sub('(\n\s*)+\n', '\n', re.sub('\r\n', '\n', self.raw_data()['data'])).split('\n')[:-1] data = ma.masked_values(np.array( [CNV.__split_row(d) for d in data_rows], dtype=np.float), float(self.attributes['bad_flag']), atol=1e-30) # Talvez usar o np.fromstring(data, sep=" ") for i in self.ids: attributes = self.data[i].attributes self.data[i] = data[:, i] self.data[i].attributes = attributes
def measure_psf(vignet, pixscale=1., show=False, mask_value=None): y, x = np.mgrid[-vignet.shape[0]/2:vignet.shape[0]/2, -vignet.shape[1]/2:vignet.shape[1]/2]*pixscale if mask_value : vignet = ma.masked_values(vignet, mask_value).filled(0) # Fit the data using astropy.modeling p_init=models.Gaussian2D(amplitude=vignet.max(), x_mean=0., y_mean=0., x_stddev=2*pixscale, y_stddev=2*pixscale, theta=0, cov_matrix=None) fit_p = fitting.LevMarLSQFitter() p = fit_p(p_init, x, y, vignet) barycenter=measure_barycenter(vignet, pixscale=pixscale) # Plot the data with the best-fit model P.figure(figsize=(8, 2.5)) P.subplot(1, 3, 1) P.imshow(vignet, origin='lower', interpolation='nearest', vmin=vignet.min(), vmax=vignet.max()) P.title("Data") P.subplot(1, 3, 2) P.imshow(p(x, y), origin='lower', interpolation='nearest', vmin=vignet.min(), vmax=vignet.max()) P.scatter(vignet.shape[0]/2, vignet.shape[1]/2,marker="+") P.annotate("({:.3f},{:.3f})".format(*barycenter), (vignet.shape[0]/3, vignet.shape[1]/3)) P.title("Model - psf = {:.2f}".format(2.3548*np.mean([p.x_stddev.value, p.y_stddev.value]))) P.subplot(1, 3, 3) P.imshow(vignet - p(x, y), origin='lower', interpolation='nearest', vmin=-vignet.max()/10,vmax=vignet.max()/10) P.title("Residual") P.tight_layout() if show : P.show() return p
def test_missing_dual_observations(): with Model() as model: obs1 = ma.masked_values([1, 2, -1, 4, -1], value=-1) obs2 = ma.masked_values([-1, -1, 6, -1, 8], value=-1) beta1 = Normal("beta1", 1, 1) beta2 = Normal("beta2", 2, 1) latent = Normal("theta", size=5) with pytest.warns(ImputationWarning): ovar1 = Normal("o1", mu=beta1 * latent, observed=obs1) with pytest.warns(ImputationWarning): ovar2 = Normal("o2", mu=beta2 * latent, observed=obs2) prior_trace = sample_prior_predictive(return_inferencedata=False) assert {"beta1", "beta2", "theta", "o1", "o2"} <= set(prior_trace.keys()) # TODO: Assert something trace = sample(chains=1, draws=50)
def _get_band_from_image(self, band_num): """extract a band from an image an apply masking""" if isinstance(self.__image, rasterio.DatasetReader): raster = self.__image elif isinstance(self.__image, rasterio.io.MemoryFile): raster = self.__image.open() else: raster = rasterio.open(os.path.normpath(self.__image)) mask_band = 1 if self.band_map['mask'] > 0: mask_band = self.band_map['mask'] if raster.nodata is not None: mask = raster.read(mask_band, masked=True).mask elif self.__src_nodata is not None: mask = ma.masked_values(raster.read(mask_band, masked=True), self.__src_nodata).mask else: mask = np.zeros(shape=(raster.height, raster.width), dtype=bool) band = np.where(~mask, raster.read(band_num), np.nan).astype('float32') if not isinstance(self.__image, rasterio.DatasetReader): raster.close() return band
def test_mask(self): NOx = marray([4.818, 2.849, 3.275, 4.691, 4.255, 5.064, 2.118, 4.602, 2.286, 0.970, 3.965, 5.344, 3.834, 1.990, 5.199, 5.283, -9999, -9999, 3.752, 0.537, 1.640, 5.055, 4.937, 1.561]) NOx = maskedarray.masked_values(NOx, -9999) E = marray([0.831, 1.045, 1.021, 0.970, 0.825, 0.891, 0.71, 0.801, 1.074, 1.148, 1.000, 0.928, 0.767, 0.701, 0.807, 0.902, -9999, -9999, 0.997, 1.224, 1.089, 0.973, 0.980, 0.665]) gas_fit_E = numpy.array([0.665, 0.949, 1.224]) newdata = numpy.array([0.6650000, 0.7581667, 0.8513333, 0.9445000, 1.0376667, 1.1308333, 1.2240000]) coverage = 0.99 rfile = open(os.path.join('tests','gas_result'), 'r') results = [] for i in range(8): rfile.readline() z = fromiter((float(v) for v in rfile.readline().rstrip().split()), float_) results.append(z) # gas = loess(E,NOx) gas.model.span = 2./3. gas.fit() assert_almost_equal(gas.outputs.fitted_values.compressed(), results[0], 6) assert_almost_equal(gas.outputs.enp, 5.5, 1) assert_almost_equal(gas.outputs.s, 0.3404, 4)
def geometric_mean(array, axis=0): """return the geometric mean of an array removing all zero-values but retaining total length """ non_zero = ma.masked_values(array, 0) log_a = ma.log(non_zero) return ma.exp(log_a.mean(axis=axis))
def geometric_mean(array, axis=0): '''return the geometric mean of an array removing all zero-values but retaining total length ''' non_zero = ma.masked_values(array, 0) log_a = ma.log(non_zero) return ma.exp(log_a.mean(axis=axis))
def test_convert_to_annual(self): "Test convert_to_annual" base = dict(D=1, H=24, T=24 * 60, S=24 * 3600) #for fq in ('D', 'H', 'T', 'S'): # Don't test for minuTe and Second frequency, too time consuming. for fq in ('D', 'H'): dates = date_array(start_date=Date(fq, '2001-01-01 00:00:00'), end_date=Date(fq, '2004-12-31 23:59:59')) bq = base[fq] series = time_series(range(365 * bq) * 3 + range(366 * bq), dates=dates) control = ma.masked_all((4, 366 * bq), dtype=series.dtype) control[0, :58 * bq] = range(58 * bq) control[0, 59 * bq:] = range(58 * bq, 365 * bq) control[[1, 2]] = control[0] control[3] = range(366 * bq) test = convert_to_annual(series) assert_equal(test, control) # series = time_series(range(59, 365) + range(366) + range(365), start_date=Date('D', '2003-03-01')) test = convert_to_annual(series) assert_equal(test[:, 59:62], ma.masked_values([[-1, 59, 60], [59, 60, 61], [-1, 59, 60]], - 1))
def reproject2(src, data, resolution, resampling): meta = src.meta.copy() if not src.crs.is_valid: crs = src.crs.from_string(u'epsg:4326') else: crs = src.crs newaff, width, \ height = rwarp.calculate_default_transform(crs, crs, src.width, src.height, *src.bounds, resolution=resolution) out = ma.empty((src.count, int(height), int(width)), dtype=meta['dtype']) newarr = np.empty((int(height), int(width)), dtype=meta['dtype']) meta.update({'transform': newaff, 'width': int(width), 'height': int(height), 'nodata': src.nodata}) for idx in range(data.shape[0]): rwarp.reproject(source = data[idx], destination = newarr, src_transform = src.transform, dst_transform = newaff, src_crs = src.crs, dst_crs = crs, src_nodata = src.nodatavals[idx], dst_nodata = src.nodatavals[idx], resampling = resampling) out[idx] = ma.masked_values(newarr, src.nodatavals[idx]) return meta, out
def __call__(self, sess, epoch, iteration, model, loss): if iteration == 0 and epoch % self.at_every_epoch == 0: total = 0 total_old = 0 correct_old = 0 correct = 0 for values in self.batcher: total_old += len(values[-1]) feed_dict = {} for i in range(0, len(self.placeholders)): feed_dict[self.placeholders[i]] = values[i] truth = np.argmax(values[-1], 1) # mask truth truth_noneutral = ma.masked_values(truth, 0) truth_noneutral_compr = truth_noneutral.compressed() predicted = sess.run(tf.arg_max(tf.nn.softmax(model), 1), feed_dict=feed_dict) pred_nonneutral = ma.array(predicted, mask=truth_noneutral.mask) pred_nonneutral_compr = pred_nonneutral.compressed() correct_old += sum(truth == predicted) correct += sum(truth_noneutral_compr == pred_nonneutral_compr) total += len(truth_noneutral_compr) acc = float(correct) / total self.update_summary(sess, iteration, "AccurayNonNeut", acc) print("Epoch " + str(epoch) + "\tAccNonNeut " + str(acc) + "\tCorrect " + str(correct) + "\tTotal " + str(total)) return acc return 0.0
def around_profile(self, t, lon, lat, var, rmax): """ !!ATENTION!! There are a lot of problems on this approach for big distances, including the borders and the poles. Improve it to actually calculate the distances. """ nt = numpy.absolute(ma.array([d.toordinal() for d in self['datetime']])-t.toordinal()).argmin() #from fluid.common.distance import distance #Lon, Lat = numpy.meshgrid(model_ref['xt_ocean'], model_ref['yt_ocean']) ##L = distance(lon = Lon, lat = Lat, lon_c = lon, lat_c = lat) #fac = numpy.cos((lat+Lat)/2.*numpy.pi/180) #L = ((Lat-lat)**2+((Lon-lon)*fac)**2)**.5 #L = L*60*1852 nX = numpy.arange(self['xt_ocean'].shape[0])[abs(self['xt_ocean']-lon)<(rmax/1856./60.)] nY = numpy.arange(self['yt_ocean'].shape[0])[abs(self['yt_ocean']-lat)<(rmax/1856./60.)] profile = {'depth':self['st_ocean']} profile['lon'] = self['xt_ocean'][nX[0]:nX[-1]+1] profile['lat'] = self['yt_ocean'][nY[0]:nY[-1]+1] profile[var] = ma.masked_values(self.dataset.variables[var][nt,:,nY[0]:nY[-1]+1,nX[0]:nX[-1]+1],value=self.dataset.variables[var].missing_value) #profile = ma.masked_values(self.dataset[var][nt,:,nY:nY+1,nX:nX+1],value=self.dataset[var].missing_value) return profile
def run(self): # Get inputs outname = str(self.ui.lineOutfile.text()) eqstring = str(self.ui.textEqEdit.toPlainText()) # Basic user validation if (len(eqstring) < 1): sys.stderr.write('Error: No equation to process.\n') elif(self.ui.listWidget_Layers.count() < 1): sys.stderr.write('Error: No input files.\n') # Process to new file else: try: # Test if output box is checked if self.ui.checkBoxGenerateOutput.isChecked() == False: if (len(outname) < 1): sys.stderr.write('Error: No output filename specified.\n') else: newband = eval(eqstring) newband = ma.masked_values(newband, 9999.0) epsg = rasterIO.wkt2epsg(proj) # setup python dictionary of rgdal formats and drivers formats = {'GeoTiff (.tif)':'.tif','Erdas Imagine (.img)':'.img'} drivers = {'GeoTiff (.tif)':'GTiff','Erdas Imagine (.img)':'HFA'} out_ext = formats[str(self.ui.comboFormats.currentText())] driver = drivers[str(self.ui.comboFormats.currentText())] outfile = outname + out_ext #driver = 'GTiff' rasterIO.writerasterband(newband, outfile, driver, XSize, YSize, geotrans, epsg) sys.stdout.write('Process complete, created newfile ') sys.stdout.write(str(outfile)) sys.stdout.write('\n') if self.ui.checkBoxQGIS.isEnabled() == True: qgis.utils.iface.addRasterLayer(outfile) self.ui.textPyout.insertPlainText('# create a new matrix from equation\n') self.ui.textPyout.insertPlainText('newband = %s\n' %(eqstring)) self.ui.textPyout.insertPlainText('# get the epsg code from the projection\n') self.ui.textPyout.insertPlainText('epsg = rasterIO.wkt2epsg(proj)\n') self.ui.textPyout.insertPlainText('# set the gdal driver / output file type\n') self.ui.textPyout.insertPlainText('driver = "%s"\n' %(driver)) self.ui.textPyout.insertPlainText('# specify the new output file\n') self.ui.textPyout.insertPlainText('outfile = "%s"\n' %(outfile)) self.ui.textPyout.insertPlainText('# write the new matrix to the new file\n') self.ui.textPyout.insertPlainText('rasterIO.writerasterband(newband, outfile, driver, XSize, YSize, geotrans, epsg)\n\n') self.ui.textPyout.insertPlainText('# add the new file to qgis\n') self.ui.textPyout.insertPlainText('qgis.utils.iface.addRasterLayer(outfile)\n\n') else: outputstring = (str(eval(str(self.ui.textEqEdit.toPlainText())))) +'\n' self.ui.textInformation.setTextColor(QtGui.QColor(0,0,255)) self.ui.textInformation.insertPlainText(outputstring) self.ui.textInformation.moveCursor(QtGui.QTextCursor.End) self.ui.textPyout.insertPlainText('# run without output file\n') self.ui.textPyout.insertPlainText('print %s\n\n' %(eqstring)) except ValueError: sys.stderr.write('Error: Could not perform calculation. Are input rasters same shape and size? Is the output a matrix?\n') except TypeError: sys.stderr.write('Error: Could not perform calculation. Are input rasters loaded?\n') except SyntaxError: sys.stderr.write('Error: Could not perform calculation. Is the equation correct?\n') except AttributeError: sys.stderr.write('Error: Could not perform calculation. Is the output raster correct?\n')
def readrasterband(dataset, aband): '''Accepts GDAL raster dataset and band number, returns Numpy 2D-array.''' if dataset.RasterCount >= aband: # Get one band band = dataset.GetRasterBand(aband) if band.GetNoDataValue() != None: NoDataVal = band.GetNoDataValue() else: NoDataVal = 0 band.SetNoDataValue(NoDataVal) #print "Warning NoDataValue not found, assuming 0!".format(dataset,aband) # create blank array (full of 0's) to hold extracted data [note Y,X format] dt = np.dtype(np.float32) datarray = np.zeros( ( band.YSize,band.XSize ), dtype=dt ) # create loop based on YAxis (i.e. num rows) for i in range(band.YSize): # read lines of band scanline = band.ReadRaster( 0, i, band.XSize, 1, band.XSize, 1, GDT_Float32) # unpack from binary representation tuple_of_floats = struct.unpack('f' * band.XSize, scanline) # add tuple to image array line by line datarray[i,:] = tuple_of_floats # apply mask for NoDataVal dataraster = ma.masked_values(datarray, NoDataVal) # apply mask for NaN values datarasterNaN = ma.masked_invalid(dataraster) # return array (raster) return datarasterNaN else: raise TypeError
def create_vsm( tediff, esp, sigmamm = 0.0 ): """ Create the voxel shift map (vsm) in the mag/phase space. use mag as input to assure that vsm is same dimension as mag. The input only affects the output dimension. The content of the input has no effect on the vsm. The de-warped mag volume is meaningless and will be thrown away """ mask_name=( op.join(gconf.get_epiunwarp(), 'mask.nii.gz') ) mag_name=( op.join(gconf.get_epiunwarp(), 'magnitude.nii.gz') ) magdw_name=( op.join(gconf.get_epiunwarp(), 'magdw.nii.gz') ) ph_name=( op.join(gconf.get_epiunwarp(), 'phase_unwrapped.nii.gz') ) vsmmag_name=( op.join(gconf.get_epiunwarp(), 'vsmmag.nii.gz') ) vsm_cmd = 'fugue -i %s -u %s -p %s --dwell=%s --asym=%s --mask=%s --saveshift=%s' % ( mag_name, magdw_name, ph_name, esp, tediff, mask_name, vsmmag_name) if( sigmamm > 0.0 ): vsm_cmd = '%s --smooth2=%s' % ( vsm_cmd, sigmamm ) runCmd( vsm_cmd, log ) vsmmag_nii = ni.load( vsmmag_name ) #load vsmmag_nii vsmmag_data = vsmmag_nii.get_data() mask_data = ni.load( mask_name ).get_data() vsmmag_data[ mask_data==0 ] = 0 vsmmag_masked = ma.masked_values( vsmmag_nii.get_data().reshape(-1), 0.0 ) print vsmmag_masked.mean() vsmmag_masked = vsmmag_masked - vsmmag_masked.mean() # Remove the mean in-brain shift from VSM # save vsmmag_nii._data = vsmmag_masked.reshape( vsmmag_nii.get_shape() ) ni.save( vsmmag_nii, op.join(gconf.get_epiunwarp(), 'vsmmag_meancorrected.nii.gz') )
def calculateMeans(self): self.synHist = ma.masked_values(self.synHist, -9999.0) self.synHistMean = ma.mean(self.synHist, axis=0) self.medSynHist = ma.median(self.synHist, axis=0) self.synHistUpper = percentile(self.synHist, per=95, axis=0) self.synHistLower = percentile(self.synHist, per=5, axis=0)
def read_sensor_count(pgm_name): """Read the MPE data from a grib file into an array. This function uses wgrib2 to write and then read the MPE data via an intermediate binary file, which is later deleted. The non-raining regions are masked and the data converted into mm/hr. """ bin_name = pgm_name[0:-4] + '.bin' hdr_name = pgm_name[0:-4] + '.hdr' exec_str = 'gdal_translate -ot UInt16 -of EHdr ' + pgm_name + ' ' + bin_name # spaces are N.B. execute(exec_str) data = np.fromfile(bin_name, dtype=np.uint16) os.remove(bin_name) os.remove(hdr_name) # read the pgm header and parse for relevant parameters f = open(pgm_name) pgm_hdr = f.read(120) f.close() hdr_elems = pgm_hdr.split() cols = int(hdr_elems[1]) # fixed if the pgm format spec is followed rows = int(hdr_elems[2]) # fixed if the pgm format spec is followed data = np.reshape(data, (rows, cols)) data = ma.masked_values(data, 0) return data
def geometric_mean(array, axis=0): '''return the geometric mean of an array removing all zero-values but retaining total length ''' non_zero = ma.masked_values(array, 0) log_a = np.log(non_zero) return log_a.mean(axis=axis)
def load_data(self): """ Sure there is a better way to do it. Think about, should I do things using nvalues as expected number of rows? Maybe do it free, and on checks, validate it. In the case of an incomplete file, I think I should load it anyways, and the check alerts me that it is missing data. There is a problem here. This atol is just a temporary solution, but it's not the proper way to handle it. """ data_rows = re.sub('(\n\s*)+\n', '\n', re.sub('\r\n', '\n', self.raw_data()['data']) ).split('\n')[:-1] data = ma.masked_values( np.array( [d.split() for d in data_rows], dtype=np.float), float(self.attributes['bad_flag']), atol = 1e-30) # Talvez usar o np.fromstring(data, sep=" ") for i in self.ids: attributes = self.data[i].attributes self.data[i] = data[:, i] self.data[i].attributes = attributes
def nearest_profile(self, t, lon, lat, var): """ """ # --- Model ---- # Define the closest time instant #dt = self.['datetime']-t #nt = numpy.arange(dt.shape[0])[dt == min(dt)][0] nt = numpy.absolute( ma.array([d.toordinal() for d in self['datetime']]) - t.toordinal()).argmin() # Define the nearest point nx = numpy.absolute(self['xt_ocean'] - lon).argmin() ny = numpy.absolute(self['yt_ocean'] - lat).argmin() # Extract the temperature profile from the model profile = ma.masked_values( self.dataset.variables[var][nt, :, ny, nx], value=self.dataset.variables[var].missing_value) # Restrict the profile to deeper than 10m, and only good data # The find_z20 can't handle masked arrays #ind = (temp_model.mask==False) & (numpy.absolute(self['depth'])>10) #z = model_ref['depth'][ind] #t = temp_model[ind] return { 'depth': self['st_ocean'], var: profile, 'lon': self['xt_ocean'][nx], 'lat': self['yt_ocean'][ny] }
def test_convert_to_annual(self): "Test convert_to_annual" base = dict(D=1, H=24, T=24 * 60, S=24 * 3600) #for fq in ('D', 'H', 'T', 'S'): # Don't test for minuTe and Second frequency, too time consuming. for fq in ('D', 'H'): dates = date_array(start_date=Date(fq, '2001-01-01 00:00:00'), end_date=Date(fq, '2004-12-31 23:59:59')) bq = base[fq] series = time_series(range(365 * bq) * 3 + range(366 * bq), dates=dates) control = ma.masked_all((4, 366 * bq), dtype=series.dtype) control[0, :58 * bq] = range(58 * bq) control[0, 59 * bq:] = range(58 * bq, 365 * bq) control[[1, 2]] = control[0] control[3] = range(366 * bq) test = convert_to_annual(series) assert_equal(test, control) # series = time_series(range(59, 365) + range(366) + range(365), start_date=Date('D', '2003-03-01')) test = convert_to_annual(series) assert_equal( test[:, 59:62], ma.masked_values([[-1, 59, 60], [59, 60, 61], [-1, 59, 60]], -1))
def __getitem__(self, key): """ """ if type(self.data[key]) == 'numpy.ma.core.MaskedArray': return self.data[key] elif hasattr(self.data[key], 'missing_value'): return ma.masked_values(self.data[key][:], getattr(self.data[key], 'missing_value')) return ma.array(self.data[key])
def band(self, layer): bnd=self.raster.GetRasterBand(layer) band=bnd.ReadAsArray() nan_value=bnd.GetNoDataValue() if nan_value == None: nan_value=np.amin(band) return ma.masked_values(band, nan_value)
def list_to_dates(items, date_format='%Y-%m-%d', missing=1e-10): """Convert a list into a list of masked date values, with each date in the specified date format. """ #print "dtu:66", items if not items: return None x_data = [to_matplotlib_date(x, date_format) for x in items] return ma.masked_values(x_data, missing) # ignore missing data
def _apply_mask(self, array, mask): if isinstance(mask, (np.ndarray, NDArrayType)): return ma.array(array, mask=mask) elif np.isscalar(mask): if np.isnan(mask): return ma.array(array, mask=np.isnan(array)) else: return ma.masked_values(array, mask) return array
def test_estimate_anomaly(): f1 = estimate_anomaly(dummy_features, {'spike': dummy_params['spike']}) f2 = estimate_anomaly(pd.DataFrame(dummy_features), {'spike': dummy_params['spike']}) assert ma.allclose(f1, f2) assert ma.allclose(f1, ma.masked_values([-999, 0.0, -5.797359001920061, -57.564627324851145, -999, -9.626760611162082], -999))
def load_file(file_name = file_name, time_start = time_start, time_end = time_end, lat_start = lat_start, lat_end = lat_end, lon_start = lon_start, lon_end = lon_end, masked_value = masked_value): nc = netcdf_file(file_name, 'r') new_array = nc.variables['Cflx'][time_start:time_end, lat_start:lat_end, lon_start:lon_end] nc.close() new_array = ma.masked_values(new_array, masked_value) new_array = ma.array(new_array, dtype=np.float32) new_array = new_array * unit_changer return new_array
def test_missing(): data = ma.masked_values([1,2,-1,4,-1], value=-1) with Model() as model: x = Normal('x', 1, 1) y = Normal('y', x, 1, observed=data) y_missing, = model.missing_values assert y_missing.tag.test_value.shape == (2,) model.logp(model.test_point)
def eke(cutperiod=360, dt=7, verbose=False): """ Include the possibility to do with a different dataset, like anomaly or ref. ATENTION, need to move user and password out of here. """ from maud import window_1Dmean l = cutperiod*24 # From days to hours. Aviso time is on hours. #self.metadata['urlbase'] = "http://%s:%[email protected]/thredds/dodsC" % (self.metadata['username'], self.metadata['password']) url_uv = "http://*****:*****@opendap.aviso.oceanobs.com/thredds/dodsC/dataset-duacs-dt-upd-global-merged-madt-uv-daily" dataset = open_url(url_uv) T, I, J = dataset.Grid_0001.shape eke = ma.masked_all((I,J)) I,J = numpy.nonzero(ma.masked_values(dataset.Grid_0001.Grid_0001[-300::60,:,:], dataset.Grid_0001.attributes['_FillValue']).max(axis=0)) t = ma.array(dataset.time[::dt]) if verbose: from progressbar import ProgressBar pbar = ProgressBar(maxval=I.shape[0]).start() n=-1 for i, j in zip(I,J): if verbose: n+=1 pbar.update(n) doit = True while doit: try: u = ma.masked_values(dataset.Grid_0001.Grid_0001[::dt,i,j], dataset.Grid_0001.attributes['_FillValue'])*1e-2 v = ma.masked_values(dataset.Grid_0002.Grid_0002[::dt,i,j], dataset.Grid_0002.attributes['_FillValue'])*1e-2 u_prime = u-window_1Dmean(u, l=l, t=t, axis=0) v_prime = v-window_1Dmean(v, l=l, t=t, axis=0) eke[i,j] = (u_prime**2+v_prime**2).mean()/2. doit=False except: print "I had some trouble. I'll wait a litte bit and try again" time.sleep(10) if verbose: pbar.finish() return eke
def plotswath(map, lats, lons, data, MISSING=-999.0, vmin=0.0, vmax=300.0): x, y = map(lons, lats) # compute map proj coordinates for pixel centers xc, yc = extrapcorners(x,y) # interpolate/extrapolate corner boundaries of pixels # plot data as overlayed color mesh # datam = ma.masked_where(np.isnan(data),data) datam = ma.masked_values(data, MISSING) cs = plt.pcolormesh(xc,yc,datam,rasterized=False, vmin=vmin, vmax=vmax)
def get_mask(self): self.array_mean = ma.mean(self.array) self.array_stdev = ma.std(self.array) self.array_range = ma.max(self.array) - ma.min(self.array) print "The mean is %f, the stdev is %f, the range is %f." %(self.array_mean, self.array_stdev, self.array_range) from scipy.io.netcdf import netcdf_file as NetCDFFile ### get landmask nc = NetCDFFile(os.getcwd()+ '/../data/netcdf_files/ORCA2_landmask.nc','r') self.mask = ma.masked_values(nc.variables['MASK'][:, :self.time_len, :self.lat_len, :180], -9.99999979e+33) nc.close() self.xxx, self.yyy, self.zzz = np.lib.index_tricks.mgrid[0:self.time_len, 0:self.lat_len, 0:180]
def load_file(file_name = file_name, time_start = time_start, time_end = time_end, lat_start = lat_start, lat_end = lat_end, lon_start = lon_start, lon_end = lon_end, masked_value = masked_value): nc = NetCDFFile(file_name, 'r') new_array = nc.variables['Cflx'][time_start:time_end, lat_start:lat_end, lon_start:lon_end] nc.close() new_array = ma.masked_values(new_array, masked_value) new_array = new_array*1e08 return new_array
def getStatVal(imageFile,longitude,latitude,winsize,statistic,site): """ Caculates the statistics on the pixels in the window array """ band1,band2,band3,band4,band5,band6,count = 'None','None','None','None','None','None','None' if imageFile != 'None' and imageFile != None: imageFile=qvf.changestage(imageFile,'tmp') temp = '%s_%s_%spix.tif' % (imageFile.split('.')[0],site.strip(),winsize) if not os.path.exists(temp): subsetRaster = getWindow(imageFile,longitude,latitude,winsize,site) else: subsetRaster = temp try: imgInfo = gdalcommon.info(subsetRaster) handle = gdal.Open(subsetRaster) for band in [1,2,3,4,5,6]: if handle != None: bandHandle = handle.GetRasterBand(band) bandArray = bandHandle.ReadAsArray() maskedBand = ma.masked_values(bandArray, 0) count = ma.count(maskedBand) if statistic == 'mean': statVal = maskedBand.mean() elif statistic == 'std': statVal = maskedBand.std() else: statVal = None if band == 1: band1 = statVal elif band == 2: band2 = statVal elif band == 3: band3 = statVal elif band == 4: band4 = statVal elif band == 5: band5 = statVal elif band == 6: band6 = statVal except: pass return band1, band2, band3, band4, band5, band6, count