def test_perturb(): """Test perturbation operator.""" # Given data = np.random.random([2, 3]) p_vals = np.array([1., 2., 3.]) # perturbation values expected = data * p_vals # When output = coda.perturb(data, p_vals, reclose=False) # Then assert np.all(output == expected)
# Impute comp[comp == 0] = 1. # Closure comp = tet.closure(comp) # Plot related operations p_mask = mask.reshape(dims[0] * dims[1] * dims[2]) p_comp = comp[p_mask > 0] # Centering center = tet.sample_center(p_comp) # center = np.array([[ 0.06521165, 0.66942364, 0.26536471]]) # 0 noise center print "Sample center: " + str(center) c_temp = np.ones(p_comp.shape) * center p_comp = tet.perturb(p_comp, c_temp**-1) # Standardize totvar = tet.sample_total_variance(p_comp, center) p_comp = tet.power(p_comp, np.power(totvar, -1. / 2.)) # Isometric logratio transformation for plotting ilr = tet.ilr_transformation(p_comp) # Plot 2D histogram of ilr transformed data plt.hist2d( ilr[:, 0], ilr[:, 1], bins=2000, norm=LogNorm(), # vmax=100, cmap='inferno') plt.xlabel('$v_1$')
def simplex_color_balance(bary, center=True, standardize=False, trunc_max=False): """Compositional data based method for color balance. Highly experimental! Parameters ---------- bary: numpy.ndarray Barycentric coordinates. Sum of all channels should add up to 1 (closed). center: bool Center barycentric coordinates (similar to de-meaning). Single-hue dominated images will be balanced to cover all hues. standardize: bool Standardize barycentric coordinates. Standardized compositions make better use of the simplex space dynamic range. trunc_max: bool Truncate maximum barycentric coordinates to eliminate extreme hues that are not prevalent in the image. Returns ------- bary: numpy.ndarray Processed composition. """ dims = bary.shape bary = bary.reshape([np.prod(dims[:-1]), dims[-1]]) bary = np.nan_to_num(bary) # Do not consider values <= 0 mask = np.prod(bary, axis=-1) mask = mask > 0 temp = bary[mask] # Interpretation of centering: Compositions cover the simplex space more # balanced across components. Similar to de-mean data. if center: sample_center = coda.sample_center(temp) temp2 = np.full(temp.shape, sample_center) temp = coda.perturb(temp, temp2**-1.) temp2 = None # Interpretation of standardization: Centered compositions cover the # dynamic range of simplex space more. if standardize: # Standardize totvar = coda.sample_total_variance(temp, sample_center) temp = coda.power(temp, np.power(totvar, -1./2.)) # Interpretation of max truncation: Pull the exterme compositions to # threshold distance by using scaling. Scaling factor is determined for # each outlier composition to pull more extreme compositions more strongly. if trunc_max: # Use Aitchison norm and powering to truncate extreme compositions anorm = coda.aitchison_norm(temp) anorm_thr_min, anorm_thr_max = np.percentile(anorm, [1., 99.]) # Max truncate idx_trunc = anorm > anorm_thr_max truncation_power = anorm[idx_trunc] / anorm_thr_max correction = np.ones(anorm.shape) correction[idx_trunc] = truncation_power temp = coda.power(temp, correction[:, None]) # min truncate idx_trunc = anorm < anorm_thr_min truncation_power = anorm[idx_trunc] / anorm_thr_min correction = np.ones(anorm.shape) correction[idx_trunc] = truncation_power temp = coda.power(temp, correction[:, None]) # TODO: Implement this similar to truncate and scpe function but for # simplex space. Proportion of dynamic range to the distance of between # aitchison norm percentiles gives the global scaling factor. This should # be done after truncation though. # Put back processed composition bary[mask] = temp return bary.reshape(dims)
# comp[:, i] = temp # Impute comp[comp == 0] = 1. # Closure comp = coda.closure(comp) # Isometric logratio transformation before any centering ilr_orig = coda.ilr_transformation(np.copy(comp)) # Centering center = coda.sample_center(comp) print("Sample center: " + str(center)) c_temp = np.ones(comp.shape) * center p_comp = coda.perturb(comp, c_temp**-1) # Standardize totvar = coda.sample_total_variance(comp, center) comp = coda.power(comp, np.power(totvar, -1. / 2.)) # Isometric logratio transformation for plotting ilr = coda.ilr_transformation(comp) # Plots fig = plt.figure() limits = [-2.5, 2.5] ax_1 = plt.subplot(121) # Plot 2D histogram of ilr transformed data _, _, _, h_1 = ax_1.hist2d(ilr_orig[:, 0], ilr_orig[:, 1], bins=1000,
comp = np.copy(orig) # Lightness light = (np.max(comp, axis=1) + np.min(comp, axis=1)) / 2. # Closure comp = coda.closure(comp) # Do not consider masked values p_mask = mask.reshape(dims[0] * dims[1] * dims[2]) p_comp = comp[p_mask > 0] # Centering center = coda.sample_center(p_comp) temp = np.ones(p_comp.shape) * center p_comp = coda.perturb(p_comp, temp**-1.) # Standardize totvar = coda.sample_total_variance(p_comp, center) p_comp = coda.power(p_comp, np.power(totvar, -1. / 2.)) # Use Aitchison norm and powerinf for truncation of extreme compositions anorm_thr = 3 anorm = coda.aitchison_norm(p_comp) idx_trunc = anorm > anorm_thr truncation_power = anorm[idx_trunc] / anorm_thr correction = np.ones(anorm.shape) correction[idx_trunc] = truncation_power comp_bal = coda.power(p_comp, correction[:, None]) # go to hexcone lattice for exports comp[p_mask > 0] = comp_bal
# Closure comp = tet.closure(comp) # Plot related operations p_mask = mask.reshape(dims[0] * dims[1] * dims[2]) p_comp = comp[p_mask > 0] # Isometric logratio transformation before any centering ilr_orig = tet.ilr_transformation(np.copy(p_comp)) # Centering center = tet.sample_center(p_comp) print("Sample center: " + str(center)) c_temp = np.ones(p_comp.shape) * center p_comp = tet.perturb(p_comp, c_temp**-1) # Standardize totvar = tet.sample_total_variance(p_comp, center) p_comp = tet.power(p_comp, np.power(totvar, -1. / 2.)) # Isometric logratio transformation for plotting ilr = tet.ilr_transformation(p_comp) # Plots fig = plt.figure() limits = [-2.5, 2.5] ax_1 = plt.subplot(121) # Plot 2D histogram of ilr transformed data _, _, _, h_1 = ax_1.hist2d(ilr_orig[:, 0], ilr_orig[:, 1], bins=2000,