def auto_bright_nonlin(img, epochs, transform_factor=0.5, sigma=0.8, mean_thresh=2, mean_reduction=0.9): """ TODO: Transform multiple images simultaneously (e.g. Before and After) per Roy's request Try sliding window approach and maximize entropy for each window. Windows can't be too small, or too large. :param img: numpy array/obj of the image you want to transform :param epochs: hyperparameter for number of transformations :param transform_factor: hyperparameter for rate of exponential transformation :param sigma: gaussian filter hyperparameter :param mean_thresh: hyperparameter controlling sensitivity of intensity cutoff :param mean_reduction: hyperparameter for reducing the lowest intensity pixels :return best_img: maximum entropy image """ # normalize pixels between 0 and 1 img = np.array(img).astype(np.float) img *= 1 / np.max(img) # calculate initial entropy of the image counts, bins = np.histogram(img) count_frac = [count / np.sum(counts) for count in counts] d = dit.Distribution(list(map(str, range(len(counts)))), count_frac) entropy_loss = [entropy(d)] d_entropy = 1 # arbitrary imgs = [ img ] # holds all images so that we can choose the one with the best entropy for i in range(epochs): # remove low intensity pixels img[img <= mean_thresh * np.mean(img)] *= mean_reduction img = gf(img, sigma=sigma) img = img**(1 - (transform_factor * d_entropy)) img[img == np.inf] = 1 # clip infities at 1 imgs.append(img) counts, bins = np.histogram(img) count_frac = [count / np.sum(counts) for count in counts] d = dit.Distribution(list(map(str, range(len(counts)))), count_frac) entropy_loss.append(entropy(d)) d_entropy = entropy_loss[-1] - entropy_loss[-2] if i % 10 == 0: print('Finished: ', 100 * i / epochs, '%') print('Best entropy: ', max(entropy_loss), 'at ix ', entropy_loss.index(max(entropy_loss))) best_img = imgs[entropy_loss.index(max(entropy_loss))] best_img = gf(best_img, sigma=sigma) return best_img, entropy_loss
def test_cross_entropy_2(): """ Test that xH(d, d) = H(d). """ ds = get_dists() for d in ds: yield assert_almost_equal, cross_entropy(d, d), entropy(d)
def test_cross_entropy_2(): """ Test that xH(d, d) = H(d). """ ds = get_dists() for d in ds: yield assert_almost_equal, cross_entropy(d, d), entropy(d)
def test_no_constraints_self_disclosure(): """Test that unconstrained self-disclosure is joint entropy.""" nb_reps = 10 for _ in range(nb_reps): d = dit.random_distribution(3, 2) H = entropy(d) syn = self_disclosure(d, cons=[]) assert (np.isclose(H, syn))
def test_dist(): """ Test that the construct dist is accurate. """ mimi = MinimalIntrinsicTotalCorrelation(intrinsic_1, [[0], [1]], [2], bound=3) mimi.optimize() d = mimi.construct_distribution() assert entropy(d, [3]) == pytest.approx(1.5, abs=1e-2)
def test_nat(): """ Test known bit values. """ d = Distribution(['0', '1'], [1 / 2, 1 / 2]) ditParams['units'] = True h = entropy(d) reset_params() true = ureg.Quantity(np.log(2), ureg.nat).to_base_units() assert h.m == pytest.approx(true.m)
def test_bit(): """ Test known bit values. """ d = Distribution(['0', '1'], [1 / 2, 1 / 2]) ditParams['units'] = True h = entropy(d) reset_params() true = ureg.Quantity(1, ureg.bit) assert h.m == pytest.approx(true.m)
def test_nat(): """ Test known bit values. """ d = Distribution(['0', '1'], [1 / 2, 1 / 2]) ditParams['units'] = True h = entropy(d) reset_params() true = ureg.Quantity(np.log(2), ureg.nat) assert h == true
def test_dit(): """ Test known bit values. """ d = Distribution(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'], [1/10]*10) ditParams['units'] = True h = entropy(d) reset_params() true = ureg.Quantity(1, ureg.dit).to_base_units() assert h.m == pytest.approx(true.m)
def test_nat(): """ Test known bit values. """ d = Distribution(['0', '1'], [1/2, 1/2]) ditParams['units'] = True h = entropy(d) reset_params() true = ureg.Quantity(np.log(2), ureg.nat).to_base_units() assert h.m == pytest.approx(true.m)
def test_bit(): """ Test known bit values. """ d = Distribution(['0', '1'], [1/2, 1/2]) ditParams['units'] = True h = entropy(d) reset_params() true = ureg.Quantity(1, ureg.bit) assert h.m == pytest.approx(true.m)
def test_dit(): """ Test known bit values. """ d = Distribution(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'], [1 / 10] * 10) ditParams['units'] = True h = entropy(d) reset_params() true = ureg.Quantity(1, ureg.dit).to_base_units() assert h.m == pytest.approx(true.m)
def test_distributions3(dist): """ A test for the distributions strategy. """ assert entropy(dist) <= np.log2(len(dist.pmf))
rcyDEP = PID_dep(dist1, [[0], [1]], [2]) print(rcyDEP) rcyWB = PID_WB(dist1, [[0], [1]], [2]) print(rcyWB) rcyPROJ = PID_Proj(dist1, [[0], [1]], [2]) print(rcyPROJ) print(" ") # Computation of the entropies H_OBA = entropy(dist1) H_B = entropy(dist1, [0]) H_A = entropy(dist1, [1]) H_O = entropy(dist1, [2]) H_OB = entropy(dist1, [[0], [2]]) H_OA = entropy(dist1, [[1], [2]]) H_BA = entropy(dist1, [[0], [1]]) # Computation of the mutual informations and interaction information I_OB = H_B + H_O - H_OB I_OA = H_A + H_O - H_OA I_BA = H_B + H_A - H_BA I_OBgA = H_OA - H_A - H_OBA + H_BA I_OAgB = H_OB - H_B - H_OBA + H_BA
def test_cross_entropy_2(d): """ Test that xH(d, d) = H(d). """ assert cross_entropy(d, d) == pytest.approx(entropy(d))
def test_cross_entropy_2(d): """ Test that xH(d, d) = H(d). """ assert cross_entropy(d, d) == pytest.approx(entropy(d))