def test_normalize_and_transpose_returns_the_right_shape_and_colors(self): # Given old_image = np.ones((2, 5, 3)) * [5, 200, 110] # rgb # When image = normalize_and_transpose(old_image) # Then expected_image_size = np.shape(np.ones((3, 2, 5))) expected_image_colors = [110 / 255., 200 / 255., 5 / 255.] self.assertEqual(expected_image_size, np.shape(image)) self.assertEqual(expected_image_colors[0], image[0, 0, 0]) # b self.assertEqual(expected_image_colors[1], image[1, 0, 0]) # g self.assertEqual(expected_image_colors[2], image[2, 0, 0]) # r
def create_BITMAP(self): BITMAP = np.ones([self.bitmap_dim, self.bitmap_dim], dtype=int) for line in self.sample: for dot in line: x = project_coordinate(dot[1], self.bitmap_dim, self.image_dim) y = project_coordinate(dot[0], self.bitmap_dim, self.image_dim) BITMAP[x][y] = 0 return BITMAP
def run_the_app(): DATA_DIR = 'data' path_input_image = '18499925491_e3af00ff02_o.jpg' original_image = cv2.imread(path_input_image) st.image(original_image, caption='') gray = cv2.cvtColor(original_image, cv2.COLOR_BGR2GRAY) st.image(gray, caption='') edges = cv2.Canny(gray, 50, 150) st.image(edges, caption='') kernel = np.ones((2, 2), np.uint8) erosion = cv2.erode(edges, kernel, iterations=10) st.image(erosion, caption='')
def smooth(x, window_len=11, window='hanning'): """Smooth the data using a window with requested size""" if x.ndim != 1: raise ValueError('Function only accepts 1 dimension arrays.') if x.size < window_len: raise ValueError('Input vector needs to be bigger than window size.') if window_len < 3: return x valid = ['flat', 'hanning', 'hamming', 'bartlett', 'blackman'] if window not in valid: raise ValueError('Invalid value for parameter window. Valid values: ' + ','.join(valid)) s = np.r_[x[window_len - 1:0:-1], x, x[-1:-window_len:-1]] if window == 'flat': # moving average w = np.ones(window_len, 'd') else: w = eval('np.' + window + '(window_len)') y = np.convolve(w / w.sum(), s, mode='valid') return y
def predict(self, X): X1 = np.ones((len(X), self.n_inputs + 1)) X1[:, 0] = X[:, 0] return self.W1.dot(X1.T).T
def rolling_distance(df,window,freq=1): arr = (np.ones([1,window-1])*np.nan).reshape(window-1).tolist() for i in range(window-1,len(df),freq): d = distance_calc(df[['lat','lon']].iloc[i-freq],df[['lat','lon']].iloc[i]) arr.append(d) return arr
def Rect(self): if self.image is not None: kernel = np.ones((5, 5), np.uint8) self.image = cv.morphologyEx(self.image, cv.MORPH_RECT, kernel) self.processedImg()
def BlackHat(self): if self.image is not None: kernel = np.ones((5, 5), np.uint8) self.image = cv.morphologyEx(self.image, cv.MORPH_BLACKHAT, kernel) self.processedImg()
def Gradient(self): if self.image is not None: kernel = np.ones((5, 5), np.uint8) self.image = cv.morphologyEx(self.image, cv.MORPH_GRADIENT, kernel) self.processedImg()
def Elipse(self): if self.image is not None: kernel = np.ones((5, 5), np.uint8) self.image = cv.morphologyEx(self.image, cv.MORPH_ELLIPSE, kernel) self.processedImg()
def Cross(self): if self.image is not None: kernel = np.ones((5, 5), np.uint8) self.image = cv.morphologyEx(self.image, cv.MORPH_CROSS, kernel) self.processedImg()
def Closing(self): if self.image is not None: kernel = np.ones((5, 5), np.uint8) self.image = cv.morphologyEx(self.image, cv.MORPH_CLOSE, kernel) self.processedImg()
def Opening(self): if self.image is not None: kernelDilation = np.ones((15, 15), np.uint8) self.image = cv.morphologyEx(self.image, cv.MORPH_OPEN, kernelDilation) self.processedImg()
def Dilation(self): if self.image is not None: kernelDilation = np.ones((15, 15), np.uint8) self.image = cv.dilate(self.image, kernelDilation, iterations=1) self.processedImg()
def Filter2D(self): if self.image is not None: kernel = np.ones((5, 5), np.float32) / 25 self.image = cv.filter2D(self.image, -1, kernel) self.processedImg()
def homogenize(self, X): X = getattr(X, 'values', X).reshape(len(X), 1) X_1 = np.ones((len(X), self.n_inputs + 1)) X_1[:, 1:] = getattr(X, 'values', X) return X_1
def Erosion(self): if self.image is not None: kernelErosion = np.ones((5, 5), np.uint8) self.image = cv.erode(self.image, kernelErosion, iterations=1) self.processedImg()
gan.compile(optimizer=gan_optimizer, loss='binary_crossentropy') iterations = 10000 batch_size = 20 save_dir = 'output' start = 0 for step in range(iterations): random_latent_vectors = np.random.normal(size=(batch_size, latent_dim)) generated_images = generator.predict(random_latent_vectors) stop = start + batch_size real_images = x_train[start:stop] combined_images = np.concatenate([generated_images, real_images]) labels = np.concatenate( [np.ones((batch_size, 1)), np.zeros((batch_size, 1))]) labels += 0.05 * np.random.random(labels.shape) d_loss = discriminator.train_on_batch(combined_images, labels) random_latent_vectors = np.random.normal(size=(batch_size, latent_dim)) misleading_targets = np.zeros((batch_size, 1)) a_loss = gan.train_on_batch(random_latent_vectors, misleading_targets) start += batch_size if start > len(x_train) - batch_size: start = 0 if step % 100 == 0: gan.save_weights('gan.h5') img = image.array_to_img(generated_images[0] * 255., scale=False) img.save(os.path.join(save_dir, 'generated_frog' + str(step) + '.png')) img = image.array_to_img(real_images[0] * 255., scale=False) img.save(os.path.join(save_dir, 'real_frog' + str(step) + '.png'))