def run_coarse(): files_dict_gen = loadCSV.load_csv(CSV_FILENAME, 1) file_dict = next(files_dict_gen, None) sess = tf.Session() #start the session ##############GET DATA############### test_coarse_input = tf.placeholder(tf.float32, shape=(None, BASE_INPUT_SHAPE[0], BASE_INPUT_SHAPE[1], BASE_INPUT_SHAPE[2])) coarse_guess = coarseModel.build_model(test_coarse_input) test_fine_input = tf.placeholder(tf.float32, shape=(None, None, None, 3)) test_fine_label = tf.placeholder(tf.float32, shape=(None, None, None, 1)) real_summary, _, adam_variable = build_model(test_fine_input, test_fine_label, images=None) sess.run(tf.global_variables_initializer()) # saver_perm = tf.train.Saver() saver_perm.restore(sess, MODEL_FILEPATH) train_writer = tf.summary.FileWriter(SUMMARY_FILEPATH, sess.graph) count = 0 while file_dict: for k, v in file_dict.items(): image = Image.open(DATA_FILEPATH + k) np_image = np.array(image, dtype=np.float32) / 255.0 feed_dict = {test_coarse_input: np.expand_dims(np_image, 0)} coarse_guess_ex = sess.run(coarse_guess, feed_dict=feed_dict) box_list = islandProblem.get_boxes(np.squeeze(coarse_guess_ex)) if len(box_list): # print(box_list) for mins, maxes in box_list: x_min, y_min = mins[0], mins[1] x_max, y_max = maxes[0] + 1, maxes[1] + 1 fine_ex = np_image[x_min * INPUT_SHAPE[0]:x_max * INPUT_SHAPE[0], y_min * INPUT_SHAPE[1]:y_max * INPUT_SHAPE[1]] label_ex = v[x_min * INPUT_SHAPE[0]:x_max * INPUT_SHAPE[0], y_min * INPUT_SHAPE[1]:y_max * INPUT_SHAPE[1]] feed_dict = { test_fine_input: np.expand_dims(fine_ex, 0), test_fine_label: np.expand_dims(np.expand_dims(label_ex, 0), -1) } real_summary_ex, _ = sess.run( [real_summary, adam_variable], feed_dict=feed_dict) train_writer.add_summary(real_summary_ex, count) count += 1 if not count % WHEN_SAVE: saver_perm.save(sess, MODEL_FILEPATH) file_dict = next(files_dict_gen, None)
def save_whole(): files_dict_gen = loadCSV.load_csv(CSV_FILENAME, batch_size) file_dict = next(files_dict_gen, None) count = 0 while file_dict: input_group = np.zeros((len(file_dict), WIDTH, HEIGHT, 3), dtype=np.uint8) coarse_label = np.zeros((len(file_dict), DIVIDEND, DIVIDEND), dtype=np.uint8) fine_label = np.zeros((len(file_dict), WIDTH, HEIGHT), dtype=np.uint8) for i, temp in enumerate(file_dict.items()): k, v = temp try: image = Image.open(DATA_FILEPATH + k) np_image = np.array(image, dtype=np.uint8) input_group[i] = np_image fine_label[i] = v for j in range(DIVIDEND * DIVIDEND): x_a = (j // DIVIDEND) y_a = (j % DIVIDEND) has_boat = np.amax( v[x_a * NEW_WIDTH:x_a * NEW_WIDTH + NEW_WIDTH, y_a * NEW_HEIGHT:y_a * NEW_HEIGHT + NEW_HEIGHT]) coarse_label[i, x_a, y_a] = has_boat except: print('ERR-------------------') continue count += 1 if not count % 500: print('Count: ' + str(count)) if len(input_group): if count < number_hold: print('TEST SAVE') input_group = np.array(input_group, dtype=np.uint8) fine_label = np.array(fine_label, dtype=np.uint8) append_binary_file(TEST_INPUT_SAVE, input_group.tobytes()) append_binary_file(TEST_LABEL_SAVE, fine_label.tobytes()) append_binary_file(TEST_INPUT_SAVE_COARSE, coarse_label.tobytes()) else: print('TRAIN SAVE') print(len(input_group)) input_group = np.array(input_group, dtype=np.uint8) fine_label = np.array(fine_label, dtype=np.uint8) append_binary_file(TRAIN_INPUT_SAVE, input_group.tobytes()) append_binary_file(TRAIN_LABEL_SAVE, fine_label.tobytes()) append_binary_file(TRAIN_INPUT_SAVE_COARSE, coarse_label.tobytes()) else: print('EMPTY') file_dict = next(files_dict_gen, None) print('DOOOOOOOOONE')
def save_data(): files_dict_gen = loadCSV.load_csv(CSV_FILENAME, batch_size) file_dict = next(files_dict_gen, None) sess = tf.Session() #start the session ##############GET DATA############### test_coarse_input = tf.placeholder(tf.float32, shape=(None, BASE_INPUT_SHAPE[0], BASE_INPUT_SHAPE[1], BASE_INPUT_SHAPE[2])) coarse_guess = build_coarse_model(test_coarse_input) sess.run(tf.global_variables_initializer()) saver_perm = tf.train.Saver() saver_perm.restore(sess, MODEL_FILEPATH) count = 0 while file_dict: # input_group = np.zeros((len(file_dict)*DIVIDEND*DIVIDEND,NEW_WIDTH,NEW_HEIGHT,3),dtype=np.uint8) # label_group = np.full((len(file_dict)*DIVIDEND*DIVIDEND,NEW_WIDTH,NEW_HEIGHT),.1,dtype=np.float32) input_group = [] fine_label = [] for k, v in file_dict.items(): try: image = Image.open(DATA_FILEPATH + k) np_image = np.array(image, dtype=np.uint8) feed_dict = {test_coarse_input: np.expand_dims(np_image, 0)} coarse_guess_ex = np.squeeze( sess.run(coarse_guess, feed_dict=feed_dict)) for j in range(DIVIDEND * DIVIDEND): x_a = (j // DIVIDEND) y_a = (j % DIVIDEND) # has_boat = np.amax(coarse_guess_ex[x_a * NEW_WIDTH:x_a * NEW_WIDTH + NEW_WIDTH, y_a * NEW_HEIGHT:y_a * NEW_HEIGHT + NEW_HEIGHT]) if coarse_guess_ex[x_a, y_a]: input_group.append( np_image[x_a * NEW_WIDTH:x_a * NEW_WIDTH + NEW_WIDTH, y_a * NEW_HEIGHT:y_a * NEW_HEIGHT + NEW_HEIGHT]) fine_label.append( v[x_a * NEW_WIDTH:x_a * NEW_WIDTH + NEW_WIDTH, y_a * NEW_HEIGHT:y_a * NEW_HEIGHT + NEW_HEIGHT]) count += 1 if not count % 500: print('Count: ' + str(count)) except: print('Error') continue if len(input_group): if count < number_hold: print('TEST SAVE') input_group = np.array(input_group, dtype=np.uint8) fine_label = np.array(fine_label, dtype=np.uint8) append_binary_file(TEST_INPUT_SAVE, input_group.tobytes()) append_binary_file(TEST_LABEL_SAVE, fine_label.tobytes()) else: print('TRAIN SAVE') print(len(input_group)) input_group = np.array(input_group, dtype=np.uint8) fine_label = np.array(fine_label, dtype=np.uint8) append_binary_file(TRAIN_INPUT_SAVE, input_group.tobytes()) append_binary_file(TRAIN_LABEL_SAVE, fine_label.tobytes()) else: print('EMPTY') file_dict = next(files_dict_gen, None) print('DOOOOOOOOONE')
DIVIDEND = 24 WIDTH = 768 HEIGHT = 768 NEW_HEIGHT = HEIGHT / DIVIDEND NEW_WIDTH = WIDTH / DIVIDEND batch_size= 2000 total_images = 104000 percent_hold = .2 number_hold = total_images * percent_hold print('Number Validate' + str(number_hold)) def append_binary_file(file_name, bytes_): with open(file_name,"ab") as f: f.write(bytes_) files_dict_gen = loadCSV.load_csv(CSV_FILENAME,batch_size) file_dict = next(files_dict_gen, None) count = 0 match_num = 0 while file_dict: # input_group = np.zeros((len(file_dict)*DIVIDEND*DIVIDEND,NEW_WIDTH,NEW_HEIGHT,3),dtype=np.uint8) # label_group = np.full((len(file_dict)*DIVIDEND*DIVIDEND,NEW_WIDTH,NEW_HEIGHT),.1,dtype=np.float32) input_group = [] fine_label = [] for i, temp in enumerate(file_dict.items()): k,v = temp try: image = Image.open(DATA_FILEPATH + k) np_image = np.array(image,dtype=np.uint8) for j in range(DIVIDEND*DIVIDEND):
def save_part(): files_dict_gen = loadCSV.load_csv(CSV_FILENAME, batch_size) file_dict = next(files_dict_gen, None) count = 0 while file_dict: # input_group = np.zeros((len(file_dict)*DIVIDEND*DIVIDEND,NEW_WIDTH,NEW_HEIGHT,3),dtype=np.uint8) # label_group = np.full((len(file_dict)*DIVIDEND*DIVIDEND,NEW_WIDTH,NEW_HEIGHT),.1,dtype=np.float32) input_group = [] fine_label = [] for i, temp in enumerate(file_dict.items()): k, v = temp try: image = Image.open(DATA_FILEPATH + k) np_image = np.array(image, dtype=np.uint8) for j in range(DIVIDEND * DIVIDEND): # x_a = (j // DIVIDEND) * NEW_WIDTH # y_a = (j % DIVIDEND) * NEW_HEIGHT # label_group[i*DIVIDEND*DIVIDEND + j] = v[x_a:x_a + NEW_WIDTH, y_a:y_a + NEW_HEIGHT] # input_group[i*DIVIDEND*DIVIDEND + j] = np_image[x_a:x_a + NEW_WIDTH, y_a:y_a + NEW_HEIGHT] x_a = (j // DIVIDEND) y_a = (j % DIVIDEND) has_boat = v[x_a * NEW_WIDTH:x_a * NEW_WIDTH + NEW_WIDTH, y_a * NEW_HEIGHT:y_a * NEW_HEIGHT + NEW_HEIGHT] has_boat = skimage.measure.block_reduce( has_boat, (num_pool, num_pool), np.max) input_group.append( np_image[x_a * NEW_WIDTH:x_a * NEW_WIDTH + NEW_WIDTH, y_a * NEW_HEIGHT:y_a * NEW_HEIGHT + NEW_HEIGHT]) fine_label.append(has_boat) count += 1 if not count % 100: print('Count: ' + str(count)) except: print('ERR------------------') continue if len(input_group): if count < number_hold: print('TEST SAVE') input_group = np.array(input_group, dtype=np.uint8) fine_label = np.array(fine_label, dtype=np.uint8) append_binary_file( TEST_INPUT_SAVE + '_' + str(DIVIDEND) + '_' + str(num_pool), input_group.tobytes()) append_binary_file( TEST_LABEL_SAVE + '_' + str(DIVIDEND) + '_' + str(num_pool), fine_label.tobytes()) else: print('TRAIN SAVE') print(len(input_group)) input_group = np.array(input_group, dtype=np.uint8) fine_label = np.array(fine_label, dtype=np.uint8) append_binary_file( TRAIN_INPUT_SAVE + '_' + str(DIVIDEND) + '_' + str(num_pool), input_group.tobytes()) append_binary_file( TRAIN_LABEL_SAVE + '_' + str(DIVIDEND) + '_' + str(num_pool), fine_label.tobytes()) else: print('EMPTY') file_dict = next(files_dict_gen, None) print('DOOOOOOOOONE')