def test_balance(unbalanced_data): balanced_data = utils.balance(unbalanced_data) assert (('data1', 1) == next(balanced_data)) assert (('data3', 0) == next(balanced_data)) assert (('data2', 1) == next(balanced_data)) assert (('data4', 0) == next(balanced_data)) assert (('data6', 1) == next(balanced_data)) assert (('data5', 0) == next(balanced_data)) with pytest.raises(StopIteration): next(balanced_data)
def test_balance_unbalanced_data(): unbalanced_tuples = [("data1", 1), ("data2", 1), ("data3", 1), ("data4", 0), ("data5", 0)] balanced_data = utils.balance(unbalanced_tuples) assert (('data1', 1) == next(balanced_data)) assert (('data4', 0) == next(balanced_data)) assert (('data2', 1) == next(balanced_data)) assert (('data5', 0) == next(balanced_data)) assert (('data3', 1) == next(balanced_data)) with pytest.raises(StopIteration): next(balanced_data)
def gen_func(images): return utils.balance(couples_generator(images))
train_iterator = utils.CouplesIterator( utils.make_infinite(gen_func, images_train), input_shape, data_gen, BATCH_SIZE) test_iterator = utils.CouplesIterator( utils.make_infinite(gen_func, images_test), input_shape, data_gen, BATCH_SIZE) model = network.create(input_shape) network.compile(model) model.save('pretrain.h5') model.fit_generator(train_iterator, steps_per_epoch=train_couples_len / BATCH_SIZE, epochs=EPOCHS) model.save('pretrain.h5') score = model.evaluate_generator(test_iterator, steps=test_couples_len / BATCH_SIZE) print(score) asd = utils.CouplesIterator(utils.make_infinite(gen_func, images_test[:100]), input_shape, data_gen) predict_couples_len = sum( 1 for e in utils.balance(couples_generator(images_test))) predictions = model.predict_generator(asd, steps=predict_couples_len / BATCH_SIZE) print(predictions) print([a[1] for a in utils.balance(couples_generator(images_test[:100]))])
def inf_couples_generator(images): while True: random.shuffle(images) for e in utils.balance(couples_generator(images)): yield e