def main(): """Run experiment with multiple classifiers.""" # Get classifiers classifiers = [ ('k nn', KNeighborsClassifier(3)), ] print("Start getting data.") data = get_data('hasy') print("Got data. Start.") # Fit them all classifier_data = {} with open('classifier-comp.md', 'w') as f: for clf_name, clf in classifiers: print(clf_name) classifier_data[clf_name] = [] f.write("#" * 80) f.write("\n") f.write("Start fitting '%s' classifier.\n" % clf_name) for fold in data: tmp = max_k_samples(fold['train']['X'], fold['train']['y'], 50) fold['train']['X'], fold['train']['y'] = tmp print("Got %i training samples and %i test samples." % (len(fold['train']['X']), len(fold['test']['X']))) t0 = time.time() examples = 10**9 clf.fit(fold['train']['X'][:examples], fold['train']['y'][:examples]) t1 = time.time() an_data = analyze(clf, fold, t1 - t0, clf_name=clf_name, handle=f) classifier_data[clf_name].append({ 'training_time': t1 - t0, 'testing_time': an_data['testing_time'], 'accuracy': an_data['accuracy'] }) pretty_print(classifier_data) pretty_print(classifier_data)
if loops * batch_size < len(hasy.test.images): loops += 1 for i in range(loops): data = hasy.test.images[i * batch_size:(i + 1) * batch_size] data = data.reshape((-1, 32 * 32)) predicted = tf.argmax(y_conv, 1).eval(feed_dict={x: data}) actual = np.argmax( hasy.test.labels[i * batch_size:(i + 1) * batch_size], 1) for pred, act in zip(predicted, actual): cm[act][pred] += 1 t1 = time.time() results['testing_time'] = t1 - t0 results['accuracy'] = (float(sum([cm[i][i] for i in range(369)])) / len(hasy.test.images)) classifier_data[MODEL_NAME].append({ 'training_time': results['fit_time'], 'testing_time': results['testing_time'], 'accuracy': results['accuracy'] }) with open("cnn-comp.md", "a") as handle: write_analyzation_results(handle, 'CNN %s' % MODEL_NAME, results, cm) pretty_print(classifier_data) # Save the variables to disk. # save_path = saver.save(sess, model_checkpoint_path) # print("Model saved in file: %s" % save_path)