Ejemplo n.º 1
0
def main(filename, frames, batch_size, num_classes, input_length):
    # Get our data.
    X_train, X_test, y_train, y_test = get_data(
        filename, frames, num_classes, input_length, True)

    # Get sizes.
    print ("y_train[0] : ", y_train)
    num_classes = len(y_train[0])
    print ("Num classes : - ", num_classes)
    # print "Y train : ", y_train[0]

    # Get our network.
    # net = get_network_deep(frames, input_length, num_classes)
    net = get_network_wide(frames, input_length, num_classes)

    # Train the model.
    dir = os.path.join(os.getcwd(), "checkpoints")
    if not os.path.exists(dir):
        os.makedirs(dir)
    if os.path.exists('checkpoints/rnn.tflearn'):
        print ("Model already exists! Loading it")
        model.load('checkpoints/rnn.tflearn',weights_only="true")
        print ("Model Loaded")
    else:
        model = tflearn.DNN(net, tensorboard_verbose=0)
    
    model.fit(X_train, y_train, validation_set=(X_test, y_test),
              show_metric=True, batch_size=batch_size, snapshot_step=100,
              n_epoch=10)

    # Save it.
    print ("Do you wanna save the model and overwrite? y or n")
    x = input()
    if(x == "y"):
        model.save('checkpoints/rnn.tflearn')
Ejemplo n.º 2
0
def main(filename, frames, batch_size, num_classes, input_length):
    """From the blog post linked above."""
    # Get our data.
    X_train, _, y_train, _ = get_data(filename, frames, num_classes,
                                      input_length)

    # Get sizes.
    num_classes = len(y_train[0])

    # Get our network.
    net = get_network_wide(frames, input_length, num_classes)

    # Get our model.
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.load(RNN_MODEL)

    print(f"Y: {y_train}")

    # Evaluate.
    #res = model.evaluate(X_train, y_train)  #Evaluates model accuracy.
    res = model.predict(X_train)  #Predicts probabilities of each class
    print(f"RESULTADO: {res}")
    print(f"RESULTADO tipo: {type(res)}")
    print(f"RESULTADO shape: {res.shape}")

    res = model.predict_label(
        X_train
    )  #Predicts bitmask of classed (1 if it is that class, 0 if not)
    print(f"RESULTADO LABEL: {res}")
Ejemplo n.º 3
0
def main(input_data_dump, num_frames_per_video, batch_size, labels, model_file):
    # Get our data.
    X_train, X_test, y_train, y_test = get_data(input_data_dump, num_frames_per_video, labels, True)

    num_classes = len(labels)
    size_of_each_frame = X_train.shape[2]

    # Get our network.
    net = get_network_wide(num_frames_per_video, size_of_each_frame, num_classes)

    # Train the model.
    try:
        model = tflearn.DNN(net, tensorboard_verbose=0)
        model.load('checkpoints/' + model_file)
        print("\nModel already exists! Loading it")
        print("Model Loaded")
    except Exception:
        model = tflearn.DNN(net, tensorboard_verbose=0)
        print("\nNo previous checkpoints of %s exist" % (model_file))

    model.fit(X_train, y_train, validation_set=(X_test, y_test),
              show_metric=True, batch_size=batch_size, snapshot_step=100,
              n_epoch=100)

    # Save it.
    x = input("Do you wanna save the model and overwrite? y or n: ")
    if(x.strip().lower() == "y"):
        model.save('checkpoints/' + model_file)
def main(filename, frames, batch_size, num_classes, input_length):
    """From the blog post linked above."""
    # Get our data.
    X_train, X_test, y_train, y_test = get_data(filename, frames, num_classes,
                                                input_length, True)

    # Get sizes.
    print "y_train[0] : ", y_train
    num_classes = len(y_train[0])
    print "Num classes : - ", num_classes
    # print "Y train : ", y_train[0]

    # Get our network.
    # net = get_network_deep(frames, input_length, num_classes)
    net = get_network_wide(frames, input_length, num_classes)

    # Train the model.
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.load('checkpoints/rnn.tflearn')
    print "Model Loaded"
    model.fit(X_train,
              y_train,
              validation_set=(X_test, y_test),
              show_metric=True,
              batch_size=batch_size,
              snapshot_step=100,
              n_epoch=10)

    # Save it.
    print "Do you wanna save the model and overwrite? y or n"
    x = raw_input()
    if (x == "y"):
        model.save('checkpoints/rnn.tflearn')
Ejemplo n.º 5
0
def main(filename, frames, batch_size, num_classes, input_length):
    # Get our data.
    X, Y = get_data(input_data_dump, num_frames_per_video, labels, False)

    num_classes = len(labels)
    size_of_each_frame = X.shape[2]

    # Get our network.
    net = get_network_wide(num_frames_per_video, size_of_each_frame, num_classes)

    # Train the model.
    model = tflearn.DNN(net, tensorboard_verbose=0)
    try:
        model.load('checkpoints/' + model_file)
        print("\nModel Exists! Loading it")
        print("Model Loaded")
    except Exception:
        print("\nNo previous checkpoints of %s exist" % (model_file))
        print("Exiting..")
        sys.exit()

    predictions = model.predict(X)
    predictions = np.array([np.argmax(pred) for pred in predictions])
    Y = np.array([np.argmax(each) for each in Y])

    # Writing predictions and gold labels to file
    rev_labels = dict(zip(list(labels.values()), list(labels.keys())))
    print(rev_labels)
    with open("result.txt", "w") as f:
        f.write("gold, pred\n")
        for a, b in zip(Y, predictions):
            f.write("%s %s\n" % (rev_labels[a], rev_labels[b]))

    acc = 100 * np.sum(predictions == Y) / len(Y)
    print("Accuracy: ", acc)
Ejemplo n.º 6
0
def main(filename, frames, batch_size, num_classes, input_length,
         output_model):
    """From the blog post linked above."""
    # Get our data.
    X_train, X_test, y_train, y_test = get_data(filename, frames, num_classes,
                                                input_length)

    # Get sizes.
    num_classes = len(y_train[0])

    # Get our network.
    net = get_network_wide(frames, input_length, num_classes)

    # Train the model.
    print(f"Y_TEST[0]: {y_test[0]}")
    print(f"Y_TEST[0] tipo: {type(y_test[0])}, shape: {y_test[0].shape}")

    input()
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.fit(X_train,
              y_train,
              validation_set=(X_test, y_test),
              show_metric=True,
              batch_size=batch_size,
              snapshot_step=100,
              n_epoch=4)

    # Save it.
    model.save(output_model)
Ejemplo n.º 7
0
def main(filename, frames, batch_size, num_classes, input_length):
    """From the blog post linked above."""
    # Get our data.
    X_train, X_test, y_train, y_test = get_data(
        filename, frames, num_classes, input_length, True)

    # Get sizes.
    print ("y_train[0] : ", y_train)
    num_classes = len(y_train[0])
    print ("Num classes : - ", num_classes)
    # print "Y train : ", y_train[0]

    # Get our network.
    # net = get_network_deep(frames, input_length, num_classes)
    net = get_network_deep(frames, input_length, num_classes)

    # Train the model.
    if os.path.exists('checkpoints/rnn.tflearn'):
        print ("Model already exists! Loading it")
        model.load('checkpoints/rnn.tflearn')
        print ("Model Loaded")
    else:
        model = tflearn.DNN(net, max_checkpoints=1, tensorboard_verbose=0)

    model.fit(X_train, y_train, validation_set=(X_test, y_test),
              show_metric=True, batch_size=batch_size, snapshot_step=100,
              n_epoch=10, run_id='name_model')


    # Save it.
    x = input("Do you wanna save the model and overwrite? y or n")
    if(x == "y"):
        model.save('checkpoints/rnn.tflearn')
def main(filename, frames, batch_size, num_classes, input_length):
    """From the blog post linked above."""
    # Get our data.
    X_train, X_test, y_train, y_test = get_data(filename, frames, num_classes,
                                                input_length)

    # Get sizes.
    num_classes = len(y_train[0])

    # Get our network.
    net = get_network_wide(frames, input_length, num_classes)

    # Train the model.
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.fit(X_train,
              y_train,
              validation_set=(X_test, y_test),
              show_metric=True,
              batch_size=batch_size,
              snapshot_step=100,
              n_epoch=4)

    # Save it.
    model.save('checkpoints/rnn.tflearn')
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.load('checkpoints/rnn.tflearn')

    # Evaluate. Note that although we're passing in "train" data,
    # this is actually our holdout dataset, so we never actually
    # used it to train the model. Bad variable semantics.
    print(model.evaluate(X_train, y_train))
Ejemplo n.º 9
0
def main(filename, frames, batch_size, num_classes, input_length):
    """From the blog post linked above."""
    # Get our data.
    X_train, y_train = get_data(
        filename, frames, num_classes, input_length, False)
    # print X_train
    # print y_train

    # Get sizes.
    print ("Y train :- ", y_train[0])
    num_classes = len(y_train[0])

    # Get our network.
    net = get_network_wide(frames, input_length, num_classes)

    dir = os.path.join(os.getcwd(), "checkpoints")
    if not os.path.exists(dir):
        os.makedirs(dir)
    print ("HELLO1")
    # Get our model.
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.load('checkpoints/rnn.tflearn',weights_only="true")
    
    print ("HELLO2")

    # Evaluate.
    hc = model.predict(X_train)
    print ("HELLO3")
    hc = [np.argmax(every) for every in hc]
    print ("HELLO4")
    aadi = [np.argmax(every) for every in y_train]
    print ("HELLO5")
    print ("l1 :", len(aadi))
    print ("HELLO6")
    print ("l2 ", len(hc))
    print ("HELLO7")
    answer = []

    for i in range(0, len(hc)):
        answer.append([aadi[i], hc[i]])
    
    print ("HELLO8")

    answer.sort()
    print ("HELLO9")
    f = open("results.txt", "wb")
    print ("HELLO10")
    for x in answer:
        print (x[0], x[1])
        f.write(str(x[0])+" "+str(x[1])+"\n")
    
    print ("HELLO11")

    print(model.evaluate(X_train, y_train))
Ejemplo n.º 10
0
def main(filename, frames, batch_size, num_classes, input_length):
    """From the blog post linked above."""
    # Get our data.
    X_train, _, y_train, _ = get_data(filename, frames, num_classes, input_length)

    # Get sizes.
    num_classes = len(y_train[0])

    # Get our network.
    net = get_network_wide(frames, input_length, num_classes)

    # Get our model.
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.load('checkpoints/rnn.tflearn')

    # Evaluate.
    print(model.evaluate(X_train, y_train))
Ejemplo n.º 11
0
def main(filename, frames, batch_size, num_classes, input_length):
    """From the blog post linked above."""
    # Get our data.
    X_train, X_test, y_train, y_test = get_data(filename, frames, num_classes, input_length)

    # Get sizes.
    num_classes = len(y_train[0])

    # Get our network.
    net = get_network_wide(frames, input_length, num_classes)

    # Train the model.
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.fit(X_train, y_train, validation_set=(X_test, y_test),
              show_metric=True, batch_size=batch_size, snapshot_step=100,
              n_epoch=4)

    # Save it.
    model.save('checkpoints/rnn.tflearn')
Ejemplo n.º 12
0
def main(filename, frames, batch_size, num_classes, input_length):
    """From the blog post linked above."""
    # Get our data.
    X_train, y_train = get_data(filename, frames, num_classes, input_length,
                                False)
    # print X_train
    # print y_train

    # Get sizes.
    # print ("Y train :- ", y_train)
    print("X_train:-", X_train[0])
    num_classes = len(y_train[0])

    # Get our network.
    net = get_network_wide(frames, input_length, num_classes)

    # Get our model.
    model = tflearn.DNN(net, tensorboard_verbose=0)
    model.load('checkpoints/rnn.tflearn')
    # model.load('checkpoints_backup/rnn.tflearn')

    # Evaluate.
    hc = model.predict(X_train)
    hc = [np.argmax(every) for every in hc]
    print(hc)
    aadi = [np.argmax(every) for every in y_train]
    print("l1 :", len(aadi))
    print("l2 ", len(hc))
    answer = []

    for i in range(0, len(hc)):
        answer.append([aadi[i], hc[i]])

    answer.sort()
    print(answer)
    f = open("results.txt", "w")
    for x in answer:
        print(x[0], x[1])
        f.write(str(x[0]) + " " + str(x[1]) + "\n")

    print(model.evaluate(X_train, y_train))