Пример #1
0
def get_args(filename, sd, bn, im):
    x = ArffToArgs()
    x.set_input(filename)
    x.set_class_index("last")

    x.set_impute(im)
    x.set_binarize(bn)
    x.set_standardize(sd)

    args = x.get_args()
    x.close()
    return args
def get_args(filename, sd, bn, im):
    x = ArffToArgs()
    x.set_input(filename)
    x.set_class_index("last")

    x.set_impute(im)
    x.set_binarize(bn)
    x.set_standardize(sd)

    args = x.get_args()
    x.close()
    return args
        bs = args["batch_size"]
    else:
        bs = 128

    X_test = args["X_test"]

    preds = iter_test(X_test).tolist()

    new = []
    for pred in preds:
        new.append(np.eye(args["num_classes"])[pred].tolist())
    return new


if __name__ == '__main__':
    x = ArffToArgs()
    x.set_input("data/cpu_act.arff")
    x.set_class_index("last")
    x.set_impute(True)
    x.set_binarize(True)
    x.set_standardize(True)
    x.set_arguments(
        "adaptive=True;alpha=0.01;lambda=0;epochs=500;rmsprop=True")
    args = x.get_args()
    #args["debug"] = True

    args["X_test"] = np.asarray(args["X_train"], dtype="float32")

    model = train(args)

    test(args, model)
    if "batch_size" in args:
        bs = args["batch_size"]
    else:
        bs = 128

    X_test = args["X_test"]

    preds = iter_test(X_test).tolist()

    new = []
    for pred in preds:
        new.append( np.eye(args["num_classes"])[pred].tolist() )
    return new

if __name__ == '__main__':
    x = ArffToArgs()
    x.set_input("data/cpu_act.arff")
    x.set_class_index("last")
    x.set_impute(True)
    x.set_binarize(True)
    x.set_standardize(True)
    x.set_arguments("adaptive=True;alpha=0.01;lambda=0;epochs=500;rmsprop=True")
    args = x.get_args()
    #args["debug"] = True

    args["X_test"] = np.asarray(args["X_train"], dtype="float32")

    model = train(args)

    test(args, model)
            if b*bs >= len(filenames):
                break
            X_train_batch = get_batch(filenames, b*bs, (b+1)*bs)
            #print (X_train_batch.shape)

            #sys.stderr.write("  Batch #%i (%i-%i)\n" % ((b+1), (b*bs), ((b+1)*bs) ))
            loss, loss_flat = iter_train( X_train_batch )
            batch_train_losses.append(loss)
            print (loss, loss_flat, sum(loss_flat > 0))
        helper.plot_conv_activity( symbols.conv_layer, X_train_batch[1:2] )

        sys.stderr.write( "  train_loss = %f\n" % \
            (np.mean(batch_train_losses)) )

    current_weights = lasagne.layers.get_all_param_values(symbols.output_layer)

    return (print_network(symbols.output_layer), current_weights)

if __name__ == '__main__':

    f = ArffToArgs()
    f.set_input("../mnist/mnist.meta.arff")
    args = f.get_args()
    f.close()
    args["lambda"] = 0
    args["alpha"] = 0.1
    args["epochs"] = 10
    args["dir"] = "../mnist/data"

    weights = train(args)
Пример #6
0
    X_test = np.asarray([load_image(x) for x in filenames], dtype="float32")

    if "batch_size" in args:
        bs = args["batch_size"]
    else:
        bs = 128

    preds = iter_test(X_test).tolist()

    return preds


if __name__ == '__main__':

    f = ArffToArgs()
    f.set_input("mnist.meta.arff")
    args = f.get_args()
    f.close()
    args["lambda"] = 0
    args["alpha"] = 0.01
    args["epochs"] = 10
    args["dir"] = "data"

    weights = train(args)

    args["X_test"] = args["X_train"]

    preds = test(args, weights)

    print("done!")