def get_args(filename, sd, bn, im):
    x = ArffToArgs()
    x.set_input(filename)
    x.set_class_index("last")

    x.set_impute(im)
    x.set_binarize(bn)
    x.set_standardize(sd)

    args = x.get_args()
    x.close()
    return args
Пример #2
0
def get_args(filename, sd, bn, im):
    x = ArffToArgs()
    x.set_input(filename)
    x.set_class_index("last")

    x.set_impute(im)
    x.set_binarize(bn)
    x.set_standardize(sd)

    args = x.get_args()
    x.close()
    return args
        bs = args["batch_size"]
    else:
        bs = 128

    X_test = args["X_test"]

    preds = iter_test(X_test).tolist()

    new = []
    for pred in preds:
        new.append(np.eye(args["num_classes"])[pred].tolist())
    return new


if __name__ == '__main__':
    x = ArffToArgs()
    x.set_input("data/cpu_act.arff")
    x.set_class_index("last")
    x.set_impute(True)
    x.set_binarize(True)
    x.set_standardize(True)
    x.set_arguments(
        "adaptive=True;alpha=0.01;lambda=0;epochs=500;rmsprop=True")
    args = x.get_args()
    #args["debug"] = True

    args["X_test"] = np.asarray(args["X_train"], dtype="float32")

    model = train(args)

    test(args, model)
    if "batch_size" in args:
        bs = args["batch_size"]
    else:
        bs = 128

    X_test = args["X_test"]

    preds = iter_test(X_test).tolist()

    new = []
    for pred in preds:
        new.append( np.eye(args["num_classes"])[pred].tolist() )
    return new

if __name__ == '__main__':
    x = ArffToArgs()
    x.set_input("data/cpu_act.arff")
    x.set_class_index("last")
    x.set_impute(True)
    x.set_binarize(True)
    x.set_standardize(True)
    x.set_arguments("adaptive=True;alpha=0.01;lambda=0;epochs=500;rmsprop=True")
    args = x.get_args()
    #args["debug"] = True

    args["X_test"] = np.asarray(args["X_train"], dtype="float32")

    model = train(args)

    test(args, model)
        if "expectation" in args:
            new = []
            for pred in preds:
                label = int( round(expectation(pred)) )
                new.append( np.eye(args["num_classes"])[label].tolist() )
            return new
        else:    
            return preds

if __name__ == '__main__':
    x = ArffToArgs()
    #x.set_input("data/auto_price.arff")
    if len(sys.argv) != 3:
        sys.argv.append("data/2dplanes.arff")
        sys.argv.append("kappa")
    x.set_input( sys.argv[1] )
    print "Training on: %s" % sys.argv[1]
    x.set_class_index("last")
    x.set_impute(True)
    x.set_binarize(True)
    x.set_standardize(True)
    if sys.argv[2] == "kappa":
        #x.set_arguments("expectation=True;a=1;b=0;logistic=True;alpha=0.1;rmsprop=True;epochs=5000")
        x.set_arguments("expectation=True;a=1;b=0;logistic=True;alpha=0.1;schedule=500;epochs=5000")
    elif sys.argv[2] == "regression":
        #x.set_arguments("regression=True;alpha=0.1;rmsprop=True;epochs=5000")
        x.set_arguments("regression=True;alpha=0.1;schedule=500;epochs=5000")
    else:
        print "error!"
    args = x.get_args()
    args["debug"] = False
            if b*bs >= len(filenames):
                break
            X_train_batch = get_batch(filenames, b*bs, (b+1)*bs)
            #print (X_train_batch.shape)

            #sys.stderr.write("  Batch #%i (%i-%i)\n" % ((b+1), (b*bs), ((b+1)*bs) ))
            loss, loss_flat = iter_train( X_train_batch )
            batch_train_losses.append(loss)
            print (loss, loss_flat, sum(loss_flat > 0))
        helper.plot_conv_activity( symbols.conv_layer, X_train_batch[1:2] )

        sys.stderr.write( "  train_loss = %f\n" % \
            (np.mean(batch_train_losses)) )

    current_weights = lasagne.layers.get_all_param_values(symbols.output_layer)

    return (print_network(symbols.output_layer), current_weights)

if __name__ == '__main__':

    f = ArffToArgs()
    f.set_input("../mnist/mnist.meta.arff")
    args = f.get_args()
    f.close()
    args["lambda"] = 0
    args["alpha"] = 0.1
    args["epochs"] = 10
    args["dir"] = "../mnist/data"

    weights = train(args)
Пример #7
0
    X_test = np.asarray([load_image(x) for x in filenames], dtype="float32")

    if "batch_size" in args:
        bs = args["batch_size"]
    else:
        bs = 128

    preds = iter_test(X_test).tolist()

    return preds


if __name__ == '__main__':

    f = ArffToArgs()
    f.set_input("mnist.meta.arff")
    args = f.get_args()
    f.close()
    args["lambda"] = 0
    args["alpha"] = 0.01
    args["epochs"] = 10
    args["dir"] = "data"

    weights = train(args)

    args["X_test"] = args["X_train"]

    preds = test(args, weights)

    print("done!")
Пример #8
0
                break
            X_train_batch = get_batch(filenames, b * bs, (b + 1) * bs)
            #print (X_train_batch.shape)

            #sys.stderr.write("  Batch #%i (%i-%i)\n" % ((b+1), (b*bs), ((b+1)*bs) ))
            loss, loss_flat = iter_train(X_train_batch)
            batch_train_losses.append(loss)
            print(loss, loss_flat, sum(loss_flat > 0))
        helper.plot_conv_activity(symbols.conv_layer, X_train_batch[1:2])

        sys.stderr.write( "  train_loss = %f\n" % \
            (np.mean(batch_train_losses)) )

    current_weights = lasagne.layers.get_all_param_values(symbols.output_layer)

    return (print_network(symbols.output_layer), current_weights)


if __name__ == '__main__':

    f = ArffToArgs()
    f.set_input("../mnist/mnist.meta.arff")
    args = f.get_args()
    f.close()
    args["lambda"] = 0
    args["alpha"] = 0.1
    args["epochs"] = 10
    args["dir"] = "../mnist/data"

    weights = train(args)
Пример #9
0
from pyscript.pyscript import ArffToArgs

# test saving the pkl to an output file

f = ArffToArgs()
f.set_input("../datasets/iris.arff")
args = f.get_args()
f.save("iris.pkl.gz")
f.close()

# test normal

f = ArffToArgs()
f.set_input("../datasets/iris.arff")
args = f.get_args()
print f.output
f.close()
@uses(["dir"])
def train(args):
    filenames = [ (args["dir"] + os.path.sep + elem) for elem in args["attr_values"]["filename"] ] 
    y_train = np.asarray(args["y_train"].flatten(), dtype="int32")
    net1 = get_conv_net(filenames)
    X_train = args["X_train"]
    with Capturing() as output:
        model = net1.fit(X_train, y_train)
    return { "results": remove_colour("\n".join(output)),
        "params": net1.get_all_params_values() }

def describe(args, model):
    return model["results"]

def test(args, model):
    filenames = [ (args["dir"] + os.path.sep + elem) for elem in args["attr_values"]["filename"] ]
    net1 = get_conv_net(filenames)
    net1.initialize()
    net1.load_params_from(model["params"])
    X_test = args["X_test"]
    return net1.predict_proba(X_test).tolist()

if __name__ == "__main__":
    f = ArffToArgs()
    f.set_input("mnist.meta.arff")
    args = f.get_args()
    f.close()
    args["dir"] = "data"
    dd = train(args)
    print(dd["results"])
Пример #11
0
            new = []
            for pred in preds:
                label = int(round(expectation(pred)))
                new.append(np.eye(args["num_classes"])[label].tolist())
            return new
        else:
            return preds


if __name__ == '__main__':
    x = ArffToArgs()
    #x.set_input("data/auto_price.arff")
    if len(sys.argv) != 3:
        sys.argv.append("data/2dplanes.arff")
        sys.argv.append("kappa")
    x.set_input(sys.argv[1])
    print "Training on: %s" % sys.argv[1]
    x.set_class_index("last")
    x.set_impute(True)
    x.set_binarize(True)
    x.set_standardize(True)
    if sys.argv[2] == "kappa":
        #x.set_arguments("expectation=True;a=1;b=0;logistic=True;alpha=0.1;rmsprop=True;epochs=5000")
        x.set_arguments(
            "expectation=True;a=1;b=0;logistic=True;alpha=0.1;schedule=500;epochs=5000"
        )
    elif sys.argv[2] == "regression":
        #x.set_arguments("regression=True;alpha=0.1;rmsprop=True;epochs=5000")
        x.set_arguments("regression=True;alpha=0.1;schedule=500;epochs=5000")
    else:
        print "error!"