if "batch_size" in args:
        bs = args["batch_size"]
    else:
        bs = 128

    X_test = args["X_test"]

    preds = iter_test(X_test).tolist()

    new = []
    for pred in preds:
        new.append( np.eye(args["num_classes"])[pred].tolist() )
    return new

if __name__ == '__main__':
    x = ArffToArgs()
    x.set_input("data/cpu_act.arff")
    x.set_class_index("last")
    x.set_impute(True)
    x.set_binarize(True)
    x.set_standardize(True)
    x.set_arguments("adaptive=True;alpha=0.01;lambda=0;epochs=500;rmsprop=True")
    args = x.get_args()
    #args["debug"] = True

    args["X_test"] = np.asarray(args["X_train"], dtype="float32")

    model = train(args)

    test(args, model)
        new = []
        for pred in preds:
            new.append( np.eye(args["num_classes"])[pred].tolist() )
        return new
    else:
        if "expectation" in args:
            new = []
            for pred in preds:
                label = int( round(expectation(pred)) )
                new.append( np.eye(args["num_classes"])[label].tolist() )
            return new
        else:    
            return preds

if __name__ == '__main__':
    x = ArffToArgs()
    #x.set_input("data/auto_price.arff")
    if len(sys.argv) != 3:
        sys.argv.append("data/2dplanes.arff")
        sys.argv.append("kappa")
    x.set_input( sys.argv[1] )
    print "Training on: %s" % sys.argv[1]
    x.set_class_index("last")
    x.set_impute(True)
    x.set_binarize(True)
    x.set_standardize(True)
    if sys.argv[2] == "kappa":
        #x.set_arguments("expectation=True;a=1;b=0;logistic=True;alpha=0.1;rmsprop=True;epochs=5000")
        x.set_arguments("expectation=True;a=1;b=0;logistic=True;alpha=0.1;schedule=500;epochs=5000")
    elif sys.argv[2] == "regression":
        #x.set_arguments("regression=True;alpha=0.1;rmsprop=True;epochs=5000")
from pyscript.pyscript import ArffToArgs

# test saving the pkl to an output file

f = ArffToArgs()
f.set_input("../datasets/iris.arff")
args = f.get_args()
f.save("iris.pkl.gz")
f.close()

# test normal

f = ArffToArgs()
f.set_input("../datasets/iris.arff")
args = f.get_args()
print f.output
f.close()
            if b*bs >= len(filenames):
                break
            X_train_batch = get_batch(filenames, b*bs, (b+1)*bs)
            #print (X_train_batch.shape)

            #sys.stderr.write("  Batch #%i (%i-%i)\n" % ((b+1), (b*bs), ((b+1)*bs) ))
            loss, loss_flat = iter_train( X_train_batch )
            batch_train_losses.append(loss)
            print (loss, loss_flat, sum(loss_flat > 0))
        helper.plot_conv_activity( symbols.conv_layer, X_train_batch[1:2] )

        sys.stderr.write( "  train_loss = %f\n" % \
            (np.mean(batch_train_losses)) )

    current_weights = lasagne.layers.get_all_param_values(symbols.output_layer)

    return (print_network(symbols.output_layer), current_weights)

if __name__ == '__main__':

    f = ArffToArgs()
    f.set_input("../mnist/mnist.meta.arff")
    args = f.get_args()
    f.close()
    args["lambda"] = 0
    args["alpha"] = 0.1
    args["epochs"] = 10
    args["dir"] = "../mnist/data"

    weights = train(args)
def get_args(filename, sd, bn, im):
    x = ArffToArgs()
    x.set_input(filename)
    x.set_class_index("last")

    x.set_impute(im)
    x.set_binarize(bn)
    x.set_standardize(sd)

    args = x.get_args()
    x.close()
    return args