示例#1
0
            while (a < len(list)):
                if list_Evo[a][i] > b:
                    b = list_Evo[a][i]
                if list_Evo[a][i] < c:
                    c = list_Evo[a][i]
                a = a + 1
            list_max_env[i] = b
            list_min_env[i] = c
            i = i + 1

    #Creating a new file where average will be stored:
    averagepath = testpath + "Model_Ver_" + paul[:
                                                 2] + "_average/plots_Model_Ver_" + paul[:
                                                                                         2] + "_average/"

    assure_path_exists(testpath + "Model_Ver_" + paul[:2] + "_average/")
    assure_path_exists(testpath + "Model_Ver_" + paul[:2] +
                       "_average/plots_Model_Ver_" + paul[:2] + "_average/")

    f = open(averagepath + "FOM_evo_data.txt", "w+")
    f.write("\n".join(map(str, list_ave_Evo)))
    f.close()

    copyfile(testpath + "Model_Ver_10/plots_Model_Ver_10/FOM_cut_data.txt",
             averagepath + "FOM_cut_data.txt")

    print("Model_Ver_" + paul[:2] + "-average saved! :-)")

    if args.env:
        f = open(averagepath + "FOM_max_data.txt", "w+")
        f.write("\n".join(map(str, list_max_env)))
示例#2
0
    }
    learning_rate = args.learningRate
    my_decay = args.decay
    myAdam = Adam(lr=learning_rate, decay=my_decay)
    compileArgs['optimizer'] = myAdam

    if args.verbose:
        print "Opening file"

    from commonFunctions import StopDataLoader, FullFOM, getYields, getDefinedClassifier, assure_path_exists
    filepath = args.outputDir
    baseName = filepath.replace(cfg.lgbk + "Searches/", "")
    baseName = baseName.replace("/", "")
    fileToPlot = "ROC_" + baseName

    assure_path_exists(filepath + "/accuracy/" + "dummy.txt")
    assure_path_exists(filepath + "/loss/" + "dummy.txt")
    os.chdir(filepath)

    #fileToPlot = "mGS:outputs_run_"+test_point+"_"+str(learning_rate)+"_"+str(my_decay)

    f = open(fileToPlot + '.txt', 'w')

    for y in [1, 2, 3]:  # LAYERS
        for x in range(2, 101):  # NEURONS
            if args.verbose:
                print "  ==> #LAYERS:", y, "   #NEURONS:", x, " <=="
                print("Starting the training")

            model = getDefinedClassifier(len(trainFeatures), 1, compileArgs, x,
                                         y, args.dropoutRate)
示例#3
0
    args = parser.parse_args()

    n_layers = args.layers
    n_neurons = args.neurons
    n_epochs = args.epochs
    batch_size = args.batchSize  #len(XDev)/100
    learning_rate = args.learningRate
    my_decay = args.decay
    dropout_rate = args.dropoutRate
    regularizer = args.regularizer
    iteration = args.iteration

    dateSubmission = datetime.datetime.now().strftime("%Y-%m-%d_%H:%M")
    baseName = cfg.lgbk + "SingleNN/" + dateSubmission + "/"
    assure_path_exists(baseName + "dummy.txt")

    for i in range(1, iteration + 1):
        shPath = baseName + 'trainNN_Ver' + str(i) + '.sh'
        with open(shPath, 'w') as f:
            f.write("#!/bin/bash\n")
            f.write("#$ -cwd\n")
            f.write("#$ -pe mcore 3\n")
            f.write("#$ -l container=True\n")
            f.write("#$ -v CONTAINER=CENTOS7\n")
            f.write("#...$ -v CONTAINER=UBUNTU16\n")
            f.write("#$ -l gpu,release=el7\n")
            f.write("cd /exper-sw/cmst3/cmssw/users/dbastos/StopNN/\n")
            f.write("module load root-6.10.02\n")
            f.write("python trainNN.py -z -l " + str(n_layers) + " -n " +
                    str(n_neurons) + " -e " + str(n_epochs) + " -a " +
示例#4
0
        #model_name = model_name.replace("Lr5_","Lr5.0_")
        model_name = model_name + "_TP" + test_point + "_DT" + suffix
    elif args.runNum != None:
        filepath = cfg.lgbk + "Searches/run" + str(args.runNum)
        model_name = "L" + str(args.layers) + "_N" + str(
            args.neurons) + "_" + test_point + "_run" + str(args.runNum)
    elif args.local:
        filepath = "/home/diogo/PhD/SingleNN/" + model_name

    os.chdir(filepath + "/")
    if args.bk:
        plots_path = filepath + "/plots_" + model_name + "_2bk/"
    else:
        plots_path = filepath + "/plots_" + model_name + "/"

    assure_path_exists(plots_path)

    if args.verbose:
        print "Loading Model ..."

    ## Load your trainned model
    with open(model_name + '.json', 'r') as json_file:
        loaded_model_json = json_file.read()
    model = model_from_json(loaded_model_json)
    model.load_weights(model_name + ".h5")
    model.compile(loss='binary_crossentropy', optimizer='adam')

    if args.verbose:
        print("Getting predictions")

    devPredict = model.predict(XDev)
示例#5
0
                        **trainParams)

    # Time of the training
    training_time = time.time() - start
    if args.verbose:
        print "Training took ", training_time, " seconds"

    lr_list = lrm.lrates

    acc = history.history['acc']
    val_acc = history.history['val_acc']
    loss = history.history['loss']
    val_loss = history.history['val_loss']

    # assure_path_exists() is defined in commonFunctions.py
    assure_path_exists(filepath + "accuracy/")
    assure_path_exists(filepath + "loss/")

    # Saving accuracy and loss values in a pickle file for later plotting
    pickle.dump(acc, open(filepath + "accuracy/acc_" + name + ".pickle", "wb"))
    pickle.dump(loss, open(filepath + "loss/loss_" + name + ".pickle", "wb"))
    pickle.dump(val_acc,
                open(filepath + "accuracy/val_acc_" + name + ".pickle", "wb"))
    pickle.dump(val_loss,
                open(filepath + "loss/val_loss_" + name + ".pickle", "wb"))

    # Saving the trainned model
    model_json = model.to_json()  # model structure
    with open(filepath + name + ".json", "w") as json_file:
        json_file.write(model_json)