Beispiel #1
0
                outFile.write('process {}\n'.format(' '.join(
                    [str(x) for x in range(len(config['backgrounds']) + 1)])))
                outFile.write('rate {} {}\n'.format(
                    globalMatrix[signal].values[binNum - 1], ' '.join([
                        str(x) for x in globalMatrix[
                            config['backgrounds']].iloc[binNum - 1].values
                    ])))

                outFile.write(uncertFile.read())
                outFile.close()

                uncertFile.close()


if __name__ == "__main__":
    args = getArgs()
    configData = readConfig(args.config)

    print('Creating output folders...')
    outputPath = createOutputFolders(configData)

    # create and save global matrix
    print('Retrieving individual histogram data...')
    getIndHistogramsInfo(configData, outputPath)

    print("Creating global matrix...")
    createGlobalMatrix(configData, outputPath)

    print("Creating yields...")
    # create and save yields from matrix
    createYields(configData, outputPath)
Beispiel #2
0
from utils import getArgs, log

import torch
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
import numpy as np 

import os
import sys
import logging
from pathlib import Path

# get settings as input arguments
train = True
args, dl_args = getArgs(train=train)

# save/load directories  
load_init = next(Path(f"./results/train/{args.load_init}").glob("*.pth"))
save_path = f"./results/train/{args.fname}/"
PATH = f"{save_path}/{args.fname}.pth"
LogPath = f"{save_path}/{args.fname}.txt"
LossPath = f"{save_path}/{args.fname}.npz"
os.makedirs(Path(save_path).absolute(), exist_ok=True)

# set up logging file 
logging.basicConfig(filename=LogPath, filemode='w', format="%(asctime)s;%(message)s", level=logging.ERROR)
logger = logging.getLogger()

split_file = f"./Data/VidSequences/LIRIS/LIRIS_Data_{args.frame_num}_indices.npz" if args.dataset_name == "LIRIS" else None
Beispiel #3
0
import torch
from dataloaders import PersonDataloader
import segmentation_models_pytorch as smp
from matplotlib import pyplot as plt
from tqdm import tqdm_notebook as tqdm
import numpy as np
from preprocessing import list_files
import time
import cv2 as cv
from torchvision import transforms
from PIL import Image
from utils import getArgs

# load variables from file
args = getArgs('segmentation/args.yaml')
img_dir = args['img_dir']
mask_dir = args['mask_dir']
mean = args['mean']
std = args['std']
ckpt_path = args['ckpt_path']
df = list_files(img_dir)
dataloader = PersonDataloader(df, img_dir, mask_dir, mean, std, 'val', 1, 4)

device = torch.device("cuda")
model = smp.Unet("resnet101",
                 encoder_weights="imagenet",
                 classes=1,
                 activation=None)
model.to(device)
model.eval()
state = torch.load(ckpt_path, map_location=lambda storage, loc: storage)
Beispiel #4
0
import keras
import modelCreator
import parseText
import sys
import numpy

from keras.callbacks import ModelCheckpoint

import utils

if __name__ == "__main__":
    args = utils.getArgs(sys.argv[1:])
    print args

    path = args.path
    seq_length = args.seq_length

    vocab, char_to_num, num_to_char = parseText.getInfo(path, args)
    x, y = parseText.createDataset(path, char_to_num, seq_length, args)

    models = [
        modelCreator.buildModel, modelCreator.buildModelCNN,
        modelCreator.buildModelBiDir
    ]

    model = models[args.model_type](vocab, x)

    with open(args.save_path + "/model.json", "w") as f:
        f.write(model.to_json())

    c = [
Beispiel #5
0
from svm import SVM
import sys
import utils

if __name__ == "__main__":
    dataName = utils.getArgs(sys.argv, '-i')
    svm = SVM(dataName)
    c_list = [1, 1, 0.05]
    g_list = [1, 0.05, 1]
    for c, g in zip(c_list, g_list):
        svm.rbf_training(c=c, gamma=g)
        result = svm.evalMetrics()
Beispiel #6
0
        choice = True
    else:
        choice = False

    phases = _separatePhaseFiles(phaseMain)

    if phases:
        Data.update(phases)

    return Data    


if __name__ == '__main__':
    # Input the location where the data files are present
    if len(sys.argv) > 1:
        mainpath = getArgs()
    else:
        mainpath = input("[!] Enter path to Working Directory (Press ENTER for default): ")
        if mainpath == '':
            mainpath = '../'
    
    # Check if path is valid
    if not os.path.isdir(mainpath):
        print("[-] Working Directory doesn't exist; Exiting")
        sys.exit(2)

    outputpath = mainpath + "/alphameltsData/output/{}/".format(
        dt.now().strftime('%Y-%m-%d_%H-%M')
    )

    phase_main = mainpath + "Phase_main_tbl.txt"