def __init__(self, header): # header = [SampleNames,Column1,Column2,...] self.header = header self.table = [] self.nspaces = [len(h) for h in header] self.num_temp = '%.6g' parser.parse_args()
def GetOutDir(path, year, region, sub): parser.parse_args() if path is None: path = parser.args.basepath if year is not None: path = os.path.join(path, 'Plots' + year) if region is not None: path = os.path.join(path, region + 'Plots_EWK') if sub is not None: path = os.path.join(path, sub) if parser.args.sub is not None: path = os.path.join(path, parser.args.sub) return path
def __init__(self): self.opt = parser.parse_args() self.transform = transforms.Compose([ transforms.RandomCrop(self.opt.imageSize * self.opt.upSampling), transforms.ToTensor() ]) data_route = r'E:\DataSet\DOTA\part1\images' self.data_list = self.listdir(data_route) print('Read ' + str(len(self.data_list)) + ' images')
from data.CCF import CCFDataSet from torch.utils.data import DataLoader import torchvision.transforms as transforms from Parser import parser from model.models import Generator import torch from PIL import Image import cv2 import numpy as np device = torch.device("cuda:0") opt = parser.parse_args() transform = transforms.Compose([ transforms.RandomCrop(opt.imageSize * opt.upSampling), transforms.ToTensor() ]) transform2 = transforms.Compose([transforms.ToTensor()]) normalize = transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) scale = transforms.Compose([ transforms.ToPILImage(), transforms.Resize(opt.imageSize), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) unnormalize = transforms.Normalize(mean=[-1, -1, -1], std=[2, 2, 2]) generator = Generator(16, opt.upSampling)
from Parser import parser import pickle from os import path import sys #Checking for dictionary if not path.exists('./VocabDictionary.pkl'): with open('VocabDictionary.pkl', 'wb') as f: pickle.dump({}, f, protocol=pickle.HIGHEST_PROTOCOL) if __name__ == '__main__': parser.parse_args(sys.argv[1:])
def __init__(self, year=None, region=None, lumi=None, path=None, config=None, autovar=None, useMaxLumi=False, show=True, blinded=None): parser.parse_args() self.year = year self.region = region self.show = show self.setPath(path) self.setConfig(config) if self.region is None: self.region = GetRegion() self.setLumi(lumi, useMaxLumi) self.autovar = autovar if parser.args.autovar is not None: self.autovar = parser.args.autovar if self.autovar is True: self.autovar = 0 self.isBlinded = blinded if self.isBlinded is None: self.isBlinded = parser.args.blinded self.MCList = [] for mc in self.config.mclist: self.MCList.append(mc) self.SampleList = ["Data"] + self.MCList self.processes = {} datafile = DataFileMap[self.region] if 'Ele' in self.region and self.year == '2017': datalist = [] for type in ("SE", "SP"): for era in sorted(self.lumimap.keys()): datalist.append('%s_%s_%s' % (datafile, type, era)) else: datalist = [ '%s_%s' % (datafile, era) for era in sorted(self.lumimap.keys()) ] self.processes["Data"] = Process("Data", datalist, None, 'data', year=self.year, region=self.region) for mc in self.MCList: if self.region == "GammaCR" and mc == "QCD" and not parser.args.use_ga_qcd: fakefiles = [ datafile.replace("Gamma", "QCDFake") for datafile in datalist ] self.processes[mc] = Process("QCDFake", fakefiles, None, 'bkg', leg=self.config.legmap[mc], color=self.config.colmap[mc], year=self.year, region=self.region) continue filelist = list(self.config.filemap[mc]) if mc in self.config.nlomap and not parser.args.no_nlo: filelist += list(self.config.nlomap[mc]) self.processes[mc] = Process(mc, filelist, GetMCxsec(filelist, self.config.xsec), 'bkg', leg=self.config.legmap[mc], color=self.config.colmap[mc], year=self.year, region=self.region) if self.region == "SignalRegion" and any(parser.args.signal): self.setSignalInfo() self.haddFiles() if os.getcwd() != self.cwd: os.chdir(self.cwd)
def __init__(self, year=None, region=None, lumi=None, path=None, config=None, autovar=False, useMaxLumi=False, show=True): parser.parse_args() self.year = year self.region = region self.show = show self.setPath(path) self.setConfig(config) if self.region is None: self.region = GetRegion() self.setLumi(lumi, useMaxLumi) self.autovar = autovar if parser.args.autovar: self.autovar = True self.isBlinded = False self.MCList = [] for mc in self.config.mclist: if parser.args.nlo and mc in self.config.nlomap: self.MCList.append(self.config.nlomap[mc]) else: self.MCList.append(mc) self.SampleList = ["Data"] + self.MCList self.processes = {} datafile = DataFileMap[self.region] if 'Ele' in self.region and self.year == '2017': datalist = [] for type in ("SE", "SP"): for era in sorted(self.lumimap.keys()): datalist.append('%s_%s_%s' % (datafile, type, era)) else: datalist = [ '%s_%s' % (datafile, era) for era in sorted(self.lumimap.keys()) ] self.processes["Data"] = Process("Data", datalist, None, 'data', year=self.year, region=self.region) for mc in self.MCList: self.processes[mc] = Process(mc, self.config.filemap[mc], GetMCxsec(self.config.filemap[mc], self.config.xsec), 'bkg', leg=self.config.legmap[mc], color=self.config.colmap[mc], year=self.year, region=self.region) if self.region == "SignalRegion" and any(parser.args.signal): self.setSignalInfo() self.haddFiles() if os.getcwd() != self.cwd: os.chdir(self.cwd)