def main(): from sys import argv as __ARGV__ __ARGV__.append('-b') from tools import loadJson from argparse import ArgumentParser parser = ArgumentParser(description='List mean and RMS of histograms '+ \ 'in data files') parser.add_argument('-b', action='store_true', help='enable batch mode') parser.add_argument('json', nargs=1, help='specify JSON file containing '+ \ 'config information') parser.add_argument('-p', '--preliminary', action='store_true', \ help='use data files with only 1/80 of data') args = parser.parse_args() json = loadJson(args.json[0]) bcids = [str(bx) for bx in json['bunchCrossings']] scaling = float(json['scaling']) offsetx = float(json['offsetx']) offsety = float(json['offsety']) filepath = str(json['datapath']) filename = str(json['prefix']) + '_' + str(json['suffix']) preliminary = args.preliminary if preliminary: filename += '_prel' evaluateScaling(bcids, scaling, offsetx, offsety, filepath, filename, \ preliminary)
def renderFabricExplorer(yamlContent): clusterName = yamlContent["clusterName"] PORTSTARTFROM = yamlContent["fabricPortStartFrom"] nfsExportDir = yamlContent["nfsServer"]["exportDir"] nfsIp = yamlContent["nfsServer"]["hostname"] mountPoint = yamlContent["nfsServer"]["mountPoint"] templatesDir = dataPath("templates") env = jinjaEnv(templatesDir) template = env.get_template("fabric_1_0_explorer.yaml") hostNodePort = PORTSTARTFROM + 2067 content = template.render(clusterName=clusterName, nfsExportDir=nfsExportDir, nfsIp=nfsIp, hostNodePort=hostNodePort) explorerYaml = mountPoint + "/" + clusterName + "/resources/explorer-artifacts/fabric_1_0_explorer.yaml" with open(explorerYaml, "w") as f: f.write(content) f.close() #render config.json channelName = yamlContent["channelName"] cryptoConfig = mountPoint + "/" + clusterName + "/resources/crypto-config.yaml" cryptoYaml = readYaml(cryptoConfig) networkConfig = mountPoint + "/" + clusterName + "/resources/explorer-artifacts/config.json" networkTemplatePath = dataPath("resources/explorer-artifacts/config.json") networkTemplate = loadJson(networkTemplatePath) networkTemplate["channel"] = channelName renderNetwork(cryptoYaml, networkConfig, networkTemplate)
def json2csv(fName): tmp = tools.loadJson(fName) if type(tmp) == types.DictType: Dicts = buildListOfDictionaries(tmp,"key") elif type(tmp) == types.ListType: Dicts = tmp else: print "type unknown" return dicts2csv(Dicts, fName+".csv")
def parseJson(fin, constraint, feature): jText = tools.loadJson(fin) if jText == -1: return -1 statistics = {} lines = filter(constraint, jText) for line in lines: if not statistics.has_key(line["word"]): statistics[line["word"]] = {} if not statistics[line["word"]].has_key(line[feature]): statistics[line["word"]][line[feature]] = 0 statistics[line["word"]][line[feature]] += 1 return statistics
def localGroup(fName): lines = tools.loadJson(fName) if lines == -1: return -1 i = 0 while i < len(lines): j = i i = group(lines, i, "PER") i = group(lines, i, "LOC") i = group(lines, i, "ORG") if i == j: i += 1 tools.dumpJson(lines, fName)
def main(): from sys import argv as __ARGV__ __ARGV__.append('-b') from tools import loadJson from argparse import ArgumentParser parser = ArgumentParser(description='Plot overlap integral as function '+ \ 'of vertex resolution') parser.add_argument('-b', action='store_true', help='enable batch mode') parser.add_argument('json', nargs='+', help='specify one or more JSON '+ \ 'files containing config information') parser.add_argument('-m', '--model', required=True, choices=['SingleGauss', \ 'SingleGaussUncorrelated', 'DoubleGauss', 'SuperGauss', \ 'TripleGauss', 'SuperDoubleGauss'], help='specify '+ \ 'fit model') parser.add_argument('-c', '--bcid', required=True, nargs='+', \ help='list one or more bunch crossings') parser.add_argument('-r', '--vtxres', nargs='+', choices=['default', 'low', \ 'high', 'half', 'onehalf', 'double'], \ default=['default'], help='specify one or more '+ \ 'options to modify vertex resolution') args = parser.parse_args() from shapes.SingleGauss import SingleGauss, SingleGaussUncorrelated from shapes.DoubleGauss import DoubleGauss, SuperGauss from shapes.TripleGauss import TripleGauss, SuperDoubleGauss jsons = [loadJson(filename) for filename in args.json] prefix = [str(json['prefix']) for json in jsons] suffix = [str(json['suffix']) for json in jsons] scaling = [float(json['scaling']) for json in jsons] model = locals()[args.model].Shortname vtxres = [str(v) for v in args.vtxres] legend = [str(json['legend']) for json in jsons] for bcid in args.bcid: evaluateResolutionVariation(model, bcid, prefix, suffix, vtxres, \ scaling, legend)
def main(): from sys import argv as __ARGV__ __ARGV__.append('-b') from tools import loadJson from argparse import ArgumentParser from re import match parser = ArgumentParser(description='Make list of ROOT files') parser.add_argument('-b', action='store_true', help='enable batch mode') parser.add_argument('json', nargs=1, help='specify JSON file containing '+ \ 'config information') args = parser.parse_args() json = loadJson(args.json[0]) name = str(json['prefix']) directories = [ json['sourcepath'] + '/' + d for d in json['sourcedirectories'] ] times = {name[4:6]: (json[name][0], json[name[:-5]+'End'][-1]) \ for name in json if match('^scan[12][XY]MoveBegin$', name)} prepareFileList(directories, name, times)
def main(): from sys import argv as __ARGV__ __ARGV__.append('-b') from tools import loadJson from argparse import ArgumentParser from re import match parser = ArgumentParser(description='Extract Beam Imaging data from ROOT '+ \ 'files') parser.add_argument('-b', action='store_true', help='enable batch mode') parser.add_argument('json', nargs=1, help='specify JSON file containing '+ \ 'config information') parser.add_argument('-p', '--preliminary', action='store_true', help='run '+ \ 'only on 1/80 of data') args = parser.parse_args() json = loadJson(args.json[0]) listfile = {name: 'filelist/'+str(json['prefix'])+'_'+name+'.txt' for \ name in ['1X', '1Y', '2X', '2Y']} times = {name[4:6]: [(bg, ed) for bg, ed in zip(json[name], \ json[name[:-5]+'End'])] for name in json if \ match('^scan[12][XY]MoveBegin$', name)} minTrk = int(json['minTrk']) nbins = int(json['nbins']) bcids = [str(bx) for bx in json['bunchCrossings']] scaling = float(json['scaling']) offsetx = float(json['offsetx']) offsety = float(json['offsety']) outputpath = str(json['datapath']) outputname = str(json['prefix']) + '_' + str(json['suffix']) preliminary = args.preliminary if preliminary: outputname += '_prel' prepareDataFile(listfile, times, minTrk, nbins, bcids, scaling, offsetx, \ offsety, outputpath, outputname, preliminary)
def tagFile(f, dictionary): jText = tools.loadJson(f) if jText == -1: return -1 jNewText = tag(jText, dictionary) tools.dumpJson(jNewText, f+".t")
b=set() for k in store.keys(): for v in store[k].keys(): b.add(v) return list(b) class record(): def __init__(self, dictionary, word): self.tags = {'I-FAC':0, 'I-LOC':1, 'B-ORG':2, 'O':3, 'B-PER':4, 'I-PER':5, 'B-FAC':6, 'I-ORG':7, 'B-LOC':8, 'PER':9, 'LOC':10, 'ORG':11} self.freqs = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] for k in dictionary.keys(): self.freqs[self.tags[k]] = dictionary[k] self.word = word def __str__(self): return self.word+','+','.join([str(i) for i in self.freqs]) if __name__=="__main__": if len(sys.argv) < 2: print "usage: stat2csv.py fileName\n the file should in json format" f = os.path.abspath(sys.argv[1]) store = tools.loadJson(f) if store == -1: sys.exit() records = [] for k in store.keys(): records.append(record(store[k],k)) text = [str(rec) for rec in records] tools.writeText(f+'.csv','\n'.join(text))