Example #1
0
def verifica_uso_palavras( frequencia_palavra, mapa_palavra_id, mapa_id_palavra ):
	
	retirar = set()
	log = open( "palavras_retiradas", 'w' )
	for palavra in frequencia_palavra:
		if frequencia_palavra[ palavra ] == 0:
			print >> log, palavra
			retirar.add( palavra )
			
			
	log.close()
	
	retirada = 0
	for palavra,palavra_id in sorted(mapa_palavra_id.iteritems(), key=operator.itemgetter(1)):
		mapa_palavra_id[ palavra ] -= retirada
		novo_id = mapa_palavra_id[ palavra ]
		
		mapa_id_palavra[ novo_id ] = palavra
		
		if palavra in retirar:
			id_invalido = mapa_palavra_id[ palavra ]
			
			mapa_palavra_id.pop( palavra )
			mapa_id_palavra.pop( id_invalido )
			
			retirada += 1
	
	return mapa_palavra_id,mapa_id_palavra
Example #2
0
def savehist(logmsg=""):
    import shutil, os, glob
    print "Saving current setup to hist"
    if not os.path.exists('./history/'):
        os.mkdir('./history')
        cur_entry=1
        log=file('./history/history.txt', 'w')
    else:
        if os.path.exists('./history/history.txt'): log=file('./history/history.txt', 'a')
        else: log=file('./history/history.txt', 'w')
        filelist=glob.glob('./history/*.hist.????')
        cur_entry=max(int(item[-4:]) for item in filelist)+1
    print "Creating new history item with number %04d"%cur_entry
    suffix=".hist.%04d"%cur_entry
    bup_files=['comp.ind', 'dica.dat', 'spct.dat']
    for ifile in bup_files:
        shutil.copy(ifile, './history/'+ifile+suffix)
    log.write("%04d log:"%(cur_entry)+logmsg+"\n")
    log.close()
Example #3
0
def datalog(nom, lastevent, mission_start):
    log = open('/sd/log{}.csv'.format(nom), 'a')
    senBMP()
    senMPU()
    senGPS(2)
    orientacio.calc()
    t = pyb.elapsed_millis(mission_start)
    line = '{},{},{},{},{},{},{},{},{},{},{},{}\n'.format(
        t,
        lastevent,
        str(my_gps.latitude).replace(",", "").replace("'", ""),
        str(my_gps.longitude).replace(",", "").replace("'", ""),
        p,
        altitud,
        90 + orientacio.calc()[1],
        VelVert,
        my_gps.speed[2] / 3.6,
        acX,
        acY,
        acZ,
    )
    print(line)
    log.write(line)
    log.close()
Example #4
0
    #this line controls how many block sizes are tried...
    for i in [18]:  #range(1,18):
        blocksize = int(1.5**i)
        print "Blocksize", blocksize
        modeldir = basePath / str(blocksize)
        modeldir.mkdir()

        log = file(modeldir / "keys", 'w')
        feats = file(modeldir / "feats", 'w')

        conditionalFeatures.timeSpanFeats(train,
                                          logfile=log,
                                          blocksize=blocksize,
                                          output=feats)

        log.close()
        feats.close()

        devlog = file(modeldir / "devkeys", 'w')
        devfeats = file(modeldir / "devfeats", 'w')

        conditionalFeatures.timeSpanFeats(dev,
                                          logfile=devlog,
                                          blocksize=blocksize,
                                          output=devfeats)

        devlog.close()
        devfeats.close()

        trainErr = StringIO()
    for group in walk(otufolder + "fasta_groups").next()[2]:
        groupnum = group.split("_")[-1].split(".")[0]
        pool.apply_async(func=run_fold_for_infernal, args=(groupnum, otufolder+"fasta_groups/" + group, otufolder, args.minseqs))
    pool.close()
    pool.join()

    #get sequence counts for each group
    infernalorder = []
    count = 0
    for group in walk(otufolder).next()[1]:
        if group == "fasta_groups":
            continue
        count += 1
        log = open(otufolder + group + "/log.txt")
        loginfo = log.readlines()
        log.close()
        infernalorder.append((group, int(loginfo[1].split()[0]), int(loginfo[2].split()[0])))
    #write out file of sequence counts
    infernalorder.sort(reverse=True, key=lambda x: x[1])
    groupsizefile = open(otufolder + "/group_sizes.txt", 'w')
    groupsizefile.write("Group\tTotal Seqs\tUnique Seqs\n")
    for info in infernalorder:
        groupsizefile.write("%s\t%s\t%s\n" % info)
    groupsizefile.close()

    print count, "final groups"
    print "Runtime: " + str((time() - secs) / 3600) + " hrs"

    print "==Running Infernal for all groups=="
    print "Infernal score cutoff: " + str(infernalscore)
    #create the csv file for holding all the hit counts
Example #6
0
def fitOneEfficiency(name, hpass, hfail, sigModel, bkgModel, refpass, reffail, options):
    w = ROOT.RooWorkspace("w")
    w.factory("mass[%g,%g]" % (hpass.GetXaxis().GetXmin(), hpass.GetXaxis().GetXmax()))
    w.var("mass").setBins(hpass.GetNbinsX())
    w.factory("passing[yes=1,no=0]")
    # make combined dataset, relying on combine
    dpass = ROOT.RooDataHist("dpass","dpass", ROOT.RooArgList(w.var("mass")), hpass)
    dfail = ROOT.RooDataHist("dfail","dfail", ROOT.RooArgList(w.var("mass")), hfail)
    dfactory = ROOT.CombDataSetFactory(ROOT.RooArgSet(w.var("mass")), w.cat("passing"))
    dfactory.addSetBin("yes", dpass)
    dfactory.addSetBin("no",  dfail)
    data = dfactory.done("data","data")
    nsig = hpass.Integral(); nbkg = hfail.Integral(); nall = nsig+nbkg
    # make PDF
    signals = makeSignalModel(w, sigModel, refpass, reffail, options)
    if not signals: return None
    backgrounds = makeBackgroundModel(w, bkgModel, options)
    if not backgrounds: return None
    w.factory('expr::sigPass("@0*  @1",   Nsig[%g,0,%g], efficiency[0.95,0,1])' % (nsig,nall))
    w.factory('expr::sigFail("@0*(1-@1)", Nsig         , efficiency         )')
    w.factory('expr::bkgPass("@0*  @1",   Nbkg[%g,0,%g], effbkg[0.5,0,1])' % (nbkg,nall))
    w.factory('expr::bkgFail("@0*(1-@1)", Nbkg         , effbkg         )')
    w.factory('SUM::pdfPass(sigPass*{sigPdfPass}, bkgPass*{bkgPdfPass})'.format(sigPdfPass = signals[0], bkgPdfPass = backgrounds[0]))
    w.factory('SUM::pdfFail(sigFail*{sigPdfFail}, bkgFail*{bkgPdfFail})'.format(sigPdfFail = signals[1], bkgPdfFail = backgrounds[1]))
    w.factory('SIMUL::pdf(passing, yes=pdfPass, no=pdfFail)')   
    #pdf = ROOT.RooSimultaneousOpt(w.pdf("pdf0"), "pdf")
    #getattr(w,'import')(pdf, ROOT.RooFit.RecycleConflictNodes(True), ROOT.RooFit.Silence())

    getattr(w,'import')(dpass, ROOT.RooCmdArg())
    getattr(w,'import')(dfail, ROOT.RooCmdArg())
    # make FIT

    if hfail.Integral() == 0:
        w.var("efficiency").setVal(1)
        w.var("effbkg").setVal(1)
        w.var("efficiency").setConstant(True)
        w.var("effbkg").setConstant(True)
    #result = w.pdf("pdf").fitTo(data, ROOT.RooFit.Save(True), ROOT.RooFit.Minos(ROOT.RooArgSet(w.var("efficiency"))))
    nll, minim, retval = minimize(w.pdf("pdf"), data, options, strategy=0)
    if options.doScan: minim.minimize("Minuit2","scan");
    #minim.minimize("Minuit2","migrad")
    minim.hesse()
    nll, minim, retval = minimize(w.pdf("pdf"), data, options, strategy=None)
    if hfail.Integral() > 0 and w.var("efficiency").getVal() > 0.9:
        w.var("efficiency").setMin(0.85)
        retval = minim.minimize("Minuit2","migrad");
    go = True; niter = 0; minoslog = []; efferr = None
    while go:
        niter += 1
        if niter == 5: 
            minim.minimize("Minuit2","scan");
        if niter == 7: 
            minim.minimize("Minuit","minimize");
        if niter >= 10:
            minoslog += [ "STOP AFTER %d ITERATIONS" % niter ]
            go = False
        minim.hesse()
        nll, minim, retval = minimize(w.pdf("pdf"), data, options, strategy=None)
        result = minim.save()
        effval = w.var("efficiency").getVal()
        if hfail.Integral() == 0:
            cpcalc = ROOT.TEfficiency.ClopperPearson
            nsig = int(floor(w.var("Nsig").getVal() - w.var("Nsig").getError()))
            emin = cpcalc(nsig,nsig,0.6827,False)
            efferr = [ emin-1, 0 ]
            minoslog += [ "No passing probes: using clopper pearson for the fitted number of signal events minus -1 sigma from the fit."]
            minoslog += [ "Nsig fit: %.1f +- %.1f " % (w.var("Nsig").getVal(), w.var("Nsig").getError()) ]
            minoslog += [ "CP lower bound for %d events, all passing: %.4f" % (nsig,emin) ]
        else:
            try:
                efferr, minoslog2 = manualMinos(w.pdf("pdf"), data, w.var("efficiency"), options)
                go = False; minoslog += minoslog2
            except MinosException:
                print         "Negative value in Minos, restarting"
                minoslog += [ "Negative value in Minos, restarting everything", '------' ]
                w.var("efficiency").setConstant(False)
                
    #print "MANUAL MINOS: ",efferr,"\n\t","\n\t".join(minoslog)
    # plot
    c1 = ROOT.TCanvas("c1","c1"); c1.SetCanvasSize(900,500);
    c1.Divide(2,1)
    c1.cd(1)
    fpass = w.var("mass").frame()
    dpass.plotOn(fpass)
    w.pdf("pdfPass").plotOn(fpass, ROOT.RooFit.Components(backgrounds[0]),ROOT.RooFit.LineColor(ROOT.kGreen+3))
    w.pdf("pdfPass").plotOn(fpass, ROOT.RooFit.LineColor(ROOT.kGreen+1))
    fpass.Draw()
    c1.cd(2)
    ffail = w.var("mass").frame()
    dfail.plotOn(ffail)
    w.pdf("pdfFail").plotOn(ffail, ROOT.RooFit.Components(backgrounds[1]),ROOT.RooFit.LineColor(ROOT.kRed+3))
    w.pdf("pdfFail").plotOn(ffail, ROOT.RooFit.LineColor(ROOT.kRed+0))
    ffail.Draw()
    for ext in "pdf","png":
        if ext == "pdf" and not options.fullOut: continue
        c1.Print("%s/%s.%s" % (options.printDirBins, name, ext))
    log = open("%s/%s.txt" % (options.printDirBins, name), "w")
    fpf = result.floatParsFinal()
    for i in range(fpf.getSize()):
        par = fpf.at(i)
        log.write("%-20s : " % par.GetName())
        if par.hasAsymError():
            log.write("%8.4f  %+8.4f / %+8.4f" % (par.getVal(), par.getAsymErrorLo(), par.getAsymErrorHi()))
            plo, phi = par.getVal() - par.getAsymErrorLo(), par.getVal() + par.getAsymErrorHi()
        else:
            if par.GetName() in ("Nsig", "Nbkg"):
                log.write("%8.1f  %+8.1f           " % (par.getVal(), par.getError()))
            else:
                log.write("%8.4f  %+8.4f           " % (par.getVal(), par.getError()))
            plo, phi = par.getVal() - par.getError(), par.getVal() + par.getError()
        pmax, pmin = par.getMax(), par.getMin()
        log.write("   [ %8.4f , %8.4f ]" % (pmin, pmax))
        if phi > 0.95*pmax + 0.05*pmin: log.write("   <-- close to max")
        if plo < 0.05*pmax + 0.95*pmin: log.write("   <-- close to min")
        log.write("\n")

    log.write("MANUAL MINOS: %s\n\t%s\n"  % (efferr,"\n\t".join(minoslog)) )

    # goodness of fit
    chi2pass = chi2(hpass, w.var("mass"), w.pdf("pdfPass"), w.function("sigPass").getVal() + w.function("bkgPass").getVal())
    chi2fail = chi2(hfail, w.var("mass"), w.pdf("pdfFail"), w.function("sigFail").getVal() + w.function("bkgFail").getVal())
    log.write("\nChi2: pass %.2f, fail %.2f, total %.2f, ndof: %d - %d = %d \n" % (
        chi2pass, chi2fail, chi2pass + chi2fail, 
        hpass.GetNbinsX()*2, fpf.getSize(),  hpass.GetNbinsX()*2 - fpf.getSize()+1))

    log.write("\n"+options.textBlob+"\n")

    log.close()

    if options.fullOut:
        tfout = ROOT.TFile.Open("%s/%s.%s" % (options.printDirBins, name, "root"), "RECREATE")
        w.SetName("w_"+name)
        tfout.WriteTObject(w, "w_"+name)
        tfout.Close()

    # report result
    if efferr: 
        return effval, efferr[0], efferr[1]
    else:
        return None
Example #7
0
def mesurar(nom):
    log = open('/sd/log{}.csv'.format(nom), 'w')
    log.write(
        'temps (ms), etapa, latitud, longitud, pressio (Pa), altitud (m), AoA(º), Velocitat(m/s), Velocitat_GPS(m/s), acX(m/s²), acY(m/s²), acZ(m/s²)\n'
    )
    log.close()
Example #8
0
def main():
  # Init logger
  if not os.path.isdir(args.save_path):
    os.makedirs(args.save_path)
  log = open(os.path.join(args.save_path, 'log_seed_{}.txt'.format(args.manualSeed)), 'w')
  print_log('save path : {}'.format(args.save_path), log)
  state = {k: v for k, v in args._get_kwargs()}
  print_log(state, log)
  print_log("Random Seed: {}".format(args.manualSeed), log)
  print_log("python version : {}".format(sys.version.replace('\n', ' ')), log)
  print_log("torch  version : {}".format(torch.__version__), log)
  print_log("cudnn  version : {}".format(torch.backends.cudnn.version()), log)

  # Data loading code
  # Any other preprocessings? http://pytorch.org/audio/transforms.html
  sample_length = 10000
  scale = transforms.Scale()
  padtrim = transforms.PadTrim(sample_length)
  downmix = transforms.DownmixMono()
  transforms_audio = transforms.Compose([
    scale, padtrim, downmix
  ])

  if not os.path.isdir(args.data_path):
    os.makedirs(args.data_path)
  train_dir = os.path.join(args.data_path, 'train')
  val_dir = os.path.join(args.data_path, 'val')

  #Choose dataset to use
  if args.dataset == 'arctic':
    # TODO No ImageFolder equivalent for audio. Need to create a Dataset manually
    train_dataset = Arctic(train_dir, transform=transforms_audio, download=True)
    val_dataset = Arctic(val_dir, transform=transforms_audio, download=True)
    num_classes = 4
  elif args.dataset == 'vctk':
    train_dataset = dset.VCTK(train_dir, transform=transforms_audio, download=True)
    val_dataset = dset.VCTK(val_dir, transform=transforms_audio, download=True)
    num_classes = 10
  elif args.dataset == 'yesno':
    train_dataset = dset.YESNO(train_dir, transform=transforms_audio, download=True)
    val_dataset = dset.YESNO(val_dir, transform=transforms_audio, download=True)
    num_classes = 2
  else:
    assert False, 'Dataset is incorrect'

  train_loader = torch.utils.data.DataLoader(
    train_dataset,
    batch_size=args.batch_size,
    shuffle=True,
    num_workers=args.workers,
    # pin_memory=True, # What is this?
    # sampler=None     # What is this?
  )
  val_loader = torch.utils.data.DataLoader(
    val_dataset,
    batch_size=args.batch_size, shuffle=False,
    num_workers=args.workers, pin_memory=True)


  #Feed in respective model file to pass into model (alexnet.py)
  print_log("=> creating model '{}'".format(args.arch), log)
  # Init model, criterion, and optimizer
  # net = models.__dict__[args.arch](num_classes)
  net = AlexNet(num_classes)
  #
  print_log("=> network :\n {}".format(net), log)

  # net = torch.nn.DataParallel(net, device_ids=list(range(args.ngpu)))

  # define loss function (criterion) and optimizer
  criterion = torch.nn.CrossEntropyLoss()

  # Define stochastic gradient descent as optimizer (run backprop on random small batch)
  optimizer = torch.optim.SGD(net.parameters(), state['learning_rate'], momentum=state['momentum'],
                weight_decay=state['decay'], nesterov=True)

  #Sets use for GPU if available
  if args.use_cuda:
    net.cuda()
    criterion.cuda()

  recorder = RecorderMeter(args.epochs)
  # optionally resume from a checkpoint
  # Need same python vresion that the resume was in 
  if args.resume:
    if os.path.isfile(args.resume):
      print_log("=> loading checkpoint '{}'".format(args.resume), log)
      if args.ngpu == 0:
        checkpoint = torch.load(args.resume, map_location=lambda storage, loc: storage)
      else:
        checkpoint = torch.load(args.resume)

      recorder = checkpoint['recorder']
      args.start_epoch = checkpoint['epoch']
      net.load_state_dict(checkpoint['state_dict'])
      optimizer.load_state_dict(checkpoint['optimizer'])
      print_log("=> loaded checkpoint '{}' (epoch {})" .format(args.resume, checkpoint['epoch']), log)
    else:
      print_log("=> no checkpoint found at '{}'".format(args.resume), log)
  else:
    print_log("=> do not use any checkpoint for {} model".format(args.arch), log)

  if args.evaluate:
    validate(val_loader, net, criterion, 0, log, val_dataset)
    return

  # Main loop
  start_time = time.time()
  epoch_time = AverageMeter()

  # Training occurs here
  for epoch in range(args.start_epoch, args.epochs):
    current_learning_rate = adjust_learning_rate(optimizer, epoch, args.gammas, args.schedule)

    need_hour, need_mins, need_secs = convert_secs2time(epoch_time.avg * (args.epochs-epoch))
    need_time = '[Need: {:02d}:{:02d}:{:02d}]'.format(need_hour, need_mins, need_secs)

    print_log('\n==>>{:s} [Epoch={:03d}/{:03d}] {:s} [learning_rate={:6.4f}]'.format(time_string(), epoch, args.epochs, need_time, current_learning_rate) \
                + ' [Best : Accuracy={:.2f}, Error={:.2f}]'.format(recorder.max_accuracy(False), 100-recorder.max_accuracy(False)), log)

    print("One epoch")
    # train for one epoch
    # Call to train (note that our previous net is passed into the model argument)
    train_acc, train_los = train(train_loader, net, criterion, optimizer, epoch, log, train_dataset)

    # evaluate on validation set
    #val_acc,   val_los   = extract_features(test_loader, net, criterion, log)
    val_acc,   val_los   = validate(val_loader, net, criterion, epoch, log, val_dataset)
    is_best = recorder.update(epoch, train_los, train_acc, val_los, val_acc)

    save_checkpoint({
      'epoch': epoch + 1,
      'arch': args.arch,
      'state_dict': net.state_dict(),
      'recorder': recorder,
      'optimizer' : optimizer.state_dict(),
    }, is_best, args.save_path, 'checkpoint.pth.tar')

    # measure elapsed time
    epoch_time.update(time.time() - start_time)
    start_time = time.time()
    recorder.plot_curve( os.path.join(args.save_path, 'curve.png') )

  log.close()
Example #9
0
def fitOneEfficiency(name, hpass, hfail, sigModel, bkgModel, refpass, reffail,
                     options):
    w = ROOT.RooWorkspace("w")
    w.factory("mass[%g,%g]" %
              (hpass.GetXaxis().GetXmin(), hpass.GetXaxis().GetXmax()))
    w.var("mass").setBins(hpass.GetNbinsX())
    w.factory("passing[yes=1,no=0]")
    # make combined dataset, relying on combine
    dpass = ROOT.RooDataHist("dpass", "dpass", ROOT.RooArgList(w.var("mass")),
                             hpass)
    dfail = ROOT.RooDataHist("dfail", "dfail", ROOT.RooArgList(w.var("mass")),
                             hfail)
    dfactory = ROOT.CombDataSetFactory(ROOT.RooArgSet(w.var("mass")),
                                       w.cat("passing"))
    dfactory.addSetBin("yes", dpass)
    dfactory.addSetBin("no", dfail)
    data = dfactory.done("data", "data")
    nsig = hpass.Integral()
    nbkg = hfail.Integral()
    nall = nsig + nbkg
    # make PDF
    signals = makeSignalModel(w, sigModel, refpass, reffail, options)
    if not signals: return None
    backgrounds = makeBackgroundModel(w, bkgModel, options)
    if not backgrounds: return None
    w.factory(
        'expr::sigPass("@0*  @1",   Nsig[%g,0,%g], efficiency[0.9,0,1])' %
        (nsig, nall))
    w.factory('expr::sigFail("@0*(1-@1)", Nsig         , efficiency         )')
    w.factory('expr::bkgPass("@0*  @1",   Nbkg[%g,0,%g], effbkg[0.5,0,1])' %
              (nbkg, nall))
    w.factory('expr::bkgFail("@0*(1-@1)", Nbkg         , effbkg         )')
    w.factory(
        'SUM::pdfPass(sigPass*{sigPdfPass}, bkgPass*{bkgPdfPass})'.format(
            sigPdfPass=signals[0], bkgPdfPass=backgrounds[0]))
    w.factory(
        'SUM::pdfFail(sigFail*{sigPdfFail}, bkgFail*{bkgPdfFail})'.format(
            sigPdfFail=signals[1], bkgPdfFail=backgrounds[1]))
    w.factory('SIMUL::pdf(passing, yes=pdfPass, no=pdfFail)')

    getattr(w, 'import')(dpass, ROOT.RooCmdArg())
    getattr(w, 'import')(dfail, ROOT.RooCmdArg())
    # make FIT

    #result = w.pdf("pdf").fitTo(data, ROOT.RooFit.Save(True), ROOT.RooFit.Minos(ROOT.RooArgSet(w.var("efficiency"))))
    nll, minim, retval = minimize(w.pdf("pdf"), data, options, strategy=0)
    nll, minim, retval = minimize(w.pdf("pdf"), data, options, strategy=None)
    if w.var("efficiency").getVal() > 0.9:
        w.var("efficiency").setMin(0.85)
        retval = minim.minimize("Minuit2", "migrad")
    result = minim.save()
    effval = w.var("efficiency").getVal()
    efferr, minoslog = manualMinos(w.pdf("pdf"), data, w.var("efficiency"),
                                   options)
    #poi = ROOT.RooArgSet(w.var("efficiency"))
    #ret = minim.minos(poi)
    print "MANUAL MINOS: ", efferr, "\n\t", "\n\t".join(minoslog)

    # plot
    c1 = ROOT.TCanvas("c1", "c1")
    c1.SetCanvasSize(900, 500)
    c1.Divide(2, 1)
    c1.cd(1)
    fpass = w.var("mass").frame()
    dpass.plotOn(fpass)
    w.pdf("pdfPass").plotOn(fpass, ROOT.RooFit.Components(backgrounds[0]),
                            ROOT.RooFit.LineColor(ROOT.kGreen + 3))
    w.pdf("pdfPass").plotOn(fpass, ROOT.RooFit.LineColor(ROOT.kGreen + 1))
    fpass.Draw()
    c1.cd(2)
    ffail = w.var("mass").frame()
    dfail.plotOn(ffail)
    w.pdf("pdfFail").plotOn(ffail, ROOT.RooFit.Components(backgrounds[1]),
                            ROOT.RooFit.LineColor(ROOT.kRed + 3))
    w.pdf("pdfFail").plotOn(ffail, ROOT.RooFit.LineColor(ROOT.kRed + 0))
    ffail.Draw()
    for ext in "pdf", "png":
        c1.Print("%s/%s.%s" % (options.printDirBins, name, ext))
    log = open("%s/%s.txt" % (options.printDirBins, name), "w")
    fpf = result.floatParsFinal()
    for i in range(fpf.getSize()):
        par = fpf.at(i)
        log.write("%-20s : " % par.GetName())
        if par.hasAsymError():
            log.write(
                "%8.4f  %+8.4f / %+8.4f" %
                (par.getVal(), par.getAsymErrorLo(), par.getAsymErrorHi()))
            plo, phi = par.getVal() - par.getAsymErrorLo(), par.getVal(
            ) + par.getAsymErrorHi()
        else:
            if par.GetName() in ("Nsig", "Nbkg"):
                log.write("%8.1f  %+8.1f           " %
                          (par.getVal(), par.getError()))
            else:
                log.write("%8.4f  %+8.4f           " %
                          (par.getVal(), par.getError()))
            plo, phi = par.getVal() - par.getError(), par.getVal(
            ) + par.getError()
        pmax, pmin = par.getMax(), par.getMin()
        log.write("   [ %8.4f , %8.4f ]" % (pmin, pmax))
        if phi > 0.95 * pmax + 0.05 * pmin: log.write("   <-- close to max")
        if plo < 0.05 * pmax + 0.95 * pmin: log.write("   <-- close to min")
        log.write("\n")

    log.write("MANUAL MINOS: %s\n\t%s\n" % (efferr, "\n\t".join(minoslog)))

    # goodness of fit
    chi2pass = chi2(
        hpass, w.var("mass"), w.pdf("pdfPass"),
        w.function("sigPass").getVal() + w.function("bkgPass").getVal())
    chi2fail = chi2(
        hfail, w.var("mass"), w.pdf("pdfFail"),
        w.function("sigFail").getVal() + w.function("bkgFail").getVal())
    log.write(
        "\nChi2: pass %.2f, fail %.2f, total %.2f, ndof: %d - %d = %d \n" %
        (chi2pass, chi2fail, chi2pass + chi2fail, hpass.GetNbinsX() * 2,
         fpf.getSize(), hpass.GetNbinsX() * 2 - fpf.getSize() + 1))
    log.close()

    tfout = ROOT.TFile.Open("%s/%s.%s" % (options.printDirBins, name, "root"),
                            "RECREATE")
    w.SetName("w_" + name)
    tfout.WriteTObject(w, "w_" + name)
    tfout.Close()

    # report result
    if efferr:
        return effval, efferr[0], efferr[1]
    else:
        return None