示例#1
0
def doindex(gve, x, y, z):
    global NUL
    global tmp
    global NPKS
    ss = sys.stdout  # turns of printing
    #sys.stdout = NUL
    myindexer = indexing.indexer()
    myindexer.readgvfile(gve)
    for ring1 in [1, 0, 2]:
        for ring2 in [1, 0]:
            myindexer.parameterobj.set_parameters({
                'ds_tol': 0.004,
                'minpks': NPKS,
                'max_grains': 1000,
                'hkl_tol': 0.02,
                'ring_1': ring1,
                'ring_2': ring2
            })
            myindexer.loadpars()
            myindexer.assigntorings()
            myindexer.find()
            myindexer.scorethem()
    grains = [grain.grain(ubi, [x, y, z]) for ubi in myindexer.ubis]
    grain.write_grain_file("%s.ubi" % (tmp), grains)
    sys.stdout = ss
    return len(grains)
示例#2
0
def grid_index_parallel(fltfile, parfile, tmp, gridpars, translations):
    """
    fltfile containing peaks
    parfile containing instrument geometry and unit cell
    tmp - base name for scratch files and results
    gridpars : dictionary of control parameters (rings to use, etc)
    translations : list of translation positions to try

    Runs a grid index algorithm using pythons multiprocessing module
    splits workload over processes (blocks of translations to each process)
    This thread should catch results via a queue
    """
    gridpars = initgrid(fltfile, parfile, tmp, gridpars)
    print("Done init")
    if 'NPROC' not in gridpars or gridpars['NPROC'] is None:
        NPR = multiprocessing.cpu_count() - 1
        cImageD11.cimaged11_omp_set_num_threads(
            2)  # assume hyperthreading is useful?
    else:
        NPR = int(gridpars['NPROC'])
    if 'NTHREAD' in gridpars:
        cImageD11.cimaged11_omp_set_num_threads(int(gridpars['NTHREAD']))
    elif NPR > 1:
        cImageD11.cimaged11_omp_set_num_threads(1)
    tsplit = [translations[i::NPR] for i in range(NPR)]
    args = [("%s.flt" % (tmp), parfile, t, gridpars)
            for i, t in enumerate(tsplit)]
    q = PQueue()
    p = Pool(processes=NPR,
             initializer=wrap_test_many_points_init,
             initargs=[q])
    print("Using a pool of", NPR, "processes")
    pa = p.map_async(wrap_test_many_points, args)
    ul = uniq_grain_list(gridpars['SYMMETRY'], gridpars['toldist'],
                         gridpars['tolangle'])
    lastsave = 0

    while True:
        # If we go more than 30 seconds without something, die
        try:
            grs = q.get(True, 10)
            gb4 = len(ul.uniqgrains)
            ul.add(grs)
            gnow = len(ul.uniqgrains)
            print("Got % 5d new %d from %d" % (gnow, gnow - gb4, len(grs)))
            if len(ul.uniqgrains) > lastsave:
                lastsave = len(ul.uniqgrains)
                grain.write_grain_file("all" + tmp + ".map", ul.uniqgrains)
            if pa._number_left == 0:
                break
        except Queue.Empty:
            sys.stderr.write(" Caught queue empty exception\n")
            if pa._number_left == 0:
                break
        except KeyboardInterrupt:
            break
    # write here to be on the safe side ....
    grain.write_grain_file("all" + tmp + ".map", ul.uniqgrains)
    p.close()
    p.join()
示例#3
0
def doindex(gve, x, y, z, w):
    global NUL
    global tmp
    global NPKS
    global UC
    global TOLSEQ
    ss = sys.stdout  # turns of printing
    sys.stdout = NUL
    myindexer = indexing.indexer(wavelength=w, unitcell=UC, gv=gve.T)
    myindexer.ds = np.sqrt((gve * gve).sum(axis=0))
    myindexer.ga = np.zeros(len(myindexer.ds), int) - 1  # Grain assignments
    #   myindexer.readgvfile( gve )

    for ring1 in RING1:
        for ring2 in RING2:
            myindexer.parameterobj.set_parameters({
                'ds_tol': DSTOL,
                'minpks': NPKS,
                'max_grains': 1000,
                'hkl_tol': TOLSEQ[0],
                'ring_1': ring1,
                'ring_2': ring2
            })
            myindexer.loadpars()
            myindexer.assigntorings()
            myindexer.find()
            myindexer.scorethem()
    grains = [grain.grain(ubi, [x, y, z]) for ubi in myindexer.ubis]
    grain.write_grain_file("%s.ubi" % (tmp), grains)
    sys.stdout = ss
    return len(grains)
示例#4
0
def setup():
    try:
        c = columnfile( sys.argv[1] )
        g = read_grain_file( sys.argv[2] )
        parfile = sys.argv[3]
        cmds = []
    except:
        print( "Usage: %s colfile.flt.new grains.map parameters.par  --omega_slop=1 etc"%(sys.argv[0]))
        sys.exit()
    
    if platform.system() != "Windows":
        fmt = "%s %s"
    else:
        fmt = '%s "%s"'

    cmd0 = fmt%( sys.executable,
                     os.path.join( os.path.split(__file__)[0],
                                   "fitgrain.py" ) )

    for i in range(len(g)):
        #g[i].translation[2] = 0.0
        write_grain_file("%d.ubi"%(i),[g[i]])
        d = c.copy()
        d.filter( d.labels == i )
        d.writefile("%d.flt"%(i))
        cmd = cmd0 + " -p %s -u %d.ubi -U %d.ubi -P %d.par -f %d.flt -x t_z"%(
            parfile,i,i,i,i)
        for extra_arg in sys.argv[4:]:
            cmd += " "+extra_arg
        cmds.append( cmd )
    return cmds
示例#5
0
def main():
    import sys, time
    c = columnfile(sys.argv[1])
    p = read_par_file(sys.argv[2])
    u = unitcell_from_parameters(p)
    gl = read_grain_file(sys.argv[3])
    if gl[0].translation is None:
        gl[0].translation = np.array((0., 0., 0.))
    start = time.time()
    # Setup and assign hkls
    w = p.get("wavelength")
    peaks_Cxyz, beam_Cxyz = getCxyz(c, p)
    t = gl[0].translation.copy()
    ub = gl[0].ub.copy()
    ubi = gl[0].ubi.copy()
    gve = compute_Cgve(t, peaks_Cxyz, beam_Cxyz, w)
    hi = np.round(np.dot(ubi, gve))
    lastgof = 1e9
    ubn, tn = fit_ub_t(ub, t, hi, peaks_Cxyz, beam_Cxyz, w)
    print("Before\nt=", gl[0].translation)
    print("UB=", gl[0].ub)
    gl[0].set_ubi(np.linalg.inv(ubn))
    gl[0].translation = tn
    dt = time.time() - start
    print("time calculating", dt, "gps", 1 / dt)
    print("After\nt=", gl[0].translation)
    print("UB=", gl[0].ub)
    write_grain_file(sys.argv[4], gl)
示例#6
0
def main2():
    import sys
    c = columnfile(sys.argv[1])
    p = read_par_file(sys.argv[2])
    gl = read_grain_file(sys.argv[3])
    for i, g in enumerate(gl):
        mask = c.labels == i
        g.sc = np.compress(mask, c.sc)
        g.fc = np.compress(mask, c.fc)
        g.omega = np.compress(mask, c.omega)
        ubnew, tnew = fitagrain(g, p)
        g.set_ubi(np.linalg.inv(ubnew))
        g.translation[:] = tnew
        print(i, len(g.sc), tnew)
    write_grain_file(sys.argv[4], gl)
示例#7
0
def domap(OmFloat,
          OmSlop,
          pars,
          colfile,
          grainsfile,
          tolseq=[0.03, 0.02, 0.01],
          symmetry="triclinic"):
    """mapping function - does what makemap.py does"""
    global NPKS
    ss = sys.stdout  # turns off printing

    def fit(tol):
        o = refinegrains.refinegrains(
            OmFloat=OmFloat,
            OmSlop=OmSlop,
            tolerance=tol,
            intensity_tth_range=(0, 180),
        )
        o.parameterobj = pars
        # o.loadfiltered ...
        o.scannames = ["internal"]
        o.scantitles = colfile.titles
        o.scandata["internal"] = colfile
        o.tolerance = tol
        o.readubis(grainsfile)
        if symmetry != "triclinic":
            o.makeuniq(symmetry)
        o.generate_grains()
        o.refinepositions()
        return o

    for tol in tolseq:
        sys.stdout = NUL
        o = fit(tol)
        gl = filter(lambda x: x.npks > NPKS, o.grains.values())
        sys.stdout = ss
        if len(gl) == 0:
            print("I killed all your grains!")
            break
        else:
            print("Keeping", len(gl), "from", len(o.grains.values()),
                  "grains with at least", NPKS, "peaks", tol)
            grain.write_grain_file(grainsfile, gl)
    # re-assign after last filter
    fit(tol)
    return len(gl)
示例#8
0
def main():
    flt = columnfile.columnfile(sys.argv[1])
    grains = grain.read_grain_file(sys.argv[2])
    pars = parameters.read_par_file(sys.argv[3])
    newgrainfile = sys.argv[4]

    hkltol = 0.05  #  for first peak assignments
    nmedian = 5  #  for removing peak fit outliers
    omegastep = 1.0  #  for omega images
    ymin = -18  #  dty start (so -15 -> +15 in 0.25 steps)
    ystep = 0.25  #  step in dty from scan
    rcut = 0.2  #  cutoff for segmentation of reconstruction

    OMSLOP = omegastep / 2.0

    tth, eta, gve = update_cols(flt, pars, OMSLOP)
    assign_peaks(grains, gve, flt, pars, nmedian, hkltol)
    pl.ioff()
    print("\n\n")
    for i, g in enumerate(grains):
        print("# Grain:", i)
        fit_one_grain(g, flt, pars)
        y0, x, y = map_out_cell(g, flt)
        sinoangles, sino, recon = map_grain(g, flt, ymin, ystep, omegastep)
        if 0:
            pl.subplot(211)
            pl.imshow(sino)
            pl.subplot(212)
            pl.imshow(recon)
            pl.show()
        active = recon > recon.max() * rcut
        ii, jj = np.mgrid[0:recon.shape[0],
                          0:recon.shape[0]] - recon.shape[0] // 2
        for ix, iy in zip(ii[active], jj[active]):
            gf = fit_one_point(g, flt, pars, ix, iy, ystep)
            print(("%-4d  " * 4) % (i, ix, iy, gf.mask.astype(int).sum()),
                  end=" ")
            print(("%.6f  " * 6) % (indexing.ubitocellpars(gf.ubi)), end=" ")
            print(("%.6f  " * 3) % tuple(gf.Rod))
        g.translation = (x, y, 0)

    grain.write_grain_file(newgrainfile, grains)
示例#9
0
    def savegrains(self, filename, sort_npks=True):
        """
        Save the refined grains

        """
        ks = list(self.grains.keys())
        # sort by number of peaks indexed to write out
        if sort_npks:
            #        npks in x array
            order = numpy.argsort([self.grains[k].npks for k in ks])
            ks = [ks[i] for i in order[::-1]]
        else:
            ks.sort()
        gl = [(self.grains[k], k) for k in ks]

        # Update the datafile and grain names reflect indices in grain list
        for g, k in gl:
            name, fltname = g.name.split(":")
            assert fltname in self.scandata, "Sorry - logical flaw"
            assert len(list(self.scandata.keys())
                       ) == 1, "Sorry - need to fix for multi data"
            self.set_translation(k[0], fltname)
            self.compute_gv(g, update_columns=True)
            numpy.put(self.scandata[fltname].gx, g.ind, self.gv[:, 0])
            numpy.put(self.scandata[fltname].gy, g.ind, self.gv[:, 1])
            numpy.put(self.scandata[fltname].gz, g.ind, self.gv[:, 2])
            hkl_real = numpy.dot(g.ubi, self.gv.T)
            numpy.put(self.scandata[fltname].hr, g.ind, hkl_real[0, :])
            numpy.put(self.scandata[fltname].kr, g.ind, hkl_real[1, :])
            numpy.put(self.scandata[fltname].lr, g.ind, hkl_real[2, :])
            hkl = numpy.floor(hkl_real + 0.5)
            numpy.put(self.scandata[fltname].h, g.ind, hkl[0, :])
            numpy.put(self.scandata[fltname].k, g.ind, hkl[1, :])
            numpy.put(self.scandata[fltname].l, g.ind, hkl[2, :])
            # Count "uniq" reflections...
            sign_eta = numpy.sign(self.scandata[fltname].eta_per_grain[g.ind])
            uniq_list = [(int(h), int(k), int(l), int(s))
                         for (h, k, l), s in zip(hkl.T, sign_eta)]
            g.nuniq = len(set(uniq_list))
        grain.write_grain_file(filename, [g[0] for g in gl])
示例#10
0
    def savegrains(self, filename, sort_npks=True):
        """
        Save the refined grains

        """
        ks = self.grains.keys()
        # sort by number of peaks indexed to write out
        if sort_npks:
            #        npks in x array
            gl = [(self.grains[k].npks, self.grains[k], k) for k in ks]
            gl.sort()
            gl = [(g[1], g[2]) for g in gl[::-1]]
        else:
            ks.sort()
            gl = [(self.grains[k], k) for k in ks]
        grain.write_grain_file(filename, [g[0] for g in gl])

        # Update the datafile and grain names reflect indices in grain list
        for g, k in gl:
            name, fltname = g.name.split(":")
            assert self.scandata.has_key(fltname), "Sorry - logical flaw"
            assert len(self.scandata.keys()
                       ) == 1, "Sorry - need to fix for multi data"
            self.set_translation(k[0], fltname)
            self.compute_gv(g, update_columns=True)
            numpy.put(self.scandata[fltname].gx, g.ind, self.gv[:, 0])
            numpy.put(self.scandata[fltname].gy, g.ind, self.gv[:, 1])
            numpy.put(self.scandata[fltname].gz, g.ind, self.gv[:, 2])
            hkl_real = numpy.dot(g.ubi, self.gv.T)
            numpy.put(self.scandata[fltname].hr, g.ind, hkl_real[0, :])
            numpy.put(self.scandata[fltname].kr, g.ind, hkl_real[1, :])
            numpy.put(self.scandata[fltname].lr, g.ind, hkl_real[2, :])
            hkl = numpy.floor(hkl_real + 0.5)
            numpy.put(self.scandata[fltname].h, g.ind, hkl[0, :])
            numpy.put(self.scandata[fltname].k, g.ind, hkl[1, :])
            numpy.put(self.scandata[fltname].l, g.ind, hkl[2, :])
示例#11
0
from ImageD11.grain import read_grain_file, write_grain_file
import sys, numpy as np
gl = read_grain_file(sys.argv[1])

s = 10.
for g in gl:
    g.translation += (np.random.random(3) - 0.5) * s
    g.ubi += (np.random.random((3, 3)) - 0.5) * 0.002

write_grain_file(sys.argv[2], gl)
示例#12
0
        ] * 3
    else:
        tols = [0.05, 0.01, 0.0075]
    for gref in gl:
        print("BEFORE:            ", 3 * "% 7.3f " % tuple(gref.translation),
              6 * "%.6f " % (ubitocellpars(gref.ubi)))
        for ii, tol in enumerate(tols):
            #print gref.translation
            gref = o.refine(gref, tol=tol)
            #print i,gref.npks
#        gref.pks = None
# re-assign after convergence
#        gref = o.refine( gref, tol=0.0075)
        gfl.append(gref)
        ng += 1
        print(ng,
              ng * 100.0 / len(gl),
              "%.2f   " % (time.time() - start),
              gref.npks,
              end=' ')
        print((3 * "%.4f ") % tuple(gref.translation), end=' ')
        print((6 * "%.6f ") % ubitocellpars(gref.ubi))
    print(time.time() - start)
    grain.write_grain_file(sys.argv[4], gfl)
#    1/0
#    for i in range(2):
#        o.find_triplets( i*17 )
#    pylab.show()

# ~/ImageD11/test/simul_1000_grains % python ../../sandbox/fittrans.py Al1000/Al1000.flt Al1000/Al1000.par allgrid.map allgridfittrans.map
示例#13
0
#!/usr/bin/env python
from __future__ import print_function


"""
Removes grains from a grain file if they have less than
npks peaks

Usage : cutgrains.py   ubi_in  ubi_out  npks
"""

from ImageD11.grain import read_grain_file, write_grain_file
import sys
try:
    GRAINS = read_grain_file(sys.argv[1])
    NPKS = int(sys.argv[3])
    KEEP = []
    for g in GRAINS:
        if int(g.npks) >= NPKS:
            KEEP.append(g)
    write_grain_file( sys.argv[2], KEEP)
except:
    print(__doc__)
    raise
            
示例#14
0
def check(gl1, gl2):
    dTs = 0
    dVs = 0
    mxT = 0
    mxTi = 0
    mxV = 0
    mxVi = 0
    for i, (g1, g2) in enumerate(zip( gl1, gl2)):
        dT = g1.translation - g2.translation
        dvol = np.linalg.det(g1.ubi) - np.linalg.det(g2.ubi)
        t = abs(dT).max()
        dTs += t
        dVs += abs(dvol)
        if t > mxT:
            mxTi, mxT = i, t
        if abs(dvol) > mxV:
            mxVi, mxV = i, abs(dvol)
    print dTs/i, dVs/i, mxT, mxTi, mxV, mxVi
    


print "fname avg_pos_err avg_vol_err max_pos_err grain_max_pos_err max_vol_err grain_max_vol_err"
for f in fnames:
    print f,
    check(read_grain_file(f), ideal)



gbad = read_grain_file("shaken.map")[159]
write_grain_file("gbad.ubi",[gbad,])
示例#15
0
#!/usr/bin/python

from ImageD11.columnfile  import columnfile
from ImageD11.grain import read_grain_file, write_grain_file
import sys, os

c = columnfile( sys.argv[1] )
g = read_grain_file( sys.argv[2] )

for i in range(len(g)):
    #g[i].translation[2] = 0.0
    write_grain_file("%d.ubi"%(i),[g[i]])
    d = c.copy()
    d.filter( d.labels == i )
    d.writefile("%d.flt"%(i))

    os.system( "fitgrain.py -p %s -u %d.ubi -U %d.ubi -P %d.par -f %d.flt -x t_z"%(sys.argv[3],i,i,i,i))

#    for j in range(3):
#            os.system( "fitgrain.py -u %d.ubi -U %d.ubi -p %d.par -P %d.par -f %d.flt"%(i,i,i,i,i))
示例#16
0
def fitallgrains( gfile, pfile, cfile, ngrains = None):
    colfile = loadcolfile( cfile )
    grains = read_grain_file( gfile )
    pars = read_par_file( pfile )

    variables = [ 't_x','t_y', 't_z', 'y_center',  'tilt_y', 'tilt_z',
                       'tilt_x',  'distance', 'wedge']
    pfitted = []
    grs = []
    cfs = []
    if ngrains is None:
        ng = len(grains)
    else:
        ng = ngrains
    
    for i in range(ng):
        print "***",i,
        gr = grains[i]
        cf = assignpeaks( gr, pars, colfile, tol = 0.02 )
        cfs.append(cf)
        grs.append(gr)
    pi = parameters( **pars.parameters )
    refpars = fitmanygrains( cfs, grs, pi, variables )
    for i in range(3):
        refpars = fitmanygrains( cfs, grs, refpars, variables )

    if 0:
        pi = parameters( **pars.parameters )
        pi.set('t_x', gr.translation[0])
        pi.set('t_y', gr.translation[1])
        pi.set('t_z', gr.translation[2])
        diff, Ddiff = fitgrainfunc( cf, gr, pi, variables )
        print "%.5g"%((diff*diff).ravel().sum()),
        gr, pfit = fitgrain( cf, gr, pi, variables, quiet=True )
        grains[i] = gr
        pfitted.append( pfit )
        diff, Ddiff = fitgrainfunc( cf, gr, pfit, variables )
        print "%.5g"%((diff*diff).ravel().sum())

        if 0:
            v = Ddiff.keys()
            for v in ['y_center', 'distance']:
                pylab.figure(1)
                pylab.title("Versus omega")
                pylab.plot( cf.omega, project_diff_on_variable( diff, v, Ddiff) , ",", label=v) 
                pylab.figure(2)
                pylab.title("Versus fc")
                pylab.plot( cf.fc, project_diff_on_variable( diff, v, Ddiff) , ",", label=v) 
                pylab.figure(3)
                pylab.title("Versus sc")
                pylab.plot( cf.sc, project_diff_on_variable( diff, v, Ddiff) , ",", label=v) 
                pylab.figure(4)
                pylab.title("Versus sigo")
                pylab.plot( cf.sigo, project_diff_on_variable( diff, v, Ddiff) , ",", label=v)    
                pylab.legend()
                pylab.show()
                raw_input()



    write_grain_file( gfile+".fit", grains)
示例#17
0
                break
            if TESTING:
                print("translation", t)
                print("Cell", indexing.ubitocellpars(np.linalg.inv(UB)))
        print("Translation %.5f %.5f %.5f" % tuple(t))
        print("Cell %.7f %.7f %.7f %.8f %.8f %.8f" %
              (indexing.ubitocellpars(np.linalg.inv(UB))))
        g.translation = t
        g.set_ubi(np.linalg.inv(UB))
    return grains


# TODO:
#
# DONE : Least squares as SVD problem
# - Allow a constraint matrix to be used
# - Compute weights from experimental data (sig/cov etc)
# DONE (but not weighted or printed) : Compute error estimates
# - Allow fitting of other geometrical parameters
# - Allow refinement in terms of other parameterisation of UB (e.g. U, B)
# - Documentation

if __name__ == "__main__":
    colfile, parfile, grainsfile, newgrainsfile = sys.argv[1:5]
    c = columnfile.columnfile(colfile)
    p = parameters.read_par_file(parfile)
    g = grain.read_grain_file(grainsfile)
    grain.write_grain_file(newgrainsfile, refit_makemap(c, p, g))

    # python teo.py ../test/simul_1000_grains/g187.flt ../test/simul_1000_grains/Al1000/Al1000.par ../test/simul_1000_grains/g187.map teo187fitted.map
示例#18
0
    matching = diffs < tol
    ubi = ubis[matching].mean(axis=0)
    remaining = ubis[~matching]
    return ubi, remaining, tol


ubiall = ubis.copy()
tol = None
uniqs = []
while len(ubis > 0):
    uniq, ubis, tol = removeone(ubis, ubis[0], tol)
    print(uniq, len(ubis))
    uniqs.append(uniq)

# now check if any uniqs have a collision due to symmetry:

uniqs = np.array(uniqs)

for i in range(len(uniqs)):
    for operator in grp.group[1:]:  # skip the identity
        symubi = np.dot(operator, uniqs[i].reshape(3, 3)).ravel()
        scors = abs(uniqs - symubi).sum(axis=1)
        found = scors < tol
        if found.sum() > 0:
            print("Symmetry collision!")
            print(i, operator.ravel(), scors[found],
                  np.arange(len(uniqs))[found])

grain.write_grain_file(
    uniqfile, [grain.grain(ubi.reshape(3, 3), (0, 0, 0)) for ubi in uniqs])
示例#19
0
def filtergrain(options):
    """
    Filter a peaks file according to which peaks are indexed
    """
    o = refinegrains.refinegrains(tolerance=0.9,
                                  OmFloat=options.omega_float,
                                  OmSlop=options.omega_slop)
    o.loadparameters(options.parfile)
    o.readubis(options.ubifile)

    if options.grain is None:
        if len(o.grainnames) == 1:
            gn = o.grainnames[0]
        else:
            for i, gn in zip(list(range(len(o.grainnames))), o.grainnames):
                logging.info("Choose %d for grain %s" % (i, gn))
            gn = o.grainnames[int(input("select which grain "))]
    else:
        gn = o.grainnames[int(options.grain)]
    o.grainnames = [
        gn,
    ]

    o.loadfiltered(options.fltfile)
    o.generate_grains()
    o.assignlabels()
    o.set_translation(gn, options.fltfile)
    o.compute_gv(o.grains[(gn, options.fltfile)])
    if options.tol is None:
        for tol in [0.01, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5]:
            o.tolerance = tol
            logging.info("tol %f" % (o.tolerance))
            o.refineubis(quiet=False, scoreonly=True)
        o.tolerance = float(input("Enter tolerance "))
        options.tol = o.tolerance
    else:
        o.tolerance = options.tol
    matrix = o.grains[(gn, options.fltfile)].ubi

    o.assignlabels()
    drlv2 = indexing.calc_drlv2(matrix, o.gv)
    logging.info("Total peaks before filtering %d" %
                 o.scandata[options.fltfile].nrows)
    gotpks = o.scandata[options.fltfile].copy()
    gotpks.filter(gotpks.labels == gn)
    gotpks.writefile(options.newfltfile)
    logging.info("Peaks which were indexed %d written to %s" %
                 (gotpks.nrows, options.newfltfile))
    # don't bother to copy here as we can overwrite
    if options.notindexed is not None:
        notpks = o.scandata[options.fltfile].copy()
        notpks.addcolumn(o.tth, "tth")
        notpks.addcolumn(o.eta, "eta")
        notpks.addcolumn(o.gv[:, 0], "gx")
        notpks.addcolumn(o.gv[:, 1], "gy")
        notpks.addcolumn(o.gv[:, 2], "gz")
        notpks.filter(drlv2 > o.tolerance * o.tolerance)
        notpks.writefile(options.notindexed)
        logging.info("Peaks which were not indexed %d written to %s" %
                     (notpks.nrows, options.notindexed))
    if options.newubifile is not None:
        o.scandata[options.fltfile] = gotpks
        #        matrix = o.refine(matrix,quiet=True)
        grain.write_grain_file(options.newubifile,
                               [o.grains[(gn, options.fltfile)]])
        logging.info("Refined ubi in %s " % (options.newubifile))