Beispiel #1
0
def fitgrain(options):
    """
    Fits grains to a dataset using all peaks within tol
    """
    func = getattr(refinegrains, options.latticesymmetry)
    o = refinegrains.refinegrains(tolerance=options.tol,
                                  latticesymmetry=func,
                                  OmFloat=options.omega_float,
                                  OmSlop=options.omega_slop)
    o.loadparameters(options.parfile)
    o.readubis(options.ubifile)
    o.loadfiltered(options.fltfile)
    o.generate_grains()
    #o.refineubis(quiet = False)
    #print o.grains.keys()
    #print "#pks",o.grains[(0,"0.flt")].x.shape
    o.parameterobj.varylist = options.varylist
    for p in options.fixlist:
        try:
            o.parameterobj.varylist.remove(p)
        except:
            pass
    logging.info("Varying " + str(o.parameterobj.varylist))
    #print "#pks",o.grains[(0,"0.flt")].x.shape
    o.fit(maxiters=options.steps)
    #print "#pks",o.grains[(0,"0.flt")].x.shape
    o.refineubis(quiet=False)
    o.saveparameters(options.newparfile)
    # ul = [g.ubi for g in o.grains.values()]
    # indexing.write_ubi_file(options.newubifile, ul)
    # Keep the original ordering and add translation information
    o.savegrains(options.newubifile, sort_npks=False)
Beispiel #2
0
def makemap(options):
    try:
        if options.tthrange is None:
            tthr = (0., 180.)
        else:
            tthr = options.tthrange
            if len(tthr) == 1:
                tthr = (0, tthr[0])
            print("Using tthrange", tthr)
        func = getattr(ImageD11.refinegrains, options.latticesymmetry)
        o = refinegrains(intensity_tth_range=tthr,
                         latticesymmetry=func,
                         OmFloat=options.omega_float,
                         OmSlop=options.omega_slop)
    except:
        raise
        o = refinegrains()
    o.loadparameters(options.parfile)
    print("got pars")
    o.loadfiltered(options.fltfile)
    print("got filtered")
    o.readubis(options.ubifile)
    if options.symmetry is not "triclinic":
        # Grainspotter will have already done this
        print("transform to uniq")
        o.makeuniq(options.symmetry)
    print("got ubis")
    o.tolerance = float(options.tol)
    print("generating")
    o.generate_grains()
    print("Refining posi too")
    # o.refineubis(quiet = False , scoreonly = True)
    print("Refining positions too")
    o.refinepositions()
    print("Done refining positions too")
    # o.refineubis(quiet = False , scoreonly = True)
    o.savegrains(options.newubifile, sort_npks=options.sort_npks)
    col = o.scandata[options.fltfile].writefile(options.fltfile + ".new")
    if hasattr(options, "newfltfile") and options.newfltfile is not None:
        print("re-assignlabels")
        o.assignlabels()
        col = o.scandata[options.fltfile].copy()
        print("Before filtering", col.nrows)
        col.filter(col.labels < -0.5)
        # print col.labels[:10]
        print("After filtering", col.nrows)
        col.writefile(options.newfltfile)
Beispiel #3
0
def domap(pars, colfile, grains, gridpars):
    """
    mapping function - does what makemap.py does, but in a function
    """
    if 'FITPOS' not in gridpars:
        gridpars['FITPOS'] = True

    OmSlop = gridpars['OMEGAFLOAT']
    OmFloat = OmSlop > 0
    #
    ss = sys.stdout  # turns off printing
    if gridpars['NUL']:
        NUL = open(nulfile, "w")
        sys.stdout = NUL
    for tol in gridpars['TOLSEQ']:
        o = refinegrains.refinegrains(
            OmFloat=OmFloat,
            OmSlop=OmSlop,
            tolerance=tol,
            intensity_tth_range=(0, 180),
        )
        o.parameterobj = pars
        # o.loadfiltered ...
        o.scannames = ["internal"]
        o.scantitles = colfile.titles
        o.scandata["internal"] = colfile
        o.tolerance = tol
        # o.readubis( grainsfile )
        for i, g in enumerate(grains):
            name = i
            o.grainnames.append(i)
            o.ubisread[name] = g.ubi
            o.translationsread[name] = g.translation
        if gridpars['SYMMETRY'] != "triclinic":
            o.makeuniq(gridpars['SYMMETRY'])
        o.generate_grains()
        if gridpars['FITPOS']:
            o.refinepositions()
        else:
            o.assignlabels()
            for key in o.grains.keys():
                g = o.grains[key]
                g.set_ubi(o.refine(g.ubi, quiet=False))
        # This fills in the uniq for each grain
        o.savegrains(nulfile, sort_npks=False)
        if 'NUNIQ' in gridpars:
            keep = lambda g: g.nuniq > gridpars['NUNIQ'] and g.npks > gridpars[
                'NPKS']
        else:
            keep = lambda g: g.npks > gridpars['NPKS']
        gl = [g for g in o.grains.values() if keep(g)]
        if len(gl) == 0:
            break
        grains = gl
    if gridpars['NUL']:
        sys.stdout = ss
    return gl
Beispiel #4
0
def filtergrain(options):
    """
    Filter a peaks file according to which peaks are indexed
    """
    o = refinegrains.refinegrains(options.tol)
    o.loadparameters(options.parfile)
    o.readubis(options.ubifile)
    o.loadfiltered(options.fltfile)
    o.generate_grains()
    for gn in o.grainnames:
        o.reset_labels(options.fltfile)
        o.set_translation(gn, options.fltfile)
        o.compute_gv(gn, options.fltfile)
        drlv2 = indexing.calc_drlv2(o.grains[(gn, options.fltfile)].ubi, o.gv)
        o.scandata[options.fltfile].filter(drlv2 > options.tol * options.tol)
        print(gn, o.scandata[options.fltfile].nrows)
    o.scandata[options.fltfile].writefile(options.newfltfile)
def domap(pars, colfile, grains, gridpars):
    """
    mapping function - does what makemap.py does, but in a function
    """
    OmSlop = gridpars['OMEGAFLOAT']
    OmFloat = OmSlop > 0
    #
    ss = sys.stdout  # turns off printing
    if gridpars['NUL']:
        NUL = open(nulfile, "w")
        sys.stdout = NUL
    for tol in gridpars['TOLSEQ']:
        o = refinegrains.refinegrains(
            OmFloat=OmFloat,
            OmSlop=OmSlop,
            tolerance=tol,
            intensity_tth_range=(0, 180),
        )
        o.parameterobj = pars
        # o.loadfiltered ...
        o.scannames = ["internal"]
        o.scantitles = colfile.titles
        o.scandata["internal"] = colfile
        o.tolerance = tol
        # o.readubis( grainsfile )
        for i, g in enumerate(grains):
            name = i
            o.grainnames.append(i)
            o.ubisread[name] = g.ubi
            o.translationsread[name] = g.translation
        if gridpars['SYMMETRY'] is not "triclinic":
            o.makeuniq(gridpars['SYMMETRY'])
        o.generate_grains()
        o.refinepositions()
        # This fills in the uniq for each grain
        o.savegrains(nulfile, sort_npks=False)
        gl = [g for g in o.grains.values() if g.npks > gridpars['NPKS']]
        if len(gl) == 0:
            break
        grains = gl
    if gridpars['NUL']:
        sys.stdout = ss
    return gl
Beispiel #6
0
 def fit(tol):
     o = refinegrains.refinegrains(
         OmFloat=OmFloat,
         OmSlop=OmSlop,
         tolerance=tol,
         intensity_tth_range=(0, 180),
     )
     o.parameterobj = pars
     # o.loadfiltered ...
     o.scannames = ["internal"]
     o.scantitles = colfile.titles
     o.scandata["internal"] = colfile
     o.tolerance = tol
     o.readubis(grainsfile)
     if symmetry != "triclinic":
         o.makeuniq(symmetry)
     o.generate_grains()
     o.refinepositions()
     return o
Beispiel #7
0
def domap(OmFloat,
          OmSlop,
          pars,
          colfile,
          grainsfile,
          tolseq=[0.03, 0.02, 0.01],
          symmetry="triclinic"):
    """mapping function - does what makemap.py does"""
    global NPKS
    ss = sys.stdout  # turns off printing
    for tol in tolseq:
        sys.stdout = NUL
        o = refinegrains.refinegrains(
            OmFloat=OmFloat,
            OmSlop=OmSlop,
            tolerance=tol,
            intensity_tth_range=(0, 180),
        )
        o.parameterobj = pars
        # o.loadfiltered ...
        o.scannames = ["internal"]
        o.scantitles = colfile.titles
        o.scandata["internal"] = colfile
        o.tolerance = tol
        o.readubis(grainsfile)
        if symmetry != "triclinic":
            o.makeuniq(symmetry)
        o.generate_grains()
        o.refinepositions()
        gl = [x for x in list(o.grains.values()) if x.npks > NPKS]
        sys.stdout = ss
        if len(gl) == 0:
            print("I killed all your grains!")
            break
        else:
            print("Keeping", len(gl), "from", len(list(o.grains.values())),
                  "grains with at least", NPKS, "peaks", tol)
            grain.write_grain_file(grainsfile, gl)
    return len(gl)
Beispiel #8
0
def main(args):

    # set up peak-finding
    print()
    r = refinegrains()
    r.loadparameters(args.parfile)
    r.loadfiltered(args.fltfile)
    r.readubis(args.ubifile)
    r.generate_grains()
    r.tolerance = float(args.tol)
    p = parameters.parameters()
    p.loadparameters(args.parfile)

    #*********************** BEGIN USER INPUT ***********************#

    print()
    param_range = np.array([
        #     MIN          MAX
        #----------------------------
        [
            r.parameterobj.parameters['distance'] * (1 - args.dd),
            r.parameterobj.parameters['distance'] * (1 + args.dd),
        ],  #dd
        [
            r.parameterobj.parameters['y_center'] - args.dy,
            r.parameterobj.parameters['y_center'] + args.dy,
        ],  #dy
        [
            r.parameterobj.parameters['z_center'] - args.dz,
            r.parameterobj.parameters['z_center'] + args.dz,
        ],  #dz
        [
            r.parameterobj.parameters['tilt_x'] - args.dtx,
            r.parameterobj.parameters['tilt_x'] + args.dtx,
        ],  #dtx
        [
            r.parameterobj.parameters['tilt_y'] - args.dty,
            r.parameterobj.parameters['tilt_y'] + args.dtz,
        ],  #dty
        [
            r.parameterobj.parameters['tilt_z'] - args.dtz,
            r.parameterobj.parameters['tilt_z'] + args.dtz,
        ],  #dtz
        [
            r.parameterobj.parameters['wedge'] - args.dw,
            r.parameterobj.parameters['wedge'] + args.dw,
        ]
    ])  #dw

    #************************* END USER INPUT *************************#

    # read and record initial optimal paramter values
    init_pars = open(args.parfile, 'r')
    init_pars_array = np.genfromtxt(init_pars, usecols=1)
    distance = init_pars_array[8]
    y_center = init_pars_array[25]
    z_center = init_pars_array[27]
    tilt_x = init_pars_array[20]
    tilt_y = init_pars_array[21]
    tilt_z = init_pars_array[22]
    wedge = init_pars_array[24]

    p_opt_array = [distance, y_center, z_center, tilt_x, tilt_y, tilt_z, wedge]

    # create list of parameter names

    all_param_names = [
        'distance', 'y_center', 'z_center', 'tilt_x', 'tilt_y', 'tilt_z',
        'wedge'
    ]
    param_names = []
    if args.dd != 0:
        param_names.append('distance')
    if args.dy != 0:
        param_names.append('y_center')
    if args.dz != 0:
        param_names.append('z_center')
    if args.dtx != 0:
        param_names.append('tilt_x')
    if args.dty != 0:
        param_names.append('tilt_y')
    if args.dtz != 0:
        param_names.append('tilt_z')
    if args.dw != 0 and (args.experiment == 'FF' or args.experiment == 'ff'):
        param_names.append('wedge')

    print('varying:')
    print(param_names)
    print()

    # find number of peaks using initial optimal parameter values
    for i, name in enumerate(all_param_names):
        r.parameterobj.parameters[name] = p_opt_array[i]
    r.assignlabels()  ## try here...
    n, e = scor(r)
    print(('Peaks found before parameter refinement: %d' % n))
    print()

    # parameter refinement iteration loop
    for current_iteration in range(args.iterations):

        if current_iteration > 0:
            ## Update range on parameters
            param_range[0,
                        0] = r.parameterobj.parameters['distance'] * (1 -
                                                                      args.dd)
            param_range[0,
                        1] = r.parameterobj.parameters['distance'] * (1 +
                                                                      args.dd)
            param_range[1, 0] = r.parameterobj.parameters['y_center'] - args.dy
            param_range[1, 1] = r.parameterobj.parameters['y_center'] + args.dy
            param_range[2, 0] = r.parameterobj.parameters['z_center'] - args.dz
            param_range[2, 1] = r.parameterobj.parameters['z_center'] + args.dz
            param_range[3, 0] = r.parameterobj.parameters['tilt_x'] - args.dtx
            param_range[3, 1] = r.parameterobj.parameters['tilt_x'] + args.dtx
            param_range[4, 0] = r.parameterobj.parameters['tilt_y'] - args.dty
            param_range[4, 1] = r.parameterobj.parameters['tilt_y'] + args.dty
            param_range[5, 0] = r.parameterobj.parameters['tilt_z'] - args.dtz
            param_range[5, 1] = r.parameterobj.parameters['tilt_z'] + args.dtz
            param_range[6, 0] = r.parameterobj.parameters['wedge'] - args.dw
            param_range[6, 1] = r.parameterobj.parameters['wedge'] + args.dw
    # skip parameter refinement?
        if args.skip_par_ref:
            print('Option to skip parameter refinement is set to ON.'
                  'GrainSweeper will use parameters values from the nf_X.par'
                  'file, unless the opt_par_values_nf_X.txt already exists '
                  '(in which case values from this .txt file will be used.')
            print()
            print('The parameter refinement skipping option can be '
                  'toggled/overridden in the tweakpars.py script.')
            print()
            break

            ## set initial optimal parameters
            for i, name in enumerate(all_param_names):
                r.parameterobj.parameters[name] = p_opt_array[i]

        # create 3D array where:
        #     rows = parameter variation increments
        #     cols = parameter value, peaks found, e (3 cols total)
        #     slices = parameters varied
        param_peaks = np.zeros(shape=(args.steps, 3, len(param_names)))

        ## find peaks for each par variation, fit Gaussian to curve, and find
        ## optimal par values
        for i, name in enumerate(param_names):  # for each parameter
            print()
            print(('Varying parameter: %s; value: %g' %
                   (name, p_opt_array[all_param_names.index(name)])))
            print('   distance   y_center  z_center    tilt_x      tilt_y'
                  '      tilt_z      wedge     peaks found     e')
            var_range = list(
                np.linspace(param_range[all_param_names.index(name), 0],
                            param_range[all_param_names.index(name),
                                        1], args.steps))
            for j, d in enumerate(var_range):  # each parameter variation step
                r.parameterobj.parameters[name] = d
                dd = r.parameterobj.parameters['distance']
                yy = r.parameterobj.parameters['y_center']
                zz = r.parameterobj.parameters['z_center']
                tx = r.parameterobj.parameters['tilt_x']
                ty = r.parameterobj.parameters['tilt_y']
                tz = r.parameterobj.parameters['tilt_z']
                ww = r.parameterobj.parameters['wedge']

                n, e = scor(r)
                param_peaks[j, :, i] = d, n, e  # write values to array
                print(('%11.1f %9.1f %9.1f %11.6f %11.6f %11.6f %11.6f %10d '
                       '%11.5f' % (dd, yy, zz, tx, ty, tz, ww, n, e)))

            # create Gaussian fit
            p_opt = gauss_fit(param_peaks[:, 0, i], param_peaks[:, 1, i])
            if p_opt is None:
                print('cant fit something that doesnt vary')
                r.parameterobj.parameters[name] = p_opt_array[
                    all_param_names.index(name)]
                continue

            # create Gaussian xy values, and plot original and fit curves
            # together
            x_gauss_steps = np.linspace(
                param_range[all_param_names.index(name), 0],
                param_range[all_param_names.index(name), 1], 200)
            y_gauss_steps = gaussian(x_gauss_steps, *p_opt)
            plt.xlabel(str(name))
            plt.ylabel('peaks found')
            plt.plot(param_peaks[:, 0, i], param_peaks[:, 1, i])
            plt.plot(x_gauss_steps, y_gauss_steps)
            if args.experiment:
                figurename = ('%s_param_var_%s_iter%d.pdf' %
                              (args.experiment, name, current_iteration + 1))
            else:
                figurename = ('NEW_param_var_%s_iter%d.pdf' %
                              (name, current_iteration + 1))
                plt.savefig(figurename)
                plt.clf()

            ## print and record newest updated optimal parameter value,
            ## print peaks found
            print(('Optimal parameter value: %.4f' % p_opt[1]))
            p_opt_array[all_param_names.index(name)] = p_opt[1]
            r.parameterobj.parameters[name] = p_opt_array[
                all_param_names.index(name)]
            print()
            n, e = scor(r)  # find number of peaks using all optimal
            #parameter values
            print(('Peaks found after variation: %d' % n))

        # print optimal parameters from current iteration
        print()
        print(('Optimal parameter values for parameter refinement'
               'iteration %d:' % (current_iteration + 1)))
        print((' '.join(map(str, all_param_names))))
        print(('  '.join(map(str, p_opt_array))))
        print()

    # create optimal parameter values file (for importing into shell)
    if args.experiment:
        filename = '%s_opt_par_values.ini' % (args.experiment)
        writeline = args.experiment + '=' + str(n)
    else:
        filename = 'NEW_opt_par_values.ini'
        writeline = 'NEW = ' + str(n)

    text_file = open(filename, 'w')
    for i, name in enumerate(all_param_names):
        text_file.write(str(name) + '=' + str(p_opt_array[i]) + '\n')

    text_file.write(writeline)
    text_file.close()

    # write new paramterfile
    r.parameterobj.parameters['t_x'] = 0
    r.parameterobj.parameters['t_y'] = 0
    r.parameterobj.parameters['t_z'] = 0
    r.parameterobj.saveparameters(args.outparfile)
Beispiel #9
0
def filtergrain(options):
    """
    Filter a peaks file according to which peaks are indexed
    """
    o = refinegrains.refinegrains(tolerance=0.9,
                                  OmFloat=options.omega_float,
                                  OmSlop=options.omega_slop)
    o.loadparameters(options.parfile)
    o.readubis(options.ubifile)

    if options.grain is None:
        if len(o.grainnames) == 1:
            gn = o.grainnames[0]
        else:
            for i, gn in zip(list(range(len(o.grainnames))), o.grainnames):
                logging.info("Choose %d for grain %s" % (i, gn))
            gn = o.grainnames[int(input("select which grain "))]
    else:
        gn = o.grainnames[int(options.grain)]
    o.grainnames = [
        gn,
    ]

    o.loadfiltered(options.fltfile)
    o.generate_grains()
    o.assignlabels()
    o.set_translation(gn, options.fltfile)
    o.compute_gv(o.grains[(gn, options.fltfile)])
    if options.tol is None:
        for tol in [0.01, 0.025, 0.05, 0.1, 0.15, 0.25, 0.5]:
            o.tolerance = tol
            logging.info("tol %f" % (o.tolerance))
            o.refineubis(quiet=False, scoreonly=True)
        o.tolerance = float(input("Enter tolerance "))
        options.tol = o.tolerance
    else:
        o.tolerance = options.tol
    matrix = o.grains[(gn, options.fltfile)].ubi

    o.assignlabels()
    drlv2 = indexing.calc_drlv2(matrix, o.gv)
    logging.info("Total peaks before filtering %d" %
                 o.scandata[options.fltfile].nrows)
    gotpks = o.scandata[options.fltfile].copy()
    gotpks.filter(gotpks.labels == gn)
    gotpks.writefile(options.newfltfile)
    logging.info("Peaks which were indexed %d written to %s" %
                 (gotpks.nrows, options.newfltfile))
    # don't bother to copy here as we can overwrite
    if options.notindexed is not None:
        notpks = o.scandata[options.fltfile].copy()
        notpks.addcolumn(o.tth, "tth")
        notpks.addcolumn(o.eta, "eta")
        notpks.addcolumn(o.gv[:, 0], "gx")
        notpks.addcolumn(o.gv[:, 1], "gy")
        notpks.addcolumn(o.gv[:, 2], "gz")
        notpks.filter(drlv2 > o.tolerance * o.tolerance)
        notpks.writefile(options.notindexed)
        logging.info("Peaks which were not indexed %d written to %s" %
                     (notpks.nrows, options.notindexed))
    if options.newubifile is not None:
        o.scandata[options.fltfile] = gotpks
        #        matrix = o.refine(matrix,quiet=True)
        grain.write_grain_file(options.newubifile,
                               [o.grains[(gn, options.fltfile)]])
        logging.info("Refined ubi in %s " % (options.newubifile))
try:
    flt = sys.argv[1]
    par = sys.argv[2]
    ubi = sys.argv[3]
    tol = float(sys.argv[4])
    if len(sys.argv) > 5:
        nbins = int(sys.argv[5])
    else:
        nbins = 30
except:
    print("Usage: %s flt par ubi tol [nbins=30] [omega_slop]" % (sys.argv[0]))
    sys.exit()

if len(sys.argv) > 6:
    o = refinegrains(OmFloat=True, OmSlop=float(sys.argv[6]))
else:
    o = refinegrains(OmFloat=False)

o.loadparameters(par)
o.readubis(ubi)
o.loadfiltered(flt)
o.tolerance = tol
o.generate_grains()
o.assignlabels()

import matplotlib.pylab as pl
# indexed peaks only
d = o.scandata[flt]
d.filter(d.labels >= 0)