def add_layer(self, insertpoint):
        params = self.params
        values = curvefitter.values(params)

        if not reflect.is_proper_abeles_input(values):
            raise ValueError('The size of the parameter array passed'
                             ' to reflectivity should be 4 * coefs[0] + 8')

        oldlayers = int(values[0])

        self.params['nlayers'].value = oldlayers + 1
        self.beginInsertRows(QtCore.QModelIndex(), insertpoint + 1,
                             insertpoint + 1)

        values = curvefitter.values(self.params)
        varys = curvefitter.varys(self.params)
        bounds = curvefitter.bounds(self.params)

        #do the insertion
        startP = 4 * insertpoint + 8

        values = np.insert(values, startP, [0] * 4)
        dummy = [varys.insert(startP, i) for i
                 in [True] * 4]
        dummy = [bounds.insert(startP, i) for i
                 in [(None, None)] * 4]

        bounds = np.array(bounds)
        names = ReflectivityFitFunction.parameter_names(nparams=values.size)

        #clear the parameters
        map(self.params.pop, self.params.keys())

        # reinsert parameters
        parlist = zip(names,
                      values,
                      varys,
                      bounds.T[0],
                      bounds.T[1],
                      [None] * values.size)

        for para in parlist:
            self.params.add(*para)

        self.endInsertRows()
    def remove_layer(self, which_layer):
        params = self.params
        values = curvefitter.values(params)
        if int(values[0]) == 0:
            return False

        if not reflect.is_proper_abeles_input(values):
            raise ValueError('The size of the parameter array passed'
                             ' to reflectivity should be 4 * coefs[0] + 8')

        oldlayers = int(values[0])

        self.beginRemoveRows(QtCore.QModelIndex(), which_layer, which_layer)

        self.params['nlayers'].value = oldlayers - 1

        startP = 4 * (which_layer - 1) + 8

        #get rid of parameters we don't need anymore
        names_lost = curvefitter.names(self.params)[startP: startP + 4]
        map(self.params.pop, names_lost)

        # but now we need to rejig parameters names
        # the only way to do this is to pop them all and readd
        values = curvefitter.values(self.params)
        varys = curvefitter.varys(self.params)
        bounds = np.array(curvefitter.bounds(self.params))
        names = ReflectivityFitFunction.parameter_names(values.size)
        map(self.params.pop, self.params.keys())

        parlist = zip(names,
                      values,
                      varys,
                      bounds.T[0],
                      bounds.T[1],
                      [None] * values.size)

        for para in parlist:
            self.params.add(*para)

        self.endRemoveRows()
Ejemplo n.º 3
0
def global_fitter_setup(global_pilot_file, dqvals=5.0):
    # Parse the global_fitter setup from Igor.
    # TODO deal with user generated non-slab models.
    with open(global_pilot_file, 'r') as f:
        data_files = f.readline().split()
        pilot_files = f.readline().split()

    constraints = np.loadtxt(global_pilot_file, skiprows=2, dtype=int)

    # open the datafiles
    datasets = []
    for data_file in data_files:
        dataset = ReflectDataset(data_file)
        datasets.append(dataset)

    # deal with the individual pilot files
    parameters = []
    for pilot_file in pilot_files:
        pars = np.loadtxt(pilot_file, skiprows=4)

        # lets just assume for now that the data has resolution info
        # and that we're doing a slab model.
        pv = pars[:, 0][:]
        varies = (pars[:, 1].astype(int) == 0)

        # workout bounds, and account for the fact that MotofitMPI
        # doesn't set bounds for parameters that are fixed
        bounds = []
        for idx in range(np.size(pv)):
            if not varies[idx]:
                bounds.append((0, 2 * pv[idx]))
            else:
                bounds.append(pars[idx, 2:4])

        P = to_parameters(pv, varies=varies, bounds=bounds)
        parameters.append(P)

    # now create CurveFitting instances
    T = Transform('logY')
    fitters = []

    for parameter, dataset in zip(parameters, datasets):
        t_data_y, t_data_yerr = T.transform(dataset.x, dataset.y,
                                            dataset.y_err)

        if isinstance(dqvals, numbers.Real):
            _dqvals = float(dqvals)
        else:
            _dqvals = dataset.x_err

        c = CurveFitter(ReflectivityFitFunction(T.transform, workers=True),
                        (dataset.x, t_data_y, t_data_yerr),
                        parameter,
                        fcn_kws={'dqvals': _dqvals})
        fitters.append(c)

    # create globalfitter
    # setup constraints
    unique, indices = np.unique(constraints, return_index=True)

    # TODO assertions for checking linkage integrity

    n_datasets = len(datasets)

    def is_unique(row, col):
        ravelled_idx = row * n_datasets + col
        return ravelled_idx in indices

    cons = []
    for col in range(n_datasets):
        for row, val in enumerate(parameters[col]):
            if constraints[row, col] == -1 or is_unique(row, col):
                continue
            # so it's not unique, but which parameter does it depend on?
            # find location of master parameter
            master = np.extract(unique == constraints[row, col], indices)[0]
            m_col = master % n_datasets
            m_row = (master - m_col) // n_datasets
            constraint = 'd%u:p%u = d%u:p%u' % (col, row, m_col, m_row)
            cons.append(constraint)

            # also have to rejig the bounds because MotoMPI doesn't
            # set bounds for those that aren't unique. But this is bad for
            # lmfit because it'll clip them.
            par = fitters[col].params['p%u' % row]
            m_par = fitters[m_col].params['p%u' % m_row]
            par.min = m_par.min
            par.max = m_par.max

    global_fitter = GlobalFitter(fitters, constraints=cons)

    # # update the constraints
    # global_fitter.params.update_constraints()

    return global_fitter