示例#1
0
    def fit(self, df, group=None):
        """
        Fits a loose, tight, beta, and p combinations model. If you pass in
        update group it will override the initial parameters with new
        initial parameters based on the df you pass.

        Args:
            df:
            group: (str) passing in the group will update the initialization
                dictionary (not replacing the old one) for this particular fit.

        Returns:

        """
        if group is not None:
            init_dict = self.update_init_model(df=df, group=group)
        else:
            init_dict = deepcopy(self.init_dict)

        for param in ['beta', 'p']:
            for fit_type in ['loose', 'tight']:
                model_arg_dict = deepcopy(
                    getattr(self, f'{param}_model_kwargs'))
                fit_arg_dict = deepcopy(
                    getattr(self, f'{fit_type}_{param}_fit_dict'))
                model = CurveModel(df=df, **model_arg_dict)

                fe_init, re_init = compute_starting_params(
                    init_dict[param][fit_type])

                fit_arg_dict.update(fe_init=fe_init, re_init=re_init)
                model.fit_params(**fit_arg_dict)

                setattr(self, f'{fit_type}_{param}_model', model)
示例#2
0
    def fit(self, df, group=None):
        """
        Fits a loose, tight, beta, and p combinations model. If you pass in
        update group it will override the initial parameters with new
        initial parameters based on the df you pass.

        Args:
            df:
            group: (str) passing in the group will update the initialization
                dictionary (not replacing the old one) for this particular fit.

        Returns:

        """
        if group is not None:
            init_dict = self.update_init_model(df=df, group=group)
        else:
            init_dict = deepcopy(self.init_dict)

        fit_dict = deepcopy(self.fit_dict)
        fe_init, re_init = compute_starting_params(init_dict)
        fit_dict.update(fe_init=fe_init, re_init=re_init)

        self.mod = CurveModel(df=df, **self.basic_model_dict)
        self.mod.fit_params(**fit_dict)
示例#3
0
    def fit_params(self,
                   fe_init,
                   re_init=None,
                   fe_bounds=None,
                   re_bounds=None,
                   fe_gprior=None,
                   re_gprior=None,
                   fixed_params=None,
                   smart_initialize=False,
                   fixed_params_initialize=None,
                   options=None,
                   smart_init_options=None):
        """Fit the parameters.

        Args:
            fe_init (numpy.ndarray):
                Initial value for the fixed effects.
            re_init (numpy.ndarray, optional):
                Initial value for the random effects.
            fe_bounds (list of lists, optional):
                Bounds for fixed effects.
            re_bounds (list of lists, optional):
                Bounds for random effects.
            fe_gprior (list of lists, optional):
                Gaussian prior for fixed effects.
            re_gprior (list of lists, optional):
                Gaussian prior for random effects.
            fixed_params (list{str}, optional):
                A list of parameter names that will be fixed at initial value.
            smart_initialize (bool, optional):
                Whether or not to initialize a model's fixed effects based
                on the average fixed effects across many individual models
                fit with the same settings and the random effects
                based on the fixed effects deviation from the average
                in the individual models
            fixed_params_initialize (list{str}, optional):
                A list of parameter names that will be fixed at initial value during the smart initialization.
                Will be ignored if smart_initialize = False and raise warning.
            options (dict, optional):
                Options for the optimizer.
            smart_init_options (dict, optional):
                Options for the inner model
        """
        assert len(fe_init) == self.num_fe
        if fe_bounds is None:
            fe_bounds = [[-np.inf, np.inf]] * self.num_fe
        if re_bounds is None:
            re_bounds = [[-np.inf, np.inf]] * self.num_fe
        assert len(fe_bounds) == self.num_fe
        assert len(re_bounds) == self.num_fe

        if fe_gprior is not None:
            assert len(fe_gprior) == self.num_fe
            self.fe_gprior = np.array(fe_gprior)
        if re_gprior is not None:
            assert len(re_gprior) == self.num_fe
            self.re_gprior = np.array(re_gprior)
        if re_init is None:
            re_init = np.zeros(self.num_re)

        if fixed_params_initialize is not None:
            if not smart_initialize:
                raise Warning(
                    f"You passed in an initialization parameter "
                    f"fixed_params_initialize {fixed_params_initialize} "
                    f"but set smart_initialize=False. Will ignore fixed_params_initialize."
                )

        if smart_init_options is not None:
            if options is None:
                raise RuntimeError(
                    "Need to pass in options if you pass in smart init options."
                )

        if smart_initialize:
            smart_initialize_options = deepcopy(options)
            if smart_init_options is not None:
                smart_initialize_options.update(smart_init_options)
            if self.num_groups == 1:
                raise RuntimeError(
                    "Don't do initialization for models with only one group.")

            fe_dict = get_initial_params(
                groups=self.group_names,
                model=self,
                fit_arg_dict=dict(
                    fe_init=fe_init,
                    fe_bounds=fe_bounds,
                    fe_gprior=fe_gprior,
                    fixed_params=fixed_params_initialize,
                    options=smart_initialize_options,
                ))
            fe_init, re_init = compute_starting_params(fe_dict)
            print(f"Overriding fe_init with {fe_init}.")
            print(f"Overriding re_init with {re_init}.")

        x0 = np.hstack([fe_init, re_init])
        if fe_bounds is None:
            fe_bounds = np.array([[-np.inf, np.inf]] * self.num_fe)
        if re_bounds is None:
            re_bounds = np.array([[-np.inf, np.inf]] * self.num_fe)

        fe_bounds = np.array(fe_bounds)
        re_bounds = np.array(re_bounds)

        if fixed_params is not None:
            for param in fixed_params:
                param_id = self.param_idx[param]
                fe_bounds[param_id] = x0[param_id, None]
                re_bounds[param_id] = 0.0

        re_bounds = np.repeat(re_bounds[None, :, :], self.num_groups, axis=0)
        bounds = np.vstack([fe_bounds, re_bounds.reshape(self.num_re, 2)])

        result = minimize(fun=self.objective,
                          x0=x0,
                          jac=self.gradient,
                          method='L-BFGS-B',
                          bounds=bounds,
                          options=options)

        self.result = result
        self.params = self.compute_params(self.result.x, expand=False)