Example #1
0
    def setUpClass(self):

        ## Non-linear least squares
        np.random.seed(100)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(
            np.random.random((test_size - k_ar, nyields)))
        var_data_test = self.var_data_test = \
            pa.DataFrame(np.random.random((test_size, neqs)))
        self.mats = mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim_nolat = dim = k_ar * neqs
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs_nolat = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma
        }

        guess_params_nolat = np.random.random((neqs**2 + neqs)).tolist()
        affine_obj_nolat = Affine(**self.mod_kwargs_nolat)

        self.results = affine_obj_nolat.solve(guess_params_nolat,
                                              method='nls',
                                              xtol=0.1,
                                              ftol=0.1)
Example #2
0
    def setUp(self):

        np.random.seed(100)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(
            np.random.random((test_size - k_ar, nyields)))
        var_data_test = pa.DataFrame(np.random.random((test_size, neqs)))
        mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim = dim = k_ar * neqs + latent
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_0[-latent:] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        lam_1[-latent:, -latent:] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma,
            'latent': latent,
            'no_err': [1]
        }

        self.guess_params = np.random.random(
            (neqs**2 + neqs + (2 * latent), )).tolist()
        self.affine_obj = Affine(**self.mod_kwargs)
        self.affineml_obj = AffineML(**self.mod_kwargs)
Example #3
0
    def setUpClass(self):

        ## Non-linear least squares
        np.random.seed(100)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(np.random.random((test_size - k_ar,
                                                      nyields)))
        var_data_test = self.var_data_test = \
            pa.DataFrame(np.random.random((test_size, neqs)))
        self.mats = mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim_nolat = dim = k_ar * neqs
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs_nolat = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma
        }

        guess_params_nolat = np.random.random((neqs**2 + neqs)).tolist()
        affine_obj_nolat = Affine(**self.mod_kwargs_nolat)

        self.results = affine_obj_nolat.solve(guess_params_nolat, method='nls',
                                              xtol=0.1, ftol=0.1)
Example #4
0
 def test_create_correct(self):
     """
     Tests whether __init__ successfully initializes an Affine model object.
     If the Affine object does not successfully instantiate, then this test
     fails, otherwise it passes.
     """
     model = Affine(**self.mod_kwargs)
     self.assertIsInstance(model, Affine)
Example #5
0
    def setUp(self):

        np.random.seed(100)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(np.random.random((test_size - k_ar,
                                                      nyields)))
        var_data_test = pa.DataFrame(np.random.random((test_size, neqs)))
        mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim = dim = k_ar * neqs + latent
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_0[-latent:] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        lam_1[-latent:, -latent:] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma,
            'latent': latent,
            'no_err': [1]
        }

        self.guess_params = np.random.random((neqs**2 + neqs + (2 * latent),)
                                            ).tolist()
        self.affine_obj = Affine(**self.mod_kwargs)
        self.affineml_obj =  AffineML(**self.mod_kwargs)
Example #6
0
class TestEstimationMethods(TestCase):
    """
    Tests for solution methods
    """
    def setUp(self):

        ## Non-linear least squares
        np.random.seed(101)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(np.random.random((test_size - k_ar,
                                                      nyields)))
        var_data_test = pa.DataFrame(np.random.random((test_size, neqs)))
        mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim_nolat = dim = k_ar * neqs
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs_nolat = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma
        }

        self.guess_params_nolat = np.random.random((neqs**2 + neqs)).tolist()
        self.affine_obj_nolat = Affine(**self.mod_kwargs_nolat)

        ## Maximum likelihood build

        # initialize masked arrays
        self.dim_lat = dim = k_ar * neqs + latent
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_0[-latent:] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        lam_1[-latent:, -latent:] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma,
            'latent': latent,
            'no_err': [1]
        }

        self.guess_params_lat = np.random.random((neqs**2 + neqs +
                                                 (2 * latent),)).tolist()
        self.affine_obj_lat = Affine(**self.mod_kwargs)


    def test_solve_nls(self):
        """
        Tests whether or not basic estimation is performed for non-linear least
        squares case without any latent factors. If the numerical approximation
        method converges, this test passes. Otherwise, the test fails.
        """
        guess_params = self.guess_params_nolat
        method = 'nls'
        self.affine_obj_nolat.solve(guess_params, method=method, alg='newton',
                                    xtol=0.1, ftol=0.1)

    def test_solve_ml(self):
        """
        Tests whether or not model estimation converges is performed for direct
        maximum likelihood with a single latent factor. If the numerical
        approximation method converges, this test passes. Otherwise, the test
        fails.
        """
        guess_params = self.guess_params_lat
        method = 'ml'
        self.affine_obj_lat.solve(guess_params, method=method, alg='bfgs',
                                  xtol=0.1, ftol=0.1)
Example #7
0
class TestEstimationSupportMethodsComplex(TestCase):
    """
    Test cases where instantiation is complex
    """
    def setUp(self):

        np.random.seed(100)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(np.random.random((test_size - k_ar,
                                                      nyields)))
        var_data_test = pa.DataFrame(np.random.random((test_size, neqs)))
        mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim = dim = k_ar * neqs + latent
        lam_0 = make_nomask([dim, 1]) + 0j
        lam_1 = make_nomask([dim, dim]) + 0j
        delta_0 = make_nomask([1, 1]) + 0j
        delta_1 = make_nomask([dim, 1]) + 0j
        mu = make_nomask([dim, 1]) + 0j
        phi = make_nomask([dim, dim]) + 0j
        sigma = make_nomask([dim, dim]) + 0j

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_0[-latent:] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        lam_1[-latent:, -latent:] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma,
            'latent': latent,
            'no_err': [1]
        }

        self.guess_params = np.random.random((neqs**2 + neqs + (2 * latent),)
                                            ).tolist()
        self.affine_obj = Affine(**self.mod_kwargs)

    def test_opt_gen_pred_coef_float(self):
        """
        Tests if complex values are return properly
        """
        # DOC: Need to make sure that the arrays are also np.complex
        guess_params = self.guess_params
        params = self.affine_obj.params_to_array(guess_params)
        arrays_gpc = self.affine_obj.opt_gen_pred_coef(*params)
        for array in arrays_gpc:
            self.assertEqual(array.dtype, np.complex_)

    def test_opt_gen_pred_coef_complex(self):
        """
        Tests if complex values are return properly
        """
        # DOC: Need to make sure that the arrays are also np.complex
        guess_params = [np.complex_(el) for el in self.guess_params]
        params = self.affine_obj.params_to_array(guess_params)
        arrays_gpc = self.affine_obj.opt_gen_pred_coef(*params)
        for array in arrays_gpc:
            self.assertEqual(array.dtype, np.complex_)
Example #8
0
class TestEstimationSupportMethods(TestCase):
    """
    Tests for support methods related to estimating models
    """
    def setUp(self):

        np.random.seed(100)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(np.random.random((test_size - k_ar,
                                                      nyields)))
        var_data_test = pa.DataFrame(np.random.random((test_size, neqs)))
        mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim = dim = k_ar * neqs + latent
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_0[-latent:] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        lam_1[-latent:, -latent:] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma,
            'latent': latent,
            'no_err': [1]
        }

        self.guess_params = np.random.random((neqs**2 + neqs + (2 * latent),)
                                            ).tolist()
        self.affine_obj = Affine(**self.mod_kwargs)
        self.affineml_obj =  AffineML(**self.mod_kwargs)

    def test_loglike(self):
        """
        Tests if loglikelihood is calculated. If the loglikelihood is
        calculated given a set of parameters, then this test passes.
        Otherwise, it fails.
        """
        self.affineml_obj.loglike(self.guess_params)

    def test_score(self):
        """
        Tests if score of the likelihood is calculated. If the score
        calculation succeeds without error, then the test passes. Otherwise,
        the test fails.
        """
        self.affineml_obj.score(self.guess_params)

    def test_hessian(self):
        """
        Tests if hessian of the likelihood is calculated. If the hessian
        calculation succeeds without error, then the test passes. Otherwise,
        the test fails.
        """
        self.affineml_obj.hessian(self.guess_params)

    def test_std_errs(self):
        """
        Tests if standard errors are calculated. If the standard error
        calculation succeeds, then the test passes. Otherwise, the test
        fails.
        """
        self.affineml_obj.std_errs(self.guess_params)

    def test_params_to_array(self):
        """
        Tests if the params_to_array function works correctly, with and without
        returning masked arrays. In order to pass, the params_to_array function
        must return masked arrays with the masked elements filled in when the
        return_mask argument is set to True and contiguous standard numpy
        arrays when the return_mask argument is False. Otherwise, the test
        fails.
        """
        arrays_no_mask = self.affine_obj.params_to_array(self.guess_params)
        for arr in arrays_no_mask[:-1]:
            self.assertIsInstance(arr, np.ndarray)
            self.assertNotIsInstance(arr, np.ma.core.MaskedArray)
        arrays_w_mask = self.affine_obj.params_to_array(self.guess_params,
                                                        return_mask=True)
        for arr in arrays_w_mask[:-1]:
            self.assertIsInstance(arr, np.ma.core.MaskedArray)

    def test_params_to_array_inconsistent_types(self):
        """
        Tests if an assertion error is raised when parameters of different
        types are passed in
        """
        guess_params_adj = self.guess_params
        guess_params_adj[-1] = np.complex_(guess_params_adj[-1])
        self.assertRaises(AssertionError, self.affine_obj.params_to_array,
                          guess_params_adj)

    def test_params_to_array_zeromask(self):
        """
        Tests if params_to_array_zeromask function works correctly. In order to
        pass, params_to_array_zeromask must return masked arrays with the
        guess_params elements that are zero unmasked and set to zero in the
        appropriate arrays. The new guess_params array is also returned with
        those that were 0 removed. If both of these are not returned correctly,
        the test fails.
        """
        guess_params_arr = np.array(self.guess_params)
        neqs = self.affine_obj.neqs
        guess_params_arr[:neqs] = 0
        guess_params = guess_params_arr.tolist()
        guess_length = self.affine_obj._gen_guess_length()
        params_guesses = self.affine_obj.params_to_array_zeromask(guess_params)
        updated_guesses = params_guesses[-1]
        self.assertEqual(len(updated_guesses), len(guess_params) - neqs)

        # ensure that number of masked has correctly been set
        count_masked_new = ma.count_masked(params_guesses[0])
        count_masked_orig = ma.count_masked(self.affine_obj.lam_0_e)
        self.assertEqual(count_masked_new, count_masked_orig - neqs)

    def test_gen_pred_coef(self):
        """
        Tests if Python-driven gen_pred_coef function runs. If a set of
        parameter arrays are passed into the gen_pred_coef function and the
        A and B arrays are returned, then the test passes. Otherwise, the test
        fails.
        """
        params = self.affine_obj.params_to_array(self.guess_params)
        self.affine_obj.gen_pred_coef(*params)

    def test_opt_gen_pred_coef(self):
        """
        Tests if C-driven gen_pred_coef function runs. If a set of parameter
        arrays are passed into the opt_gen_pred_coef function and the A and
        B arrays are return, then the test passes. Otherwise, the test fails.
        """
        params = self.affine_obj.params_to_array(self.guess_params)
        self.affine_obj.opt_gen_pred_coef(*params)

    def test_py_C_gen_pred_coef_equal(self):
        """
        Tests if the Python-driven and C-driven gen_pred_coef functions produce
        the same result, up to a precision of 1e-14. If the gen_pred_coef and
        opt_gen_pred_coef functions produce the same result, then the test
        passes. Otherwise, the test fails.
        """
        params = self.affine_obj.params_to_array(self.guess_params)
        py_gpc = self.affine_obj.gen_pred_coef(*params)
        c_gpc = self.affine_obj.opt_gen_pred_coef(*params)
        for aix, array in enumerate(py_gpc):
            np.testing.assert_allclose(array, c_gpc[aix], rtol=1e-14)

    def test__solve_unobs(self):
        """
        Tests if the _solve_unobs function runs. If the _solve_unobs function
        runs and the latent series, likelihood jacobian, and yield errors are
        returned, then the test passes. Otherwise the test fails.
        """
        guess_params = self.guess_params
        param_arrays = self.affine_obj.params_to_array(guess_params)
        a_in, b_in = self.affine_obj.gen_pred_coef(*param_arrays)
        result = self.affineml_obj._solve_unobs(a_in=a_in, b_in=b_in,
                                                dtype=param_arrays[-1])

    def test__affine_pred(self):
        """
        Tests if the _affine_pred function runs. If the affine_pred function
        produces a list of the yields stacked in order of increasing maturity
        and is of the expected shape, the test passes. Otherwise, the test
        fails.
        """
        lat = self.affine_obj.latent
        yobs = self.affine_obj.yobs
        mats = self.affine_obj.mats
        var_data_vert_tpose = self.affine_obj.var_data_vert.T

        guess_params = self.guess_params
        latent_rows = np.random.random((lat, yobs))
        data = np.append(var_data_vert_tpose, latent_rows, axis=0)
        pred = self.affine_obj._affine_pred(data, *guess_params)
        self.assertEqual(len(pred), len(mats) * yobs)

    def test__gen_mat_list(self):
        """
        Tests if _gen_mat_list generates a length 2 tuple with a list of the
        maturities estimated without error followed by those estimated with
        error. If _gen_mat_list produces a tuple of lists of those yields
        estimates without error and then those with error, this test passes.
        Otherwise, the test fails.
        """
        no_err_mat, err_mat = self.affine_obj._gen_mat_list()
        self.assertEqual(no_err_mat, [2])
        self.assertEqual(err_mat, [1,3,4,5])
Example #9
0
#assume phi lower triangular
#phi_e[:, :] = ma.masked
phi_e.mask = np.tri(phi_e.shape[0], M=phi_e.shape[1])

sigma_e[:, :] = ma.masked
sigma_e[:, :] = ma.nomask
#sigma_e[:, :] = np.identity(latent)
sigma_e.mask = np.identity(sigma_e.shape[0])
#sigma_e[-latent:, -latent:] = ma.masked

mod_init = Affine(yc_data=mod_yc_data,
                  var_data=None,
                  latent=latent,
                  lam_0_e=lam_0_e,
                  lam_1_e=lam_1_e,
                  delta_0_e=delta_0_e,
                  delta_1_e=delta_1_e,
                  mu_e=mu_e,
                  phi_e=phi_e,
                  sigma_e=sigma_e,
                  mats=mats,
                  use_C_extension=False)

guess_length = mod_init.guess_length

guess_params = [0] * guess_length

np.random.seed(100)

for numb, element in enumerate(guess_params):
    element = 0.001
    guess_params[numb] = np.abs(element * np.random.random())
Example #10
0
                                                      k_ar=k_ar,
                                                      neqs=neqs,
                                                      delta_0=delta_0_e,
                                                      delta_1=delta_1_e,
                                                      mu=mu_e,
                                                      phi=phi_e,
                                                      sigma=sigma_e,
                                                      rf_rate=rf_rate)

mod_init = Affine(yc_data=yc_data_use,
                  var_data=macro_data_use,
                  latent=latent,
                  lam_0_e=lam_0_e,
                  lam_1_e=lam_1_e,
                  delta_0_e=delta_0_e,
                  delta_1_e=delta_1_e,
                  mu_e=mu_e,
                  phi_e=phi_e,
                  sigma_e=sigma_e,
                  mats=mats,
                  k_ar=k_ar,
                  neqs=neqs,
                  use_C_extension=False)

guess_length = mod_init.guess_length

guess_params = [0.0000] * guess_length

np.random.seed(100)

for numb, element in enumerate(guess_params):
    element = 0.0000000001
Example #11
0
def robust(mod_data, mod_yc_data, method=None):
    """
    Function to run model with guesses, also generating
    method : string
        method to pass to Affine.solve()
    mod_data : pandas DataFrame
        model data
    mod_yc_data : pandas DataFrame
        model yield curve data
    """
    # subset to pre 2005
    mod_data = mod_data[:217]
    mod_yc_data = mod_yc_data[:214]

    k_ar = 4
    neqs = 5
    lat = 0

    lam_0_e = ma.zeros((k_ar * neqs, 1))
    lam_0_e[:neqs] = ma.masked

    lam_1_e = ma.zeros((k_ar * neqs, k_ar * neqs))
    lam_1_e[:neqs, :neqs] = ma.masked

    delta_0_e = ma.zeros([1, 1])
    delta_0_e[:, :] = ma.masked
    delta_0_e[:, :] = ma.nomask

    delta_1_e = ma.zeros([k_ar * neqs, 1])
    delta_1_e[:, :] = ma.masked
    delta_1_e[:, :] = ma.nomask
    delta_1_e[np.argmax(mod_data.columns == 'fed_funds')] = 1

    var_fit = VAR(mod_data, freq="M").fit(maxlags=k_ar)

    coefs = var_fit.params.values
    sigma_u = var_fit.sigma_u
    obs_var = neqs * k_ar

    mu_e = ma.zeros([k_ar * neqs, 1])
    mu_e[:, :] = ma.masked
    mu_e[:, :] = ma.nomask
    mu_e[:neqs] = coefs[0, None].T

    phi_e = ma.zeros([k_ar * neqs, k_ar * neqs])
    phi_e[:, :] = ma.masked
    phi_e[:, :] = ma.nomask
    phi_e[:neqs] = coefs[1:].T
    phi_e[neqs:obs_var, :(k_ar - 1) * neqs] = np.identity((k_ar - 1) * neqs)

    sigma_e = ma.zeros([k_ar * neqs, k_ar * neqs])
    sigma_e[:, :] = ma.masked
    sigma_e[:, :] = ma.nomask
    sigma_e[:neqs, :neqs] = sigma_u
    sigma_e[neqs:obs_var, neqs:obs_var] = np.identity((k_ar - 1) * neqs)

    #anl_mths, mth_only_data = proc_to_mth(mod_yc_data)
    bsr = Affine(yc_data=mod_yc_data,
                 var_data=mod_data,
                 lam_0_e=lam_0_e,
                 lam_1_e=lam_1_e,
                 delta_0_e=delta_0_e,
                 delta_1_e=delta_1_e,
                 mu_e=mu_e,
                 phi_e=phi_e,
                 sigma_e=sigma_e)
    neqs = bsr.neqs

    guess_length = bsr.guess_length

    guess_params = [0.0000] * guess_length

    for numb, element in enumerate(guess_params[:30]):
        element = 0.0001
        guess_params[numb] = element * (np.random.random() - 0.5)

    out_bsr = bsr.solve(guess_params=guess_params,
                        method=method,
                        ftol=1e-950,
                        xtol=1e-950,
                        maxfev=1000000000,
                        full_output=False)

    if method == "ls":
        lam_0, lam_1, delta_1, mu, phi, sig, a_solve, b_solve, output = out_bsr
        return lam_0, lam_1, output

    else:
        lam_0, lam_1, delta_1, mu, phi, sig, a_solve, b_solve, lam_cov = out_bsr
        return lam_0, lam_1, lam_cov
Example #12
0
class TestEstimationMethods(TestCase):
    """
    Tests for solution methods
    """
    def setUp(self):

        ## Non-linear least squares
        np.random.seed(101)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(
            np.random.random((test_size - k_ar, nyields)))
        var_data_test = pa.DataFrame(np.random.random((test_size, neqs)))
        mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim_nolat = dim = k_ar * neqs
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs_nolat = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma
        }

        self.guess_params_nolat = np.random.random((neqs**2 + neqs)).tolist()
        self.affine_obj_nolat = Affine(**self.mod_kwargs_nolat)

        ## Maximum likelihood build

        # initialize masked arrays
        self.dim_lat = dim = k_ar * neqs + latent
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_0[-latent:] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        lam_1[-latent:, -latent:] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma,
            'latent': latent,
            'no_err': [1]
        }

        self.guess_params_lat = np.random.random(
            (neqs**2 + neqs + (2 * latent), )).tolist()
        self.affine_obj_lat = Affine(**self.mod_kwargs)

    def test_solve_nls(self):
        """
        Tests whether or not basic estimation is performed for non-linear least
        squares case without any latent factors. If the numerical approximation
        method converges, this test passes. Otherwise, the test fails.
        """
        guess_params = self.guess_params_nolat
        method = 'nls'
        self.affine_obj_nolat.solve(guess_params,
                                    method=method,
                                    alg='newton',
                                    xtol=0.1,
                                    ftol=0.1)

    def test_solve_ml(self):
        """
        Tests whether or not model estimation converges is performed for direct
        maximum likelihood with a single latent factor. If the numerical
        approximation method converges, this test passes. Otherwise, the test
        fails.
        """
        guess_params = self.guess_params_lat
        method = 'ml'
        self.affine_obj_lat.solve(guess_params,
                                  method=method,
                                  alg='bfgs',
                                  xtol=0.1,
                                  ftol=0.1)
Example #13
0
class TestEstimationSupportMethodsComplex(TestCase):
    """
    Test cases where instantiation is complex
    """
    def setUp(self):

        np.random.seed(100)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(
            np.random.random((test_size - k_ar, nyields)))
        var_data_test = pa.DataFrame(np.random.random((test_size, neqs)))
        mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim = dim = k_ar * neqs + latent
        lam_0 = make_nomask([dim, 1]) + 0j
        lam_1 = make_nomask([dim, dim]) + 0j
        delta_0 = make_nomask([1, 1]) + 0j
        delta_1 = make_nomask([dim, 1]) + 0j
        mu = make_nomask([dim, 1]) + 0j
        phi = make_nomask([dim, dim]) + 0j
        sigma = make_nomask([dim, dim]) + 0j

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_0[-latent:] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        lam_1[-latent:, -latent:] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma,
            'latent': latent,
            'no_err': [1]
        }

        self.guess_params = np.random.random(
            (neqs**2 + neqs + (2 * latent), )).tolist()
        self.affine_obj = Affine(**self.mod_kwargs)

    def test_opt_gen_pred_coef_float(self):
        """
        Tests if complex values are return properly
        """
        # DOC: Need to make sure that the arrays are also np.complex
        guess_params = self.guess_params
        params = self.affine_obj.params_to_array(guess_params)
        arrays_gpc = self.affine_obj.opt_gen_pred_coef(*params)
        for array in arrays_gpc:
            self.assertEqual(array.dtype, np.complex_)

    def test_opt_gen_pred_coef_complex(self):
        """
        Tests if complex values are return properly
        """
        # DOC: Need to make sure that the arrays are also np.complex
        guess_params = [np.complex_(el) for el in self.guess_params]
        params = self.affine_obj.params_to_array(guess_params)
        arrays_gpc = self.affine_obj.opt_gen_pred_coef(*params)
        for array in arrays_gpc:
            self.assertEqual(array.dtype, np.complex_)
Example #14
0
class TestEstimationSupportMethods(TestCase):
    """
    Tests for support methods related to estimating models
    """
    def setUp(self):

        np.random.seed(100)

        # initialize yield curve and VAR observed factors
        yc_data_test = pa.DataFrame(
            np.random.random((test_size - k_ar, nyields)))
        var_data_test = pa.DataFrame(np.random.random((test_size, neqs)))
        mats = list(range(1, nyields + 1))

        # initialize masked arrays
        self.dim = dim = k_ar * neqs + latent
        lam_0 = make_nomask([dim, 1])
        lam_1 = make_nomask([dim, dim])
        delta_0 = make_nomask([1, 1])
        delta_1 = make_nomask([dim, 1])
        mu = make_nomask([dim, 1])
        phi = make_nomask([dim, dim])
        sigma = make_nomask([dim, dim])

        # Setup some of the elements as non-zero
        # This sets up a fake model where only lambda_0 and lambda_1 are
        # estimated
        lam_0[:neqs] = ma.masked
        lam_0[-latent:] = ma.masked
        lam_1[:neqs, :neqs] = ma.masked
        lam_1[-latent:, -latent:] = ma.masked
        delta_0[:, :] = np.random.random(1)
        delta_1[:neqs] = np.random.random((neqs, 1))
        mu[:neqs] = np.random.random((neqs, 1))
        phi[:neqs, :] = np.random.random((neqs, dim))
        sigma[:, :] = np.identity(dim)

        self.mod_kwargs = {
            'yc_data': yc_data_test,
            'var_data': var_data_test,
            'k_ar': k_ar,
            'neqs': neqs,
            'mats': mats,
            'lam_0_e': lam_0,
            'lam_1_e': lam_1,
            'delta_0_e': delta_0,
            'delta_1_e': delta_1,
            'mu_e': mu,
            'phi_e': phi,
            'sigma_e': sigma,
            'latent': latent,
            'no_err': [1]
        }

        self.guess_params = np.random.random(
            (neqs**2 + neqs + (2 * latent), )).tolist()
        self.affine_obj = Affine(**self.mod_kwargs)
        self.affineml_obj = AffineML(**self.mod_kwargs)

    def test_loglike(self):
        """
        Tests if loglikelihood is calculated. If the loglikelihood is
        calculated given a set of parameters, then this test passes.
        Otherwise, it fails.
        """
        self.affineml_obj.loglike(self.guess_params)

    def test_score(self):
        """
        Tests if score of the likelihood is calculated. If the score
        calculation succeeds without error, then the test passes. Otherwise,
        the test fails.
        """
        self.affineml_obj.score(self.guess_params)

    def test_hessian(self):
        """
        Tests if hessian of the likelihood is calculated. If the hessian
        calculation succeeds without error, then the test passes. Otherwise,
        the test fails.
        """
        self.affineml_obj.hessian(self.guess_params)

    def test_std_errs(self):
        """
        Tests if standard errors are calculated. If the standard error
        calculation succeeds, then the test passes. Otherwise, the test
        fails.
        """
        self.affineml_obj.std_errs(self.guess_params)

    def test_params_to_array(self):
        """
        Tests if the params_to_array function works correctly, with and without
        returning masked arrays. In order to pass, the params_to_array function
        must return masked arrays with the masked elements filled in when the
        return_mask argument is set to True and contiguous standard numpy
        arrays when the return_mask argument is False. Otherwise, the test
        fails.
        """
        arrays_no_mask = self.affine_obj.params_to_array(self.guess_params)
        for arr in arrays_no_mask[:-1]:
            self.assertIsInstance(arr, np.ndarray)
            self.assertNotIsInstance(arr, np.ma.core.MaskedArray)
        arrays_w_mask = self.affine_obj.params_to_array(self.guess_params,
                                                        return_mask=True)
        for arr in arrays_w_mask[:-1]:
            self.assertIsInstance(arr, np.ma.core.MaskedArray)

    def test_params_to_array_inconsistent_types(self):
        """
        Tests if an assertion error is raised when parameters of different
        types are passed in
        """
        guess_params_adj = self.guess_params
        guess_params_adj[-1] = np.complex_(guess_params_adj[-1])
        self.assertRaises(AssertionError, self.affine_obj.params_to_array,
                          guess_params_adj)

    def test_params_to_array_zeromask(self):
        """
        Tests if params_to_array_zeromask function works correctly. In order to
        pass, params_to_array_zeromask must return masked arrays with the
        guess_params elements that are zero unmasked and set to zero in the
        appropriate arrays. The new guess_params array is also returned with
        those that were 0 removed. If both of these are not returned correctly,
        the test fails.
        """
        guess_params_arr = np.array(self.guess_params)
        neqs = self.affine_obj.neqs
        guess_params_arr[:neqs] = 0
        guess_params = guess_params_arr.tolist()
        guess_length = self.affine_obj._gen_guess_length()
        params_guesses = self.affine_obj.params_to_array_zeromask(guess_params)
        updated_guesses = params_guesses[-1]
        self.assertEqual(len(updated_guesses), len(guess_params) - neqs)

        # ensure that number of masked has correctly been set
        count_masked_new = ma.count_masked(params_guesses[0])
        count_masked_orig = ma.count_masked(self.affine_obj.lam_0_e)
        self.assertEqual(count_masked_new, count_masked_orig - neqs)

    def test_gen_pred_coef(self):
        """
        Tests if Python-driven gen_pred_coef function runs. If a set of
        parameter arrays are passed into the gen_pred_coef function and the
        A and B arrays are returned, then the test passes. Otherwise, the test
        fails.
        """
        params = self.affine_obj.params_to_array(self.guess_params)
        self.affine_obj.gen_pred_coef(*params)

    def test_opt_gen_pred_coef(self):
        """
        Tests if C-driven gen_pred_coef function runs. If a set of parameter
        arrays are passed into the opt_gen_pred_coef function and the A and
        B arrays are return, then the test passes. Otherwise, the test fails.
        """
        params = self.affine_obj.params_to_array(self.guess_params)
        self.affine_obj.opt_gen_pred_coef(*params)

    def test_py_C_gen_pred_coef_equal(self):
        """
        Tests if the Python-driven and C-driven gen_pred_coef functions produce
        the same result, up to a precision of 1e-14. If the gen_pred_coef and
        opt_gen_pred_coef functions produce the same result, then the test
        passes. Otherwise, the test fails.
        """
        params = self.affine_obj.params_to_array(self.guess_params)
        py_gpc = self.affine_obj.gen_pred_coef(*params)
        c_gpc = self.affine_obj.opt_gen_pred_coef(*params)
        for aix, array in enumerate(py_gpc):
            np.testing.assert_allclose(array, c_gpc[aix], rtol=1e-14)

    def test__solve_unobs(self):
        """
        Tests if the _solve_unobs function runs. If the _solve_unobs function
        runs and the latent series, likelihood jacobian, and yield errors are
        returned, then the test passes. Otherwise the test fails.
        """
        guess_params = self.guess_params
        param_arrays = self.affine_obj.params_to_array(guess_params)
        a_in, b_in = self.affine_obj.gen_pred_coef(*param_arrays)
        result = self.affineml_obj._solve_unobs(a_in=a_in,
                                                b_in=b_in,
                                                dtype=param_arrays[-1])

    def test__affine_pred(self):
        """
        Tests if the _affine_pred function runs. If the affine_pred function
        produces a list of the yields stacked in order of increasing maturity
        and is of the expected shape, the test passes. Otherwise, the test
        fails.
        """
        lat = self.affine_obj.latent
        yobs = self.affine_obj.yobs
        mats = self.affine_obj.mats
        var_data_vert_tpose = self.affine_obj.var_data_vert.T

        guess_params = self.guess_params
        latent_rows = np.random.random((lat, yobs))
        data = np.append(var_data_vert_tpose, latent_rows, axis=0)
        pred = self.affine_obj._affine_pred(data, *guess_params)
        self.assertEqual(len(pred), len(mats) * yobs)

    def test__gen_mat_list(self):
        """
        Tests if _gen_mat_list generates a length 2 tuple with a list of the
        maturities estimated without error followed by those estimated with
        error. If _gen_mat_list produces a tuple of lists of those yields
        estimates without error and then those with error, this test passes.
        Otherwise, the test fails.
        """
        no_err_mat, err_mat = self.affine_obj._gen_mat_list()
        self.assertEqual(no_err_mat, [2])
        self.assertEqual(err_mat, [1, 3, 4, 5])
Example #15
0
                                                      k_ar=k_ar,
                                                      neqs=neqs,
                                                      delta_0=delta_0_e,
                                                      delta_1=delta_1_e,
                                                      mu=mu_e,
                                                      phi=phi_e,
                                                      sigma=sigma_e,
                                                      rf_rate=rf_rate)

mod_init = Affine(yc_data=yc_data_use,
                  var_data=macro_data_use,
                  latent=latent,
                  no_err=[0, 2, 4],
                  lam_0_e=lam_0_e,
                  lam_1_e=lam_1_e,
                  delta_0_e=delta_0_e,
                  delta_1_e=delta_1_e,
                  mu_e=mu_e,
                  phi_e=phi_e,
                  sigma_e=sigma_e,
                  mats=mats,
                  k_ar=k_ar,
                  neqs=neqs)

guess_length = mod_init.guess_length

guess_params = [0.0000] * guess_length

np.random.seed(100)

for numb, element in enumerate(guess_params[:30]):
    element = 0.0001
Example #16
0
                #generate decent guesses
                lam_0_e, lam_1_e, delta_0_e, delta_1_e, mu_e, phi_e, sigma_e \
                    = bsr_constructor(k_ar=k_ar, neqs=neqs)

                delta_0_e, delta_1_e, mu_e, phi_e, sigma_e = pass_ols(var_data=mod_data,
                                                                      freq="M", lat=0,
                                                                      k_ar=k_ar, neqs=neqs,
                                                                      delta_0=delta_0_e,
                                                                      delta_1=delta_1_e,
                                                                      mu=mu_e, phi=phi_e,
                                                                      sigma=sigma_e)
                delta_1_e[np.argmax(mod_data.columns == 'fed_funds')] = 1

                print "Initial estimation"
                bsr_model = Affine(yc_data=mod_yc_data, var_data=mod_data, lam_0_e=lam_0_e,
                                   lam_1_e=lam_1_e, delta_0_e=delta_0_e, delta_1_e=delta_1_e,
                                   mu_e=mu_e, phi_e=phi_e, sigma_e=sigma_e, mths=mths)


                guess_length = bsr_model.guess_length
                guess_params = [0.0000] * guess_length

                print source
                print model
                print "xtol " + str(xtol)
                print "ftol " + str(ftol)
                print "Begin " + str(yc_dates[0])
                print "End " + str(yc_dates[-1])
                print "variables " + str(list(bsr_model.names))
                out_bsr = bsr_model.solve(guess_params=guess_params, method='nls',
                                        ftol=ftol, xtol=xtol, maxfev=10000000,
Example #17
0
    delta_1=delta_1_e,
    mu=mu_e,
    phi=phi_e,
    sigma=sigma_e,
    rf_rate=rf_rate,
)


mod_init = Affine(
    yc_data=yc_data_use,
    var_data=macro_data_use,
    latent=latent,
    lam_0_e=lam_0_e,
    lam_1_e=lam_1_e,
    delta_0_e=delta_0_e,
    delta_1_e=delta_1_e,
    mu_e=mu_e,
    phi_e=phi_e,
    sigma_e=sigma_e,
    mats=mats,
    k_ar=k_ar,
    neqs=neqs,
    use_C_extension=False,
)

guess_length = mod_init.guess_length

guess_params = [0.0000] * guess_length

np.random.seed(100)

for numb, element in enumerate(guess_params):
Example #18
0
                    lat=0,
                    k_ar=k_ar,
                    neqs=neqs,
                    delta_0=delta_0_e,
                    delta_1=delta_1_e,
                    mu=mu_e,
                    phi=phi_e,
                    sigma=sigma_e)
                delta_1_e[np.argmax(mod_data.columns == 'fed_funds')] = 1

                print "Initial estimation"
                bsr_model = Affine(yc_data=mod_yc_data,
                                   var_data=mod_data,
                                   lam_0_e=lam_0_e,
                                   lam_1_e=lam_1_e,
                                   delta_0_e=delta_0_e,
                                   delta_1_e=delta_1_e,
                                   mu_e=mu_e,
                                   phi_e=phi_e,
                                   sigma_e=sigma_e,
                                   mths=mths)

                guess_length = bsr_model.guess_length
                guess_params = [0.0000] * guess_length

                print source
                print model
                print "xtol " + str(xtol)
                print "ftol " + str(ftol)
                print "Begin " + str(yc_dates[0])
                print "End " + str(yc_dates[-1])
                print "variables " + str(list(bsr_model.names))
Example #19
0
def robust(mod_data, mod_yc_data, method=None):
    """
    Function to run model with guesses, also generating
    method : string
        method to pass to Affine.solve()
    mod_data : pandas DataFrame
        model data
    mod_yc_data : pandas DataFrame
        model yield curve data
    """
    # subset to pre 2005
    mod_data = mod_data[:217]
    mod_yc_data = mod_yc_data[:214]

    k_ar = 4
    neqs = 5
    lat = 0

    lam_0_e = ma.zeros((k_ar * neqs, 1))
    lam_0_e[:neqs] = ma.masked

    lam_1_e = ma.zeros((k_ar * neqs, k_ar * neqs))
    lam_1_e[:neqs, :neqs] = ma.masked

    delta_0_e = ma.zeros([1, 1])
    delta_0_e[:, :] = ma.masked
    delta_0_e[:, :] = ma.nomask

    delta_1_e = ma.zeros([k_ar * neqs, 1])
    delta_1_e[:, :] = ma.masked
    delta_1_e[:, :] = ma.nomask
    delta_1_e[np.argmax(mod_data.columns == 'fed_funds')] = 1

    var_fit = VAR(mod_data, freq="M").fit(maxlags=k_ar)

    coefs = var_fit.params.values
    sigma_u = var_fit.sigma_u
    obs_var = neqs * k_ar

    mu_e = ma.zeros([k_ar*neqs, 1])
    mu_e[:, :] = ma.masked
    mu_e[:, :] = ma.nomask
    mu_e[:neqs] = coefs[0, None].T

    phi_e = ma.zeros([k_ar * neqs, k_ar * neqs])
    phi_e[:, :] = ma.masked
    phi_e[:, :] = ma.nomask
    phi_e[:neqs] = coefs[1:].T
    phi_e[neqs:obs_var, :(k_ar - 1) * neqs] = np.identity((k_ar - 1) * neqs)

    sigma_e = ma.zeros([k_ar * neqs, k_ar * neqs])
    sigma_e[:, :] = ma.masked
    sigma_e[:, :] = ma.nomask
    sigma_e[:neqs, :neqs] = sigma_u
    sigma_e[neqs:obs_var, neqs:obs_var] = np.identity((k_ar - 1) * neqs)

    #anl_mths, mth_only_data = proc_to_mth(mod_yc_data)
    bsr = Affine(yc_data = mod_yc_data, var_data = mod_data, lam_0_e=lam_0_e,
                 lam_1_e=lam_1_e, delta_0_e=delta_0_e, delta_1_e=delta_1_e,
                 mu_e=mu_e, phi_e=phi_e, sigma_e=sigma_e)
    neqs = bsr.neqs

    guess_length = bsr.guess_length

    guess_params = [0.0000] * guess_length

    for numb, element in enumerate(guess_params[:30]):
        element = 0.0001
        guess_params[numb] = element * (np.random.random() - 0.5)

    out_bsr = bsr.solve(guess_params=guess_params, method=method, ftol=1e-950,
                        xtol=1e-950, maxfev=1000000000, full_output=False)

    if method == "ls":
        lam_0, lam_1, delta_1, mu, phi, sig, a_solve, b_solve, output = out_bsr
        return lam_0, lam_1, output

    else:
        lam_0, lam_1, delta_1, mu, phi, sig, a_solve, b_solve, lam_cov = out_bsr
        return lam_0, lam_1, lam_cov