def test_evaluate_basis_mix(): """Test gbasis.evals.eval.evaluate_basis_mix.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) # cartesian and spherical are the same for s orbital basis = make_contractions(basis_dict, ["H"], np.array([[0, 0, 0]])) assert np.allclose( evaluate_basis(basis, np.array([[0, 0, 0]]), coord_type="spherical"), evaluate_basis(basis, np.array([[0, 0, 0]]), coord_type=["spherical"]), ) assert np.allclose( evaluate_basis(basis, np.array([[0, 0, 0]]), coord_type="cartesian"), evaluate_basis(basis, np.array([[0, 0, 0]]), coord_type=["cartesian"]), ) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) assert np.allclose( evaluate_basis(basis, np.array([[1, 1, 1]]), coord_type="spherical"), evaluate_basis(basis, np.array([[1, 1, 1]]), coord_type=["spherical"] * 8), ) assert np.allclose( evaluate_basis(basis, np.array([[1, 1, 1]]), coord_type="cartesian"), evaluate_basis(basis, np.array([[1, 1, 1]]), coord_type=["cartesian"] * 8), )
def test_evaluate_basis_spherical(): """Test gbasis.evals.eval.evaluate_basis_spherical.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) # cartesian and spherical are the same for s orbital basis = make_contractions(basis_dict, ["H"], np.array([[0, 0, 0]])) evaluate_obj = Eval(basis) assert np.allclose( evaluate_obj.construct_array_cartesian(points=np.array([[0, 0, 0]])), evaluate_basis(basis, np.array([[0, 0, 0]]), coord_type="spherical"), ) # p orbitals are zero at center basis = make_contractions(basis_dict, ["Li"], np.array([[0, 0, 0]])) evaluate_obj = Eval(basis) assert np.allclose( evaluate_obj.construct_array_cartesian(points=np.array([[0, 0, 0]])), evaluate_basis(basis, np.array([[0, 0, 0]]), coord_type="spherical"), ) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) evaluate_obj = Eval(basis) assert np.allclose( evaluate_obj.construct_array_spherical(points=np.array([[1, 1, 1]])), evaluate_basis(basis, np.array([[1, 1, 1]]), coord_type="spherical"), )
def test_overlap_spherical_norm_anorcc(): """Test the norm of gbasis.integrals.overlap_spherical on the ANO-RCC basis set. The contraction coefficients in ANO-RCC is such that the Cartesian contractions are normalized. """ basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) basis = make_contractions(basis_dict, ["C"], np.array([[0, 0, 0]])) overlap_obj = Overlap(basis) assert np.allclose(np.diag(overlap_obj.construct_array_cartesian()), 1) basis = make_contractions(basis_dict, ["Xe"], np.array([[0, 0, 0]])) overlap_obj = Overlap(basis) assert np.allclose(np.diag(overlap_obj.construct_array_cartesian()), 1)
def test_evaluate_hessian_deriv_horton(): """Test gbasis.evals.density.evaluate_density_hessian against result from HORTON. The test case is diatomic with H and He separated by 0.8 angstroms with basis set ANO-RCC. """ basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) # NOTE: used HORTON's conversion factor for angstroms to bohr points = np.array([[0, 0, 0], [0.8 * 1.0 / 0.5291772083, 0, 0]]) basis = make_contractions(basis_dict, ["H", "He"], points) basis = [ HortonContractions(i.angmom, i.coord, i.coeffs, i.exps) for i in basis ] horton_density_hessian = np.zeros((10**3, 3, 3)) horton_density_hessian[:, [0, 0, 0, 1, 1, 2], [0, 1, 2, 1, 2, 2]] = np.load( find_datafile( "data_horton_hhe_sph_density_hessian.npy")) horton_density_hessian[:, [1, 2, 2], [0, 0, 1]] = horton_density_hessian[:, [0, 0, 1], [1, 2, 2]] grid_1d = np.linspace(-2, 2, num=10) grid_x, grid_y, grid_z = np.meshgrid(grid_1d, grid_1d, grid_1d) grid_3d = np.vstack([grid_x.ravel(), grid_y.ravel(), grid_z.ravel()]).T assert np.allclose( evaluate_density_hessian(np.identity(88), basis, grid_3d, np.identity(88)), horton_density_hessian, )
def load_basis(self): ''' Method that loads the basis set for the atoms in the molecule. If the basis set does not exist in the database, we will download the basis set from https://www.basissetexchange.org/ using their API ''' gbs_path = os.getcwd() + rf'\Basis_Sets\{self.basis_type}.gbs' if not os.path.exists(gbs_path): print( f'Error: Basis set {self.basis_type} not found, downloading ...' ) import requests print(requests.get("http://basissetexchange.org/api/formats").text) response = requests.get( "http://basissetexchange.org" + f'/api/basis/{self.basis_type}/format/Gaussian94') if response: print('Succesfully obtained basis set file') with open(gbs_path, 'w+') as f: f.write(response.text) self.load_basis() else: print('Failed to obtain basis set file') else: print(f'Succesfully loaded {self.basis_type}.gbs') all_basis_dict = gb_pars.parse_gbs(gbs_path) self.basis = gb_pars.make_contractions( all_basis_dict, [a.symbol for a in self.atoms], np.asarray([a.coords for a in self.atoms]))
def test_electrostatic_potential_spherical(): """Test gbasis.evals.electrostatic_potential.electorstatic_potential_spherical. The test case is diatomic with H and He separated by 0.8 angstroms with basis set ANO-RCC. Density matrix is an identity matrix. """ basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) # NOTE: used HORTON's conversion factor for angstroms to bohr coords = np.array([[0, 0, 0], [0.8 * 1.0 / 0.5291772083, 0, 0]]) basis = make_contractions(basis_dict, ["H", "He"], coords) basis = [ HortonContractions(i.angmom, i.coord, i.coeffs, i.exps) for i in basis ] grid_1d = np.linspace(-2, 2, num=5) grid_x, grid_y, grid_z = np.meshgrid(grid_1d, grid_1d, grid_1d) grid_3d = np.vstack([grid_x.ravel(), grid_y.ravel(), grid_z.ravel()]).T horton_nucattract = np.load(find_datafile("data_horton_hhe_sph_esp.npy")) assert np.allclose( electrostatic_potential(basis, np.identity(88), grid_3d, coords, np.array([1, 2]), coord_type="spherical"), horton_nucattract, )
def test_evaluate_general_kinetic_energy_density_horton(): """Test evaluate_general_kinetic_energy_density against results from HORTON. The test case is diatomic with H and He separated by 0.8 angstroms with basis set ANO-RCC. It's actually just testing posdef part. """ basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) # NOTE: used HORTON's conversion factor for angstroms to bohr points = np.array([[0, 0, 0], [0.8 * 1.0 / 0.5291772083, 0, 0]]) basis = make_contractions(basis_dict, ["H", "He"], points) basis = [ HortonContractions(i.angmom, i.coord, i.coeffs, i.exps) for i in basis ] horton_density_kinetic_density = np.load( find_datafile("data_horton_hhe_sph_posdef_kinetic_density.npy")) grid_1d = np.linspace(-2, 2, num=10) grid_x, grid_y, grid_z = np.meshgrid(grid_1d, grid_1d, grid_1d) grid_3d = np.vstack([grid_x.ravel(), grid_y.ravel(), grid_z.ravel()]).T assert np.allclose( evaluate_general_kinetic_energy_density(np.identity(88), basis, grid_3d, 0, np.identity(88)), horton_density_kinetic_density, )
def test_electron_repulsion_cartesian_horton_custom_hhe(): """Test electron_repulsion.electron_repulsion_cartesian against horton results. The test case is diatomic with H and He separated by 0.8 angstroms with basis set ANO-RCC modified. The basis set was modified to remove large exponent components to avoid overflow and some contractions for faster test. This test is also slow. """ basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) coords = np.array([[0, 0, 0], [0.8, 0, 0]]) basis = make_contractions(basis_dict, ["H", "He"], coords) basis = [HortonContractions(i.angmom, i.coord, i.coeffs[:, 0], i.exps) for i in basis[:8]] basis[0] = HortonContractions( basis[0].angmom, basis[0].coord, basis[0].coeffs[3:], basis[0].exps[3:] ) basis[4] = HortonContractions( basis[4].angmom, basis[4].coord, basis[4].coeffs[4:], basis[4].exps[4:] ) basis.pop(3) basis.pop(2) horton_elec_repulsion = np.load(find_datafile("data_horton_hhe_cart_elec_repulsion.npy")) assert np.allclose( horton_elec_repulsion, electron_repulsion_integral(basis, coord_type="cartesian") )
def test_from_pyscf(): """Test gbasis.wrapper.from_pyscf.""" pytest.importorskip("pyscf") from pyscf import gto mol = gto.Mole() mol.build(atom="""Kr 1.0 2.0 3.0""", basis="ano-rcc", unit="Bohr") test = from_pyscf(mol) basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[1, 2, 3]])) with pytest.raises(ValueError): class OtherName(gto.Mole): pass test = from_pyscf(OtherName()) assert len(test) == len(basis) for i, j in zip(test, basis): assert np.allclose(i.coord, j.coord) assert i.angmom == j.angmom assert np.allclose(i.exps, j.exps) assert np.allclose(i.coeffs, j.coeffs) assert np.allclose(i.norm_cont, j.norm_cont) assert test[0].angmom_components_sph == ("c0", ) assert test[1].angmom_components_sph == ("c1", "s1", "c0") assert test[2].angmom_components_sph == ("s2", "s1", "c0", "c1", "c2")
def test_evaluate_deriv_basis_lincomb(): """Test gbasis.evals.eval.evaluate_deriv_basis_lincomb.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) evaluate_obj = EvalDeriv(basis) cart_transform = np.random.rand(14, 19) sph_transform = np.random.rand(14, 18) assert np.allclose( evaluate_obj.construct_array_lincomb( cart_transform, "cartesian", points=np.array([[1, 1, 1]]), orders=np.array([0, 0, 0]) ), evaluate_deriv_basis( basis, np.array([[1, 1, 1]]), np.array([0, 0, 0]), cart_transform, coord_type="cartesian", ), ) assert np.allclose( evaluate_obj.construct_array_lincomb( sph_transform, "spherical", points=np.array([[1, 1, 1]]), orders=np.array([2, 1, 0]) ), evaluate_deriv_basis( basis, np.array([[1, 1, 1]]), np.array([2, 1, 0]), sph_transform, coord_type="spherical" ), )
def test_overlap_cartesian(): """Test gbasis.integrals.overlap.overlap_cartesian.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) overlap_obj = Overlap(basis) assert np.allclose( overlap_obj.construct_array_cartesian(), overlap_integral(basis, coord_type="cartesian") )
def test_kinetic_energy_integral_cartesian(): """Test gbasis.integrals.kinetic_energy.kinetic_energy_integral_cartesian.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) kinetic_energy_integral_obj = KineticEnergyIntegral(basis) assert np.allclose( kinetic_energy_integral_obj.construct_array_cartesian(), kinetic_energy_integral(basis, coord_type="cartesian"), )
def test_momentum_integral_cartesian(): """Test gbasis.integrals.momentum.momentum_integral_cartesian.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) momentum_integral_obj = MomentumIntegral(basis) assert np.allclose( momentum_integral_obj.construct_array_cartesian(), momentum_integral(basis, coord_type="cartesian"), )
def test_evaluate_basis_cartesian(): """Test gbasis.evals.eval.evaluate_basis_cartesian.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["H"], np.array([[0, 0, 0]])) evaluate_obj = Eval(basis) assert np.allclose( evaluate_obj.construct_array_cartesian(points=np.array([[0, 0, 0]])), evaluate_basis(basis, np.array([[0, 0, 0]]), coord_type="cartesian"), )
def test_angular_momentum_integral_mix(): """Test gbasis.integrals.angular_momentum.angular_momentum_integral_mix.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) angular_momentum_integral_obj = AngularMomentumIntegral(basis) assert np.allclose( angular_momentum_integral_obj.construct_array_mix(["spherical"] * 8), angular_momentum_integral(basis, coord_type=["spherical"] * 8), )
def test_overlap_lincomb(): """Test gbasis.integrals.overlap.overlap_lincomb.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) overlap_obj = Overlap(basis) transform = np.random.rand(14, 18) assert np.allclose( overlap_obj.construct_array_lincomb(transform, "spherical"), overlap_integral(basis, transform=transform, coord_type="spherical"), )
def test_kinetic_energy_integral_lincomb(): """Test gbasis.integrals.kinetic_energy.kinetic_energy_integral_lincomb.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) kinetic_energy_integral_obj = KineticEnergyIntegral(basis) transform = np.random.rand(14, 18) assert np.allclose( kinetic_energy_integral_obj.construct_array_lincomb( transform, "spherical"), kinetic_energy_integral(basis, transform, coord_type="spherical"), )
def test_angular_momentum_integral_lincomb(): """Test gbasis.integrals.angular_momentum.angular_momentum_integral_lincomb.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) angular_momentum_integral_obj = AngularMomentumIntegral(basis) transform = np.random.rand(14, 18) assert np.allclose( angular_momentum_integral_obj.construct_array_lincomb( transform, "spherical"), angular_momentum_integral(basis, transform, coord_type="spherical"), )
def test_evaluate_density_gradient(): """Test gbasis.evals.density.evaluate_density_gradient.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) transform = np.random.rand(14, 18) density = np.random.rand(14, 14) density += density.T points = np.random.rand(10, 3) np.allclose( evaluate_density_gradient(density, basis, points, transform).T, np.array([ np.einsum( "ij,ik,jk->k", density, evaluate_deriv_basis(basis, points, np.array([1, 0, 0]), transform), evaluate_basis(basis, points, transform), ) + np.einsum( "ij,ik,jk->k", density, evaluate_basis(basis, points, transform), evaluate_deriv_basis(basis, points, np.array([1, 0, 0]), transform), ), np.einsum( "ij,ik,jk->k", density, evaluate_deriv_basis(basis, points, np.array([0, 1, 0]), transform), evaluate_basis(basis, points, transform), ) + np.einsum( "ij,ik,jk->k", density, evaluate_basis(basis, points, transform), evaluate_deriv_basis(basis, points, np.array([0, 1, 0]), transform), ), np.einsum( "ij,ik,jk->k", density, evaluate_deriv_basis(basis, points, np.array([0, 0, 1]), transform), evaluate_basis(basis, points, transform), ) + np.einsum( "ij,ik,jk->k", density, evaluate_basis(basis, points, transform), evaluate_deriv_basis(basis, points, np.array([0, 0, 1]), transform), ), ]), )
def test_overlap_cartesian_norm_sto6g(): """Test the norm of gbasis.integrals.overlap_cartesian on the STO-6G basis set. The contraction coefficients in STO-6G is such that the Cartesian contractions are not normalized to past 3rd decimal places. """ basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) overlap_obj = Overlap(basis) assert np.allclose(np.diag(overlap_obj.construct_array_cartesian()), 1)
def test_point_charge_mix(): """Test gbasis.integrals.point_charge.point_charge_mix.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) point_charge_obj = PointChargeIntegral(basis) points_coords = np.random.rand(5, 3) points_charge = np.random.rand(5) assert np.allclose( point_charge_obj.construct_array_mix( ["spherical"] * 8, points_coords=points_coords, points_charge=points_charge ), point_charge_integral(basis, points_coords, points_charge, coord_type=["spherical"] * 8), )
def test_evaluate_density(): """Test gbasis.evals.density.evaluate_density.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) transform = np.random.rand(14, 18) density = np.random.rand(14, 14) density += density.T points = np.random.rand(10, 3) evaluate_orbs = evaluate_basis(basis, points, transform) assert np.allclose( evaluate_density(density, basis, points, transform), np.einsum("ij,ik,jk->k", density, evaluate_orbs, evaluate_orbs), )
def test_overlap_horton_anorcc_bec(): """Test gbasis.integrals.overlap.overlap_cartesian against HORTON's overlap matrix. The test case is diatomic with Be and C separated by 1.0 angstroms with basis set ANO-RCC. """ basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) # NOTE: used HORTON's conversion factor for angstroms to bohr basis = make_contractions( basis_dict, ["Be", "C"], np.array([[0, 0, 0], [1.0 * 1.0 / 0.5291772083, 0, 0]]) ) basis = [HortonContractions(i.angmom, i.coord, i.coeffs, i.exps) for i in basis] horton_overlap = np.load(find_datafile("data_horton_bec_cart_overlap.npy")) assert np.allclose(overlap_integral(basis, coord_type="cartesian"), horton_overlap)
def test_evaluate_basis_lincomb(): """Test gbasis.evals.eval.evaluate_basis_lincomb.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) evaluate_obj = Eval(basis) transform = np.random.rand(14, 18) assert np.allclose( evaluate_obj.construct_array_lincomb(transform, "spherical", points=np.array([[1, 1, 1]])), evaluate_basis(basis, np.array([[1, 1, 1]]), transform=transform, coord_type="spherical"), )
def test_electron_repulsion_mix(): """Test gbasis.integrals.electron_repulsion.electron_repulsion_mix.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["C"], np.array([[0, 0, 0]])) erep_obj = ElectronRepulsionIntegral(basis) assert np.allclose( erep_obj.construct_array_mix(["spherical"] * 3), electron_repulsion_integral(basis, notation="chemist", coord_type=["spherical"] * 3), ) assert np.allclose( np.einsum("ijkl->ikjl", erep_obj.construct_array_mix(["spherical"] * 3)), electron_repulsion_integral(basis, notation="physicist", coord_type=["spherical"] * 3), ) with pytest.raises(ValueError): electron_repulsion_integral(basis, notation="bad", coord_type=["spherical"] * 3)
def test_electron_repulsion_cartesian_horton_sto6g_bec(): """Test electron_repulsion.electron_repulsion_cartesian against horton results. The test case is diatomic with Be and C separated by 1.0 angstroms with basis set STO-6G. Note that ano-rcc was not used because it results in overflow in the _compute_two_electron_integrals. """ basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) coords = np.array([[0, 0, 0], [1.0, 0, 0]]) basis = make_contractions(basis_dict, ["Be", "C"], coords) basis = [HortonContractions(i.angmom, i.coord, i.coeffs, i.exps) for i in basis] horton_elec_repulsion = np.load(find_datafile("data_horton_bec_cart_elec_repulsion.npy")) assert np.allclose( horton_elec_repulsion, electron_repulsion_integral(basis, coord_type="cartesian") )
def test_overlap_integral_asymmetric_compare(): """Test overlap_asymm.overlap_integral_asymmetric against overlap.overlap_integral.""" basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) basis = make_contractions(basis_dict, ["Kr", "Kr"], np.array([[0, 0, 0], [1.0, 0, 0]])) basis = [ HortonContractions(i.angmom, i.coord, i.coeffs, i.exps) for i in basis ] assert np.allclose( overlap_integral(basis, coord_type="cartesian"), overlap_integral_asymmetric(basis, basis, coord_type_one="cartesian", coord_type_two="cartesian"), ) assert np.allclose( overlap_integral(basis, coord_type="spherical"), overlap_integral_asymmetric(basis, basis, coord_type_one="spherical", coord_type_two="spherical"), ) assert np.allclose( overlap_integral(basis, transform=np.identity(218), coord_type="spherical"), overlap_integral_asymmetric( basis, basis, transform_one=np.identity(218), transform_two=np.identity(218), coord_type_one="spherical", coord_type_two="spherical", ), ) assert np.allclose( overlap_integral(basis, coord_type=["spherical"] * 9 + ["cartesian"]), overlap_integral_asymmetric( basis, basis, coord_type_one=["spherical"] * 9 + ["cartesian"], coord_type_two=["spherical"] * 9 + ["cartesian"], ), )
def test_point_charge_lincomb(): """Test gbasis.integrals.point_charge.point_charge_lincomb.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) point_charge_obj = PointChargeIntegral(basis) points_coords = np.random.rand(5, 3) points_charge = np.random.rand(5) transform = np.random.rand(14, 18) assert np.allclose( point_charge_obj.construct_array_lincomb( transform, "spherical", points_coords=points_coords, points_charge=points_charge ), point_charge_integral( basis, points_coords=points_coords, points_charge=points_charge, transform=transform ), )
def test_point_charge_cartesian(): """Test gbasis.integrals.point_charge.point_charge_cartesian.""" basis_dict = parse_nwchem(find_datafile("data_sto6g.nwchem")) basis = make_contractions(basis_dict, ["Kr"], np.array([[0, 0, 0]])) point_charge_obj = PointChargeIntegral(basis) points_coords = np.random.rand(5, 3) points_charges = np.random.rand(5) assert np.allclose( point_charge_obj.construct_array_cartesian( points_coords=points_coords, points_charges=points_charges), point_charge_integral( basis, points_coords=points_coords, points_charges=points_charges, coord_type="cartesian", ), )
def test_kinetic_energy_integral_horton_anorcc_hhe(): """Test gbasis.integrals.kinetic_energy.kinetic_energy_integral_cartesian against HORTON's results. The test case is diatomic with H and He separated by 0.8 angstroms with basis set ANO-RCC. """ basis_dict = parse_nwchem(find_datafile("data_anorcc.nwchem")) # NOTE: used HORTON's conversion factor for angstroms to bohr basis = make_contractions( basis_dict, ["H", "He"], np.array([[0, 0, 0], [0.8 * 1.0 / 0.5291772083, 0, 0]])) basis = [ HortonContractions(i.angmom, i.coord, i.coeffs, i.exps) for i in basis ] horton_kinetic_energy_integral = np.load( find_datafile("data_horton_hhe_cart_kinetic_energy_integral.npy")) assert np.allclose(kinetic_energy_integral(basis, coord_type="cartesian"), horton_kinetic_energy_integral)