def testTokenizer(self): # fold>> data = FileSnippets.fileHeader()+"\n"+ FileSnippets.centerOfMass()+"\n"+FileSnippets.isotopicMasses()+"\n"+FileSnippets.totalMass()+"\n"+FileSnippets.momentsOfInertia()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 5) self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken) self.assertEqual(tokens[1].__class__, Tokens.CenterOfMassToken) self.assertEqual(tokens[2].__class__, Tokens.IsotopicMassesToken) self.assertEqual(tokens[3].__class__, Tokens.TotalMassToken) self.assertEqual(tokens[4].__class__, Tokens.MomentsOfInertiaToken)
def testTokenizerDipoleMomentComponents(self): # fold>> data = FileSnippets.dipoleMomentComponents()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.DipoleMomentComponentsToken)
def testTokenizerAtomsAndBasisSetsTable(self): # fold>> data = FileSnippets.atomsAndBasisSetsTable()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.AtomsAndBasisSetsToken)
def testTokenizerNormalModesEigenvalues(self): # fold>> data = FileSnippets.normalModesEigenvalues()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.NormalModesEigenvaluesToken)
def testTokenizerOptimizationInfo(self): # fold>> data = FileSnippets.optimizationInfo()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.OptimizationInfoToken)
def testTokenizerGeometryConvergenceNumIterations(self): # fold>> data = FileSnippets.geometryConvergenceNumIterations()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.GeometryConvergenceNumIterationsToken)
def testZPVEToken(self): # fold>> data = FileSnippets.zpve()+"\n" writeToTestFile(data) tokens = GRRM.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.ZPVEToken)
def testStructureHeaderToken(self): # fold>> data = FileSnippets.structureHeader()+"\n" writeToTestFile(data) tokens = GRRM.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.StructureHeaderToken)
def testHeaderTransitionToken(self): # fold>> data = FileSnippets.headerTransition()+"\n" writeToTestFile(data) tokens = GRRM.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.HeaderTransitionToken)
def testTokenizerIsotopicMasses(self): # fold>> data = FileSnippets.isotopicMasses() + "\n" writeToTestFile(data) tokens = Dalton20.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.IsotopicMassesToken)
def testTokenizerFileHeader(self): # fold>> data = FileSnippets.fileHeader() + "\n" writeToTestFile(data) tokens = Dalton20.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken)
def testTokenizerHOMOLUMOSeparation(self): # fold>> data = FileSnippets.HomoLumoSeparation()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.HOMOLUMOSeparationToken)
def testTokenizerTotalMass(self): # fold>> data = FileSnippets.totalMass()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.TotalMassToken)
def testTokenizerSymmetry(self): # fold>> data = FileSnippets.symmetry() + "\n" writeToTestFile(data) tokens = Dalton20.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.SymmetryToken)
def testOptimizationNextGeometryToken(self): # fold>> data = FileSnippets.optimizationNextGeometry() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.OptimizationNextGeometryToken.match(reader) self.assertEqual(token.__class__, Tokens.OptimizationNextGeometryToken) self.assertEqual(reader.currentPos(), start_pos + 8) atom_list = token.atomList() self.assertEqual(len(atom_list), 4) self.assertEqual(atom_list[0][0], "O1") self.assertEqual(atom_list[0][1], "") self.assertEqual(atom_list[0][2], ("0.0000000000", "0.0000000000", "0.0680928675")) self.assertEqual(atom_list[1][0], "H1") self.assertEqual(atom_list[1][1], "1") self.assertEqual(atom_list[1][2], ("0.0000000000", "-1.7554324515", "3.4700805319")) self.assertEqual(atom_list[2][0], "H1") self.assertEqual(atom_list[2][1], "2") self.assertEqual(atom_list[2][2], ("0.0000000000", "1.7554324515", "3.4700805319")) self.assertEqual(atom_list[3][0], "C1") self.assertEqual(atom_list[3][1], "") self.assertEqual(atom_list[3][2], ("0.0000000000", "0.0000000000", "2.3521294415"))
def testTokenizerFinalGeometryEnergy(self): # fold>> data = FileSnippets.finalGeometryEnergy()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.FinalGeometryEnergyToken)
def testCartesianCoordinatesToken(self): # fold>> data = FileSnippets.cartesianCoordinates() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.CartesianCoordinatesToken.match(reader) self.assertEqual(token.__class__, Tokens.CartesianCoordinatesToken) self.assertEqual(reader.currentPos(), start_pos + 22) atom_list = token.atomList() self.assertEqual(len(atom_list), 4) self.assertEqual(atom_list[0][0], "O1") self.assertEqual(atom_list[0][1], "") self.assertEqual(atom_list[0][2], ("0.0000000000", "0.0000000000", "0.3000000000")) self.assertEqual(atom_list[1][0], "H1") self.assertEqual(atom_list[1][1], "1") self.assertEqual(atom_list[1][2], ("0.0000000000", "-1.7597098488", "3.3775957364")) self.assertEqual(atom_list[2][0], "H1") self.assertEqual(atom_list[2][1], "2") self.assertEqual(atom_list[2][2], ("0.0000000000", "1.7597098488", "3.3775957364")) self.assertEqual(atom_list[3][0], "C1") self.assertEqual(atom_list[3][1], "") self.assertEqual(atom_list[3][2], ("0.0000000000", "0.0000000000", "2.3051919000"))
def testTokenizerEndOfOptimizationHeader(self): # fold>> data = FileSnippets.endOfOptimizationHeader()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.EndOfOptimizationHeaderToken)
def testTokenizerCartesianCoordinates(self): # fold>> data = FileSnippets.cartesianCoordinates()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.CartesianCoordinatesToken)
def testTokenizerMomentsOfInertia(self): # fold>> data = FileSnippets.momentsOfInertia()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.MomentsOfInertiaToken)
def testTokenizerFirstHyperpolarizabilityComponent(self): # fold>> data = FileSnippets.firstHyperpolarizability() + "\n" writeToTestFile(data) tokens = Dalton20.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.FirstHyperpolarizabilityComponentToken)
def testTokenizerSecondHyperpolarizability(self): # fold>> data = FileSnippets.secondHyperpolarizability() + "\n" writeToTestFile(data) tokens = Dalton20.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.SecondHyperpolarizabilityToken)
def testHeaderTransitionToken(self): # fold>> data = FileSnippets.headerTransition() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.HeaderTransitionToken.match(reader) self.assertEqual(token.__class__, Tokens.HeaderTransitionToken) self.assertEqual(reader.currentPos(), start_pos + 1)
def testHOMOLUMOSeparationTokenNotMatching(self): # fold>> data = FileSnippets.optimizationNextGeometry() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.HOMOLUMOSeparationToken.match(reader) self.assertEqual(token, None) self.assertEqual(reader.currentPos(), start_pos)
def testAtomsAndBasisSetsTokenNotMatching(self): # fold>> data = FileSnippets.fileHeader() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.AtomsAndBasisSetsToken.match(reader) self.assertEqual(token, None) self.assertEqual(reader.currentPos(), start_pos)
def testNormalModesEigenvaluesTokenNotMatching(self): # fold>> data = FileSnippets.fileHeader() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.NormalModesEigenvaluesToken.match(reader) self.assertEqual(token, None) self.assertEqual(reader.currentPos(), start_pos)
def testGeometryConvergenceNumIterationsTokenNotMatching(self): # fold>> data = FileSnippets.fileHeader() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.GeometryConvergenceNumIterationsToken.match(reader) self.assertEqual(token, None) self.assertEqual(reader.currentPos(), start_pos)
def testEndOfOptimizationHeaderTokenNotMatching(self): # fold>> data = FileSnippets.fileHeader() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.EndOfOptimizationHeaderToken.match(reader) self.assertEqual(token, None) self.assertEqual(reader.currentPos(), start_pos)
def testMomentsOfInertiaTokenNotMatching(self): # fold>> data = FileSnippets.momentsOfInertia() writeToTestFile("\n" + data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.MomentsOfInertiaToken.match(reader) self.assertEqual(token, None) self.assertEqual(reader.currentPos(), start_pos)
def testIsotopicMassesTokenNotMatching(self): # fold>> data = FileSnippets.isotopicMasses() writeToTestFile("\n" + data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.IsotopicMassesToken.match(reader) self.assertEqual(token, None) self.assertEqual(reader.currentPos(), start_pos)