def testTokenizerIsotopicMasses(self): # fold>> data = FileSnippets.isotopicMasses()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].__class__, Tokens.IsotopicMassesToken)
def testIsotopicMassesTokenNotMatching(self): # fold>> data = FileSnippets.isotopicMasses() writeToTestFile("\n" + data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.IsotopicMassesToken.match(reader) self.assertEqual(token, None) self.assertEqual(reader.currentPos(), start_pos)
def testTokenizer(self): # fold>> data = FileSnippets.fileHeader()+"\n"+ FileSnippets.centerOfMass()+"\n"+FileSnippets.isotopicMasses()+"\n"+FileSnippets.totalMass()+"\n"+FileSnippets.momentsOfInertia()+"\n" writeToTestFile(data) tokens = Dalton.tokenizeOutFile(testFilePath()) self.assertEqual(len(tokens), 5) self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken) self.assertEqual(tokens[1].__class__, Tokens.CenterOfMassToken) self.assertEqual(tokens[2].__class__, Tokens.IsotopicMassesToken) self.assertEqual(tokens[3].__class__, Tokens.TotalMassToken) self.assertEqual(tokens[4].__class__, Tokens.MomentsOfInertiaToken)
def testIsotopicMassesToken(self): # fold>> data = FileSnippets.isotopicMasses() writeToTestFile(data) reader = FileReader.FileReader(testFilePath()) start_pos = reader.currentPos() token = Tokens.IsotopicMassesToken.match(reader) self.assertEqual(token.__class__, Tokens.IsotopicMassesToken) self.assertEqual(reader.currentPos(), start_pos + 8) atom_list = token.atomList() self.assertEqual(type(atom_list), type([])) self.assertEqual(len(atom_list), 4) self.assertEqual(atom_list[0], ("O1", "", "15.994915")) self.assertEqual(atom_list[1], ("H1", "1", "1.007825")) self.assertEqual(atom_list[2], ("H1", "2", "1.007825")) self.assertEqual(atom_list[3], ("C1", "", "12.000000"))