Beispiel #1
0
    def testTokenizerTotalMass(self): # fold>>
        data = FileSnippets.totalMass()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.TotalMassToken)
    def testTotalMassTokenNotMatching(self):  # fold>>
        data = FileSnippets.totalMass()
        writeToTestFile("\n" + data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.TotalMassToken.match(reader)

        self.assertEqual(token, None)
        self.assertEqual(reader.currentPos(), start_pos)
    def testTotalMassToken(self):  # fold>>
        data = FileSnippets.totalMass()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.TotalMassToken.match(reader)

        self.assertEqual(token.__class__, Tokens.TotalMassToken)
        self.assertEqual(reader.currentPos(), start_pos + 1)

        self.assertEqual(type(token.totalMass()), type(""))
        self.assertEqual(token.totalMass(), "30.010565")
Beispiel #4
0
 def testTokenizer(self):  # fold>>
     data = FileSnippets.fileHeader() + "\n" + FileSnippets.centerOfMass(
     ) + "\n" + FileSnippets.isotopicMasses(
     ) + "\n" + FileSnippets.totalMass(
     ) + "\n" + FileSnippets.momentsOfInertia() + "\n"
     writeToTestFile(data)
     tokens = Dalton20.tokenizeOutFile(testFilePath())
     self.assertEqual(len(tokens), 5)
     self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken)
     self.assertEqual(tokens[1].__class__, Tokens.CenterOfMassToken)
     self.assertEqual(tokens[2].__class__, Tokens.IsotopicMassesToken)
     self.assertEqual(tokens[3].__class__, Tokens.TotalMassToken)
     self.assertEqual(tokens[4].__class__, Tokens.MomentsOfInertiaToken)