예제 #1
0
 def testTokenizer(self): # fold>>
     data = FileSnippets.fileHeader()+"\n"+ FileSnippets.centerOfMass()+"\n"+FileSnippets.isotopicMasses()+"\n"+FileSnippets.totalMass()+"\n"+FileSnippets.momentsOfInertia()+"\n"
     writeToTestFile(data)
     tokens = Dalton.tokenizeOutFile(testFilePath())
     self.assertEqual(len(tokens), 5)
     self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken)
     self.assertEqual(tokens[1].__class__, Tokens.CenterOfMassToken)
     self.assertEqual(tokens[2].__class__, Tokens.IsotopicMassesToken)
     self.assertEqual(tokens[3].__class__, Tokens.TotalMassToken)
     self.assertEqual(tokens[4].__class__, Tokens.MomentsOfInertiaToken)
예제 #2
0
    def testTokenizerDipoleMomentComponents(self): # fold>>
        data = FileSnippets.dipoleMomentComponents()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.DipoleMomentComponentsToken)
예제 #3
0
    def testTokenizerAtomsAndBasisSetsTable(self): # fold>>
        data = FileSnippets.atomsAndBasisSetsTable()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.AtomsAndBasisSetsToken)
예제 #4
0
    def testTokenizerNormalModesEigenvalues(self): # fold>>
        data = FileSnippets.normalModesEigenvalues()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.NormalModesEigenvaluesToken)
예제 #5
0
    def testTokenizerOptimizationInfo(self): # fold>>
        data = FileSnippets.optimizationInfo()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.OptimizationInfoToken)
예제 #6
0
    def testTokenizerGeometryConvergenceNumIterations(self): # fold>>
        data = FileSnippets.geometryConvergenceNumIterations()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.GeometryConvergenceNumIterationsToken)
    def testZPVEToken(self): # fold>>
        data = FileSnippets.zpve()+"\n"
        writeToTestFile(data)
        tokens = GRRM.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.ZPVEToken)
    def testStructureHeaderToken(self): # fold>>
        data = FileSnippets.structureHeader()+"\n"
        writeToTestFile(data)
        tokens = GRRM.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.StructureHeaderToken)
    def testHeaderTransitionToken(self): # fold>>
        data = FileSnippets.headerTransition()+"\n"
        writeToTestFile(data)
        tokens = GRRM.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.HeaderTransitionToken)
예제 #10
0
    def testTokenizerIsotopicMasses(self):  # fold>>
        data = FileSnippets.isotopicMasses() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.IsotopicMassesToken)
예제 #11
0
    def testTokenizerFileHeader(self):  # fold>>
        data = FileSnippets.fileHeader() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken)
예제 #12
0
    def testTokenizerHOMOLUMOSeparation(self): # fold>>
        data = FileSnippets.HomoLumoSeparation()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.HOMOLUMOSeparationToken)
예제 #13
0
    def testTokenizerTotalMass(self): # fold>>
        data = FileSnippets.totalMass()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.TotalMassToken)
예제 #14
0
    def testTokenizerSymmetry(self):  # fold>>
        data = FileSnippets.symmetry() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.SymmetryToken)
예제 #15
0
    def testOptimizationNextGeometryToken(self):  # fold>>
        data = FileSnippets.optimizationNextGeometry()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.OptimizationNextGeometryToken.match(reader)

        self.assertEqual(token.__class__, Tokens.OptimizationNextGeometryToken)
        self.assertEqual(reader.currentPos(), start_pos + 8)

        atom_list = token.atomList()

        self.assertEqual(len(atom_list), 4)

        self.assertEqual(atom_list[0][0], "O1")
        self.assertEqual(atom_list[0][1], "")
        self.assertEqual(atom_list[0][2],
                         ("0.0000000000", "0.0000000000", "0.0680928675"))

        self.assertEqual(atom_list[1][0], "H1")
        self.assertEqual(atom_list[1][1], "1")
        self.assertEqual(atom_list[1][2],
                         ("0.0000000000", "-1.7554324515", "3.4700805319"))

        self.assertEqual(atom_list[2][0], "H1")
        self.assertEqual(atom_list[2][1], "2")
        self.assertEqual(atom_list[2][2],
                         ("0.0000000000", "1.7554324515", "3.4700805319"))

        self.assertEqual(atom_list[3][0], "C1")
        self.assertEqual(atom_list[3][1], "")
        self.assertEqual(atom_list[3][2],
                         ("0.0000000000", "0.0000000000", "2.3521294415"))
예제 #16
0
    def testTokenizerFinalGeometryEnergy(self): # fold>>
        data = FileSnippets.finalGeometryEnergy()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.FinalGeometryEnergyToken)
예제 #17
0
    def testCartesianCoordinatesToken(self):  # fold>>
        data = FileSnippets.cartesianCoordinates()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.CartesianCoordinatesToken.match(reader)

        self.assertEqual(token.__class__, Tokens.CartesianCoordinatesToken)
        self.assertEqual(reader.currentPos(), start_pos + 22)

        atom_list = token.atomList()

        self.assertEqual(len(atom_list), 4)

        self.assertEqual(atom_list[0][0], "O1")
        self.assertEqual(atom_list[0][1], "")
        self.assertEqual(atom_list[0][2],
                         ("0.0000000000", "0.0000000000", "0.3000000000"))

        self.assertEqual(atom_list[1][0], "H1")
        self.assertEqual(atom_list[1][1], "1")
        self.assertEqual(atom_list[1][2],
                         ("0.0000000000", "-1.7597098488", "3.3775957364"))

        self.assertEqual(atom_list[2][0], "H1")
        self.assertEqual(atom_list[2][1], "2")
        self.assertEqual(atom_list[2][2],
                         ("0.0000000000", "1.7597098488", "3.3775957364"))

        self.assertEqual(atom_list[3][0], "C1")
        self.assertEqual(atom_list[3][1], "")
        self.assertEqual(atom_list[3][2],
                         ("0.0000000000", "0.0000000000", "2.3051919000"))
예제 #18
0
    def testTokenizerEndOfOptimizationHeader(self): # fold>>
        data = FileSnippets.endOfOptimizationHeader()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.EndOfOptimizationHeaderToken)
예제 #19
0
    def testTokenizerCartesianCoordinates(self): # fold>>
        data = FileSnippets.cartesianCoordinates()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.CartesianCoordinatesToken)
예제 #20
0
    def testTokenizerMomentsOfInertia(self): # fold>>
        data = FileSnippets.momentsOfInertia()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.MomentsOfInertiaToken)
예제 #21
0
    def testTokenizerFirstHyperpolarizabilityComponent(self):  # fold>>
        data = FileSnippets.firstHyperpolarizability() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__,
                         Tokens.FirstHyperpolarizabilityComponentToken)
예제 #22
0
    def testTokenizerSecondHyperpolarizability(self):  # fold>>
        data = FileSnippets.secondHyperpolarizability() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__,
                         Tokens.SecondHyperpolarizabilityToken)
예제 #23
0
    def testHeaderTransitionToken(self):  # fold>>
        data = FileSnippets.headerTransition()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.HeaderTransitionToken.match(reader)

        self.assertEqual(token.__class__, Tokens.HeaderTransitionToken)
        self.assertEqual(reader.currentPos(), start_pos + 1)
예제 #24
0
    def testHOMOLUMOSeparationTokenNotMatching(self):  # fold>>
        data = FileSnippets.optimizationNextGeometry()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.HOMOLUMOSeparationToken.match(reader)

        self.assertEqual(token, None)
        self.assertEqual(reader.currentPos(), start_pos)
예제 #25
0
    def testAtomsAndBasisSetsTokenNotMatching(self):  # fold>>
        data = FileSnippets.fileHeader()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.AtomsAndBasisSetsToken.match(reader)

        self.assertEqual(token, None)
        self.assertEqual(reader.currentPos(), start_pos)
예제 #26
0
    def testNormalModesEigenvaluesTokenNotMatching(self):  # fold>>
        data = FileSnippets.fileHeader()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.NormalModesEigenvaluesToken.match(reader)

        self.assertEqual(token, None)
        self.assertEqual(reader.currentPos(), start_pos)
예제 #27
0
    def testGeometryConvergenceNumIterationsTokenNotMatching(self):  # fold>>
        data = FileSnippets.fileHeader()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.GeometryConvergenceNumIterationsToken.match(reader)

        self.assertEqual(token, None)
        self.assertEqual(reader.currentPos(), start_pos)
예제 #28
0
    def testEndOfOptimizationHeaderTokenNotMatching(self):  # fold>>
        data = FileSnippets.fileHeader()
        writeToTestFile(data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.EndOfOptimizationHeaderToken.match(reader)

        self.assertEqual(token, None)
        self.assertEqual(reader.currentPos(), start_pos)
예제 #29
0
    def testMomentsOfInertiaTokenNotMatching(self):  # fold>>
        data = FileSnippets.momentsOfInertia()
        writeToTestFile("\n" + data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.MomentsOfInertiaToken.match(reader)

        self.assertEqual(token, None)
        self.assertEqual(reader.currentPos(), start_pos)
예제 #30
0
    def testIsotopicMassesTokenNotMatching(self):  # fold>>
        data = FileSnippets.isotopicMasses()
        writeToTestFile("\n" + data)

        reader = FileReader.FileReader(testFilePath())
        start_pos = reader.currentPos()
        token = Tokens.IsotopicMassesToken.match(reader)

        self.assertEqual(token, None)
        self.assertEqual(reader.currentPos(), start_pos)