Пример #1
0
    def testTokenizerFileHeader(self): # fold>>
        data = FileSnippets.fileHeader()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken)
Пример #2
0
    def testTokenizerIsotopicMasses(self): # fold>>
        data = FileSnippets.isotopicMasses()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.IsotopicMassesToken)
Пример #3
0
    def testTokenizerDipoleMomentComponents(self): # fold>>
        data = FileSnippets.dipoleMomentComponents()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.DipoleMomentComponentsToken)
Пример #4
0
    def testTokenizerHOMOLUMOSeparation(self): # fold>>
        data = FileSnippets.HomoLumoSeparation()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.HOMOLUMOSeparationToken)
Пример #5
0
    def testTokenizerAtomsAndBasisSetsTable(self): # fold>>
        data = FileSnippets.atomsAndBasisSetsTable()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.AtomsAndBasisSetsToken)
Пример #6
0
    def testTokenizerNormalModesEigenvalues(self): # fold>>
        data = FileSnippets.normalModesEigenvalues()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.NormalModesEigenvaluesToken)
Пример #7
0
    def testTokenizerOptimizationInfo(self): # fold>>
        data = FileSnippets.optimizationInfo()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.OptimizationInfoToken)
Пример #8
0
    def testTokenizerGeometryConvergenceNumIterations(self): # fold>>
        data = FileSnippets.geometryConvergenceNumIterations()+"\n"
        writeToTestFile(data)
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.GeometryConvergenceNumIterationsToken)
Пример #9
0
 def testTokenizer(self): # fold>>
     data = FileSnippets.fileHeader()+"\n"+ FileSnippets.centerOfMass()+"\n"+FileSnippets.isotopicMasses()+"\n"+FileSnippets.totalMass()+"\n"+FileSnippets.momentsOfInertia()+"\n"
     writeToTestFile(data)
     tokens = Dalton.tokenizeOutFile(testFilePath())
     self.assertEqual(len(tokens), 5)
     self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken)
     self.assertEqual(tokens[1].__class__, Tokens.CenterOfMassToken)
     self.assertEqual(tokens[2].__class__, Tokens.IsotopicMassesToken)
     self.assertEqual(tokens[3].__class__, Tokens.TotalMassToken)
     self.assertEqual(tokens[4].__class__, Tokens.MomentsOfInertiaToken)
Пример #10
0
def main(filename):
    tokens = Dalton.tokenizeOutFile(filename)

    try:
        bond_length_token = filter(
            lambda x: x.__class__ == Tokens.BondLengthsToken, tokens)[-1]
    except:
        print "Sorry, looks like there are no bond length entries in the file"
        sys.exit(1)

    atom_list = bond_length_token.atomList()

    print "# %s" % (filename, )
    for entry in atom_list:
        if entry[0][0][0] == "C" and entry[1][0][0] == "C":
            print "%s %s %s %s %s" % (entry[0][0], entry[0][1], entry[1][0],
                                      entry[1][1], entry[2])
Пример #11
0
def main(dalton_out):
    tokenlist = Dalton.tokenizeOutFile(dalton_out)

    atom_basis_set_token = getAtomsAndBasisSetsToken(tokenlist)
    geometry_token = getFinalGeometry(tokenlist)

    datalist = atom_basis_set_token.atomDataList()

    final_geometry = geometry_token.atomList()

    doc = minidom.parseString(
        "<cml xmlns=\"http://www.xml-cml.org/schema\" xmlns:sb=\"http://forthescience.org/NS/mycml\"/>"
    )
    namespace = "http://www.xml-cml.org/schema"
    molecule = doc.createElementNS(namespace, "molecule")
    doc.documentElement.appendChild(molecule)

    name = doc.createElementNS(namespace, "name")
    molecule.appendChild(name)
    text = doc.createTextNode(os.path.splitext(dalton_out)[0])
    name.appendChild(text)

    atomarray = doc.createElementNS(namespace, "atomArray")
    molecule.appendChild(atomarray)

    for id, atom_entry in enumerate(final_geometry):
        label, group, coords = atom_entry
        x, y, z = coords
        atomic_number = labelToAtomicNumber(label, datalist)

        atom = doc.createElementNS(namespace, "atom")
        atom.setAttribute("id", "a" + str(id + 1))
        atom.setAttribute("x3", str(x))
        atom.setAttribute("y3", str(y))
        atom.setAttribute("z3", str(z))
        atom.setAttributeNS("http://forthescience.org/NS/mycml",
                            "sb:atomicNumber", str(atomic_number))
        label_node = doc.createElementNS(namespace, "label")
        label_node.setAttribute("value", label)
        atom.appendChild(label_node)
        atomarray.appendChild(atom)

    print doc.toprettyxml()
Пример #12
0
    def testTokenizerEmptyFile(self): # fold>>
        writeToTestFile("")
        tokens = Dalton.tokenizeOutFile(testFilePath())

        self.assertEqual(tokens, [])