Exemple #1
0
    def testTokenizerOptimizationInfo(self):  # fold>>
        data = FileSnippets.optimizationInfo() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.OptimizationInfoToken)
Exemple #2
0
    def testTokenizerResponseHeader(self):  # fold>>
        data = FileSnippets.responseHeader() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.ResponseHeaderToken)
Exemple #3
0
    def testTokenizerCartesianCoordinates(self):  # fold>>
        data = FileSnippets.cartesianCoordinates() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.CartesianCoordinatesToken)
Exemple #4
0
    def testTokenizerTotalMass(self):  # fold>>
        data = FileSnippets.totalMass() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.TotalMassToken)
Exemple #5
0
    def testTokenizerMomentsOfInertia(self):  # fold>>
        data = FileSnippets.momentsOfInertia() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.MomentsOfInertiaToken)
Exemple #6
0
    def parse(self, output_filename):  # fold>>
        logging.debug("Output filename : " + output_filename)

        token_list = Dalton20.tokenizeOutFile(output_filename)

        result = {}
        result["input_molecule"] = None
        result["run_data"] = None
        result["molecules"] = []
        result["triples"] = []
        result["run_data"] = []

        cartesian_coordinates_token = tokenfilters.getCartesianCoordinatesToken(
            token_list)
        atom_basis_set_token = tokenfilters.getAtomsAndBasisSetsToken(
            token_list)
        final_geometry_token = tokenfilters.getFinalGeometryToken(token_list)

        if atom_basis_set_token is not None and final_geometry_token is not None:
            final_geometry, triples = self._parseGeometryOptimization(
                final_geometry_token, atom_basis_set_token)

        result["molecules"].append(final_geometry)
        result["triples"].extend(triples)
        return result
Exemple #7
0
    def testTokenizerHOMOLUMOSeparation(self):  # fold>>
        data = FileSnippets.HomoLumoSeparation() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.HOMOLUMOSeparationToken)
Exemple #8
0
    def testTokenizerSevereError(self):  # fold>>
        data = FileSnippets.severeError() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.SevereErrorToken)
Exemple #9
0
    def testTokenizerDipoleMoment(self):  # fold>>
        data = FileSnippets.dipoleMoment() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.DipoleMomentToken)
Exemple #10
0
    def testTokenizerFinalGeometry(self):  # fold>>
        data = FileSnippets.finalGeometry() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.FinalGeometryToken)
Exemple #11
0
    def testTokenizerAtomsAndBasisSetsTable(self):  # fold>>
        data = FileSnippets.atomsAndBasisSetsTable() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.AtomsAndBasisSetsToken)
Exemple #12
0
    def testTokenizerGeometryConvergenceNumIterations(self):  # fold>>
        data = FileSnippets.geometryConvergenceNumIterations() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__,
                         Tokens.GeometryConvergenceNumIterationsToken)
Exemple #13
0
    def testTokenizerSecondHyperpolarizability(self):  # fold>>
        data = FileSnippets.secondHyperpolarizability() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__,
                         Tokens.SecondHyperpolarizabilityToken)
Exemple #14
0
    def testTokenizerFirstHyperpolarizabilityComponent(self):  # fold>>
        data = FileSnippets.firstHyperpolarizability() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__,
                         Tokens.FirstHyperpolarizabilityComponentToken)
Exemple #15
0
    def testTokenizerEndOfOptimizationHeader(self):  # fold>>
        data = FileSnippets.endOfOptimizationHeader() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__,
                         Tokens.EndOfOptimizationHeaderToken)
Exemple #16
0
    def testTokenizerNormalModesEigenvalues(self):  # fold>>
        data = FileSnippets.normalModesEigenvalues() + "\n"
        writeToTestFile(data)
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__,
                         Tokens.NormalModesEigenvaluesToken)
Exemple #17
0
 def testTokenizer(self):  # fold>>
     data = FileSnippets.fileHeader() + "\n" + FileSnippets.centerOfMass(
     ) + "\n" + FileSnippets.isotopicMasses(
     ) + "\n" + FileSnippets.totalMass(
     ) + "\n" + FileSnippets.momentsOfInertia() + "\n"
     writeToTestFile(data)
     tokens = Dalton20.tokenizeOutFile(testFilePath())
     self.assertEqual(len(tokens), 5)
     self.assertEqual(tokens[0].__class__, Tokens.FileHeaderToken)
     self.assertEqual(tokens[1].__class__, Tokens.CenterOfMassToken)
     self.assertEqual(tokens[2].__class__, Tokens.IsotopicMassesToken)
     self.assertEqual(tokens[3].__class__, Tokens.TotalMassToken)
     self.assertEqual(tokens[4].__class__, Tokens.MomentsOfInertiaToken)
def main(argv):  # fold>>

    options = _getOptions(argv)
    token_list = Dalton20.tokenizeOutFile(options.dalton_out_filename)

    error_tokens = _getErrorTokens(token_list)

    if options.verbose:
        for token in error_tokens:
            print "Error: " + token.reason()

    if len(error_tokens) != 0:
        sys.exit(1)

    sys.exit(0)
def main(argv):

    if len(argv) < 2:
        _usage()
        sys.exit(1)

    dalton_out_filename = argv[1]

    token_list = Dalton20.tokenizeOutFile(dalton_out_filename)
    tokens = filter(lambda x: x.__class__ == Tokens.DipoleMomentToken,
                    token_list)
    if len(tokens) == 0:
        raise Exception("Sorry. Could not find dipole token")

    # take the last one, at the end of a geometry optimization, we have two of them
    print tokens[-1].dipole().value()
def main(filename):
    tokens = Dalton20.tokenizeOutFile(filename)

    try:
        bond_length_token = filter(
            lambda x: x.__class__ == Tokens.BondLengthsToken, tokens)[-1]
    except:
        print "Sorry, looks like there are no bond length entries in the file"
        sys.exit(1)

    atom_list = bond_length_token.atomList()

    print "# %s" % (filename, )
    for entry in atom_list:
        if entry[0][0][0] == "C" and entry[1][0][0] == "C":
            print "%s %s %s %s %s" % (entry[0][0], entry[0][1], entry[1][0],
                                      entry[1][1], entry[2].asUnit(
                                          Units.angstrom).value())
Exemple #21
0
def main(argv):

    if len(argv) < 2:
        _usage()
        sys.exit(1)

    dalton_out_filename = argv[1]

    token_list = Dalton20.tokenizeOutFile(dalton_out_filename)
    tokens = filter(
        lambda x: x.__class__ == Tokens.SecondHyperpolarizabilityToken,
        token_list)
    if len(tokens) == 0:
        raise Exception("Sorry. Could not find gamma component tokens")

    print "# bfreq|cfreq|dfreq|components|value|equal to"
    for t in tokens:
        printToken(t)
Exemple #22
0
def main(dalton_out, cml_out):
    tokenlist = Dalton20.tokenizeOutFile(dalton_out)

    atom_basis_set_token = getAtomsAndBasisSetsToken(tokenlist)
    geometry_token = getFinalGeometry(tokenlist)

    datalist = atom_basis_set_token.atomDataList()

    final_geometry = geometry_token.atomList()

    doc = minidom.parseString(
        "<cml xmlns=\"http://www.xml-cml.org/schema\" xmlns:sb=\"http://forthescience.org/NS/mycml\"/>"
    )
    namespace = "http://www.xml-cml.org/schema"
    molecule = doc.createElementNS(namespace, "molecule")
    doc.documentElement.appendChild(molecule)

    name = doc.createElementNS(namespace, "name")
    molecule.appendChild(name)
    text = doc.createTextNode(os.path.splitext(dalton_out)[0])
    name.appendChild(text)

    atomarray = doc.createElementNS(namespace, "atomArray")
    molecule.appendChild(atomarray)

    for id, atom_entry in enumerate(final_geometry):
        label, group, coords = atom_entry
        atomic_number = labelToAtomicNumber(label, datalist)

        atom = doc.createElementNS(namespace, "atom")
        atom.setAttribute("id", "a" + str(id + 1))
        atom.setAttribute("x3", str(coords.asUnit(Units.bohr).value()[0]))
        atom.setAttribute("y3", str(coords.asUnit(Units.bohr).value()[1]))
        atom.setAttribute("z3", str(coords.asUnit(Units.bohr).value()[2]))
        atom.setAttributeNS("http://forthescience.org/NS/mycml",
                            "sb:atomicNumber", str(atomic_number))
        label_node = doc.createElementNS(namespace, "label")
        label_node.setAttribute("value", label)
        atom.appendChild(label_node)
        atomarray.appendChild(atom)

    output = file(cml_out, "w")
    output.write(doc.toprettyxml())
def main(argv):

    if len(argv) < 2:
        _usage()
        sys.exit(1)

    dalton_out_filename = argv[1]

    token_list = Dalton20.tokenizeOutFile(dalton_out_filename)
    tokens = filter(lambda x: x.__class__ == Tokens.LinearResponseToken,
                    token_list)
    if len(tokens) == 0:
        raise Exception("Sorry. Could not find linear response tokens")

    print "# afreq|bfreq|components|value"
    for t in tokens:
        print "%7.6f|%7.6f|%s|%.8f|" % (t.AFreq().value(), t.BFreq().value(),
                                        formatComponents(
                                            t.components()), t.alpha().value())
Exemple #24
0
    def __init__(self, dalton_out_filename):  # fold>>
        token_list = Dalton20.tokenizeOutFile(dalton_out_filename)

        atom_basis_set_token = self._getAtomsAndBasisSetsToken(token_list)
        geometry_token = self._getFinalGeometryToken(token_list)
        symmetry_token = self._getSymmetryToken(token_list)

        mol = Molecule()
        mol.symmetry_generators = symmetry_token.generators()
        mol.basis_set = atom_basis_set_token.basisSet()

        for label, symmetry, coords in geometry_token.atomList():
            element = _labelToElement(label,
                                      atom_basis_set_token.atomDataList())
            mol.elements_list.append(element)
            mol.labels_list.append(label)
            mol.symmetries_list.append(symmetry)
            mol.coordinates_list.append(coords)

        self._molecule = mol
def main(argv):

    if len(argv) < 2:
        _usage()
        sys.exit(1)

    dalton_out_filename = argv[1]

    token_list = Dalton20.tokenizeOutFile(dalton_out_filename)
    tokens = filter(
        lambda x: x.__class__ == Tokens.FirstHyperpolarizabilityComponentToken,
        token_list)
    if len(tokens) == 0:
        raise Exception("Sorry. Could not find beta component tokens")

    print "# bfreq|cfreq|components|value|equal to"
    for t in tokens:
        if t.beta() is not None:
            print "%7.6f|%7.6f|%s|%.8f|" % (t.BFreq().value(), t.CFreq().value(
            ), formatComponents(t.components()), t.beta().value())
        else:
            print "%7.6f|%7.6f|%s||%s" % (t.BFreq().value(), t.CFreq().value(),
                                          formatComponents(t.components()),
                                          formatComponents(t.refersTo()))
Exemple #26
0
    def testTokenizerEmptyFile(self):  # fold>>
        writeToTestFile("")
        tokens = Dalton20.tokenizeOutFile(testFilePath())

        self.assertEqual(tokens, [])