コード例 #1
0
    def testZPVEToken(self): # fold>>
        data = FileSnippets.zpve()+"\n"
        writeToTestFile(data)
        tokens = GRRM.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.ZPVEToken)
コード例 #2
0
    def testStructureHeaderToken(self): # fold>>
        data = FileSnippets.structureHeader()+"\n"
        writeToTestFile(data)
        tokens = GRRM.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.StructureHeaderToken)
コード例 #3
0
    def testHeaderTransitionToken(self): # fold>>
        data = FileSnippets.headerTransition()+"\n"
        writeToTestFile(data)
        tokens = GRRM.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.HeaderTransitionToken)
コード例 #4
0
    def testNormalModesToken(self): # fold>>
        data = FileSnippets.normalModes()+"\n"
        writeToTestFile(data)
        tokens = GRRM.tokenizeOutFile(testFilePath())

        self.assertEqual(len(tokens), 1)
        self.assertEqual(tokens[0].__class__, Tokens.NormalModesToken)
コード例 #5
0
    def parse(self, input_filename, DC_filename, dc_filename, EQ_filename, TS_filename): # fold>>
        logging.debug("Input filename : "+input_filename)
        logging.debug("DC filename : "+DC_filename)
        logging.debug("dc filename : "+dc_filename)
        logging.debug("EQ filename : "+EQ_filename)
        logging.debug("TS filename : "+TS_filename)

        result = {}
        result["input_molecule"] = None
        result["run_data"] = None
        result["molecules"] = []
        result["triples"] = []
        result["run_data"] = []
        id_to_uuid_mapper = {}
        all_connections = []

        input_molecule, run_data = _parseInputTokenList(GRRMInput.tokenize(input_filename))
        result["input_molecule"] = input_molecule
        result["run_data"] = run_data

        molecules, triples, mapper = _parseDCTokenList(GRRM.tokenizeOutFile(DC_filename))
        result["molecules"].extend(molecules)
        result["triples"].extend(triples)
        id_to_uuid_mapper.update(mapper)

        molecules, triples, mapper, connections = _parsedcTokenList(GRRM.tokenizeOutFile(dc_filename))
        result["molecules"].extend(molecules)
        result["triples"].extend(triples)
        id_to_uuid_mapper.update(mapper)
        all_connections.extend(connections)

        molecules, triples, mapper = _parseEQTokenList(GRRM.tokenizeOutFile(EQ_filename))
        result["molecules"].extend(molecules)
        result["triples"].extend(triples)
        id_to_uuid_mapper.update(mapper)

        molecules, triples, mapper, connections = _parseTSTokenList(GRRM.tokenizeOutFile(TS_filename))
        result["molecules"].extend(molecules)
        result["triples"].extend(triples)
        id_to_uuid_mapper.update(mapper)
        all_connections.extend(connections)
   
        resolved_connection_list = _resolveConnections(all_connections, id_to_uuid_mapper)
        connection_triples = _createConnectionTriples(resolved_connection_list)
        result["triples"].extend(connection_triples)
        return result
コード例 #6
0
    def testTokenizerEmptyFile(self): # fold>>
        writeToTestFile("")
        tokens = GRRM.tokenizeOutFile(testFilePath())

        self.assertEqual(tokens, [])