Example #1
0
File: main.py Project: bluemoon/nlp
    def main(self):
        semantics = Semantics()

        logParser = irc_logParser()
        log_data = logParser.loadLogs("logs/2009-08-1*", limit=200)

        for sentences in log_data:
            rule_eng = rule_engine()
            if not sentences:
                continue

            if self.options.relex:
                r = relex.relex()
                sentence = r.process(sentences)

                for x in sentence:
                    y = x.split("\n")
                    for z in y:
                        if z:
                            print z

            if self.options.default:

                from tagger import braubt_tagger

                analogy = analysis.Analogies()

                s = linkGrammar.sentence(sentences)
                if s:
                    normal_words = sentences.split(" ")
                    container = sentence(s, normal_words)
                    container.atom = semantics.semanticsToAtoms(container)

                    test_rules(container)
                    for a in container.diagram:
                        debug(a)

                    analogy.similar(container)
Example #2
0
    def main(self):
        semantics = Semantics()
        
        logParser = irc_logParser()
        log_data = logParser.loadLogs('logs/2009-08-1*', limit=200)
        
        for sentences in log_data:
            rule_eng  = rule_engine()
            if not sentences:
                continue

            if self.options.relex:
                r = relex.relex()
                sentence = r.process(sentences)

                for x in sentence:
                    y = x.split('\n')
                    for z in y:
                        if z:
                            print z
                    
            if self.options.default:

                from tagger import braubt_tagger
                analogy = analysis.Analogies()
                
                s = linkGrammar.sentence(sentences)
                if s:
                    normal_words = sentences.split(' ')
                    container = sentence(s, normal_words)
                    container.atom = semantics.semanticsToAtoms(container)

                    test_rules(container)
                    for a in container.diagram:
                        debug(a)

                    analogy.similar(container)
Example #3
0
                
            ## end: for _file in files    
            fhandle.close()
        print 'log bytes: %d' % total_bytes
        return output

if __name__ == '__main__':
    logParser = irc_logParser()
    log_data = logParser.loadLogs('logs/2009-08-1*', limit=200)
    p = Print()
    for sentence in log_data:
        if not sentence:
            continue
        
        grammar = Grammar()
        semantics = Semantics()
        v = linkGrammar.constituents(sentence)
        s = linkGrammar.sentence(sentence)
        if s:
            p.print_sentence(s[0])
            p.print_diagram(s)
            
        grammar.sentence_to_Tree(s)
        tree = grammar.const_toTree(v)
        
        sem_output = semantics.handleSemantics(s)
        debug(sem_output)
        
        if not tree:
            continue
Example #4
0
            ## end: for _file in files
            fhandle.close()
        print 'log bytes: %d' % total_bytes
        return output


if __name__ == '__main__':
    logParser = irc_logParser()
    log_data = logParser.loadLogs('logs/2009-08-1*', limit=200)
    p = Print()
    for sentence in log_data:
        if not sentence:
            continue

        grammar = Grammar()
        semantics = Semantics()
        v = linkGrammar.constituents(sentence)
        s = linkGrammar.sentence(sentence)
        if s:
            p.print_sentence(s[0])
            p.print_diagram(s)

        grammar.sentence_to_Tree(s)
        tree = grammar.const_toTree(v)

        sem_output = semantics.handleSemantics(s)
        debug(sem_output)

        if not tree:
            continue