Beispiel #1
0
def drawPicture(filename):
    drawing = DrawPricture(filename)
    parser = Parser(filename)
    parser.parser_program()
    sement = Semantics(paramter_list=parser.paramters,drawing=drawing)
    sement.Semantic()
    drawing.draw()
 def writeCUEDFile(self, cued_fn, semantics):
     fw = codecs.open(cued_fn, "w", "UTF-8")
     try:
         for smntcs in semantics:
             text = removeConceptsFromSemantics(smntcs.semantics)
             smntcsWithoutText = removeTextFromSemantics(smntcs.semantics)
             smntcsWithoutText = Semantics("id", smntcsWithoutText, "x")
             fw.write(smntcs.text + " <=> " + smntcsWithoutText.getCUEDSemantics() + "\n")
     finally:
         fw.close()
 def writeCUEDFile(self, cued_fn, semantics):
     fw = codecs.open(cued_fn, "w", "UTF-8")
     try:
         for smntcs in semantics:
             text = removeConceptsFromSemantics(smntcs.semantics)
             smntcsWithoutText = removeTextFromSemantics(smntcs.semantics)
             smntcsWithoutText = Semantics('id', smntcsWithoutText, 'x')
             fw.write(smntcs.text+' <=> '+smntcsWithoutText.getCUEDSemantics() + '\n')
     finally:
         fw.close()
 def writePTBFile(self, ptb_fn, semantics):
     fw = codecs.open(ptb_fn, "w", "UTF-8")
     try:
         for smntcs in semantics:
             smntcsWithoutText = removeTextFromSemantics(smntcs.semantics)
             smntcsWithoutText = Semantics("id", smntcsWithoutText, "x")
             try:
                 fw.write(smntcsWithoutText.getPTBSemantics() + "\n")
             except ValueError:
                 fw.write("(TOP x)\n")
     finally:
         fw.close()
 def writePTBFile(self, ptb_fn, semantics):
     fw = codecs.open(ptb_fn, "w", "UTF-8")
     try:
         for smntcs in semantics:
             smntcsWithoutText = removeTextFromSemantics(smntcs.semantics)
             smntcsWithoutText = Semantics('id', smntcsWithoutText, 'x')
             try:
                 fw.write(smntcsWithoutText.getPTBSemantics() + '\n')
             except ValueError:
                 fw.write("(TOP x)\n")
     finally:
         fw.close()
Beispiel #6
0
def semantic_analysis(sym, sig, ast, device, errorlog):
  """ 
  Perform semantic analysis on an AST.
  """
  vmsg(v, "Performing semantic analysis")
  sem = Semantics(sym, sig, device, errorlog)
  sem.walk_program(ast)
  
  # Check for any errors
  if errorlog.any():
    raise QuietError()
   
  # Quit if we're only performing semantic analysis
  if sem_only: 
    raise SystemExit()

  return sem
Beispiel #7
0
    def parse(cls, string):
        """Returns a Semantic object"""
        string = string.replace(" ", "")
        if "|" in string:
            quant_str, string = string.split("|")
        else:
            quant_str, string = "", string

        # Parse conjuncted relations
        relations = []
        while len(string) > 0:
            relation, string = RelationParser.parse(string)
            relations.append(relation)
        sem = Semantics(relations)

        # Parse any quantifiers
        quantification_dict = QuantificationParser.parse(quant_str)
        sem.quantification_dict = quantification_dict

        return sem
 def readSemantics(self, files, dataSets, parseType, origDataSet, txtInput=False, pdtDir=None, inputChain='none'):
     if txtInput:
         reader = input.MultiReader(files, input.TXTReader)
         if 'lemma' in dataSets or 'pos' in dataSets:
             if pdtDir is None:
                 raise ValueError("Couldn't find PDT-2.0, no directory supplied")
             reader = input.PDTReader(pdtDir, reader, online=False)
     else:
         reader = input.MultiReader(files, input.DXMLReader)
     reader = input.InputChain(inputChain, reader)
     generator = input.InputGenerator(reader, dataSets, origDataSet)
     for da_fn, da_id, da_semantics, da_txts in generator.readInputs():
         s = [Semantics(da_id, da_semantics, ' '.join(txt), parseType) for txt in da_txts]
         yield s
Beispiel #9
0
    def fromxml(cls, vn_class, xml, frame_num):
        """Returns a Frame object from the VerbNet XML representation"""
        primary = xml.find("DESCRIPTION").attrib.get("primary", "")
        secondary = xml.find("DESCRIPTION").attrib.get("secondary", "")
        sem_dict, reverse_lookup = Semantics.semdict_fromxml(xml.find("SEMANTICS"))


        # Need the order of np vars given in syntax for mapping to subst nodes
        nps = xml.find("SYNTAX").findall("NP")
        np_order = [np.attrib["value"] for np in nps]
        sem_vars = [v for s in sem_dict.values() for v in s.variables()]
        np_var_order = []

        for np in np_order:
            match = [v for v in sem_vars if reverse_lookup[v].name == np]
            if len(match) > 0 and not match[0].missing: # Ignore "?" variables
                np_var_order.append(match[0])

        example = xml.find("EXAMPLES").find("EXAMPLE").text.replace('"', '')
        return Frame(vn_class, frame_num, primary, secondary, sem_dict, np_var_order, example)
Beispiel #10
0
 def __init__(self, semantics=Semantics()):
     self.semantics = semantics
Beispiel #11
0
 def setUp(self):
     self.semantics = Semantics()
Beispiel #12
0
class TestSemantics(TestCase):
    def setUp(self):
        self.semantics = Semantics()

    def test_generate_sentence_with_keyword(self):
        self.semantics._generate_sentence_with_keyworkd(['東京', 'とは'])
Beispiel #13
0
from sentence import Sentence

import config as config

logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)

if __name__ == '__main__':
    # update mimicry model tweet
    twitter = Twitter(consumer_key=config.CONSUMER_KEY, consumer_secret=config.CONSUMER_SECRET,
                      access_token_key=config.ACCESS_TOKEN, access_token_secret=config.ACCESS_TOKEN_SECRET)
    twitter.update_status(config.MIMICRY_MODEL)

    # setup natural language recognition env.
    message = twitter.get_latest_status_text(config.MIMICRY_MODEL)
    if message is None:
        logging.error("Mimicry target's tweet is None.")
        exit()
    semantics = Semantics()
    similar_words = semantics.get_similar_words(message)
    train_posts = twitter.get_all_status_text()
    sentence = Sentence()
    sentence.learn(train_posts)
    generated_message = sentence.generate(similar_words)
    twitter.post(generated_message)