Example #1
0
def show():
    path =r"C:\Users\inesmeya\Desktop\out\TestTest.pck"
    
    dbs = pload(path)
    #a = ProblemSetSolution(agent, roomset)
    for db in dbs:
        print db 
Example #2
0
def show():
    path = r"C:\Users\inesmeya\Desktop\out\TestTest.pck"

    dbs = pload(path)
    #a = ProblemSetSolution(agent, roomset)
    for db in dbs:
        print db
Example #3
0
def test():
    ag = TestAgent(AnytimeBestFirstGraphSearch(), heuristics.PowerHeuristic2())
    rs = RoomSet("d")
    a = ProblemSetSolution(ag, rs)
#    path =r"C:\Users\inesmeya\Desktop\out\Test\p.p"
    path =r"C:\Users\inesmeya\Desktop\out\Test\e.txt"    
    a = pload(path)   
    print "F"
    print a
Example #4
0
def test():
    ag = TestAgent(AnytimeBestFirstGraphSearch(), heuristics.PowerHeuristic2())
    rs = RoomSet("d")
    a = ProblemSetSolution(ag, rs)
    #    path =r"C:\Users\inesmeya\Desktop\out\Test\p.p"
    path = r"C:\Users\inesmeya\Desktop\out\Test\e.txt"
    a = pload(path)
    print "F"
    print a
Example #5
0
def try_load_room_set(name, count, seed):
    '''
    return roomset if file exists
    None otherwise
    '''
    path = roomset_filepath(name, count, seed)

    if not os.path.exists(path):
        return None
    print "Room set loaded:", path
    return pload(path)
Example #6
0
def try_load_room_set(name, count, seed):
    '''
    return roomset if file exists
    None otherwise
    '''
    path = roomset_filepath(name, count, seed)
    
    if not os.path.exists(path):
        return None
    print "Room set loaded:", path 
    return pload(path)
Example #7
0
 def load(self, path):
     self.dbs = utils.pload(path)
Example #8
0
from traditional import idf
from preprocessing import corenlp as nlp

import logging

from utils import pload, pdump

training_path_base = "../../data/training/"

logging.basicConfig(
    level=logging.INFO,
    format=
    "%(levelname)s: %(asctime)s %(filename)s[line:%(lineno)d] %(message)s")

# Obtain training data
training_matrix = pload(training_path_base + "training_data.pickle")

sentence_pair_matrix = []
label_matrix = []

# Using Stanford NLP to parsing the sentence pair
lemma_matrix = []
parse_tree_matrix = []  # for syntactic parse features
corpus = [[], []]  # for idf, bow, word embedding
parse_result_matrix = []  # for alignment features
dep_matrix = [[], []]


def parse_dependency(dep_triple):
    return " ".join([
        '|'.join(
Example #9
0
 def load(self, path):
     self.dbs = utils.pload(path)