예제 #1
0
파일: executor.py 프로젝트: Machyne/pal
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     FeatureExtractor.process(params)
     Classifier.process(params)
     Executor.process(params)
     return params
예제 #2
0
파일: executor.py 프로젝트: Machyne/pal
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     FeatureExtractor.process(params)
     Classifier.process(params)
     Executor.process(params)
     return params
예제 #3
0
파일: hill_climb.py 프로젝트: Machyne/pal
def get_confs_kws(query, services):
    params = {'query': query}
    StandardNLP.process(params)
    keywords = find_keywords(params['features']['tokens'])
    confidences = {}
    for name, heuristic in services.iteritems():
        confidences[name] = heuristic.run_heuristic(keywords)
    return confidences, keywords + ['BIAS']
예제 #4
0
파일: engine.py 프로젝트: Machyne/pal
 def process(cls, params):
     Validator.process(params)
     if 'error' in params:
         cls.LOGGER.error(message=params['error'])
         abort(404, message=params['error'])
     StandardNLP.process(params)
     FeatureExtractor.process(params)
     Classifier.process(params)
     Executor.process(params)
     Logger.process(params)
     Sanitizer.process(params)
예제 #5
0
파일: noun_finder.py 프로젝트: Machyne/pal
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     return find_nouns(params['features']['pos'])[1]
예제 #6
0
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     return classify_question(params['features']['tokens'])
예제 #7
0
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     return is_question(params["features"]["tokens"])
예제 #8
0
def test_nouns():
    for test in test_cases:
        sent, tokens, exp_features = test
        processed = StandardNLP.process(sent)
        features = FeatureExtractor.extract_features(processed)
        assert features['nouns'] == exp_features['nouns']
예제 #9
0
파일: test_nlp.py 프로젝트: Machyne/pal
def test_nouns():
    for test in test_cases:
        sent, tokens, exp_features = test
        processed = StandardNLP.process(sent)
        features = FeatureExtractor.extract_features(processed)
        assert features['nouns'] == exp_features['nouns']
예제 #10
0
파일: test_nlp.py 프로젝트: Machyne/pal
def test_pre_processing():
    for test in test_cases:
        sent, tokens, exp_features = test
        processed = StandardNLP.process(sent)
        assert processed['pos'] == tokens
예제 #11
0
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     return get_tense(params['features']['pos'])
예제 #12
0
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     return get_tense(params['features']['pos'])
예제 #13
0
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     return find_keywords(params['features']['tokens'])
예제 #14
0
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     return find_keywords(params['features']['tokens'])
예제 #15
0
def test_question_type():
    for test in test_cases:
        sent, tokens, exp_features = test
        processed = StandardNLP.process(sent)
        features = FeatureExtractor.extract_features(processed)
        assert features['questionType'] == exp_features['questionType']
예제 #16
0
파일: test_nlp.py 프로젝트: Machyne/pal
def test_question_type():
    for test in test_cases:
        sent, tokens, exp_features = test
        processed = StandardNLP.process(sent)
        features = FeatureExtractor.extract_features(processed)
        assert features['questionType'] == exp_features['questionType']
예제 #17
0
 def post(self):
     params = {x: request.form[x] for x in request.form}
     StandardNLP.process(params)
     return classify_question(params['features']['tokens'])
예제 #18
0
def test_pre_processing():
    for test in test_cases:
        sent, tokens, exp_features = test
        processed = StandardNLP.process(sent)
        assert processed['pos'] == tokens