コード例 #1
0
import datetime
import sqlite3

import requests
import time
import json

from requests_oauthlib import OAuth1
from tweepy import Stream, OAuthHandler
from tweepy.streaming import StreamListener
from log import log_config

logger = log_config.getLogger('twitter_mod.py')


class SnetListener(StreamListener):
    """ Extended Tweet Listener
    """
    def __init__(self, msg_limit=0, time_limit=0):
        logger.debug("SnetListener INIT")
        self.start_time = time.time()
        self.time_limit = time_limit
        self.msg_limit = msg_limit
        self.msg_counter = 0
        self.sentences = []
        self.status_error_code = None
        super(SnetListener, self).__init__()

    def on_data(self, data):
        """Called when raw data is received from connection.
        
コード例 #2
0
import pathlib
import subprocess
import threading
import time
import sys
import argparse

from services import registry
from log import log_config

logger = log_config.getLogger('run_service.py')


def main():
    logger.debug('call => main()')
    parser = argparse.ArgumentParser(description="Run services")
    parser.add_argument("--no-daemon",
                        action="store_false",
                        dest="run_daemon",
                        help="do not start the daemon")
    parser.add_argument("--daemon-config-path",
                        help="Path to daemon configuration file",
                        required=False)
    args = parser.parse_args()
    root_path = pathlib.Path(__file__).absolute().parent

    # All services modules go here
    service_modules = ['services.named_entity_recognition']

    # Call for all the services listed in service_modules
    start_all_services(root_path, service_modules, args.run_daemon,
コード例 #3
0
import path_setup
import grpc
from services.service_spec import sentiment_analysis_rpc_pb2_grpc as grpc_services
from services.service_spec import sentiment_analysis_rpc_pb2 as rpc
from test_data import test_sentences
from services import registry
from log import log_config

logger = log_config.getLogger('test_service.py', test=True)
channel = None

if __name__ == '__main__':

    try:
        logger.debug('call => __name == __main__')
        # Service ONE - Sentiment Analysis
        endpoint = 'localhost:{}'.format(
            registry['sentiment_analysis']['grpc'])
        # Open a gRPC channel
        channel = grpc.insecure_channel('{}'.format(endpoint))

    except KeyError as e:
        print(e)

    try:

        logger.debug("call => SentimentAnalysis() Service Test Starting... ")
        # SentimentAnalysis() Method Test
        # create a stub (client)
        stub = grpc_services.SentimentAnalysisStub(channel)
        # create a valid request message
コード例 #4
0
import path_setup
import base64
import compile_proto
from services import named_entity_recognition as ner
from test_data import b64_sentences
from log import log_config
logger = log_config.getLogger('run_unit_test.py', test=True)


class Request(object):
    def __init__(self):
        self.value = ""


def test_compiled():
    assert compile_proto.success


def test_recognize():
    """
    Test Named Entity Recognition
    :return:
    """

    servicer = ner.RecognizeMessageServicer()
    request = Request()
    request.value = b64_sentences.senteces()
    context = object()
    response = servicer.Recognize(request, context)
    decoded_result = base64.b64decode(response.value).decode('utf-8')
コード例 #5
0
import base64
import grpc
import concurrent.futures as futures
from services.modules import consensus_mod, twitter_mod
from services.service_spec import sentiment_analysis_rpc_pb2_grpc as grpc_services
from services.service_spec.sentiment_analysis_rpc_pb2 import OutputMessage
from services import common
from log import log_config
from nltk.sentiment import SentimentIntensityAnalyzer

# Services Path
current_path = os.path.dirname(os.path.realpath(__file__))
parent_path = os.path.abspath(os.path.join(current_path, os.pardir))
service_root_path = os.path.abspath(os.path.join(parent_path, os.pardir))

logger = log_config.getLogger('sentiment_analysis.py')


class ShowMessageServicer(grpc_services.ShowMessageServicer):
    """ Create a class to be added to the gRPC server
    derived from the protobuf codes.
    """
    def __init__(self):
        logger.debug("call => ShowMessageServicer()")

    def Show(self, request, context):
        """ The method that will be exposed to the snet-cli call command.

        :param request: incoming data
        :param context: object that provides RPC-specific information (timeout, etc).
        :return:
コード例 #6
0
import sys
import grpc
import base64
import concurrent.futures as futures
from services.modules import entity_recognizer_mod
from services.service_spec import named_entity_recognition_rpc_pb2_grpc as grpc_bt_grpc
from services.service_spec.named_entity_recognition_rpc_pb2 import OutputMessage
from services import common
from log import log_config

logger = log_config.getLogger('named_entity_recognition.py')


# Create a class to be added to the gRPC server
# derived from the protobuf codes.
class ShowMessageServicer(grpc_bt_grpc.ShowMessageServicer):
    def __init__(self):
        # Just for debugging purpose.
        logger.debug("ShowMessageServicer created")

    # The method that will be exposed to the snet-cli call command.
    # request: incoming data
    # context: object that provides RPC-specific information (timeout, etc).
    def Show(self, request, context):
        # In our case, request is a InputMessage() object (from .proto file)
        self.value = request.value

        # To respond we need to create a OutputMessage() object (from .proto file)
        self.result = OutputMessage()

        self.result.value = "Processed => " + self.value
コード例 #7
0
import path_setup
import grpc
from services.service_spec import named_entity_recognition_rpc_pb2_grpc as grpc_bt_grpc
from services.service_spec import named_entity_recognition_rpc_pb2 as grpc_bt_pb2
from services import registry
from test_data import test_sentences
from log import log_config

logger = log_config.getLogger('test_service.py')
channel = None

if __name__ == '__main__':

    try:
        logger.debug('call => __name == __main__')
        logger.debug("call => Creating channel() Starting... ")
        endpoint = 'localhost:{}'.format(
            registry['named_entity_recognition']['grpc'])
        # Open a gRPC channel
        channel = grpc.insecure_channel('{}'.format(endpoint))

    except Exception as e:
        logger.debug("Error found Creating Channel => " + e)

    try:
        logger.debug("call => RecognizeMessage() Method Test Starting... ")
        # RecognizeMessage() Method Test
        # create a stub (client)
        stub = grpc_bt_grpc.RecognizeMessageStub(channel)
        # create a valid request message
        test_data = test_sentences.senteces()
コード例 #8
0
import os
from log import log_config
import nltk
from nltk import pos_tag
from nltk.tag import StanfordNERTagger
from nltk.tokenize import word_tokenize
from nltk.chunk import conlltags2tree
from nltk.tree import Tree

logger = log_config.getLogger('entity_recognizer_mod.py')

#Service paths
current_path = os.path.dirname(os.path.realpath(__file__))
parent_path = os.path.abspath(os.path.join(current_path, os.pardir))
service_root_path = os.path.abspath(os.path.join(parent_path, os.pardir))


# Snet Classifier
class SnetEntityRecognizer:
    def __init__(self):
        # Snet Classifier.
        logger.debug("SnetEntityRecognizer INIT")
        self.english_model = service_root_path + '/models/stanford-ner-2018-02-27/classifiers/english.all.3class.distsim.crf.ser.gz'
        self.stanford_jar = service_root_path + '/models/stanford-ner-2018-02-27/stanford-ner-3.9.1.jar'

    # Process text
    def process_text(self, input_text):
        token_text = word_tokenize(str(input_text))
        return token_text

    # Stanford NER tagger
コード例 #9
0
import os
import pickle
from nltk.classify import ClassifierI
from statistics import mode
from nltk.tokenize import word_tokenize
from log import log_config

logger = log_config.getLogger('consensus_mod.py')

#Service paths
current_path = os.path.dirname(os.path.realpath(__file__))
parent_path = os.path.abspath(os.path.join(current_path, os.pardir))
service_root_path = os.path.abspath(os.path.join(parent_path, os.pardir))


class VoteClassifier(ClassifierI):
    """ Vote by classifiers results
    """
    def __init__(self, *classifiers):
        self._classifiers = classifiers

    def classify(self, features):
        """ Vote on all classifiers results
        :param features: incomming feature to be classified
        :return: vote winner
        """

        votes = []
        for c in self._classifiers:
            v = c.classify(features)
            votes.append(v)
コード例 #10
0
import os
import pickle
from nltk.classify import ClassifierI
from statistics import mode
from nltk.tokenize import word_tokenize
from log import log_config

logger = log_config.getLogger('analyze_mod.py')

#Service paths
current_path = os.path.dirname(os.path.realpath(__file__))
parent_path = os.path.abspath(os.path.join(current_path, os.pardir))
service_root_path = os.path.abspath(os.path.join(parent_path, os.pardir))


class VoteClassifier(ClassifierI):
    """ Vote by classifiers results
    """
    def __init__(self, *classifiers):
        self._classifiers = classifiers

    def classify(self, features):
        """ Vote on all classifiers results
        :param features: incomming feature to be classified
        :return: vote winner
        """

        votes = []
        for c in self._classifiers:
            v = c.classify(features)
            votes.append(v)