Esempio n. 1
0
class IntentAgent:
    def __init__(self, config):
        self.config = copy.deepcopy(config)
        self.agent = None
        self.classes = None
        self.preprocessor = None

    def init_agent(self):
        self.config['model_from_saved'] = True
        self.agent = KerasMulticlassModel(self.config)
        self.classes = self.agent.classes
        self.preprocessor = NLTKTokenizer()

    def _run_score(self, observation):
        task = observation[0]
        infer_result = self.agent.infer(self.preprocessor.infer(task))
        prediction = self.classes[np.argmax(infer_result)]
        return prediction

    def answer(self, input_task):
        try:
            if isinstance(input_task, list):
                result = self._run_score(input_task)
                return result
            elif isinstance(input_task, int):
                result = 'There is no Intent Classifier testing API provided'
                return result
            else:
                return {"ERROR": "Parameter error - {} belongs to unknown type".format(
                    str(input_task))}
        except Exception as e:
            print(e)
            return {"ERROR": "{}".format(traceback.extract_stack())}
Esempio n. 2
0
from intent_model.preprocessing import NLTKTokenizer
from intent_model.multiclass import KerasMulticlassModel

config_file = sys.argv[1]

# Reading parameters
with open(config_file, "r") as f:
    opt = json.load(f)

# Infering is possible only for saved intent_model
opt['model_from_saved'] = True

# Initializing intent_model
print("Initializing intent_model")
model = KerasMulticlassModel(opt)

# Initializing classes
classes = model.classes

# Initializing preprocessor
preprocessor = NLTKTokenizer()

phrase = "I want you to add 'I love you, baby' to my playlist"

# Predicting
predictions = model.infer(preprocessor.infer(phrase))

# Result
print(np.max(predictions), classes[np.argmax(predictions)])
from intent_model.dataset import Dataset
from intent_model.multiclass import KerasMulticlassModel

config_file = sys.argv[1]
data_file = sys.argv[2]

# Reading full data
comment_name = "request"
train_data = pd.read_csv(Path(data_file), sep=',')
print(train_data.head())

values = {"istask": 0, "request": "element"}
train_data.fillna(values, inplace=True)

# Tokenization that splits words and punctuation by space
preprocessor = NLTKTokenizer()
for k in range(3839):
    inds = np.arange(k * 10000, min((k + 1) * 10000, train_data.shape[0]))
    train_data.loc[inds, comment_name] = preprocessor.infer(train_data.loc[inds, comment_name].values)

# Reading parameters of intent_model from json
with open(config_file, "r") as f:
    opt = json.load(f)

# Initializing classes from dataset
columns = list(train_data.columns)
columns.remove(comment_name)
classes = np.array(columns)
opt["classes"] = " ".join(list(classes))
print(classes)
Esempio n. 4
0
 def init_agent(self):
     agent_config = self.config['kpis'][self.kpi_name]['settings_agent']
     agent_config['model_from_saved'] = True
     self.agent = KerasMulticlassModel(agent_config)
     self.classes = self.agent.classes
     self.preprocessor = NLTKTokenizer()
Esempio n. 5
0
from intent_model.dataset import Dataset
from intent_model.multiclass import KerasMulticlassModel

config_file = sys.argv[1]
data_file = sys.argv[2]

# Reading full data
comment_name = "request"
train_data = pd.read_csv(Path(data_file), sep=',')
print(train_data.head())

values = {"istask": 0, "request": "пропущено"}
train_data.fillna(values, inplace=True)

# Tokenization that splits words and punctuation by space
preprocessor = NLTKTokenizer()
for k in range(3839):
    inds = np.arange(k * 10000, min((k + 1) * 10000, train_data.shape[0]))
    train_data.loc[inds, comment_name] = preprocessor.infer(train_data.loc[inds, comment_name].values)

# Reading parameters of intent_model from json
with open(config_file, "r") as f:
    opt = json.load(f)

# Initializing classes from dataset
columns = list(train_data.columns)
columns.remove(comment_name)
classes = np.array(columns)
opt["classes"] = " ".join(list(classes))
print(classes)
Esempio n. 6
0
 def init_agent(self):
     self.config['model_from_saved'] = True
     self.agent = KerasMulticlassModel(self.config)
     self.classes = self.agent.classes
     self.preprocessor = NLTKTokenizer()
config_file = sys.argv[1]


def infer(phrase):
    global preprocessor, classes, model
    try:
        predictions = model.infer(preprocessor.infer(phrase))
    except Exception:
        print('Error', file=sys.stderr)
        return 0, 'error'
    return np.max(predictions), classes[np.argmax(predictions)]


# Initializing preprocessor
preprocessor = NLTKTokenizer()

# Reading parameters
with open(config_file, "r") as f:
    opt = json.load(f)

# Infering is possible only for saved intent_model
opt['model_from_saved'] = True

# Initializing intent_model
model = KerasMulticlassModel(opt)

# Initializing classes
classes = model.classes

print("Model is ready! You now can enter requests.")
Esempio n. 8
0
from intent_model.preprocessing import NLTKTokenizer
from intent_model.multiclass import KerasMulticlassModel


config_file = sys.argv[1]

# Reading parameters
with open(config_file, "r") as f:
    opt = json.load(f)

# Infering is possible only for saved intent_model
opt['model_from_saved'] = True

# Initializing intent_model
print("Initializing intent_model")
model = KerasMulticlassModel(opt)

# Initializing classes
classes = model.classes

# Initializing preprocessor
preprocessor = NLTKTokenizer()

phrase = "I want you to add 'I love you, baby' to my playlist"

# Predicting
predictions = model.infer(preprocessor.infer(phrase))

# Result
print(np.max(predictions), classes[np.argmax(predictions)])