Exemple #1
0
def process():
    if not check_for_token(request.args):
        return jsonify({"msg": "INVALID LOGIN TOKEN", "cod": 401})

    data = request.json
    msg = data['msg']

    (module, weight) = bot_module_namer.process(msg)
    Debug.print(f"Module Namer | {module}:{weight:.4f}")

    (predicted_cmd, cmd_weight) = bot_module_cmd_predictor.process(msg)
    Debug.print(f"Module CMD Predictor | {predicted_cmd}:{cmd_weight:.4f}")

    for m in mc.modules:
        if str(m.module_name) == str(module):
            if request.is_json:
                user = userManager.get_user(
                    tokenManager.getTokenById(request.args['key']).userid)
                if user == None:
                    return jsonify("USER NOT FOUND BY TOKEN")

                if not os.path.exists(f"./data/config/{user.uuid}"):
                    create_user_modules()
                module_output = m.exec(msg, user, predicted_cmd)
                return jsonify(module_output)
    return jsonify(data)
Exemple #2
0
    def copy_config(self, module_name, module_config, path="./data/"):
        if  os.path.exists(f"{path}config/{self.uuid}/{module_name}.json"):
            Debug.print(f"{path}config/{self.uuid}/{module_name}.json File already exists")
            return
        if not os.path.exists(f"{path}config/{self.uuid}"):
            os.makedirs(f"{path}config/{self.uuid}")

        with open(f"{path}config/{self.uuid}/{module_name}.json", "w+") as file_:
            if os.path.exists(f"{path}config/{self.uuid}/{module_name}.json"):
                json.dump(module_config, file_)
Exemple #3
0
    def __init__(self, name: str):
        self.name = name
        self.device = torch.device(
            'cuda' if torch.cuda.is_available() else 'cpu')
        Debug.print(f"Training on: {self.device}")

        self.data = {}
        self.all_words = []
        self.tags = []
        self.xy = []
        self.x_train = []
        self.y_train = []
        self.name = ""
Exemple #4
0
    def train(self, num_epochs, batch_size, learning_rate, hidden_size,
              num_workers, FILE_PATH):
        input_size = len(self.x_train[0])
        output_size = len(self.tags)

        dataset = ChatDataset(self.x_train, self.y_train)
        train_loader = DataLoader(dataset=dataset,
                                  batch_size=batch_size,
                                  shuffle=True,
                                  num_workers=num_workers)

        model = NeuralNet(input_size, hidden_size,
                          output_size).to(device=self.device)

        criterion = nn.CrossEntropyLoss()
        optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)

        for epoch in range(num_epochs):
            for (words, labels) in train_loader:
                words = words.to(self.device)
                labels = labels.to(dtype=torch.long).to(self.device)

                outpus = model(words)

                loss = criterion(outpus, labels)

                optimizer.zero_grad()
                loss.backward()
                optimizer.step()

                if (epoch + 1) % 100 == 0:
                    Debug.print(
                        f"{self.name} | Epoch [{epoch+1}/{num_epochs}], Loss: {loss.item():.9f}"
                    )
        Debug.print(f"{self.name} | Final loss: {loss.item():.5f}")

        self.data = {
            'model_state': model.state_dict(),
            'input_size': input_size,
            "output_size": output_size,
            "hidden_size": hidden_size,
            "all_words": self.all_words,
            "tags": self.tags
        }

        if not os.path.exists("./data/ai"):
            os.makedirs("./data/ai")
        torch.save(self.data, FILE_PATH)
        return self.data
Exemple #5
0
    def load(self, FILE_PATH):
        try:
            self.data = torch.load(FILE_PATH)

            input_size = self.data['input_size']
            hidden_size = self.data['hidden_size']
            output_size = self.data['output_size']
            self.all_words = self.data['all_words']
            self.tags = self.data['tags']
            model_state = self.data['model_state']

            self.model = NeuralNet(input_size, hidden_size,
                                   output_size).to(self.device)
            self.model.load_state_dict(model_state)
            self.model.eval()
            return True
        except:
            Debug.print(f"AI File loading Failed: {FILE_PATH}")
            return False
Exemple #6
0
    def input_loop(self):
        if self.isRunning == False:
            return

        while self.isRunning:
            sys.stdin = open(0)
            c = sys.stdin.readline().strip()

            if c == "exit":
                self.isRunning = False
                Debug.print("Admin-Console closed!")

            if c == "":
                return
            args = c.split()

            try:
                for cmd in self.register_commands:
                    if args[0] == cmd:
                        #importlib.reload(self.register_commands[cmd])
                        try:
                            self.register_commands[cmd].command(c, args)
                        except Exception as exc:
                            Debug.print(exc.message)
            except:
                Debug.print(f"{c} is not a valid command")
Exemple #7
0
    def command(self, msg, args):
        if len(args) > 1:
            if args[1] == "print":
                self.bot.print()

            elif args[1] == "train" and len(args) == 3:
                for module in self.moduleManager.modules:
                    tmp_arr = []
                    tmp_config = module.getConfig()

                    for i in range(len(tmp_config["commands"])):
                        if tmp_config["commands"][i]["language"] == args[2]:
                            tmp_arr.extend(
                                tmp_config["commands"][i]["examples"])

                    self.bot.add(tmp_arr, module.module_name)
                self.bot.create_set()
                self.bot.train(num_epochs=5000,
                               batch_size=8,
                               learning_rate=0.01,
                               hidden_size=8,
                               num_workers=0,
                               FILE_PATH="./data/ai/Module_Namer.ai")

            elif args[1] == "reload":
                self.bot.load(FILE_PATH="./data/ai/Module_Namer.ai")
                Debug.print("AI-CMD: reloaded")

            elif args[1] == "save" and len(args) > 2:

                try:
                    self.bot.save(self.bot.data, args[2])
                    Debug.print(f"AI-CMD: AI saved to {args[2]}")
                except Exception as exp:
                    Debug.print(exp)
                    Debug.print("AI-CMD: Saving file failed!")
        else:
            Debug.print("AI-CMD: AI print | Print the current Dataset")
            Debug.print(
                "AI-CMD: AI train [language] | trains the  AI on the language")
Exemple #8
0
def load_tokens():
    tokenManager.loadTokens()
    for tokendata in tokenManager.tokens:
        Debug.print(
            f"Add Token: [{tokendata.name}] to user [{tokendata.userid}]")
        userManager.add_token(tokendata.userid, tokendata)
Exemple #9
0
import numpy as np
import nltk
from nltk.stem.porter import PorterStemmer
from src.Debugger import Debug

try:
    nltk.data.find('tokenizers/punkt')
    Debug.print("tokenizers/punkt init")
except LookupError:
    nltk.download('punkt')

class NLTKUtils():
    stemmer = PorterStemmer()

    def tokenize(self, sentence):
        """
        split sentence into array of words/tokens
        a token can be a word or punctuation character, or number
        """
        return nltk.word_tokenize(sentence)


    def stem(self, word):
        """
        stemming = find the root form of the word
        examples:
        words = ["organize", "organizes", "organizing"]
        words = [stem(w) for w in words]
        -> ["organ", "organ", "organ"]
        """
        return self.stemmer.stem(word.lower())
Exemple #10
0
 def save_profile(self, path="./data"):
     json_data = self.toJSON()
     Debug.print(f"{path}/users/{self.uuid} save user")
     f = open(f"{path}/users/{self.uuid}.json", "w")
     f.write(json_data)
     f.close()
Exemple #11
0
 def run(self):
     Debug.print("run AdminConsole thread")
     self.isRunning = True