def deleteStory(storyId): Story.objects.get(id=ObjectId(storyId)).delete() try: intentClassifier = IntentClassifier() intentClassifier.train() except: pass try: os.remove("{}/{}.model".format(app.config["MODELS_DIR"],storyId)) except OSError: pass return buildResponse.sentOk()
def deleteStory(storyId): if (session.__getattribute__('loginstat') == 'login'): Story.objects.get(id=ObjectId(storyId)).delete() try: intentClassifier = IntentClassifier() intentClassifier.train() except BaseException: pass try: os.remove("{}/{}.model".format(app.config["MODELS_DIR"], storyId)) except OSError: pass return buildResponse.sentOk() else: return render_template('index.html')
def deleteStory(storyId): story = Story.objects.filter(id=ObjectId(storyId)) if g.botId: story=story.filter(bot=g.botId) botId=g.botId else: botId='default' story.get().delete() try: intentClassifier = IntentClassifier() intentClassifier.setBotId(g.botId) intentClassifier.train() except BaseException: pass try: os.remove("{}/{},{}.model".format(app.config["MODELS_DIR"],botId, storyId)) except OSError: pass return buildResponse.sentOk()
def buildModel(storyId): sequenceLabeler.train(storyId) intentClassifier = IntentClassifier() intentClassifier.setBotId(g.botId) intentClassifier.train() return buildResponse.sentOk()
import nltk import os # Downloading necessary NLTK datasets nltk.download("stopwords") nltk.download("wordnet") nltk.download('averaged_perceptron_tagger') nltk.download('punkt') # creating directory for storing chat logs if not os.path.exists("logs"): os.makedirs("logs") try: print("Training models..") from app.core.intentClassifier import IntentClassifier intentClassifier = IntentClassifier() intentClassifier.setBotId('default') intentClassifier.train() print("Training models finished..") except Exception as e: e = str(e) if e == "NO_DATA": e = "load Data first into mongodb. Reffer Readme." print("Could not train models..skipping.. (reason: {})".format(e))