def save(): """ 视图函数需要实现保存单词的功能,如果单词存在则返回单词已存在,否则就保存 :param: 获取输入的单词 :return: 返回提示信息 """ word_name = request.form.get("word") word = Word.query.filter(Word.word == word_name).first() if word: return word + "已存在" else: try: translation = request.form.get("translation") introduction = request.form.get("introduction") star = request.form.get("star") example = request.form.get("words") group_id = Group.query.filter( Group.name == request.form.get("group_name")).first() word_O = Word(word=word_name, translation=translation, introduction=introduction, star=star, group_id=group_id.id, example=example) db.session.add(word_O) db.session.commit() return word_name + "已添加" except Exception as e: db.session.rollback() print(e) return word_name + "失败"
def add_word(self, widget): # show dialog and get button clicked code dialog = Dialog(self) response = dialog.run() dialog.destroy() if response != Gtk.ResponseType.OK: return # get entered word & translation word = dialog.word.strip() translation = dialog.translation.strip() if word == '' or translation == '': return # insert new word entered in database record = Word(word=word, translation=translation, date=datetime.now()) pk = Word.insert(record) record = Word.retrieve_by_id(pk) # add inserted word to list view store = self.list.get_model() store.append([ record.id, record.word, record.translation, record.date.strftime("%Y-%m-%d %H:%M:%S") ])
def makeAnswer(wordText, question, POSText, index): w = Word(question=question, word=wordText, part_of_speech=POSText, index=index) w.save() return w.pk
def initialiseDB(): try: loc = ("Mywords.xlsx") wb = xlrd.open_workbook(loc) sheet = wb.sheet_by_index(0) meanings = [] # As we are generating options using meanings of other words, we # first get all the meanings and store them for i in range(sheet.nrows): meanings.append(sheet.cell_value(i, 1)) # We insert the words one by one into the database for i in range(1, sheet.nrows): word = sheet.cell_value(i, 0).lower() meaning = sheet.cell_value(i, 1).lower() hint = sheet.cell_value(i, 2).lower() completed = sheet.cell_value(i, 3) options = [] while len(options) <= 3: newChoice = random.choice(meanings) if newChoice not in options: options.append(newChoice) newWord = Word(word=word, meaning=meaning, hint=hint, options=options, completed=completed) newWord.insert() return jsonify({"success": True}) except BaseException: rollback() print("Word formatting error") abort(400)
def words(): if flask.request.method == 'POST': word = flask.request.form.get('word') or flask.request.get_json( force=True).get('word') Word(word=word).save() return flask.jsonify( [word.word for word in Word.objects.all().order_by('_id')])
def save_word(word): word_objects = Word.objects.filter(title=word) if not word_objects: word_object = Word(title=word) word_object.save() else: word_object = word_objects[0] return word_object
def makeWord(word, question, index): part_of_speech = getPartOfSpeech(word) word = getWord(word) w = Word(question=question, word=word, part_of_speech=part_of_speech, index=index) w.save() return w.pk
def create(word: str, description: str) -> dict: """creates word""" result: dict = {} try: word = Word(word=word, description=description) word.save() except IntegrityError: Word.rollback() # raise Exception return result
def add_words(self, pair_word_and_count): words = pair_word_and_count res = [] for word in words: if not Word.objects.filter(string=word).exists(): res.append(Word(string=word)) Word.objects.bulk_create(res)
def _get_or_create_word(self, word): word_query = Word.query.filter(Word.word == word) if word_query.count() == 0: word = Word(word) db.session.add(word) db.session.commit() else: word = word_query[0] return word
def check_search_word(self, word): word = Word(word) if word.is_valid: if self.previous != word.value: self.previous = word.value else: print(same_word_hint) self.search_word(word) else: self.clear(word, 'Invalid word.')
def load_vn_dict(): # setting DB engine = create_engine(get_db_connect_string()) conn = engine.connect() vn_dict = VNDict.get_instance() # load verbs verb_query_string_statement = text("SELECT PosScore, NegScore, SynsetTerm FROM verb") verbs = conn.execute(verb_query_string_statement).fetchall() for v in verbs: if v.PosScore > v.NegScore: word_score = v.PosScore word_kind = WordKindEnum.POS else: word_score = v.NegScore word_kind = WordKindEnum.NEG word_type = WordTypeEnum.VERB word_text = v.SynsetTerm.lower().strip(Setting.NONWORD_CHARACTERS) verb_w = Word(word_text, word_score, word_kind, word_type) vn_dict.add(verb_w) # load adjectives adj_query_string_statement = text("SELECT PosScore, NegScore, Adj_Key FROM adj") adjectives = conn.execute(adj_query_string_statement).fetchall() for adj in adjectives: if adj.PosScore > adj.NegScore: word_score = adj.PosScore word_kind = WordKindEnum.POS else: word_score = adj.NegScore word_kind = WordKindEnum.NEG word_type = WordTypeEnum.ADJ word_text = adj.Adj_Key.lower().strip(Setting.NONWORD_CHARACTERS) adj_w = Word(word_text, word_score, word_kind, word_type) vn_dict.add(adj_w)
def find_nearest_word(center, word_list): nearest_word = word_list[0] if center.kind == WordKindEnum.POS: nearest_score = abs(abs(nearest_word.score) - abs(center.score)) for word in word_list: score = abs(abs(word.score) - abs(center.score)) if score < nearest_score: nearest_score = score nearest_word = word return Word(nearest_word.txt, nearest_score, nearest_word.kind, nearest_word.type) else: nearest_score = abs(abs(nearest_word.score) - abs(center.score)) for word in word_list: score = abs(abs(word.score) - abs(center.score)) if score < nearest_score: nearest_score = score nearest_word = word return Word(nearest_word.txt, nearest_score * (-1), nearest_word.kind, nearest_word.type)
def html(arq): with codecs.open(arq, 'r') as arq: html = arq.read() soup = BeautifulSoup(html, 'lxml') tokens = nltk.word_tokenize(soup.get_text()) doc = Document(title=soup.title.string, url=os.path.abspath(arq.name), words=list( map(lambda word: Word(word=str(word)), tokens))) controller = DocumentController() controller.store(doc)
def parse_words_from_file(session_cls, filename='word_rus.txt'): """ Import data from dictionary to DB. There is no data validation. """ logging.info('Starting import of words to DB') t = time.time() session = session_cls() for line in open(filename, mode='r'): word = Word(word=line.rstrip()) session.add(word) session.commit() logging.info('Taken time: {:.2f} seconds'.format(time.time() - t))
def iterwords(self): for class_ in self.classes: path = os.path.join(self.path, 'data.%s' % class_) with open(path) as f: while True: line = f.readline() while line.startswith(" "): line = f.readline() if not line: break parts = line.split(' ') yield Word(parts[0], class_, parts[4])
def post(self): url = self.get_body_argument("url_text_input") page = URLProcessing(url) client = Algorithmia.client('simScfOHIdEOdxcLMXQlYYx143/1') algo = client.algo('nlp/SentimentAnalysis/1.0.4') hash = hashlib.md5((SALT + url).encode('utf-8')).hexdigest() page.get_filtered_words() page.get_frequency() sentiment = algo.pipe({"document": page.text}).result[0]['sentiment'] url_in_db = self.db_session.query(Url).filter( Url.hash == hash).one_or_none() if not url_in_db: url_entry = Url(hash=hash, url=url, sentiment=str(sentiment)) self.db_session.add(url_entry) word_cloud = page.generate_word_cloud() word_cloud.to_file("%s.png" % hash) frequencies_top = page.get_top_100() for x in frequencies_top: word = self.db_session.query(Word).filter( Word.hash == hashlib.md5( ( SALT + x[0]).encode('utf-8')).hexdigest() ).one_or_none() if not word: self.db_session.add( Word( hash=hashlib.md5( ( SALT + x[0]).encode('utf-8') ).hexdigest(), value=encrypt_w(x[0], self.public_key), frequency=x[1]) ) else: self.db_session.query(Word).filter( Word.hash == word.hash ).update({"frequency": word.frequency + x[1]}) self.db_session.commit() self.render( "template_wordcloud.html", url=url, image_file="%s.png" % hash )
def word_list(): print(flask.request.method) if flask.request.method == 'PUT': word_list = flask.request.form.get( 'word_list') or flask.request.get_json(force=True).get('word_list') # Remove all words if len(word_list) > 0: Word.objects().delete() # Creates a wordlist for word in word_list: Word(word=word).save() return flask.jsonify( [word.word for word in Word.objects.all().order_by('_id')])
def post(self): args = parser.parse_args() word = Word(args['english'].lower(), args['chinese'].lower()) duplicate = Word.query.filter_by( english=args['english'].lower()).filter_by(chinese=args['chinese']) if (args['english'] == '' or args['chinese'] == ''): return {'result': 'incomplete word', 'word': d.serialize()}, 400 if duplicate.count() > 0: d = duplicate.first() return {'result': 'duplicate word', 'word': d.serialize()}, 400 else: db.session.add(word) db.session.commit() return word, 201
def new_game(self, request): """Creates new game""" user = User.query(User.name == request.user_name).get() if not user: raise endpoints.NotFoundException( 'A User with that name does not exist!') word = Word.query(Word.word_to_guess == request.word).get() if not word: word = Word(word_to_guess=request.word) word.put() try: game = Game.new_game(user.key, word.key.urlsafe()) return game.to_form('Good luck playing Hangman!') except: raise
def add_word(): if not request.json or not 'word' in request.json: abort(400) word = request.json['word'] if 'j' in word or 'ly' in word: word_obj = Word(value=word) try: db.session.add(word_obj) db.session.commit() return jsonify({'result': 'Created', 'word_id': word_obj.word_id}), 201 except sqlalchemy.exc.IntegrityError: db.session.rollback() return jsonify({'result': 'Already exists'}), 409 else: return jsonify({'result': 'Invalid'}), 400
def add(): # Receiving data form = WordForm() if form.validate_on_submit(): word = Word(name=form.name.data, explain=form.explain.data, example=form.example.data, created_at=datetime.datetime.utcnow(), updated_at=datetime.datetime.utcnow(), user_id=g.user.id) db.session.add(word) db.session.commit() flash('Word "%s" has been saved.' % form.name.data, 'info') return redirect(url_for('add')) return render_template('form.html', form=form, title='Add')
def format_review_word(query_response_word): print('query response word', query_response_word) word_body = Word( word_id=query_response_word['Word']['Word id'], simplified=query_response_word['Word']['Simplified'], traditional=query_response_word['Word']['Traditional'], pinyin=query_response_word['Word']['Pinyin'], definition=query_response_word['Word']['Definition'], audio_file_key=query_response_word['Word']['Audio file key'], difficulty_level=query_response_word['Word']['Difficulty level'], hsk_level=query_response_word['Word']['HSK Level']) review_word = ReviewWord(list_id=query_response_word['PK'].split('#')[1], date_sent=query_response_word['SK'].split('#')[1], word=word_body) return review_word
def post(self): if request.form.get('add_index', None): list_title = request.form['list_name'] order = (WordList.query().count() + 1) * 10 lst = WordList(order=order, title=str(list_title)) lst.put() elif request.form.get('add_word', None): key = ndb.Key(WordList, int(request.form['index_key'])) word_name = str(request.form['word_name']) words_count = Word.query().filter(Word.list == key).count() + 1 w = Word(list=key, word=word_name, order=words_count * 2) w.put() return self.get()
def addCard(): form = AddCardForm() if request.method == 'POST': if not form.validate(): return render_template("add.html", form=form) else: word = Word(form.english.data.lower(), form.chinese.data.lower()) db.session.add(word) db.session.commit() return redirect( url_for("viewCard", language='chinese', translation=form.chinese.data)) elif request.method == 'GET': return render_template("add.html", form=form)
def viewAll(): vocabulary = Word.query.order_by('pinyin') form = AddCardForm() if request.method == 'POST': if not form.validate(): return render_template("vocabulary.html", vocabulary=vocabulary, form=form) else: word = Word(form.english.data.lower(), form.chinese.data.lower()) db.session.add(word) db.session.commit() return redirect(url_for("viewAll")) elif request.method == 'GET': return render_template("vocabulary.html", vocabulary=vocabulary, form=form)
def save_audio(words, speaker_id, audio_file): for word, start, end in words: # Get or create word word_obj = Session.query(Word).filter_by(word=word).first() if not word_obj: word_obj = Word(word=word) Session.add(word_obj) # Add audio Session.add(Audio( word=word_obj.word, audio_file=audio_file, start=start, end=end, speaker_id=speaker_id )) Session.commit() print("Saved {} audios ".format(len(words)), end="")
def words(): if request.method == "POST": unique_stamp = int(round(time.time() * 1000)) new_words = [] for key, value in dict(request.form).iteritems(): new_word = Word(value[0], key, unique_stamp) new_words.append(new_word) session.add_all(new_words) session.commit() return redirect('words') else: words = {} languages = {} n_word = 0 for instance in session.query(Language).order_by('id'): line = { instance.id: { 'id': instance.id, 'name': instance.name, 'short_name': instance.name } } languages.update(line) for instance in session.query(Word).order_by('id'): line = { languages.get(instance.language_id).get('name'): { 'name': instance.name } } if words.get(instance.word_id) is None: new_pair = {instance.word_id: line} words.update(new_pair) elif instance.word_id == n_word: words.get(instance.word_id).update(line) else: n_word += 1 words.get(instance.word_id).update(line) sorted_words = collections.OrderedDict(sorted(words.items())) return render_template('addlanguage.html', words=sorted_words, languages=languages)
def add_single_word(word, category): if Word.query.filter_by(name=word).first(): return 'Word already exists' letters = list(word) letters.sort() letters = ''.join(letters) anagrams = Word.query.filter_by(letters=letters).all() if anagrams: for anagram in anagrams: db.session.delete(anagram) db.session.commit() return 'An anagram for this word exists' length = len(word) new_word = Word(name=word, length=length, category=category, letters=letters) db.session.add(new_word) db.session.commit() return '"' + word + '" has been added.'
def add_from_file(filename, category): df = pd.read_csv(filename) df = df[['Word']] print(df.head(), file=sys.stderr) df['Letters'] = df.apply(get_letters, axis=1) df.drop_duplicates(subset=['Letters']) for i, row in df.iterrows(): letters = row['Letters'] if not Word.query.filter_by(letters=letters).first(): word = row['Word'].upper() length = len(word) new_word = Word(name=word, letters=letters, category=category, length=length) db.session.add(new_word) if i % 5000 == 0: db.session.commit() db.session.commit() return 'Added all words'