def generate_question_map_from_alphagrams(lexicon, alph_objects): """ Generate a question map from a list of {'q': ..., 'a': [..]} objects. """ db = WordDB(lexicon.lexiconName) return generate_question_map( db.get_questions_from_alph_dicts(alph_objects))
def savedlist_from_probabilities(lexicon, p_min, p_max, length): """ Creates a WordList instance from a list of Alphagram pks (indices) but *does not save it*. """ db = WordDB(lexicon.lexiconName) questions = db.get_questions_for_probability_range(p_min, p_max, length) wl = WordList() wl.initialize_list(questions.to_python(), lexicon, None, shuffle=True, save=False) q_map = generate_question_map(questions) return wl, q_map
def create_user_list(contents, filename, lex, user): """ Creates a user list from file contents, a filename, a lexicon, and a user. Checks to see if the user can create more lists. """ filename_stripped, extension = os.path.splitext(filename) try: WordList.objects.get(name=filename_stripped, user=user, lexicon=lex) # uh oh, it exists! return ( False, _("A list by the name {} already exists for this " "lexicon! Please rename your file.").format(filename_stripped)) except WordList.DoesNotExist: pass t1 = time.time() try: alphas = get_alphas_from_words( contents, wordwalls.settings.UPLOAD_FILE_LINE_LIMIT) except UserListParseException as e: return (False, str(e)) profile = user.aerolithprofile num_saved_alphas = profile.wordwallsSaveListSize limit = settings.SAVE_LIST_LIMIT_NONMEMBER if (num_saved_alphas + len(alphas)) > limit and not profile.member: return False, _("This list would exceed your total list size limit") db = WordDB(lex.lexiconName) questions = db.get_questions(alphas) num_alphagrams = questions.size() logger.info('number of uploaded alphagrams: %d', num_alphagrams) logger.info('elapsed time: %f', time.time() - t1) logger.info('user: %s, filename: %s', user.username, filename) wl = WordList() wl.name = filename_stripped wl.initialize_list(questions.to_python(), lex, user, shuffle=True, keep_old_name=True) profile.wordwallsSaveListSize += num_alphagrams profile.save() return True, ''
class WordDBSpanishTest(TestCase): def setUp(self): self.db = WordDB(lexicon_name='FISE09') def test_word_data(self): word = self.db.get_word_data(u'ÑAME') self.assertEqual(word.word, u'ÑAME') self.assertEqual(word.lexicon_symbols, '') self.assertEqual(word.front_hooks, '') self.assertEqual(word.back_hooks, 'S') self.assertEqual(word.inner_front_hook, True) self.assertEqual(word.inner_back_hook, False) self.assertEqual(word.alphagram, u'AEMÑ')
def generate_dc_questions(challenge_name, lex, challenge_date): """ Generate the questions for a daily challenge. Returns: A tuple (questions, time_secs) questions is of type Questions """ logger.debug('Trying to create challenge {} for {} ({})'.format( challenge_name, lex, challenge_date)) db = WordDB(lex.lexiconName) # capture number. first try to match to today's lists m = re.match("Today's (?P<length>[0-9]+)s", challenge_name.name) if m: word_length = int(m.group('length')) if word_length < 2 or word_length > 15: return None # someone is trying to break my server >:( logger.info('Generating daily challenges %s %d', lex, word_length) min_p = 1 # lengthCounts is a dictionary of strings as keys max_p = json.loads(lex.lengthCounts)[str(word_length)] r = range(min_p, max_p + 1) random.shuffle(r) # Just the first 50 elements for the daily challenge. alphagrams = db.alphagrams_by_probability_list(r[:50], word_length) return db.get_questions(alphagrams), challenge_name.timeSecs # There was no match, check other possible challenge names. if challenge_name.name == DailyChallengeName.WEEKS_BINGO_TOUGHIES: alphagrams = generate_toughies_challenge(lex, challenge_date) random.shuffle(alphagrams) return db.get_questions(alphagrams), challenge_name.timeSecs elif challenge_name.name == DailyChallengeName.BLANK_BINGOS: questions = generate_blank_bingos_challenge(lex, challenge_date) questions.shuffle() return questions, challenge_name.timeSecs elif challenge_name.name == DailyChallengeName.BINGO_MARATHON: questions = Questions() for lgt in (7, 8): min_p = 1 max_p = json.loads(lex.lengthCounts)[str(lgt)] r = range(min_p, max_p + 1) random.shuffle(r) questions.extend(db.get_questions( db.alphagrams_by_probability_list(r[:50], lgt))) return questions, challenge_name.timeSecs # elif challenge_name.name in (DailyChallengeName.COMMON_SHORT, # DailyChallengeName.COMMON_LONG): # questions = generate_common_words_challenge( # challenge_name.name) # random.shuffle(questions) # return questions, challenge_name.timeSecs return None
def setUp(self): self.db = WordDB(lexicon_name='FISE09')
def setUp(self): self.db = WordDB(lexicon_name='America')
class WordDBTest(TestCase): def setUp(self): self.db = WordDB(lexicon_name='America') def test_word_data(self): word = self.db.get_word_data('PARTIEST') self.assertEqual(word.word, 'PARTIEST') self.assertEqual(word.lexicon_symbols, '+$') self.assertEqual(word.front_hooks, '') self.assertEqual(word.back_hooks, '') self.assertEqual(word.inner_front_hook, True) self.assertEqual(word.inner_back_hook, True) self.assertTrue('party' in word.definition) self.assertEqual(word.alphagram, 'AEIPRSTT') def test_words_data_single(self): words = self.db.get_words_data(['PARTIEST']) self.assertEqual(len(words), 1) self.assertEqual(words[0].word, 'PARTIEST') self.assertEqual(words[0].lexicon_symbols, '+$') self.assertEqual(words[0].front_hooks, '') self.assertEqual(words[0].back_hooks, '') self.assertEqual(words[0].inner_front_hook, True) self.assertEqual(words[0].inner_back_hook, True) self.assertTrue('party' in words[0].definition) self.assertEqual(words[0].alphagram, 'AEIPRSTT') def test_words_data_multiple(self): words = self.db.get_words_data(['PARTIEST', 'GAMODEME']) self.assertEqual(len(words), 2) self.assertEqual(words[0].alphagram, 'ADEEGMMO') self.assertEqual(words[0].word, 'GAMODEME') self.assertEqual(words[0].lexicon_symbols, '') self.assertEqual(words[0].front_hooks, '') self.assertEqual(words[0].back_hooks, 'S') self.assertEqual(words[0].inner_front_hook, False) self.assertEqual(words[0].inner_back_hook, False) self.assertTrue('organisms' in words[0].definition) self.assertEqual(words[1].word, 'PARTIEST') self.assertEqual(words[1].lexicon_symbols, '+$') self.assertEqual(words[1].front_hooks, '') self.assertEqual(words[1].back_hooks, '') self.assertEqual(words[1].inner_front_hook, True) self.assertEqual(words[1].inner_back_hook, True) self.assertTrue('party' in words[1].definition) self.assertEqual(words[1].alphagram, 'AEIPRSTT') def test_alphagram_data(self): alpha = self.db.get_alphagram_data('AEINRST') self.assertEqual(alpha.alphagram, 'AEINRST') self.assertEqual(alpha.length, 7) self.assertEqual(alpha.probability, 9) self.assertEqual(alpha.combinations, 3006072) def test_word_not_found(self): word = self.db.get_word_data('FOOBARBAZ') self.assertTrue(word is None) def test_alphagram_not_found(self): alpha = self.db.get_alphagram_data('ABCDEFGH') self.assertTrue(alpha is None) def test_probability(self): self.assertEqual(self.db.probability('AEINRST'), 9)
def question_list_from_probabilities(lexicon, p_min, p_max, length): """ Generate a list of questions from a probability range.""" db = WordDB(lexicon.lexiconName) questions = db.get_questions_for_probability_range(p_min, p_max, length) return generate_question_list(questions)
def getWordDataFromQuestions(lexicon, questions): db = WordDB(lexicon.lexiconName) questions = db.get_questions_from_alph_dicts(questions) return get_word_data(questions.questions_array())
def getWordDataByProb(lexicon, length, minP, maxP): db = WordDB(lexicon.lexiconName) questions = db.get_questions_for_probability_range(minP, maxP, length) return get_word_data(questions.questions_array())