def test_more(self): request = make_request("/admin/global_dictionary/add_words", "POST", True, 'json={0}'.format(json.dumps(["a{0}".format(i) for i in range(201)]))) response = request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(response.status_int, 200) self.assertEqual(task_response[0].status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 201) request = make_request("/admin/global_dictionary/update_json", "POST", True, '0') response = request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(response.status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 201) self.assertEqual(GlobalDictionaryJson.query().count(), 1) self.assertEqual(task_response[0].status_int, 200) request = make_request("/api/global_dictionary/get_words/0", "GET", True, '0') response = request.get_response(main.app) server_json = json.loads(response.body) timestamp = server_json["timestamp"] words_time = datetime.fromtimestamp(0) for word in GlobalDictionaryWord.query().fetch(): words_time = max(words_time, word.timestamp) self.assertEqual(time.mktime(words_time.timetuple()) * 1000, timestamp) self.assertEqual(201, len(server_json["words"]))
def test_more(self): request = make_request( "/admin/global_dictionary/add_words", "POST", True, 'json={0}'.format( json.dumps(["a{0}".format(i) for i in range(201)]))) response = request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(response.status_int, 200) self.assertEqual(task_response[0].status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 201) request = make_request("/admin/global_dictionary/update_json", "POST", True, '0') response = request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(response.status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 201) self.assertEqual(GlobalDictionaryJson.query().count(), 1) self.assertEqual(task_response[0].status_int, 200) request = make_request("/api/global_dictionary/get_words/0", "GET", True, '0') response = request.get_response(main.app) server_json = json.loads(response.body) timestamp = server_json["timestamp"] words_time = datetime.fromtimestamp(0) for word in GlobalDictionaryWord.query().fetch(): words_time = max(words_time, word.timestamp) self.assertEqual(time.mktime(words_time.timetuple()) * 1000, timestamp) self.assertEqual(201, len(server_json["words"]))
def post(self): import numpy, matplotlib, matplotlib.pyplot N = self.request.get("N") heatmap_plot = ndb.Key(Plot, "heatmap_plot_" + N).get() if heatmap_plot is not None: heatmap_plot.key.delete() q = GlobalDictionaryWord.query(GlobalDictionaryWord.used_times > 0).order(-GlobalDictionaryWord.used_times) count = int(q.count() * int(N) / 100) words = q.fetch(count) matplotlib.pyplot.title("heatmap") x = [] y = [] for word in words: x.append(len(word.word)) y.append(int(word.E)) heatmap, xedges, yedges = numpy.histogram2d(y, x, bins=[30, 25], range=[[0, 100], [0, 25]]) extent = [0, 25, 0, 100] matplotlib.pyplot.clf() matplotlib.pyplot.axis([0, 25, 0, 100]) matplotlib.pyplot.imshow(heatmap, vmin=1, extent=extent, aspect="auto", origin="lower") matplotlib.pyplot.title("heatmap for words in top {0} % used times".format(N)) matplotlib.pyplot.xlabel("word length", fontsize=12) matplotlib.pyplot.ylabel("word difficulty", fontsize=12) rv = StringIO.StringIO() matplotlib.pyplot.savefig(rv, format="png", dpi=100) Plot(plot=rv.getvalue(), id="heatmap_plot_" + N).put() matplotlib.pyplot.close() rv.close()
def post(self): import matplotlib, matplotlib.pyplot N = self.request.get("N") scatter_plot = ndb.Key(Plot, "scatter_plot_" + N).get() if scatter_plot is not None: scatter_plot.key.delete() q = GlobalDictionaryWord.query(GlobalDictionaryWord.used_times > 0).order(-GlobalDictionaryWord.used_times) count = int(q.count() * int(N) / 100) words = q.fetch(count) dict_words = {word.word: word.frequency for word in ndb.gql('SELECT * FROM WordFrequency').fetch()} logging.info('{0} words in freq dictionary'.format(len(dict_words))) x = [] y = [] for word in words: if word.word in dict_words: x.append(dict_words[word.word]) y.append(int(word.E)) fig, ax = matplotlib.pyplot.subplots() ax.set_title("Scatter plot for words in top {0} % used times".format(N), fontsize=14) ax.set_xlabel("uses per million words", fontsize=12) ax.set_ylabel("difficulty", fontsize=12) ax.grid(True, linestyle='-', color='0.75') ax.plot(x, y, 'o', color="green", markersize=2) ax.set_xscale('log') ax.set_ylim([0, 100]) rv = StringIO.StringIO() fig.savefig(rv, format="png", dpi=100) Plot(plot=rv.getvalue(), id="scatter_plot_" + N).put() matplotlib.pyplot.close() rv.close()
def post(self): self.stage = int(self.request.get('stage', 1)) self.start_cursor = ndb.Cursor(urlsafe=self.request.get('cursor')) queue = taskqueue.Queue('logs-processing') if self.stage == 1: RecalcAllLogs.delete_all_stat() self.next_stage() self.abort(200) elif self.stage == 2: words = self.fetch_portion(GlobalDictionaryWord.query(GlobalDictionaryWord.used_times > 0)) for fut in map(self.reset_word, words): fut.get_result() elif self.stage == 3: logs = self.fetch_portion(GameLog.query()) for el in logs: if not el.ignored: el.ignored = False el.put() elif self.stage == 4: map(lambda k: queue.add_async(taskqueue.Task(url='/internal/add_game_to_statistic', params={'game_key': k.urlsafe()})), self.fetch_portion(GameLog.query(GameLog.ignored == False), keys_only=True)) elif self.stage == 5: map(lambda k: queue.add_async(taskqueue.Task(url='/internal/add_game_to_statistic', params={'game_key': k.urlsafe()})), self.fetch_portion(GameHistory.query(GameHistory.ignored == False), keys_only=True)) if self.more and self.cursor: self.next_portion() else: self.next_stage()
def post(self): import numpy, matplotlib, matplotlib.pyplot heatmap_plot = ndb.Key(Plot, "d_plot").get() if heatmap_plot is not None: heatmap_plot.key.delete() words = GlobalDictionaryWord.query(GlobalDictionaryWord.used_times > 0).fetch() matplotlib.pyplot.title("heatmap") x = [] y = [] max_used = 0 for word in words: max_used = max(word.used_times, max_used) x.append(word.used_times) y.append(word.D) max_used = 8 heatmap, xedges, yedges = numpy.histogram2d(y, x, bins=[30, max_used], range=[[0, 30], [0, max_used]]) extent = [0, max_used, 0, 30] matplotlib.pyplot.clf() matplotlib.pyplot.axis(extent) matplotlib.pyplot.imshow(heatmap, extent=extent, aspect="auto", origin="lower") matplotlib.pyplot.title("heatmap for word used times to D") matplotlib.pyplot.xlabel("times used", fontsize=12) matplotlib.pyplot.ylabel("word D", fontsize=12) rv = StringIO.StringIO() matplotlib.pyplot.savefig(rv, format="png", dpi=100) Plot(plot=rv.getvalue(), id="d_plot").put() matplotlib.pyplot.close() rv.close()
def test_delete(self): url = "/admin/global_dictionary/delete" request = make_request(url, "POST", True, '0') response = request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(GlobalDictionaryWord.query().count(), 0) self.assertEqual(GlobalDictionaryJson.query().count(), 0)
def test_delete(self): url = "/admin/global_dictionary/delete" request = make_request(url, "POST", True, '0') response = request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(GlobalDictionaryWord.query().count(), 0) self.assertEqual(GlobalDictionaryJson.query().count(), 0)
def test_delete_from_global_dictionary(self): words = ["a", "b", "c", "d"] for i in words: GlobalDictionaryWord(word=i, id=i, tags="").put() ComplainedWord(word="c").put() ComplainedWord(word="c").put() ComplainedWord(word="d").put() request = make_request("/admin/complain/remove", "POST", True, 'word=c') response = request.get_response(main.app) self.assertEqual(response.status_int, 200) self.assertEqual(ComplainedWord.query().count(), 1) self.assertEqual(GlobalDictionaryWord.query().count(), 4) self.assertEqual(ndb.Key(GlobalDictionaryWord, "c").get().tags, "-deleted") ComplainedWord(word="c").put() response = request.get_response(main.app) self.assertEqual(ComplainedWord.query().count(), 1) self.assertEqual(GlobalDictionaryWord.query().count(), 4) self.assertEqual(ndb.Key(GlobalDictionaryWord, "c").get().tags, "-deleted")
def post(self): timestamp = int(self.request.get("timestamp")) max_timestamp = 0 word_list = [] for word in GlobalDictionaryWord.query().fetch(): word_time = int(time.mktime(word.timestamp.timetuple()) * 1000) if word_time > timestamp: max_timestamp = max(max_timestamp, word_time) word_list.append({"word": word.word, "E": word.E, "D": word.D, "U": word.used_times, "tags": word.tags}) if word_list: GlobalDictionaryJson(json=json.dumps(word_list), timestamp=max_timestamp).put()
def get(self, *args, **kwargs): tab = self.request.get('tab', 'info') data = {} total = TotalStatistics.get() if tab == 'info': data['words_in_dictionary'] = cache( 'dict_word', lambda: GlobalDictionaryWord.query().count()) data['used_words'] = cache( 'used_words', lambda: GlobalDictionaryWord.query( GlobalDictionaryWord.used_times > 0).count()) longest = cache( 'longest_explanation', lambda: GlobalDictionaryWord.query(). order(-GlobalDictionaryWord.total_explanation_time).get()) if longest: data['longest_word'], data[ 'longest_time'] = longest.word, longest.total_explanation_time data['total_words'], data[ 'total_games'] = total.words_used, total.games data['games_for_players'] = cache( 'for_player_count', lambda: GamesForPlayerCount.query().order( GamesForPlayerCount.player_count).fetch()) elif tab == 'daily': data['daily_statistics'] = cache( 'daily', lambda: DailyStatistics.query().order(DailyStatistics. date).fetch()) daily = [] for el in data['daily_statistics']: daily.append((el.games, el.words_used, el.players_participated, el.total_game_duration // 60 * 60, el.date.strftime("%Y-%m-%d"))) by_hour = [0 for i in range(24)] by_day = [0 for i in range(7)] for hour, games in enumerate(total.by_hour): by_hour[hour % 24] += games by_day[(hour // 24 + 3) % 7] += games data['daily'] = daily data['by_hour'] = by_hour data['by_day'] = by_day data['by_hour_and_day'] = total.by_hour self.draw_page("statistics/total_statistic", tab=tab, **data)
def test_delete_from_global_dictionary(self): words = ["a", "b", "c", "d"] for i in words: GlobalDictionaryWord(word=i, id=i, tags="").put() ComplainedWord(word="c").put() ComplainedWord(word="c").put() ComplainedWord(word="d").put() request = make_request("/admin/complain/remove", "POST", True, 'word=c') response = request.get_response(main.app) self.assertEqual(response.status_int, 200) self.assertEqual(ComplainedWord.query().count(), 1) self.assertEqual(GlobalDictionaryWord.query().count(), 4) self.assertEqual( ndb.Key(GlobalDictionaryWord, "c").get().tags, "-deleted") ComplainedWord(word="c").put() response = request.get_response(main.app) self.assertEqual(ComplainedWord.query().count(), 1) self.assertEqual(GlobalDictionaryWord.query().count(), 4) self.assertEqual( ndb.Key(GlobalDictionaryWord, "c").get().tags, "-deleted")
def get(self, *args, **kwargs): word = self.request.get('word', None) entity, games, top, bottom, rand, danger_top = None, None, None, None, None, None if word: entity = GlobalDictionaryWord.get(word) if not entity: danger_top = memcache.get("danger_top") if not danger_top: danger_top = GlobalDictionaryWord.query().order( -GlobalDictionaryWord.danger).fetch(limit=10) memcache.set("danger_top", danger_top, time=60 * 60 * 12) top = memcache.get("words_top") if not top: top = GlobalDictionaryWord.query().order( -GlobalDictionaryWord.E).fetch(limit=10) memcache.set("words_top", top, time=60 * 60 * 12) bottom = memcache.get("words_bottom") if not bottom: bottom = GlobalDictionaryWord.query().order( GlobalDictionaryWord.E).fetch(limit=10) memcache.set("words_bottom", bottom, time=60 * 60 * 12) q = GlobalDictionaryWord.query().filter( GlobalDictionaryWord.used_times > 0) c = memcache.get("used_words_count") if not c: c = q.count() memcache.set("used_words_count", c, time=60 * 60 * 12) if c >= 10: rand = q.fetch(limit=10, offset=randint(0, c - 10)) self.draw_page('statistics/word_statistic', word=word, word_entity=entity, top=top if top else [], bottom=bottom if bottom else [], rand=rand if rand else [], danger=danger_top if danger_top else [])
def get(self, *args, **kwargs): tab = self.request.get('tab', 'info') data = {} total = TotalStatistics.get() if tab == 'info': data['words_in_dictionary'] = cache('dict_word', lambda: GlobalDictionaryWord.query().count()) data['used_words'] = cache('used_words', lambda: GlobalDictionaryWord.query(GlobalDictionaryWord.used_times > 0).count()) longest = cache('longest_explanation', lambda: GlobalDictionaryWord.query().order( -GlobalDictionaryWord.total_explanation_time).get()) if longest: data['longest_word'], data['longest_time'] = longest.word, longest.total_explanation_time data['total_words'], data['total_games'] = total.words_used, total.games data['games_for_players'] = cache('for_player_count', lambda: GamesForPlayerCount.query().order( GamesForPlayerCount.player_count).fetch()) elif tab == 'daily': data['daily_statistics'] = cache('daily', lambda: DailyStatistics.query().order(DailyStatistics.date).fetch()) daily = [] for el in data['daily_statistics']: daily.append((el.games, el.words_used, el.players_participated, el.total_game_duration // 60 * 60, el.date.strftime("%Y-%m-%d"))) by_hour = [0 for i in range(24)] by_day = [0 for i in range(7)] for hour, games in enumerate(total.by_hour): by_hour[hour % 24] += games by_day[(hour // 24 + 3) % 7] += games data['daily'] = daily data['by_hour'] = by_hour data['by_day'] = by_day data['by_hour_and_day'] = total.by_hour self.draw_page("statistics/total_statistic", tab=tab, **data)
def get(self, *args, **kwargs): word = self.request.get("word", None) entity, games, top, bottom, rand, danger_top = None, None, None, None, None, None if word: entity = GlobalDictionaryWord.get(word) if not entity: danger_top = memcache.get("danger_top") if not danger_top: danger_top = GlobalDictionaryWord.query().order(-GlobalDictionaryWord.danger).fetch(limit=10) memcache.set("danger_top", danger_top, time=60 * 60 * 12) top = memcache.get("words_top") if not top: top = GlobalDictionaryWord.query().order(-GlobalDictionaryWord.E).fetch(limit=10) memcache.set("words_top", top, time=60 * 60 * 12) bottom = memcache.get("words_bottom") if not bottom: bottom = GlobalDictionaryWord.query().order(GlobalDictionaryWord.E).fetch(limit=10) memcache.set("words_bottom", bottom, time=60 * 60 * 12) q = GlobalDictionaryWord.query().filter(GlobalDictionaryWord.used_times > 0) c = memcache.get("used_words_count") if not c: c = q.count() memcache.set("used_words_count", c, time=60 * 60 * 12) if c >= 10: rand = q.fetch(limit=10, offset=randint(0, c - 10)) self.draw_page( "statistics/word_statistic", word=word, word_entity=entity, top=top if top else [], bottom=bottom if bottom else [], rand=rand if rand else [], danger=danger_top if danger_top else [], )
def post(self): results = {} functions = {} for function in Function.query().fetch(): exec function.code in functions results[function.name] = [] for index, word in enumerate(GlobalDictionaryWord.query().fetch()): for name, result in results: res = functions[name](word) if res is not None: if len(result) <= 50: result.append(Elem(res, word.word)) else: heapq.heappushpop(result, Elem(res, word.word)) for name, result in results: FunctionResult(top=result, id=name).put()
def post(self): results = {} functions = {} for function in Function.query().fetch(): exec function.code in functions results[function.name] = [] for index, word in enumerate(GlobalDictionaryWord.query().fetch()): for name, result in results: res = functions[name](word) if res is not None: if len(result) <= 50: result.append(Elem(res, word.word)) else: heapq.heappushpop(result, Elem(res, word.word)) for name, result in results: FunctionResult(top=result, id=name).put()
def post(self): for dictionary in Dictionary.query(): data_object = [] key = str(int(time.time())) words = GlobalDictionaryWord.query(GlobalDictionaryWord.deleted == False, GlobalDictionaryWord.lang == dictionary.key.id()).order(GlobalDictionaryWord.E).fetch() chunk_size = len(words) // 100 for i, word in enumerate(words): data_object.append({"word": word.word, "diff": i // chunk_size, "used": word.used_times, "tags": word.tags}) output_file = gcs.open(get_gcs_filename(key), "w", "application/json") json.dump(data_object, output_file) output_file.close() old_key = dictionary.gcs_key dictionary.gcs_key = key dictionary.put() if old_key: gcs.delete(get_gcs_filename(old_key))
def post(self): results = {} functions = {} names = [] for function in Function.query().fetch(): exec function.code in functions results[function.name] = [] names.append(function.name) for index, word in enumerate(GlobalDictionaryWord.query().fetch()): for function_name in names: res = functions[function_name](word) if res is not None: if len(results[function_name]) <= 50: results[function_name].append(elem(res, word.word)) else: heapq.heappushpop(results[function_name], elem(res, word.word)) for function_name in results: top50 = {i.word: i.res for i in results[function_name]} taskqueue.add(url='/internal/statistics/functions/update/task_queue/push_results', params={'name': function_name, 'top': json.dumps(top50)})
def post(self): import numpy, matplotlib, matplotlib.pyplot N = self.request.get("N") heatmap_plot = ndb.Key(Plot, "heatmap_plot_" + N).get() if heatmap_plot is not None: heatmap_plot.key.delete() q = GlobalDictionaryWord.query( GlobalDictionaryWord.used_times > 0).order( -GlobalDictionaryWord.used_times) count = int(q.count() * int(N) / 100) words = q.fetch(count) matplotlib.pyplot.title("heatmap") x = [] y = [] for word in words: x.append(len(word.word)) y.append(int(word.E)) heatmap, xedges, yedges = numpy.histogram2d(y, x, bins=[30, 25], range=[[0, 100], [0, 25]]) extent = [0, 25, 0, 100] matplotlib.pyplot.clf() matplotlib.pyplot.axis([0, 25, 0, 100]) matplotlib.pyplot.imshow(heatmap, vmin=1, extent=extent, aspect="auto", origin="lower") matplotlib.pyplot.title( "heatmap for words in top {0} % used times".format(N)) matplotlib.pyplot.xlabel("word length", fontsize=12) matplotlib.pyplot.ylabel("word difficulty", fontsize=12) rv = StringIO.StringIO() matplotlib.pyplot.savefig(rv, format="png", dpi=100) Plot(plot=rv.getvalue(), id="heatmap_plot_" + N).put() matplotlib.pyplot.close() rv.close()
def post(self): self.stage = int(self.request.get('stage', 1)) self.start_cursor = ndb.Cursor(urlsafe=self.request.get('cursor')) queue = taskqueue.Queue('logs-processing') if self.stage == 1: RecalcAllLogs.delete_all_stat() self.next_stage() self.abort(200) elif self.stage == 2: words = self.fetch_portion( GlobalDictionaryWord.query( GlobalDictionaryWord.used_times > 0)) for fut in map(self.reset_word, words): fut.get_result() elif self.stage == 3: logs = self.fetch_portion(GameLog.query()) for el in logs: if not el.ignored: el.ignored = False el.put() elif self.stage == 4: map( lambda k: queue.add_async( taskqueue.Task(url='/internal/add_game_to_statistic', params={'game_key': k.urlsafe()})), self.fetch_portion(GameLog.query(GameLog.ignored == False), keys_only=True)) elif self.stage == 5: map( lambda k: queue.add_async( taskqueue.Task(url='/internal/add_game_to_statistic', params={'game_key': k.urlsafe()})), self.fetch_portion( GameHistory.query(GameHistory.ignored == False), keys_only=True)) if self.more and self.cursor: self.next_portion() else: self.next_stage()
def post(self): import matplotlib, matplotlib.pyplot N = self.request.get("N") scatter_plot = ndb.Key(Plot, "scatter_plot_" + N).get() if scatter_plot is not None: scatter_plot.key.delete() q = GlobalDictionaryWord.query( GlobalDictionaryWord.used_times > 0).order( -GlobalDictionaryWord.used_times) count = int(q.count() * int(N) / 100) words = q.fetch(count) dict_words = { word.word: word.frequency for word in ndb.gql('SELECT * FROM WordFrequency').fetch() } logging.info('{0} words in freq dictionary'.format(len(dict_words))) x = [] y = [] for word in words: if word.word in dict_words: x.append(dict_words[word.word]) y.append(int(word.E)) fig, ax = matplotlib.pyplot.subplots() ax.set_title( "Scatter plot for words in top {0} % used times".format(N), fontsize=14) ax.set_xlabel("uses per million words", fontsize=12) ax.set_ylabel("difficulty", fontsize=12) ax.grid(True, linestyle='-', color='0.75') ax.plot(x, y, 'o', color="green", markersize=2) ax.set_xscale('log') ax.set_ylim([0, 100]) rv = StringIO.StringIO() fig.savefig(rv, format="png", dpi=100) Plot(plot=rv.getvalue(), id="scatter_plot_" + N).put() matplotlib.pyplot.close() rv.close()
def post(self): import numpy, matplotlib, matplotlib.pyplot heatmap_plot = ndb.Key(Plot, "d_plot").get() if heatmap_plot is not None: heatmap_plot.key.delete() words = GlobalDictionaryWord.query( GlobalDictionaryWord.used_times > 0).fetch() matplotlib.pyplot.title("heatmap") x = [] y = [] max_used = 0 for word in words: max_used = max(word.used_times, max_used) x.append(word.used_times) y.append(word.D) max_used = 8 heatmap, xedges, yedges = numpy.histogram2d(y, x, bins=[30, max_used], range=[[0, 30], [0, max_used]]) extent = [0, max_used, 0, 30] matplotlib.pyplot.clf() matplotlib.pyplot.axis(extent) matplotlib.pyplot.imshow(heatmap, extent=extent, aspect="auto", origin="lower") matplotlib.pyplot.title("heatmap for word used times to D") matplotlib.pyplot.xlabel("times used", fontsize=12) matplotlib.pyplot.ylabel("word D", fontsize=12) rv = StringIO.StringIO() matplotlib.pyplot.savefig(rv, format="png", dpi=100) Plot(plot=rv.getvalue(), id="d_plot").put() matplotlib.pyplot.close() rv.close()
def post(self): for dictionary in Dictionary.query(): data_object = [] key = str(int(time.time())) words = ( GlobalDictionaryWord.query( GlobalDictionaryWord.deleted == False, GlobalDictionaryWord.lang == dictionary.key.id() ) .order(GlobalDictionaryWord.E) .fetch() ) chunk_size = len(words) // 100 for i, word in enumerate(words): data_object.append( {"word": word.word, "diff": i // chunk_size, "used": word.used_times, "tags": word.tags} ) output_file = gcs.open(get_gcs_filename(key), "w", "application/json") json.dump(data_object, output_file) output_file.close() old_key = dictionary.gcs_key dictionary.gcs_key = key dictionary.put() if old_key: gcs.delete(get_gcs_filename(old_key))
def test_add(self): request = make_request("/internal/global_dictionary/add_words/task_queue", "POST", True, 'json=["a", "b", "c"]') response = request.get_response(main.app) self.assertEqual(response.status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 3) request = make_request("/admin/global_dictionary/add_words", "POST", True, 'json=["a", "b", "d"]') response = request.get_response(main.app) task_response = self.run_tasks(1) must_be = [{"E": 50.0, "U": 0, "word": "a", "tags": ""}, {"E": 50.0, "U": 0, "word": "b", "tags": ""}, {"E": 50.0, "U": 0, "word": "c", "tags": ""}, {"E": 50.0, "U": 0, "word": "d", "tags": ""}] self.assertEqual(task_response[0].status_int, 200) self.assertEqual(response.status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 4) self.assertEqual(GlobalDictionaryJson.query().count(), 0) request = make_request("/admin/global_dictionary/update_json", "POST", True, '0') response = request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(response.status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 4) self.assertEqual(GlobalDictionaryJson.query().count(), 1) self.assertEqual(task_response[0].status_int, 200) server_json = json.loads(GlobalDictionaryJson.query().get().json) self.assertEqual(len(must_be), len(server_json)) for i in must_be: ok = False for j in server_json: if i["E"] == j["E"] and i["word"] == j["word"] and i["tags"] == j["tags"]: ok = True break self.assertTrue(ok) request = make_request("/api/global_dictionary/get_words/0", "GET", True, '0') response = request.get_response(main.app) server_json = json.loads(response.body) timestamp = server_json["timestamp"] words_time = datetime.fromtimestamp(0) for word in GlobalDictionaryWord.query().fetch(): words_time = max(words_time, word.timestamp) self.assertEqual(time.mktime(words_time.timetuple()) * 1000, timestamp) self.assertEqual(len(must_be), len(server_json["words"])) for i in must_be: ok = False for j in server_json["words"]: if i["E"] == j["E"] and i["word"] == j["word"] and i["tags"] == j["tags"]: ok = True break self.assertTrue(ok) request = make_request("/api/global_dictionary/get_words/{0}".format(timestamp), "GET", True, '0') response = request.get_response(main.app) self.assertEqual(json.loads(response.body)["words"], []) self.assertEqual(json.loads(response.body)["timestamp"], timestamp) time.sleep(1) request = make_request("/admin/global_dictionary/add_words", "POST", True, 'json=["f", "g", "h"]') request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(task_response[0].status_int, 200) request = make_request("/admin/global_dictionary/update_json", "POST", True, '0') request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(task_response[0].status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 7) self.assertEqual(GlobalDictionaryJson.query().count(), 2) request = make_request("/api/global_dictionary/get_words/0", "GET", True, '0') response = request.get_response(main.app) self.assertEqual(len(json.loads(response.body)["words"]), 7)
def test_add(self): request = make_request( "/internal/global_dictionary/add_words/task_queue", "POST", True, 'json=["a", "b", "c"]') response = request.get_response(main.app) self.assertEqual(response.status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 3) request = make_request("/admin/global_dictionary/add_words", "POST", True, 'json=["a", "b", "d"]') response = request.get_response(main.app) task_response = self.run_tasks(1) must_be = [{ "E": 50.0, "U": 0, "word": "a", "tags": "" }, { "E": 50.0, "U": 0, "word": "b", "tags": "" }, { "E": 50.0, "U": 0, "word": "c", "tags": "" }, { "E": 50.0, "U": 0, "word": "d", "tags": "" }] self.assertEqual(task_response[0].status_int, 200) self.assertEqual(response.status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 4) self.assertEqual(GlobalDictionaryJson.query().count(), 0) request = make_request("/admin/global_dictionary/update_json", "POST", True, '0') response = request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(response.status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 4) self.assertEqual(GlobalDictionaryJson.query().count(), 1) self.assertEqual(task_response[0].status_int, 200) server_json = json.loads(GlobalDictionaryJson.query().get().json) self.assertEqual(len(must_be), len(server_json)) for i in must_be: ok = False for j in server_json: if i["E"] == j["E"] and i["word"] == j["word"] and i[ "tags"] == j["tags"]: ok = True break self.assertTrue(ok) request = make_request("/api/global_dictionary/get_words/0", "GET", True, '0') response = request.get_response(main.app) server_json = json.loads(response.body) timestamp = server_json["timestamp"] words_time = datetime.fromtimestamp(0) for word in GlobalDictionaryWord.query().fetch(): words_time = max(words_time, word.timestamp) self.assertEqual(time.mktime(words_time.timetuple()) * 1000, timestamp) self.assertEqual(len(must_be), len(server_json["words"])) for i in must_be: ok = False for j in server_json["words"]: if i["E"] == j["E"] and i["word"] == j["word"] and i[ "tags"] == j["tags"]: ok = True break self.assertTrue(ok) request = make_request( "/api/global_dictionary/get_words/{0}".format(timestamp), "GET", True, '0') response = request.get_response(main.app) self.assertEqual(json.loads(response.body)["words"], []) self.assertEqual(json.loads(response.body)["timestamp"], timestamp) time.sleep(1) request = make_request("/admin/global_dictionary/add_words", "POST", True, 'json=["f", "g", "h"]') request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(task_response[0].status_int, 200) request = make_request("/admin/global_dictionary/update_json", "POST", True, '0') request.get_response(main.app) task_response = self.run_tasks(1) self.assertEqual(task_response[0].status_int, 200) self.assertEqual(GlobalDictionaryWord.query().count(), 7) self.assertEqual(GlobalDictionaryJson.query().count(), 2) request = make_request("/api/global_dictionary/get_words/0", "GET", True, '0') response = request.get_response(main.app) self.assertEqual(len(json.loads(response.body)["words"]), 7)