def test_clear_tokens(): timestamp = int(time.time()) clear_tokens() cleanupTime = int(os.getenv("TOKEN_CLEANUP_TIME")) assert cleanupTime != 0 con = connect_to_db() with con: execute_update(con, "TRUNCATE TABLE `token_to_user_id`", []) execute_insert( con, "INSERT `token_to_chat_id`(`ID`,`TOKEN`, `CHAT_ID`,`TSTAMP`) VALUES (0, '111', 9999, 0)", []) execute_insert( con, "INSERT `token_to_user_id`(`ID`,`TOKEN`, `USER_ID`,`TSTAMP`) VALUES (0, '111', 9999, 0)", []) clear_tokens() con = connect_to_db() with con: cnt = select_and_fetch_first_column( con, "SELECT COUNT(*) FROM `token_to_chat_id` WHERE `TOKEN` IN (SELECT `TOKEN` FROM `token_to_user_id` WHERE " + str(timestamp) + " - TSTAMP >= " + str(cleanupTime) + ")", []) assert cnt == 0 cnt = select_and_fetch_first_column( con, "SELECT COUNT(*) FROM `token_to_user_id` WHERE " + str(timestamp) + " - TSTAMP >= " + str(cleanupTime), []) assert cnt == 0
def test_update_repos(): cwd = os.getcwd() con = connect_to_db() cache_data_id = 0 with con: execute_insert(con, "INSERT INTO `repository_settings`(`ID`, `CHAT_ID`, `REPO_SITE`, `REPO_USER_NAME`, `REPO_SAME_NAME`, `USER`, `PASSWORD`, `LANG_ID`) VALUES (0, 1, 'github.com', 'reviewgramweb', 'reviewgram_tokenize', '', '', 1)" , []) cache_data_id = execute_insert(con, "INSERT INTO `repository_cache_storage_table`(`ID`, `REPO_SITE`, `REPO_USER_NAME`, `REPO_SAME_NAME`, `BRANCH_ID`, `TSTAMP`) VALUES (0, 'github.com', 'reviewgramweb', 'reviewgram_tokenize', 'test_cron', 0)" , []) assert cache_data_id != 0 execute_updating_repos() con = connect_to_db() with con: tstamp = select_and_fetch_first_column(con, "SELECT TSTAMP FROM `repository_cache_storage_table` WHERE `ID` = " + str(cache_data_id) + "", []); assert tstamp > 0 execute_update(con, "UPDATE `repository_cache_storage_table` SET `TSTAMP` = 0 WHERE `ID` = " + str(cache_data_id) + "", []) execute_updating_repos() assert os.path.isdir("reviewgramweb_reviewgram_tokenize_test_cron") os.chdir("reviewgramweb_reviewgram_tokenize_test_cron") result = subprocess.run(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = result.stdout.decode("UTF-8").replace("\n", "").replace("\r", "") assert out == "test_cron" result = subprocess.run(['git', 'rev-parse', '--verify', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = result.stdout.decode("UTF-8").replace("\n", "").replace("\r", "") assert out == "6299476ecd1b39c4b48392a8afee766bc0085e58" os.chdir("..") if (os.path.isdir("reviewgramweb_reviewgram_tokenize_test_cron")): shutil.rmtree("reviewgramweb_reviewgram_tokenize_test_cron") con = connect_to_db() with con: execute_update(con, "DELETE FROM `repository_settings` WHERE `CHAT_ID` = 1", []); execute_update(con, "DELETE FROM `repository_cache_storage_table` WHERE `ID` = " + str(cache_data_id) + "", []); os.chdir(cwd)
def test_clear_locks(): timestamp = int(time.time()) clear_locks() cleanupTime = int(os.getenv("LOCK_TIME")) assert cleanupTime != 0 con = connect_to_db() with con: execute_insert(con, "INSERT `repo_locks`(`ID`,`TOKEN`, `CHAT_ID`,`TSTAMP`) VALUES (0, '111', 9999, 0)" , []) clear_locks() con = connect_to_db() with con: cnt = select_and_fetch_first_column(con, "SELECT COUNT(*) FROM `repo_locks` WHERE " + str(timestamp) + " - TSTAMP >= " + str(cleanupTime) + "", []) assert cnt == 0
def test_clear_chats(): timestamp = int(time.time()) clear_chats() cleanupTime = int(os.getenv("CHAT_CACHE_TOKEN_SECONDS")) assert cleanupTime != 0 con = connect_to_db() with con: execute_insert( con, "INSERT `token_to_chat_id`(`ID`,`TOKEN`, `CHAT_ID`,`TSTAMP`) VALUES (0, '111', 9999, 0)", []) clear_chats() con = connect_to_db() with con: cnt = select_and_fetch_first_column( con, "SELECT COUNT(*) FROM `token_to_chat_id` WHERE " + str(timestamp) + " - TSTAMP >= " + str(cleanupTime) + "", []) assert cnt == 0
def test_python_language_get_autocompletions(): path = os.path.dirname(os.path.abspath(__file__)) env_path = Path(path + "/../") / '.env' load_dotenv(dotenv_path=env_path) f = PythonLanguage() con = connect_to_db() with con: autocompletions = f.getAutocompletions(con, ["import"], "", 1, 1, -485373794, "master") assert(autocompletions) != 0
def test_recognize(): timestamp = int(time.time()) con = connect_to_db() rowId1 = 0 rowId2 = 0 with con: fileName = "/root/reviewgram/records/5b5d33a5-d8f1-4dea-b0a7-7167ff62a37a-1615124541.669079.ogg" langId = 1 content = '' repoId = 1 rowId1 = execute_insert( con, "INSERT INTO `recognize_tasks`(FILENAME, LANG_ID, CONTENT, REPO_ID) VALUES (%s, %s, %s, %s)", [fileName, langId, content, repoId]) fileName = "/root/reviewgram/records/63ac4191-083f-4e37-a7c5-e8509a696530-1614002679.8371766.ogg" langId = 0 content = '' repoId = 1 rowId2 = execute_insert( con, "INSERT INTO `recognize_tasks`(FILENAME, LANG_ID, CONTENT, REPO_ID) VALUES (%s, %s, %s, %s)", [fileName, langId, content, repoId]) assert rowId1 != 0 assert rowId2 != 0 select_and_perform_task() select_and_perform_task() select_and_perform_task() select_and_perform_task() con = connect_to_db() with con: data = select_and_fetch_first_column( con, "SELECT `RES` FROM `recognize_tasks` WHERE `ID` =" + str(rowId1), []) data = data.strip().replace("\r", "").replace("\n", "") assert data == "import os" data = select_and_fetch_first_column( con, "SELECT `RES` FROM `recognize_tasks` WHERE `ID` =" + str(rowId2), []) data = data.strip().replace("\r", "").replace("\n", "") assert data == "http"
def clear_chats(): path = os.path.dirname(os.path.abspath(__file__)) env_path = Path(path + "/../") / '.env' load_dotenv(dotenv_path=env_path) timestamp = int(time.time()) cleanupTime = int(os.getenv("CHAT_CACHE_TOKEN_SECONDS")) con = connect_to_db() with con: cur = con.cursor() cur.execute("DELETE FROM `token_to_chat_id` WHERE " + str(timestamp) + " - TSTAMP >= " + str(cleanupTime) + "") con.commit() cur.close()
def clear_locks(): path = os.path.dirname(os.path.abspath(__file__)) env_path = Path(path + "/../") / '.env' load_dotenv(dotenv_path=env_path) timestamp = int(time.time()) cleanupTime = int(os.getenv("LOCK_TIME")) con = connect_to_db() with con: cur = con.cursor() cur.execute("DELETE FROM `repo_locks` WHERE " + str(timestamp) + " - TSTAMP >= " + str(cleanupTime) + "") con.commit() cur.close()
def clear_tokens(): path = os.path.dirname(os.path.abspath(__file__)) env_path = Path(path + "/../") / '.env' load_dotenv(dotenv_path=env_path) timestamp = int(time.time()) cleanupTime = int(os.getenv("TOKEN_CLEANUP_TIME")) * 60 con = connect_to_db() with con: cur = con.cursor() cur.execute( "DELETE FROM `token_to_chat_id` WHERE `TOKEN` IN (SELECT `TOKEN` FROM `token_to_user_id` WHERE " + str(timestamp) + " - TSTAMP >= " + str(cleanupTime) + ")") con.commit() cur.close() cur = con.cursor() cur.execute("DELETE FROM `token_to_user_id` WHERE " + str(timestamp) + " - TSTAMP >= " + str(cleanupTime)) con.commit() cur.close()
def select_and_perform_task(): expiration_time = 30 * 60 start = time.perf_counter() con = reviewgramdb.connect_to_db() perfLogFileName = os.getenv("APP_FOLDER") + "/perf_log.txt" fileObject = open(perfLogFileName, 'at') with con: timestamp = int(time.time()) request = "SELECT" \ " r.ID, " \ " r.FILENAME, " \ " r.RES," \ " r.LANG_ID," \ " r.LOG," \ " r.CONTENT," \ " r.REPO_ID " \ " FROM " \ "`recognize_tasks` AS r " \ " WHERE " \ " ((DATE_START IS NULL) OR (" + str(timestamp) + " - UNIX_TIMESTAMP(`DATE_START`) >= " + str(expiration_time) + ")) AND (DATE_END IS NULL) " \ " LIMIT 1 " row = reviewgramdb.select_and_fetch_one(con, request, []) if (row is not None): id = row[0] fileName = row[1] if (row[6] is not None): repoId = int(row[6]) else: repoId = 0 table = [] rows = reviewgramdb.select_and_fetch_all( con, "SELECT FROM_TEXT, TO_TEXT FROM `replace_tables` WHERE `REPO_ID` = %s ORDER BY `ID` ASC", [repoId]) for localRow in rows: table.append([localRow[0], localRow[1]]) langId = 0 if (row[3] is not None): langId = int(row[3]) lang = None if (langId == 1): lang = PythonLanguage() content = "" if (row[5] is not None): content = row[5] measure1 = time.perf_counter() fileObject.write("Fetching task from db: " + str(measure1 - start) + "\n") reviewgramdb.execute_update( con, "UPDATE `recognize_tasks` SET `DATE_START` = NOW() WHERE `ID` = %s", [id]) if (os.path.exists(fileName) and os.path.isfile(fileName)): try: print(fileName) newFileName = fileName.replace("ogg", "wav") errors = ogg2wav_convert(fileName, newFileName) if (len(errors) > 0): print("Errors while converting ogg to wav:" + errors) reviewgramdb.execute_update( con, "UPDATE `recognize_tasks` SET `RES` = %s, `LOG` = %s WHERE `ID` = %s", ['', 'Errors in running ffmpeg ' + errors, id]) else: reviewgramdb.execute_update( con, "UPDATE `recognize_tasks` SET `RES` = %s, `LOG` = %s WHERE `ID` = %s", ['', 'Processed ogg 2 wav', id]) print("Recognizing...") result = try_recognize(newFileName, table, lang, content) measure2 = time.perf_counter() fileObject.write("Total run for try_recognize: " + str(measure2 - measure1) + "\n") reviewgramdb.execute_update( con, "UPDATE `recognize_tasks` SET `RES` = %s, `LOG` = %s WHERE `ID` = %s", [result, 'Successfully processed result', id]) measure3 = time.perf_counter() fileObject.write("Updating result in DB: " + str(measure3 - measure2) + "\n") fileObject.close() except Exception as e: fileObject.close() print('Exception: ' + traceback.format_exc()) reviewgramdb.execute_update( con, "UPDATE `recognize_tasks` SET `RES` = %s, `LOG` = %s WHERE `ID` = %s", ['', 'Exception: ' + traceback.format_exc(), id]) else: fileObject.close() reviewgramdb.execute_update( con, "UPDATE `recognize_tasks` SET `RES` = %s, `LOG` = %s WHERE `ID` = %s", ['', 'Unable to find file ', id]) reviewgramdb.execute_update( con, "UPDATE `recognize_tasks` SET `DATE_END` = NOW() WHERE `ID` = %s", [id]) print("Performed")