def get_and_register_data(cls, session: Session, incomplete_data_key: Tuple[str, List[str]]): target_video_id, target_comment_ids = incomplete_data_key api_connector = NiconicoAPIConnector() try: comments = api_connector.get_comments(target_video_id) except VideoDataGetError: IrregularVideoIdDAO(session).add(target_video_id) JobLogDAO(session).add_or_update(cls.TYPE, JobLogStatus.ABORTED) session.commit() raise except CommentDataGetError: IrregularCommentIdDAO(session).add(target_video_id, target_comment_ids) JobLogDAO(session).add_or_update(cls.TYPE, JobLogStatus.ABORTED) session.commit() raise completed_comment_ids = [] for target_comment_id in target_comment_ids: for comment in comments.comments: if comment.id in completed_comment_ids: # for case that comments has duplicate comments continue if comment.id == target_comment_id: CommentDAO(session).add(id=comment.id, video_id=target_video_id, text=comment.text, posted_at=comment.posted_at, posted_by=comment.posted_by, point=comment.point, was_deleted=comment.was_deleted, official_nicoru=comment.official_nicoru) completed_comment_ids.append(comment.id) break IrregularCommentIdDAO(session).add(target_video_id, [x for x in target_comment_ids if x not in completed_comment_ids])
def test_no_wait(self): with db_test_session() as session: with mock.patch.object(get_incomplete_data, 'sleep') as m_sleep: # setup job_log = JobLogDAO(session).add_or_update( JobLogType.VIDEO, JobLogStatus.DONE) job_log.updated_at = datetime.now() + timedelta(minutes=2) session.commit() # run IncompleteDataGetter.wait_to_run_next_process(job_log) # verify m_sleep.assert_not_called()
def test_success(self): with db_test_session() as session: # setup NicoruDAO(session).nicoru(TestData.VIDEO_ID_1, TestData.COMMENT_ID_1) CommentDAO(session).add( id=TestData.COMMENT_ID_1, video_id=TestData.VIDEO_ID_1, text=TestData.Comment.TEXT_1, posted_at=TestData.Comment.POSTED_AT_1, posted_by=TestData.Comment.POSTED_BY_1, point=TestData.Comment.POINT_1, was_deleted=TestData.Comment.WAS_DELETED_1, official_nicoru=TestData.Comment.OFFICIAL_NICORU_1, ) session.commit() HardConstants.App = HardConstants.Test # run DBDataExporter.execute() # verify with open(HardConstants.App.REPORT_CSV, 'r') as f: assert f.readlines() == [ '"動画ID","コメ番","コメント","擬似ニコる","公式ニコる"\n', '"{vid}","{cid}","{c}","1","{o_n}"\n'.format( vid=TestData.VIDEO_ID_1, cid=TestData.COMMENT_ID_1, c=TestData.Comment.TEXT_1, o_n=TestData.Comment.OFFICIAL_NICORU_1), ] assert JobLogDAO(session).find_by_type( JobLogType.DB_DATA_EXPORT).status == JobLogStatus.DONE
def get_and_register_data(cls, session: Session, incomplete_data_key: str): api_connector = NiconicoAPIConnector() incomplete_video_id = incomplete_data_key try: video_info = api_connector.get_video_info(incomplete_video_id) video_api_info = api_connector.get_video_api_info( incomplete_video_id) except VideoDataGetError: IrregularVideoIdDAO(session).add(incomplete_video_id) JobLogDAO(session).add_or_update(JobLogType.VIDEO, JobLogStatus.ABORTED) session.commit() raise v_dao = VideoDAO(session) if v_dao.find(video_info.video_id): # case of "My Memory". e.g. sm1158689 and 1200835239(My Memory of sm1158689) # niconico API returns video id(sm1158689) when asking for My Memory id(1200835239). # this causes PK error of video table. so here replace returned video id with My Memory video id. video_info.video_id = incomplete_video_id v_dao.add( id=video_info.video_id, thumbnail=video_info.thumbnail_url, posted_at=video_api_info.posted_at, length=video_info.length, title=video_info.title, watch_url=video_info.watch_url, posted_by=video_info.author_user_id, posted_by_name=video_info.author_nickname, )
def test_video_data_get_error(self): with db_test_session() as session: # setup vid = TestData.VIDEO_ID_1 TestDataUtil.add_video(session, id=vid) video_api_info = VideoAPIInfo(video_id=vid, thread_id=1, user_id='aiueo700', ms='a', user_key='b') video_info = VideoInfo(TestDataLevel2.VideoObject.VO_1) assert video_api_info.video_id == video_info.video_id == vid with mock.patch('requests.Session', DummySession), \ mock.patch.object(NiconicoAPIConnector, 'get_video_info', side_effect=VideoDataGetError), \ mock.patch.object(NiconicoAPIConnector, 'get_video_api_info', side_effect=VideoDataGetError): with pytest.raises(VideoDataGetError): # run IncompleteVideoDataGetter.get_and_register_data( session, vid) # verify stored = session.query( IrregularVideoId).all() # type: List[IrregularVideoId] assert len(stored) == 1 assert stored[0].video_id == TestData.VIDEO_ID_1 assert JobLogDAO(session).find_by_type( JobLogType.VIDEO).status == JobLogStatus.ABORTED
def test_success(self): with db_test_session() as session: # run with mock.patch.object(DropboxUploader, 'upload'): DropboxUploader.execute() # verify assert JobLogDAO(session).find_by_type( JobLogType.UPLOAD_TO_STORAGE).status == JobLogStatus.DONE
def test_failure(self): with db_test_session() as session: # run with mock.patch.object(DropboxUploader, 'upload', side_effect=Exception): with pytest.raises(Exception): DropboxUploader.execute() # verify assert JobLogDAO(session).find_by_type( JobLogType.UPLOAD_TO_STORAGE ).status == JobLogStatus.ABORTED
def execute(cls): with db_session() as session: dao = JobLogDAO(session) try: dao.add_or_update(JobLogType.UPLOAD_TO_STORAGE, JobLogStatus.RUNNING) session.commit() cls.upload() except Exception as e: dao.add_or_update(JobLogType.UPLOAD_TO_STORAGE, JobLogStatus.ABORTED) session.commit() raise e else: dao.add_or_update(JobLogType.UPLOAD_TO_STORAGE, JobLogStatus.DONE)
def test_failure(self): with db_test_session() as session: # setup HardConstants.App = HardConstants.Test with mock.patch.object(DBDataExporter, 'export_public_data', side_effect=Exception): # run with pytest.raises(Exception): DBDataExporter.execute() # verify assert JobLogDAO(session).find_by_type( JobLogType.DB_DATA_EXPORT).status == JobLogStatus.ABORTED
def execute(cls): with db_session() as session: dao = JobLogDAO(session) try: dao.add_or_update(JobLogType.DB_DATA_EXPORT, JobLogStatus.RUNNING) session.commit() os.makedirs(HardConstants.App.DB_DUMP_DIR, exist_ok=True) cls.export_public_data() cls.export_data_for_restore() cls.compress_exported_data() except Exception as e: dao.add_or_update(JobLogType.DB_DATA_EXPORT, JobLogStatus.ABORTED) session.commit() raise e else: dao.add_or_update(JobLogType.DB_DATA_EXPORT, JobLogStatus.DONE)
def execute(cls) -> int: with db_session() as session: try: job_log_dao = JobLogDAO(session) job_log = job_log_dao.find_by_type(cls.TYPE) # exit if previous process is running if cls.previous_process_is_running(job_log): return cls.ReturnCode.PREVIOUS_PROCESS_IS_RUNNING try: incomplete_data_key = cls.get_incomplete_data_key(session) except IncompleteDataGetter.NoIncompleteDataError: # exit if there is no data to get logger.debug('there is no data to get.') return cls.ReturnCode.NO_INCOMPLETE_DATA # wait to run next process cls.wait_to_run_next_process(job_log) # mark the job as running job_log_dao.add_or_update(cls.TYPE, JobLogStatus.RUNNING) session.commit() # get and register data cls.get_and_register_data(session, incomplete_data_key) session.commit() # mark the job as running job_log_dao.add_or_update(cls.TYPE, JobLogStatus.DONE) session.commit() return cls.ReturnCode.SUCCESS except Exception: if session: session.rollback() with db_session() as session_for_error: # mark the job as aborted JobLogDAO(session_for_error).add_or_update( cls.TYPE, JobLogStatus.ABORTED) session_for_error.commit() raise
def test(self): with db_test_session() as session: with mock.patch.object(mail, 'send', return_value=None): # setup file HardConstants.App = HardConstants.Test TestDataUtil.make_test_file(HardConstants.App.DB_DUMP_ZIP, DropboxUploader.CHUNK_SIZE) # setup DB dao = JobLogDAO(session) dao.add_or_update(JobLogType.VIDEO, JobLogStatus.DONE) dao.add_or_update(JobLogType.COMMENT, JobLogStatus.DONE) session.commit() # run JobLogMailer.execute()
def make_mail_body(cls) -> str: with db_session() as session: logs = JobLogDAO(session).list() logs_text = "\n".join([ "{}: {}({})".format(x.type, x.updated_at, x.status) for x in logs ]) video_progress = NicoruDAO( session).get_status_of_video_data_getting() video_progress_text = "got: {}, irregular: {}, all(to get): {}, progress: {}".format( video_progress[0], video_progress[1], video_progress[2], video_progress[3]) comment_progress = NicoruDAO( session).get_status_of_comment_data_getting() comment_progress_text = "got: {}, irregular: {}, irregular2: {}, all(to get): {}, progress: {}".format( comment_progress[0], comment_progress[1], comment_progress[2], comment_progress[3], comment_progress[4]) return "\n".join([ "[Job Status]", logs_text, "", "[Progress of video data completion]", video_progress_text, "", "[Progress of comment data completion]", comment_progress_text ])
def test_previous_process_running(self): with db_test_session() as session: # setup job_log = JobLogDAO(session).add_or_update( JobLogType.VIDEO, JobLogStatus.RUNNING) job_log.updated_at == datetime.now() - timedelta(seconds=5) session.commit() vid = TestData.VIDEO_ID_1 video_api_info = VideoAPIInfo(video_id=vid, thread_id=1, user_id='aiueo700', ms='a', user_key='b') video_info = VideoInfo(TestDataLevel2.VideoObject.VO_1) assert video_api_info.video_id == video_info.video_id == vid with mock.patch('requests.Session', DummySession), \ mock.patch.object(NiconicoAPIConnector, 'get_video_info', return_value=video_info), \ mock.patch.object(NiconicoAPIConnector, 'get_video_api_info', return_value=video_api_info): # run, verify assert IncompleteVideoDataGetter.execute( ) == IncompleteVideoDataGetter.ReturnCode.PREVIOUS_PROCESS_IS_RUNNING