def test__sync_all_jobs_from_dir(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job1 = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) job2 = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file], 'jobs_to_sync': { 'job_ids': [job1.id, job2.id] } }, self.session) for job in [job1, job2]: job.update_attached_directories(self.session, [{ 'directory_id': directory.id, 'selected': 'sync' }]) log = regular_log.default() sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=None) sync_manager._JobDirectorySyncManager__sync_all_jobs_from_dir( file, directory, directory, create_tasks=True) dir_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file.id, WorkingDirFileLink.working_dir_id == job1.directory_id) dir_link2 = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file.id, WorkingDirFileLink.working_dir_id == job2.directory_id) self.assertTrue(dir_link.first() is not None) self.assertTrue(dir_link2.first() is not None) task1 = self.session.query(Task).filter(Task.job_id == job1.id) task2 = self.session.query(Task).filter(Task.job_id == job2.id) self.assertTrue(task1.first() is not None) self.assertTrue(task2.first() is not None)
def test_serialize_attached_elements(self): issue = Discussion.new( session = self.session, title = 'test', description = 'description', project_id = self.project.id, status = 'open' ) job = data_mocking.create_job({ 'name': 'my-test-job' }, self.session) file = data_mocking.create_file({'project_id': job.project.id, 'job_id': job.id}, self.session) rel1 = issue.attach_element( session = self.session, element = {'type': 'file', 'id': file.id} ) rel2 = issue.attach_element( session = self.session, element = {'type': 'job', 'id': job.id} ) elements = issue.serialize_attached_elements(self.session) element_ids = [x['id'] for x in elements] self.assertEqual(len(elements), 3) self.assertTrue('type' in elements[0]) self.assertTrue('discussion_id' in elements[0]) self.assertTrue('id' in elements[0]) self.assertTrue('created_time' in elements[0]) self.assertTrue(rel1.id in element_ids) self.assertTrue(rel2.id in element_ids)
def test_merge_task(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) original_file = data_mocking.create_file( {'project_id': self.project.id}, self.session) completion_dir = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [original_file] }, self.session) job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'completion_directory_id': completion_dir.id }, self.session) task = data_mocking.create_task( { 'name': 'test task', 'file': file, 'job': job, 'file_original': original_file }, self.session) print('aasadsd', task.job, task.job.completion_directory_id) task_complete.merge_task(self.session, job, task) self.session.commit() file_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.working_dir_id == task.job.completion_directory_id, WorkingDirFileLink.file_id == task.file_id).all() self.assertEqual(len(file_link), 1)
def test_serialize(self): issue = Discussion.new( session = self.session, title = 'test', description = 'description', project_id = self.project.id, status = 'open' ) job = data_mocking.create_job({ 'name': 'my-test-job' }, self.session) file = data_mocking.create_file({'project_id': job.project.id, 'job_id': job.id}, self.session) rel1 = issue.attach_element( session = self.session, element = {'type': 'file', 'id': file.id} ) rel2 = issue.attach_element( session = self.session, element = {'type': 'job', 'id': job.id} ) issue_data = issue.serialize(self.session) self.assertEqual(len(issue_data['attached_elements']), 3) self.assertTrue('id' in issue_data) self.assertTrue('created_time' in issue_data) self.assertTrue('description' in issue_data) self.assertTrue('title' in issue_data) self.assertTrue('project_id' in issue_data) self.assertTrue('status' in issue_data)
def test_launch_job(self): # Create mock tasks file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory({ 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) job = data_mocking.create_job({ 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'type': "Normal", 'attached_directories': [ attach_dir1 ] }, self.session) with patch.object(AfterLaunchControl, 'main', return_value=True) as launch_control_main: job_launch = data_mocking.create_job_launch({'job_id': job.id}, self.session) job_launch_queue = data_mocking.create_job_launch_queue_element({'job_launch_id': job_launch.id}, self.session) launch_handler = task_template_launch_handler.TaskTemplateLauncherThread() launch_handler.launch_job(session=self.session, task_template_queue_element=job_launch_queue) self.session.commit() job_launch = JobLaunch.get_by_id(session=self.session, job_launch_id=job_launch.id) self.assertEqual(job_launch.status, 'completed') self.assertEqual(job_launch.job_launch_info, 'Job Launched Successfully.') launch_control_main.assert_called_once()
def test_job_observable_remove_observer(self): project = self.project_data['project'] new_dir = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], }, self.session) job = data_mocking.create_job( { 'project': project, 'completion_directory_id': new_dir.id }, session=self.session) job_observable = task_file_observers.JobObservable( session=self.session, log={}, job=job) dir_observer = task_file_observers.DirectoryJobObserver( session=self.session, log={}, directory=new_dir, job_observable=job_observable) job_observable._remove_observer(dir_observer) self.session.commit() self.session.flush() updated_job = self.session.query(Job).filter(Job.id == job.id).first() self.assertEqual(updated_job.completion_directory_id, None)
def test_job_resync_api(self): # Create mock job. job = data_mocking.create_job( { 'name': 'my-test-job', 'project': self.project }, self.session) request_data = { 'task_template_id': job.id, } endpoint = "/api/v1/project/" + job.project.project_string_id + "/job/resync" auth_api = common_actions.create_project_auth(project=job.project, session=self.session) credentials = b64encode( "{}:{}".format(auth_api.client_id, auth_api.client_secret).encode()).decode('utf-8') response = self.client.post(endpoint, data=json.dumps(request_data), headers={ 'directory_id': str(self.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) data = response.json self.assertEqual(response.status_code, 200) self.assertEqual(data['resync_result'], True)
def test_job_view_core(self): num_jobs = 3 metadata_proposed = { 'builder_or_trainer': { 'mode': 'builder' }, 'limit': num_jobs, 'my_jobs_only': False, 'project_string_id': self.project.project_string_id } all_jobs = [] for i in range(0, num_jobs): job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(i), 'project': self.project }, self.session) all_jobs.append(job) self.session.commit() with self.app.test_request_context(): common_actions.add_auth_to_session(flask.session, self.project.users[0]) result = job_view_core(self.session, metadata_proposed, output_mode="serialize", user=self.project.users[0]) logger.info(result) self.assertEqual(len(result), num_jobs - 1)
def test_filter_by_project(self): other_project_data = data_mocking.create_project_with_context( { 'users': [{ 'username': '******', 'email': '*****@*****.**', 'password': '******', }] }, self.session) other_project = other_project_data['project'] other_job = data_mocking.create_job( { 'name': 'my-testother-job-{}'.format(1), 'project': other_project }, self.session) query = self.session.query(Job) with self.app.test_request_context(): common_actions.add_auth_to_session(flask.session, self.project.users[0]) result = filter_by_project( session=self.session, project_string_id=self.project.project_string_id, query=query) jobs = result.all() job_ids = [x.id for x in jobs] self.assertTrue(other_job.id not in job_ids)
def test_check_if_jobs_to_launch(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'type': "Normal", 'attached_directories': [attach_dir1] }, self.session) launch_handler = task_template_launch_handler.TaskTemplateLauncherThread( ) job_launch = data_mocking.create_job_launch({'job_id': job.id}, self.session) job_launch_queue = data_mocking.create_job_launch_queue_element( {'job_launch_id': job_launch.id}, self.session) with patch.object( task_template_launch_handler.TaskTemplateLauncherThread, 'launch_job', return_value=True) as launch_control_main: launch_handler.check_if_jobs_to_launch() launch_control_main.assert_called_once()
def test_task_template_launch_core(self): # Create mock tasks label = data_mocking.create_label({ 'name': 'mylabel', }, self.session) label_file = data_mocking.create_label_file( { 'label': label, 'project_id': self.project.id }, self.session) file = data_mocking.create_file({'project_id': self.project.id}, self.session) job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'type': "Normal", }, self.session) result = task_template_launch_handler.task_template_launch_core( self.session, job) self.assertEqual(result, job) result = task_template_launch_handler.task_template_launch_core( self.session, None) self.session.commit() self.assertEqual(result, False) job = Job.get_by_id(self.session, job_id=job.id) self.assertEqual(job.status, 'active')
def test_attach_element(self): # Create mock issue issue = data_mocking.create_discussion( { 'project_id': self.project.id, 'name': 'test', 'title': 'test', }, self.session, ) job = data_mocking.create_job({ 'name': 'my-test-job' }, self.session) file = data_mocking.create_file({'project_id': job.project.id, 'job_id': job.id}, self.session) issue_relation_file = issue.attach_element( session = self.session, element = {'type': 'file', 'id': file.id} ) issue_relation_job = issue.attach_element( session = self.session, element = {'type': 'job', 'id': job.id} ) self.session.commit() query_rel_file = self.session.query(DiscussionRelation).first() query_rel_job = self.session.query(DiscussionRelation).first() self.assertIsNotNone(issue_relation_file) self.assertIsNotNone(issue_relation_job) self.assertIsNotNone(query_rel_file) self.assertIsNotNone(query_rel_job)
def test_execute_after_launch_strategy(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'attached_directories': [attach_dir1] }, self.session) strategy = StandardTaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default()) strategy.execute_after_launch_strategy() self.session.commit() tasks_count = self.session.query(Task).filter( Task.job_id == job.id).count() tasks = self.session.query(Task).filter(Task.job_id == job.id).all() self.assertEqual(tasks_count, 1)
def test_task_next_issue(self): # Create mock tasks num_tasks = 5 job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project }, self.session) all_tasks = [] for i in range(0, num_tasks): task = data_mocking.create_task( { 'name': 'task{}'.format(i), 'job': job }, self.session) all_tasks.append(task) issue1 = data_mocking.create_discussion( { 'title': 'test', 'description': 'test', 'project_id': self.project.id }, self.session) issue1.attach_element(session=self.session, element={ 'type': 'task', 'id': all_tasks[2].id }) issue2 = data_mocking.create_discussion( { 'title': 'test2', 'description': 'test2', 'project_id': self.project.id }, self.session) issue2.attach_element(session=self.session, element={ 'type': 'task', 'id': all_tasks[4].id }) with self.client.session_transaction() as session: endpoint = "/api/v1/task/{}/next-task-with-issues".format( all_tasks[0].id) credentials = b64encode("{}:{}".format( self.auth_api.client_id, self.auth_api.client_secret).encode()).decode('utf-8') session['Authorization'] = credentials common_actions.add_auth_to_session(session, self.project.users[0]) response = self.client.post(endpoint, data=json.dumps({}), headers={ 'directory_id': str(self.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) data = response.json self.assertEqual(response.status_code, 200) self.assertEqual(data['task_id'], all_tasks[2].id)
def test_issue_new_web(self): # Create mock tasks # Create mock job. job = data_mocking.create_job( { 'name': 'my-test-job', 'project': self.project }, self.session) file = data_mocking.create_file( { 'project_id': job.project.id, 'job_id': job.id }, self.session) task = data_mocking.create_task( { 'name': 'task{}'.format(1), 'job': job, 'file': file, }, self.session) discussion_title = 'new_issue' discussion_description = 'new_issue_description' request_data = { 'title': discussion_title, 'description': discussion_description, 'attached_elements': [{ 'type': 'job', 'id': job.id }, { 'type': 'file', 'id': file.id }, { 'type': 'task', 'id': task.id }] } endpoint = "/api/v1/project/" + job.project.project_string_id + "/issues/new" auth_api = common_actions.create_project_auth(project=job.project, session=self.session) credentials = b64encode( "{}:{}".format(auth_api.client_id, auth_api.client_secret).encode()).decode('utf-8') response = self.client.post(endpoint, data=json.dumps(request_data), headers={ 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) data = response.json self.assertEqual(response.status_code, 200) self.assertEqual(data['issue']['title'], discussion_title) self.assertEqual(data['issue']['description'], discussion_description)
def test_get_video_frame_from_task(self): # Create mock task job = data_mocking.create_job({ 'name': 'my-test-job-{}'.format(1), 'project': self.project }, self.session) video_file = data_mocking.create_file({ 'project_id': self.project.id, 'type': 'video' }, self.session) frames_list = [] # Mock Frames for i in range(0, 10): frame = data_mocking.create_file({ 'project_id': self.project.id, 'video_parent_file_id': video_file.id, 'frame_number': i, 'type': 'frame' }, self.session) frames_list.append(frame) task = data_mocking.create_task({ 'name': 'tasktest', 'job': job, 'file': video_file }, self.session) request_data = { 'frame_list': [1, 2, 3], 'project_string_id': self.project.project_string_id, 'mode_data': 'list' } endpoint = "/api/v1/task/{}/video/single/{}/frame-list/".format(task.id, video_file.id) with self.client.session_transaction() as session: auth_api = common_actions.create_project_auth(project = self.project, session = self.session) credentials = b64encode("{}:{}".format(auth_api.client_id, auth_api.client_secret).encode()).decode( 'utf-8') session['Authorization'] = credentials common_actions.add_auth_to_session(session, self.project.users[0]) response = self.client.post( endpoint, data = json.dumps(request_data), headers = { 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) } ) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.json['url_list']), 3) i = 1 for elm in response.json['url_list']: self.assertEqual(elm['frame_number'], i) i += 1
def test_task_template_new_exam(self): job = data_mocking.create_job({ 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'type': "Normal", }, self.session) # TODO: analyze if provision_root_tasks_is needed now. result = task_template_launch_handler.task_template_new_exam(self.session, job) self.session.commit() job = Job.get_by_id(self.session, job_id=job.id) self.assertEqual(job.status, 'active')
def test_job_pin_core(self): job = data_mocking.create_job( { 'name': 'my-test-job', 'project': self.project }, self.session) job_result = job_pin_core(self.session, job_id=job.id) self.assertTrue(job_result['is_pinned']) job_result = job_pin_core(self.session, job_id=job.id) self.assertFalse(job_result['is_pinned'])
def test_execute_after_launch_strategy(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory({ 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) connection = data_mocking.create_connection({ 'name': 'test', 'integration_name': 'scale_ai', 'project_id': self.project.id }, self.session) job = data_mocking.create_job({ 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'attached_directories': [ attach_dir1 ], 'interface_connection_id': connection.id }, self.session) strategy = ScaleAITaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default() ) with patch.object(ScaleAITaskTemplateAfterLaunchStrategy, 'create_scale_ai_project', return_value={'id': '123', 'name': 'scaleaitest'}): strategy.execute_after_launch_strategy() commit_with_rollback(self.session) tasks_count = self.session.query(Task).filter( Task.job_id == job.id ).count() tasks = self.session.query(Task).filter( Task.job_id == job.id ).all() self.assertEqual(tasks_count, 1) external_maps = ExternalMap.get( session=self.session, job_id=job.id, diffgram_class_string='task_template', connection_id=connection.id, type=connection.integration_name ) self.assertNotEqual(external_maps, None)
def test_sync_events_list_api(self): # Create mock tasks num_events = 5 job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project }, self.session) all_sync_events = [] for i in range(0, num_events): sync_event = data_mocking.create_sync_event( { 'description': 'syncevent{}'.format(i), 'job_id': job.id, 'project': self.project }, self.session) all_sync_events.append(sync_event) request_payload = { 'metadata': { 'job_id': job.id, 'mode_data': 'list', 'project_string_id': self.project.project_string_id } } endpoint = "/api/v1/sync-events/list" with self.client.session_transaction() as session: auth_api = common_actions.create_project_auth(project=self.project, session=self.session) credentials = b64encode("{}:{}".format( auth_api.client_id, auth_api.client_secret).encode()).decode('utf-8') session['Authorization'] = credentials common_actions.add_auth_to_session(session, self.project.users[0]) response = self.client.post(endpoint, data=json.dumps(request_payload), headers={ 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) print(response.data) self.assertEqual(response.status_code, 200) new_session = sessionMaker.session_factory() self.assertEqual(len(response.json['sync_events_list']), num_events)
def test_notify_task_completion(self): project = self.project_data['project'] new_dir = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], }, self.session) old_dir = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], }, self.session) job = data_mocking.create_job( { 'project': project, 'completion_directory_id': new_dir.id, 'output_dir_action': 'copy' }, session=self.session) file = data_mocking.create_file({'project_id': project.id}, self.session) task_1 = data_mocking.create_task( { 'name': 'task1', 'job': job, 'file': file, 'incoming_directory_id': old_dir.id }, self.session) with patch.object( regular_methods, 'transmit_interservice_request_after_commit') as mock: job_observable = task_file_observers.JobObservable( session=self.session, log=regular_log.default(), job=job, task=task_1) dir_observer = task_file_observers.DirectoryJobObserver( session=self.session, log=regular_log.default(), directory=new_dir, job_observable=job_observable) job_observable.add_new_directory_observer(dir_observer) job_observable.notify_all_observers(defer=False) self.session.commit() file_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.working_dir_id == new_dir.id).all() mock.assert_called_once()
def test_job_resync_core(self): job = data_mocking.create_job( { 'name': 'my-test-job', 'project': self.project }, self.session) auth_api = common_actions.create_project_auth(project=job.project, session=self.session) resync_result, log = job_resync_core(session=self.session, project=self.project, member=auth_api.member, task_template_id=job.id, log=regular_log.default()) self.assertTrue(resync_result) self.assertEqual(len(log['error'].keys()), 0)
def test_job_update_core(self): # Create mock job. project_data = data_mocking.create_project_with_context( { 'users': [{ 'username': '******', 'email': '*****@*****.**', 'password': '******', }] }, self.session) project = project_data['project'] job = data_mocking.create_job( { 'name': 'my-test-job', 'project_id': project.id }, self.session) user = project_data['users'][0] input_data = { 'name': 'my_new_name', 'share_type': 'project', 'permission': [], 'label_mode': '', 'passes_per_file': 1, 'instance_type': 'box', 'launch_datetime': datetime.datetime.now(), 'file_count': 0, 'file_handling': 0, 'label_file_list': [], 'member_list_ids': [user.member.id], 'type': '', } log = {'error': {}, 'info': {}} with patch( 'methods.task.task_template.job_new_or_update.new_or_update_core', return_value=('called!', {})) as mock_method: job.status = 'draft' job_new_or_update.job_update_core(self.session, job, job.project, input_data, log) mock_method.assert_called_once() job.status = 'launched' job_new_or_update.job_update_core(self.session, job, job.project, input_data, log) new_session = sessionMaker.session_factory() updated_job = Job.get_by_id(self.session, job.id) self.assertEqual(updated_job.name, input_data['name']) self.assertEqual(updated_job.label_dict['label_file_list'], input_data['label_file_list'])
def test_serialize_for_list(self): issue = Discussion.new( session = self.session, title = 'test', description = 'description', project_id = self.project.id, status = 'open' ) job = data_mocking.create_job({ 'name': 'my-test-job' }, self.session) issue_data = issue.serialize(self.session) self.assertTrue('id' in issue_data) self.assertTrue('created_time' in issue_data) self.assertTrue('description' in issue_data) self.assertTrue('title' in issue_data) self.assertTrue('project_id' in issue_data) self.assertTrue('status' in issue_data)
def test_job_update_api(self): # Create mock job. job = data_mocking.create_job({'name': 'my-test-job'}, self.session) file = data_mocking.create_file( { 'project_id': job.project.id, 'job_id': job.id }, self.session) request_data = { 'name': 'new name', 'instance_type': 'polygon', 'share_type': 'project', 'type': 'exam', 'label_file_list': [{ 'id': file.id }], 'file_handling': 'isolate', 'job_id': job.id, } endpoint = "/api/v1/project/" + job.project.project_string_id + "/job/update" auth_api = common_actions.create_project_auth(project=job.project, session=self.session) credentials = b64encode( "{}:{}".format(auth_api.client_id, auth_api.client_secret).encode()).decode('utf-8') response = self.client.post(endpoint, data=json.dumps(request_data), headers={ 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) self.assertEqual(response.status_code, 200) new_session = sessionMaker.session_factory() updated_job = Job.get_by_id(new_session, job.id) self.assertEqual(updated_job.name, request_data['name']) self.assertEqual(updated_job.instance_type, request_data['instance_type']) self.assertEqual(updated_job.share_type, request_data['share_type']) self.assertEqual(updated_job.type, request_data['type']) self.assertEqual(updated_job.file_handling, request_data['file_handling'])
def test_job_pin_api(self): # Create mock job. job = data_mocking.create_job( { 'name': 'my-test-job', 'project': self.project }, self.session) request_data = {} endpoint = "/api/v1/job/{}/pin".format(job.id) with self.client.session_transaction() as session: auth_api = common_actions.create_project_auth(project=self.project, session=self.session) credentials = b64encode("{}:{}".format( auth_api.client_id, auth_api.client_secret).encode()).decode('utf-8') session['Authorization'] = credentials common_actions.add_auth_to_session(session, self.project.users[0]) response = self.client.post(endpoint, data=json.dumps(request_data), headers={ 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) data = response.json self.assertEqual(response.status_code, 200) self.assertEqual(data['job']['is_pinned'], True) response = self.client.post(endpoint, data=json.dumps(request_data), headers={ 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) data = response.json self.assertEqual(response.status_code, 200) self.assertEqual(data['job']['is_pinned'], False)
def test_job_list_api(self): # Create mock job. num_jobs = 5 all_jobs = [] for i in range(0, num_jobs): job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(i), 'project': self.project }, self.session) all_jobs.append(job) request_data = { 'metadata': { 'builder_or_trainer': { 'mode': 'builder' }, 'limit': 5, 'project_string_id': self.project.project_string_id } } endpoint = "/api/v1/job/list" with self.client.session_transaction() as session: auth_api = common_actions.create_project_auth(project=self.project, session=self.session) credentials = b64encode("{}:{}".format( auth_api.client_id, auth_api.client_secret).encode()).decode('utf-8') session['Authorization'] = credentials common_actions.add_auth_to_session(session, self.project.users[0]) response = self.client.post(endpoint, data=json.dumps(request_data), headers={ 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) self.assertEqual(response.status_code, 200) new_session = sessionMaker.session_factory() self.assertEqual(len(response.json['Job_list']), num_jobs)
def test__add_file_into_job(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job = data_mocking.create_job({'project': project}, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) log = regular_log.default() sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job) sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True) commit_with_rollback(self.session) dir_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file.id, WorkingDirFileLink.working_dir_id == job.directory_id) self.assertTrue(dir_link.first() is not None) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is None) # If job has correct status task should be created. job.status = 'active' self.session.add(job) commit_with_rollback(self.session) sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is not None) commit_with_rollback(self.session) # Retest for case of an existing file/task. mngr = SyncEventManager.create_sync_event_and_manager( session=self.session, status='started') sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True, sync_event_manager=mngr) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is not None)
def test_test_task_next_issue_core(self): # Create mock tasks # Create mock tasks num_tasks = 5 job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project }, self.session) all_tasks = [] for i in range(0, num_tasks): task = data_mocking.create_task( { 'name': 'task{}'.format(i), 'job': job }, self.session) all_tasks.append(task) issue1 = data_mocking.create_discussion( { 'title': 'test', 'description': 'test', 'project_id': self.project.id }, self.session) issue1.attach_element(session=self.session, element={ 'type': 'task', 'id': all_tasks[2].id }) issue2 = data_mocking.create_discussion( { 'title': 'test2', 'description': 'test2', 'project_id': self.project.id }, self.session) issue2.attach_element(session=self.session, element={ 'type': 'task', 'id': all_tasks[4].id }) result = task_next_issue.task_next_issue_core(session=self.session, task_id=all_tasks[0].id)
def test_job_observable_creation(self): project = self.project_data['project'] completion_dir = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], }, self.session) print('completion_dir', completion_dir.id) job = data_mocking.create_job( { 'name': 'my-test-job', 'completion_directory_id': completion_dir.id }, self.session) job_observable = task_file_observers.JobObservable( session=self.session, log={}, job=job) self.assertEqual(len(job_observable.dir_observer_list), 1) dir_observer = job_observable.dir_observer_list[0] self.assertEqual(dir_observer.directory.id, job.completion_directory_id) self.assertEqual(dir_observer.job_observable, job_observable)