def test_notify_task_completion(self): project = self.project_data['project'] new_dir = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], }, self.session) old_dir = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], }, self.session) job = data_mocking.create_job( { 'project': project, 'completion_directory_id': new_dir.id, 'output_dir_action': 'copy' }, session=self.session) file = data_mocking.create_file({'project_id': project.id}, self.session) task_1 = data_mocking.create_task( { 'name': 'task1', 'job': job, 'file': file, 'incoming_directory_id': old_dir.id }, self.session) with patch.object( regular_methods, 'transmit_interservice_request_after_commit') as mock: job_observable = task_file_observers.JobObservable( session=self.session, log=regular_log.default(), job=job, task=task_1) dir_observer = task_file_observers.DirectoryJobObserver( session=self.session, log=regular_log.default(), directory=new_dir, job_observable=job_observable) job_observable.add_new_directory_observer(dir_observer) job_observable.notify_all_observers(defer=False) self.session.commit() file_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.working_dir_id == new_dir.id).all() mock.assert_called_once()
def test_job_observable_remove_observer(self): project = self.project_data['project'] new_dir = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], }, self.session) job = data_mocking.create_job( { 'project': project, 'completion_directory_id': new_dir.id }, session=self.session) job_observable = task_file_observers.JobObservable( session=self.session, log={}, job=job) dir_observer = task_file_observers.DirectoryJobObserver( session=self.session, log={}, directory=new_dir, job_observable=job_observable) job_observable._remove_observer(dir_observer) self.session.commit() self.session.flush() updated_job = self.session.query(Job).filter(Job.id == job.id).first() self.assertEqual(updated_job.completion_directory_id, None)
def test_launch_job(self): # Create mock tasks file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory({ 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) job = data_mocking.create_job({ 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'type': "Normal", 'attached_directories': [ attach_dir1 ] }, self.session) with patch.object(AfterLaunchControl, 'main', return_value=True) as launch_control_main: job_launch = data_mocking.create_job_launch({'job_id': job.id}, self.session) job_launch_queue = data_mocking.create_job_launch_queue_element({'job_launch_id': job_launch.id}, self.session) launch_handler = task_template_launch_handler.TaskTemplateLauncherThread() launch_handler.launch_job(session=self.session, task_template_queue_element=job_launch_queue) self.session.commit() job_launch = JobLaunch.get_by_id(session=self.session, job_launch_id=job_launch.id) self.assertEqual(job_launch.status, 'completed') self.assertEqual(job_launch.job_launch_info, 'Job Launched Successfully.') launch_control_main.assert_called_once()
def test_execute_after_launch_strategy(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'attached_directories': [attach_dir1] }, self.session) strategy = StandardTaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default()) strategy.execute_after_launch_strategy() self.session.commit() tasks_count = self.session.query(Task).filter( Task.job_id == job.id).count() tasks = self.session.query(Task).filter(Task.job_id == job.id).all() self.assertEqual(tasks_count, 1)
def test_check_if_jobs_to_launch(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'type': "Normal", 'attached_directories': [attach_dir1] }, self.session) launch_handler = task_template_launch_handler.TaskTemplateLauncherThread( ) job_launch = data_mocking.create_job_launch({'job_id': job.id}, self.session) job_launch_queue = data_mocking.create_job_launch_queue_element( {'job_launch_id': job_launch.id}, self.session) with patch.object( task_template_launch_handler.TaskTemplateLauncherThread, 'launch_job', return_value=True) as launch_control_main: launch_handler.check_if_jobs_to_launch() launch_control_main.assert_called_once()
def test_merge_task(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) original_file = data_mocking.create_file( {'project_id': self.project.id}, self.session) completion_dir = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [original_file] }, self.session) job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'completion_directory_id': completion_dir.id }, self.session) task = data_mocking.create_task( { 'name': 'test task', 'file': file, 'job': job, 'file_original': original_file }, self.session) print('aasadsd', task.job, task.job.completion_directory_id) task_complete.merge_task(self.session, job, task) self.session.commit() file_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.working_dir_id == task.job.completion_directory_id, WorkingDirFileLink.file_id == task.file_id).all() self.assertEqual(len(file_link), 1)
def test_file_transfer_core(self): # Mock data file = data_mocking.create_file({'project_id': self.project.id}, self.session) source_directory = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) destination_directory = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [] }, self.session) destination_directory_2 = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [] }, self.session) # Test Move result = file_transfer_core.file_transfer_core( session=self.session, source_directory=source_directory, destination_directory=destination_directory, transfer_action='move', file=file, log=regular_log.default(), member=self.member) self.assertEqual(len(result['error'].keys()), 0) # Test not existing file link case. result = file_transfer_core.file_transfer_core( session=self.session, source_directory=destination_directory_2, destination_directory=destination_directory, transfer_action='move', file=file, log=regular_log.default(), member=self.member) self.assertIsNotNone(result.get('error').get('file_link'))
def test_execute_after_launch_strategy(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory({ 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) connection = data_mocking.create_connection({ 'name': 'test', 'integration_name': 'scale_ai', 'project_id': self.project.id }, self.session) job = data_mocking.create_job({ 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'attached_directories': [ attach_dir1 ], 'interface_connection_id': connection.id }, self.session) strategy = ScaleAITaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default() ) with patch.object(ScaleAITaskTemplateAfterLaunchStrategy, 'create_scale_ai_project', return_value={'id': '123', 'name': 'scaleaitest'}): strategy.execute_after_launch_strategy() commit_with_rollback(self.session) tasks_count = self.session.query(Task).filter( Task.job_id == job.id ).count() tasks = self.session.query(Task).filter( Task.job_id == job.id ).all() self.assertEqual(tasks_count, 1) external_maps = ExternalMap.get( session=self.session, job_id=job.id, diffgram_class_string='task_template', connection_id=connection.id, type=connection.integration_name ) self.assertNotEqual(external_maps, None)
def test__sync_all_jobs_from_dir(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job1 = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) job2 = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file], 'jobs_to_sync': { 'job_ids': [job1.id, job2.id] } }, self.session) for job in [job1, job2]: job.update_attached_directories(self.session, [{ 'directory_id': directory.id, 'selected': 'sync' }]) log = regular_log.default() sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=None) sync_manager._JobDirectorySyncManager__sync_all_jobs_from_dir( file, directory, directory, create_tasks=True) dir_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file.id, WorkingDirFileLink.working_dir_id == job1.directory_id) dir_link2 = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file.id, WorkingDirFileLink.working_dir_id == job2.directory_id) self.assertTrue(dir_link.first() is not None) self.assertTrue(dir_link2.first() is not None) task1 = self.session.query(Task).filter(Task.job_id == job1.id) task2 = self.session.query(Task).filter(Task.job_id == job2.id) self.assertTrue(task1.first() is not None) self.assertTrue(task2.first() is not None)
def test__add_file_into_job(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job = data_mocking.create_job({'project': project}, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) log = regular_log.default() sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job) sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True) commit_with_rollback(self.session) dir_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file.id, WorkingDirFileLink.working_dir_id == job.directory_id) self.assertTrue(dir_link.first() is not None) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is None) # If job has correct status task should be created. job.status = 'active' self.session.add(job) commit_with_rollback(self.session) sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is not None) commit_with_rollback(self.session) # Retest for case of an existing file/task. mngr = SyncEventManager.create_sync_event_and_manager( session=self.session, status='started') sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True, sync_event_manager=mngr) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is not None)
def test_job_observable_creation(self): project = self.project_data['project'] completion_dir = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], }, self.session) print('completion_dir', completion_dir.id) job = data_mocking.create_job( { 'name': 'my-test-job', 'completion_directory_id': completion_dir.id }, self.session) job_observable = task_file_observers.JobObservable( session=self.session, log={}, job=job) self.assertEqual(len(job_observable.dir_observer_list), 1) dir_observer = job_observable.dir_observer_list[0] self.assertEqual(dir_observer.directory.id, job.completion_directory_id) self.assertEqual(dir_observer.job_observable, job_observable)
def test_create_task_from_file(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file], 'jobs_to_sync': { 'job_ids': [job.id] } }, self.session) log = regular_log.default() dir_list = [{ 'directory_id': directory.id, 'nickname': directory.nickname, 'selected': 'sync' }] job.update_attached_directories(self.session, dir_list, delete_existing=True) self.session.add(job) commit_with_rollback(self.session) sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job, ) sync_manager.create_task_from_file(file) commit_with_rollback(self.session) self.session.flush() task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is not None)
def test_threaded_job_resync(self): job = data_mocking.create_job( { 'name': 'my-test-job', 'status': 'active', 'project': self.project }, self.session) auth_api = common_actions.create_project_auth(project=job.project, session=self.session) file = data_mocking.create_file({'project_id': self.project.id}, self.session) file_missing1 = data_mocking.create_file( {'project_id': self.project.id}, self.session) file_missing2 = data_mocking.create_file( {'project_id': self.project.id}, self.session) directory = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file, file_missing1, file_missing2] }, self.session) job.update_attached_directories(self.session, [{ 'directory_id': directory.id, 'selected': 'sync' }]) log = regular_log.default() sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job) sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True) self.session.commit() result = threaded_job_resync(task_template_id=job.id, member_id=auth_api.member_id) self.assertEqual(len(result), 2)
def test_remove_job_from_all_dirs(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file], 'jobs_to_sync': { 'job_ids': [job.id] } }, self.session) log = regular_log.default() dir_list = [{ 'directory_id': directory.id, 'nickname': directory.nickname, 'selected': 'sync' }] job.update_attached_directories(self.session, dir_list, delete_existing=True) self.session.add(job) commit_with_rollback(self.session) sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job, directory=directory) sync_manager.remove_job_from_all_dirs(soft_delete=False) commit_with_rollback(self.session) self.session.flush() directory_attachments = self.session.query(JobWorkingDir).filter( JobWorkingDir.working_dir_id == directory.id).all() self.assertEqual(len(directory_attachments), 0)
def test_update_output_dir_actions(self): # Create mock job. project_data = data_mocking.create_project_with_context( { 'users': [{ 'username': '******', 'email': '*****@*****.**', 'password': '******', }] }, self.session) project = project_data['project'] job = data_mocking.create_job( { 'name': 'my-test-job', 'project': project }, self.session) directory = data_mocking.create_directory( { 'project': project, 'user': project_data['users'][0], }, self.session) input_data = { 'output_dir': directory.id, 'output_dir_action': 'copy', 'job_id': job.id, } log = {'error': {}, 'info': {}} updated_job, log = job_new_or_update.update_output_dir_actions( self.session, job, job.project, input_data, log) self.session.commit() self.session.flush() self.assertEqual(updated_job.output_dir_action, input_data['output_dir_action']) self.assertEqual(updated_job.completion_directory_id, input_data['output_dir'])
def test_validate_file_data_for_input_packet(self): log = regular_log.default() file1 = data_mocking.create_file({'project_id': self.project.id}, self.session) # Case of file ID input_data = {'file_id': file1.id} result, log, file_id = packet.validate_file_data_for_input_packet( session=self.session, project_string_id=self.project.project_string_id, input=input_data, log=log) self.assertTrue(result) self.assertEqual(len(log['error'].keys()), 0) self.assertEqual(file_id, file1.id) # Case of Media URL input_data = {'media': {'url': 'test_url'}} result, log, file_id = packet.validate_file_data_for_input_packet( session=self.session, project_string_id=self.project.project_string_id, input=input_data, log=log) self.assertFalse(result) self.assertEqual(len(log['error'].keys()), 1) self.assertEqual(file_id, None) input_data['media']['type'] = 'image' log = regular_log.default() result, log, file_id = packet.validate_file_data_for_input_packet( session=self.session, project_string_id=self.project.project_string_id, input=input_data, log=log) self.assertTrue(result) self.assertEqual(len(log['error'].keys()), 0) self.assertEqual(file_id, None) # Case of Filename + Directory file2 = data_mocking.create_file( { 'project_id': self.project.id, 'original_filename': 'test1.jpg' }, self.session) directory = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file2] }, self.session) input_data = {'file_name': 'test1.jpg', 'directory_id': directory.id} log = regular_log.default() result, log, file_id = packet.validate_file_data_for_input_packet( session=self.session, project_string_id=self.project.project_string_id, input=input_data, log=log) self.assertTrue(result) self.assertEqual(len(log['error'].keys()), 0) self.assertEqual(file_id, file2.id) input_data = { 'file_name': 'test1111.jpg', 'directory_id': directory.id } log = regular_log.default() result, log, file_id = packet.validate_file_data_for_input_packet( session=self.session, project_string_id=self.project.project_string_id, input=input_data, log=log) print('log', log) self.assertFalse(result) self.assertEqual(len(log['error'].keys()), 1) self.assertEqual(file_id, None)
def test_execute_after_launch_strategy(self): file = data_mocking.create_file( { 'project_id': self.project.id, 'type': 'text' }, self.session) label = data_mocking.create_label({ 'name': 'mylabel', }, self.session) label_file = data_mocking.create_label_file( { 'label': label, 'project_id': self.project.id }, self.session) attach_dir1 = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) connection = data_mocking.create_connection( { 'name': 'test', 'integration_name': 'datasaur', 'project_id': self.project.id }, self.session) labeldict = { "label_file_list_serialized": [{ "id": label_file.id, "hash": "083e9ebc48d64e9a8874c6b95f490b56b8c4c5b0f4dacd90bd3534085e87d9fa", "type": "label", "state": "added", "created_time": "2020-07-15T18:48:34.477333", "time_last_updated": "2020-07-15T18:48:34.705290", "ann_is_complete": None, "original_filename": None, "video_id": None, "video_parent_file_id": None, "count_instances_changed": None, "attribute_group_list": [{ "id": 2, "kind": "multiple_select", "is_root": True, "name": "carwheeltag", "prompt": "How is this car wheel", "show_prompt": True, "time_updated": "2020-08-05 19:37:07.703576", "attribute_template_list": [{ "id": 4, "name": "Is rounded", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 5, "name": "is squared", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 6, "name": "is beautiful", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 7, "name": "is crazy", "value_type": None, "archived": False, "group_id": 2, "display_order": None }] }, { "id": 3, "kind": "select", "is_root": True, "name": "selectwheel", "prompt": "Please selectt something special about this wheels", "show_prompt": True, "time_updated": "2020-08-12 16:29:54.817801", "attribute_template_list": [{ "id": 10, "name": "Silver Wheel", "value_type": None, "archived": False, "group_id": 3, "display_order": None }, { "id": 9, "name": "+Gold wheel", "value_type": None, "archived": False, "group_id": 3, "display_order": None }] }, { "id": 4, "kind": "text", "is_root": True, "name": "freewheel", "prompt": "What are your thought on this wheel?", "show_prompt": True, "time_updated": "2020-08-05 20:50:59.195249", "attribute_template_list": [] }, { "id": 5, "kind": "radio", "is_root": True, "name": "clean", "prompt": "Is this wheel clean?", "show_prompt": True, "time_updated": "2020-08-05 20:53:46.314143", "attribute_template_list": [{ "id": 11, "name": "Wheel is dirty", "value_type": None, "archived": False, "group_id": 5, "display_order": None }, { "id": 12, "name": "Wheek is clean", "value_type": None, "archived": False, "group_id": 5, "display_order": None }] }, { "id": 6, "kind": "text", "is_root": True, "name": "TEST", "prompt": "TEST28", "show_prompt": True, "time_updated": "2020-08-12 16:30:03.770141", "attribute_template_list": [] }], "colour": { "hex": "#194d33", "hsl": { "h": 150, "s": 0.5, "l": 0.2, "a": 1 }, "hsv": { "h": 150, "s": 0.66, "v": 0.3, "a": 1 }, "rgba": { "r": 25, "g": 77, "b": 51, "a": 1 }, "a": 1 }, "label": { "id": 5, "name": "Car wheel", "default_sequences_to_single_frame": False } }], "label_file_colour_map": {} } job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'label_dict': labeldict, 'attached_directories': [attach_dir1], 'interface_connection_id': connection.id }, self.session) strategy = DatasaurTaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default()) with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'create_datasaur_labelset', return_value={ 'result': { 'createLabelSet': { 'id': 'mytestid' } } }): with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'create_datasaur_project', return_value={'result': { 'id': 'datasaur_test' }}): with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'get_project_files_list', return_value={ 'result': { 'id': 'datasaur_test', 'documents': [{ 'id': str(file.id), 'name': str(file.id) }] } }): strategy.execute_after_launch_strategy() commit_with_rollback(self.session) tasks_count = self.session.query(Task).filter( Task.job_id == job.id).count() tasks = self.session.query(Task).filter( Task.job_id == job.id).all() self.assertEqual(tasks_count, 1) external_map = ExternalMap.get( session=self.session, job_id=job.id, external_id='mytestid', connection_id=connection.id, diffgram_class_string='', type='{}_label_set'.format( connection.integration_name), ) self.assertNotEqual(external_map, None) project_map = ExternalMap.get( session=self.session, job_id=job.id, external_id='datasaur_test', connection_id=connection.id, diffgram_class_string='task_template', type='{}_project'.format(connection.integration_name), ) self.assertNotEqual(project_map, None) files_maps = ExternalMap.get( session=self.session, job_id=job.id, external_id=str(file.id), file_id=file.id, connection_id=connection.id, diffgram_class_string='file', type='{}_file'.format(connection.integration_name), ) self.assertNotEqual(files_maps, None)
def test_job_output_dir_update(self): # Create mock job. project_data = data_mocking.create_project_with_context( { 'users': [{ 'username': '******', 'email': '*****@*****.**', 'password': '******', }] }, self.session) project = project_data['project'] job = data_mocking.create_job( { 'name': 'my-test-job', 'project': project }, self.session) directory = data_mocking.create_directory( { 'project': project, 'user': project_data['users'][0], }, self.session) file = data_mocking.create_file( { 'project_id': job.project.id, 'job_id': job.id }, self.session) request_data = { 'output_dir': str(directory.id), 'output_dir_action': 'copy', 'job_id': job.id, } endpoint = "/api/v1/project/" + job.project.project_string_id + "/job/set-output-dir" auth_api = common_actions.create_project_auth(project=job.project, session=self.session) credentials = b64encode( "{}:{}".format(auth_api.client_id, auth_api.client_secret).encode()).decode('utf-8') response = self.client.post(endpoint, data=json.dumps(request_data), headers={ 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) self.assertEqual(response.status_code, 200) new_session = sessionMaker.session_factory() updated_job = Job.get_by_id(new_session, job.id) self.assertEqual(updated_job.output_dir_action, request_data['output_dir_action']) self.assertEqual(str(updated_job.completion_directory_id), request_data['output_dir']) # Now test a wrong action request_data_error = { 'output_dir': 58, 'output_dir_action': 'a_wrong_action', 'job_id': job.id, } response_error = self.client.post( endpoint, data=json.dumps(request_data_error), headers={ 'directory_id': str(job.project.directory_default_id), 'Authorization': 'Basic {}'.format(credentials) }) self.assertEqual(response_error.status_code, 400)
def test_new_external_export(self): # TODO: add tests. file = data_mocking.create_file( { 'project_id': self.project.id, 'type': 'image' }, self.session) label_file = data_mocking.create_file({'project_id': self.project.id}, self.session) source_directory = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) instance1 = data_mocking.create_instance( { 'x_min': 1, 'x_max': 10, 'y_min': 1, 'y_max': 10, 'file_id': file.id, 'label_file_id': label_file.id }, self.session) export = data_mocking.create_export( { 'description': 'test', 'source': 'directory', 'kind': 'Annotations', 'file_comparison_mode': 'latest', 'project_id': self.project.id, 'working_dir_id': source_directory.id, 'ann_is_complete': False }, self.session) self.session.commit() with patch.object(export_generation.data_tools, 'upload_from_string') as mock_1: result, export_data = export_generation.new_external_export( session=self.session, project=self.project, export_id=export.id, version=None, working_dir=source_directory, use_request_context=False) mock_1.asser_called_once() print('ressss', export_data) self.assertTrue(result) self.assertTrue('readme' in export_data) self.assertTrue('label_map' in export_data) self.assertTrue('export_info' in export_data) self.assertTrue('attribute_groups_reference' in export_data) self.assertTrue(file.id in export_data) self.assertEqual(len(export_data[file.id]['instance_list']), 1) self.assertEqual(export_data[file.id]['instance_list'][0]['x_min'], 1) self.assertEqual(export_data[file.id]['instance_list'][0]['x_max'], 10) self.assertEqual(export_data[file.id]['instance_list'][0]['y_min'], 1) self.assertEqual(export_data[file.id]['instance_list'][0]['y_max'], 10) self.assertEqual( export_data[file.id]['instance_list'][0]['label_file_id'], label_file.id)
def test_view_file_list_web_route(self): # Create mock tasks job = data_mocking.create_job( { 'name': 'my-test-job', 'project': self.project }, self.session) file = data_mocking.create_file({'project_id': self.project.id}, self.session) file2 = data_mocking.create_file({'project_id': self.project.id}, self.session) file3 = data_mocking.create_file({'project_id': self.project.id}, self.session) file.created_time = datetime.datetime(2020, 5, 11) file2.created_time = datetime.datetime(2020, 5, 17) file3.created_time = datetime.datetime(2020, 5, 25) self.session.commit() directory = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file, file2, file3] }, self.session) request_data = { 'metadata': { 'directory_id': directory.id, 'date_from': datetime.datetime(2020, 5, 11).strftime('%Y-%m-%d'), 'date_to': datetime.datetime(2020, 5, 18).strftime('%Y-%m-%d'), 'annotations_are_machine_made_setting': "All", 'annotation_status': "All", 'limit': 25, 'media_type': 'All', 'request_next_page': False, 'file_view_mode': 'annotation', 'request_previous_page': False, 'previous': { 'annotation_status': "All", 'date_from': None, 'date_to': None, 'directory_id': None, 'file': {}, 'file_view_mode': None, 'job_id': None, 'label': { 'start_index': 0 }, 'limit': 25, 'machine_made_setting': "All", 'media_type': "All", 'pagination': {}, 'search_term': None, 'start_index': 0, 'request_next_page': False, 'request_previous_page': False, }, 'options': { 'itemsPerPage': -1, 'sortDesc': [True] }, 'job_id': None, } } endpoint = "/api/project/{}/user/{}/file/list".format( self.project.project_string_id, self.project_data['users'][0].id) credentials = b64encode("{}:{}".format( self.auth_api.client_id, self.auth_api.client_secret).encode()).decode('utf-8') response = self.client.post( endpoint, data=json.dumps(request_data), headers={'Authorization': 'Basic {}'.format(credentials)}) data = response.json ids = [x['id'] for x in data['file_list']] # Testing Date filter case. self.assertEqual(response.status_code, 200) self.assertEqual(len(ids), 2) self.assertTrue(file.id in ids) self.assertTrue(file2.id in ids)
def test_create_file_links_for_attached_dirs(self): project = self.project_data['project'] file1 = data_mocking.create_file({'project_id': project.id}, self.session) file2 = data_mocking.create_file({'project_id': project.id}, self.session) job = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) directory1 = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file1], 'jobs_to_sync': { 'job_ids': [job.id] } }, self.session) directory2 = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file2], 'jobs_to_sync': { 'job_ids': [job.id] } }, self.session) log = regular_log.default() dir_list = [{ 'directory_id': directory1.id, 'nickname': directory1.nickname, 'selected': 'sync' }, { 'directory_id': directory2.id, 'nickname': directory2.nickname, 'selected': 'sync' }] job.update_attached_directories(self.session, dir_list, delete_existing=True) self.session.add(job) self.session.add(directory1) self.session.add(directory2) commit_with_rollback(self.session) sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job, ) sync_manager.create_file_links_for_attached_dirs(create_tasks=True) commit_with_rollback(self.session) self.session.flush() dir_link1 = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file1.id, WorkingDirFileLink.working_dir_id == job.directory_id) dir_link2 = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file2.id, WorkingDirFileLink.working_dir_id == job.directory_id) self.assertTrue(dir_link1.first() is not None) self.assertTrue(dir_link2.first() is not None) task = self.session.query(Task).filter(Task.job_id == job.id) task1 = self.session.query(Task).filter(Task.job_id == job.id, Task.file_id == file1.id) task2 = self.session.query(Task).filter(Task.job_id == job.id, Task.file_id == file2.id) self.assertEqual(len(task.all()), 2) self.assertTrue(task1.first() is not None) self.assertTrue(task2.first() is not None)