def register_member(user, session): new_member = Member( user = user ) session.add(new_member) regular_methods.commit_with_rollback(session) return new_member
def create_export(export_data, session): export = Export( type=export_data.get('type'), kind=export_data.get('kind'), archived=export_data.get('archived'), masks=export_data.get('masks'), source=export_data.get('source'), status=export_data.get('status'), status_text=export_data.get('status_text'), percent_complete=export_data.get('percent_complete'), file_comparison_mode=export_data.get('file_comparison_mode'), file_list_length=export_data.get('file_list_length'), description=export_data.get('description'), working_dir_id=export_data.get('working_dir_id'), project_id=export_data.get('project_id'), user_id=export_data.get('user_id'), job_id=export_data.get('job_id'), task_id=export_data.get('task_id'), yaml_blob_name=export_data.get('yaml_blob_name'), json_blob_name=export_data.get('json_blob_name'), tf_records_blob_name=export_data.get('tf_records_blob_name'), ann_is_complete=export_data.get('ann_is_complete'), ) session.add(export) regular_methods.commit_with_rollback(session) return export
def generate_sample_files_for_dataset(self, dataset): NUM_IMAGES = 3 NUM_VIDEOS = 3 files_list_count = WorkingDirFileLink.file_list( self.session, working_dir_id=dataset.id, root_files_only=True, # TODO do we need to get child files too? limit=None, counts_only=True, type=['image', 'video']) if files_list_count >= NUM_IMAGES: return for i in range(0, NUM_IMAGES): diffgram_input = Input(project_id=dataset.project_id, url='https://picsum.photos/1000', media_type='image', directory_id=dataset.id, type='from_url') self.session.add(diffgram_input) self.session.flush() process_media = Process_Media(session=self.session, input_id=diffgram_input.id, input=diffgram_input, item=None) process_media.main_entry() # Commit right away for future querying. commit_with_rollback(self.session)
def create_connection(connection_data, session): connection = Connection() connection.name = connection_data['name'] connection.integration_name = connection_data['integration_name'] connection.project_id = connection_data['project_id'] session.add(connection) regular_methods.commit_with_rollback(session) return connection
def create_label_file(label_file_data, session): label_file = File() label_file.label = label_file_data.get('label') label_file.project_id = label_file_data['project_id'] label_file.type = 'label' session.add(label_file) regular_methods.commit_with_rollback(session) return label_file
def create_label(label_data, session): existing_label = Label.get_by_name(session=session, label_name=label_data.get('name')) if existing_label: return existing_label label = Label() label.name = label_data.get('name') session.add(label) regular_methods.commit_with_rollback(session) return label
def create_discussion_relation(discussion_relation, session): rel = DiscussionRelation( discussion_id=discussion_relation.get('discussion_id'), instance_id=discussion_relation.get('instance_id'), file_id=discussion_relation.get('file_id'), job_id=discussion_relation.get('job_id'), task_id=discussion_relation.get('task_id'), ) session.add(rel) regular_methods.commit_with_rollback(session) return rel
def create_discussion_comment(discussion_comment_data, session): issue_comment = DiscussionComment( discussion_id=discussion_comment_data.get('discussion_id'), user_id=discussion_comment_data.get('user_id'), member_created_id=discussion_comment_data.get('member_created_id'), project_id=discussion_comment_data.get('project_id'), content=discussion_comment_data.get('content'), ) session.add(issue_comment) regular_methods.commit_with_rollback(session) return issue_comment
def create_instance(instance_data, session): instance = Instance( project_id = instance_data.get('project_id'), task_id = instance_data.get('task_id'), type = instance_data.get('type'), hash = instance_data.get('hash'), status = instance_data.get('status'), start_sentence = instance_data.get('start_sentence'), end_sentence = instance_data.get('end_sentence'), start_token = instance_data.get('start_token'), end_token = instance_data.get('end_token'), start_char = instance_data.get('start_char'), end_char = instance_data.get('end_char'), sentence = instance_data.get('sentence'), sequence_id = instance_data.get('sequence_id'), number = instance_data.get('number'), frame_number = instance_data.get('frame_number'), global_frame_number = instance_data.get('global_frame_number'), machine_made = instance_data.get('machine_made'), interpolated = instance_data.get('interpolated'), fan_made = instance_data.get('fan_made'), verified = instance_data.get('verified'), occluded = instance_data.get('occluded'), soft_delete = instance_data.get('soft_delete'), label_file_id = instance_data.get('label_file_id'), file_id = instance_data.get('file_id'), points = instance_data.get('points'), mask_url = instance_data.get('mask_url'), mask_blob_dir = instance_data.get('mask_blob_dir'), mask_url_expiry = instance_data.get('mask_url_expiry'), x_min = instance_data.get('x_min'), y_min = instance_data.get('y_min'), x_max = instance_data.get('x_max'), y_max = instance_data.get('y_max'), width = instance_data.get('width'), height = instance_data.get('height'), preview_image_url = instance_data.get('preview_image_url'), preview_image_blob_dir = instance_data.get('preview_image_blob_dir'), preview_image_url_expiry = instance_data.get('preview_image_url_expiry'), rating = instance_data.get('rating'), rating_comment = instance_data.get('rating_comment'), attribute_groups = instance_data.get('attribute_groups'), member_created_id = instance_data.get('member_created_id'), nodes = {'nodes': instance_data.get('nodes')}, edges = {'edges': instance_data.get('edges')}, previous_id = instance_data.get('previous_id'), root_id = instance_data.get('root_id') ) session.add(instance) regular_methods.commit_with_rollback(session) return instance
def create_directory(dir_data, session): working_dir = WorkingDir() working_dir.user_id = dir_data['user'].id working_dir.project_id = dir_data['project'].id if dir_data.get('jobs_to_sync'): working_dir.jobs_to_sync = dir_data.get('jobs_to_sync') session.add(working_dir) regular_methods.commit_with_rollback(session) if dir_data.get('files'): file_list = dir_data.get('files') for file in file_list: WorkingDirFileLink.add(session, working_dir.id, file) regular_methods.commit_with_rollback(session) return working_dir
def test_execute_after_launch_strategy(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory({ 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) connection = data_mocking.create_connection({ 'name': 'test', 'integration_name': 'scale_ai', 'project_id': self.project.id }, self.session) job = data_mocking.create_job({ 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'attached_directories': [ attach_dir1 ], 'interface_connection_id': connection.id }, self.session) strategy = ScaleAITaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default() ) with patch.object(ScaleAITaskTemplateAfterLaunchStrategy, 'create_scale_ai_project', return_value={'id': '123', 'name': 'scaleaitest'}): strategy.execute_after_launch_strategy() commit_with_rollback(self.session) tasks_count = self.session.query(Task).filter( Task.job_id == job.id ).count() tasks = self.session.query(Task).filter( Task.job_id == job.id ).all() self.assertEqual(tasks_count, 1) external_maps = ExternalMap.get( session=self.session, job_id=job.id, diffgram_class_string='task_template', connection_id=connection.id, type=connection.integration_name ) self.assertNotEqual(external_maps, None)
def create_discussion(discussion_data, session): issue = Discussion( title = discussion_data.get('title'), description = discussion_data.get('description'), member_created_id = discussion_data.get('member_created_id'), project_id = discussion_data.get('project_id'), status = discussion_data.get('status', 'open'), ) session.add(issue) regular_methods.commit_with_rollback(session) issue.attach_element(session, {'type': 'project', 'id': discussion_data.get('project_id')}) regular_methods.commit_with_rollback(session) return issue
def create_userscript(event_data, session): from shared.database.userscript.userscript import UserScript event = UserScript.new( member=event_data.get('member'), project=event_data.get('project'), client_created_time=event_data.get('client_created_time'), client_creation_ref_id=event_data.get('client_creation_ref_id'), name=event_data.get('name'), code=event_data.get('code'), external_src_list=event_data.get('external_src_list'), use_instructions=event_data.get('use_instructions'), language=event_data.get('language')) session.add(event) regular_methods.commit_with_rollback(session) return event
def create_sequence(sequence_data, session): sequence = Sequence( label_file_id=sequence_data.get('label_file_id'), has_changes=sequence_data.get('has_changes'), single_frame=sequence_data.get('single_frame'), keyframe_list=sequence_data.get('keyframe_list'), video_file_id=sequence_data.get('video_file_id'), number=sequence_data.get('number'), instance_preview_cache=sequence_data.get('instance_preview_cache'), cache_expiry=sequence_data.get('cache_expiry'), archived=sequence_data.get('archived'), ) session.add(sequence) regular_methods.commit_with_rollback(session) return sequence
def create_task(task_data, session): task = Task() session.add(task) task.is_live = task_data.get('is_live', True) # # # if 'job' not in task_data: job = create_job({'name': 'jobtest:{}'.format(task_data.get('name'))}, session) else: job = task_data.get('job') task.job_id = job.id task.job = job if 'file' not in task_data: # TODO: add file create mock. file_id = None else: file_id = task_data.get('file').id task.file_id = file_id task.file = task_data.get('file') # TODO: might need to create mock functions for the following relations task.guide_id = task_data.get('guide_id', None) task.label_dict = task_data.get('label_dict', {}) task.file_original_id = task_data.get('file_original_id', None) task.file_original = task_data.get('file_original', None) task.completion_directory_id = task_data.get('completion_directory_id', None) task.incoming_directory_id = task_data.get('incoming_directory_id', None) task.task_type = task_data.get('task_type', 'draw') if task.task_type == 'draw': # Set draw tasks to be available instead of # default of created task.status = 'available' # Cache from job task.status = task_data.get('status', 'available') task.project_id = job.project_id task.job_type = job.type task.label_mode = job.label_mode # Have defaults task.kind = task.task_type = task_data.get('kind', 'human') regular_methods.commit_with_rollback(session) return task
def create_instance_template(instance_template_data, session): instance_template = InstanceTemplate( name=instance_template_data.get('name', ''), project_id=instance_template_data.get('project_id'), status=instance_template_data.get('status')) session.add(instance_template) regular_methods.commit_with_rollback(session) if instance_template_data.get('instance_list', None): for instance in instance_template_data.get('instance_list'): new_instance = create_instance(instance_data=instance, session=session) rel = InstanceTemplateRelation( instance_template_id=instance_template.id, instance_id=new_instance.id) session.add(rel) regular_methods.commit_with_rollback(session) return instance_template
def create_sync_event(sync_event_data, session): dataset_source = sync_event_data.get('dataset_source', None) dataset_source_id = sync_event_data.get('dataset_source_id', None) dataset_destination = sync_event_data.get('dataset_destination', None) dataset_destination_id = sync_event_data.get('dataset_destination_id', None) description = sync_event_data.get('description', None) file = sync_event_data.get('file', None) job = sync_event_data.get('job', None) job_id = sync_event_data.get('job_id', None) input_id = sync_event_data.get('input_id', None) project = sync_event_data.get('project', None) created_task = sync_event_data.get('created_task', None) completed_task = sync_event_data.get('completed_task', None) new_file_copy = sync_event_data.get('new_file_copy', None) transfer_action = sync_event_data.get('transfer_action', None) event_effect_type = sync_event_data.get('event_effect_type', None) event_trigger_type = sync_event_data.get('event_trigger_type', None) processing_deferred = sync_event_data.get('processing_deferred', None) member_created = sync_event_data.get('member_created', None) member_updated = sync_event_data.get('member_updated', None) status = sync_event_data.get('status', None) created_date = datetime.datetime.now() sync_event = SyncEvent( dataset_source_id = dataset_source_id, dataset_destination_id = dataset_destination_id, description = description, file = file, input_id = input_id, job_id = job_id, project = project, created_task = created_task, completed_task = completed_task, new_file_copy = new_file_copy, transfer_action = transfer_action, event_effect_type = event_effect_type, event_trigger_type = event_trigger_type, processing_deferred = processing_deferred, member_created = member_created, member_updated = member_updated, created_date = created_date, status = status ) session.add(sync_event) regular_methods.commit_with_rollback(session) return sync_event
def create_label_file(label_file_data, session): label_file = File() label_file.label = label_file_data.get('label') label_file.label_id = label_file_data.get('label').id label_file.project_id = label_file_data['project_id'] label_file.state = label_file_data.get('state', 'added') label_file.type = 'label' session.add(label_file) regular_methods.commit_with_rollback(session) project = Project.get_by_id(session, label_file.project_id) if project: WorkingDirFileLink.add(session, project.directory_default_id, label_file) project.refresh_label_dict(session) session.add(label_file) regular_methods.commit_with_rollback(session) return label_file
def create_event(event_data, session): event = Event( kind = event_data.get('kind'), member_id = event_data.get('member_id'), success = event_data.get('success'), error_log = event_data.get('error_log'), description = event_data.get('description'), link = event_data.get('link'), project_id = event_data.get('project_id'), task_id = event_data.get('task_id'), job_id = event_data.get('job_id'), run_time = event_data.get('run_time'), object_type = event_data.get('object_type'), input_id = event_data.get('input_id'), file_id = event_data.get('file_id'), page_name = event_data.get('page_name'), ) session.add(event) regular_methods.commit_with_rollback(session) return event
def create_file(file_data, session): file = File( project_id = file_data.get('project_id'), job_id = file_data.get('job_id'), original_filename = get_random_string(6), type = file_data.get('type', 'image'), state = file_data.get('state', 'added'), frame_number = file_data.get('frame_number'), video_parent_file_id = file_data.get('video_parent_file_id'), ) file.type = file_data.get('type', 'image') if file.type == 'video': video = Video( filename = file_data.get('video', {'name': 'test video'}).get('name'), frame_rate = file_data.get('video', {'frame_rate': 60}).get('frame_rate'), frame_count = file_data.get('video', {'frame_count': 100}).get('frame_count'), width = file_data.get('video', {'width': 800}).get('width'), height = file_data.get('video', {'height': 800}).get('height'), parent_video_split_duration = 30, root_blob_path_to_frames = '/test/', ) session.add(video) regular_methods.commit_with_rollback(session) file.video = video file.video_parent_file_id = video.id elif file.type in ['image', 'frame']: image = Image() session.add(image) regular_methods.commit_with_rollback(session) file.image = image file.image_id = image.id session.add(file) regular_methods.commit_with_rollback(session) return file
def test_create_task_from_file(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file], 'jobs_to_sync': { 'job_ids': [job.id] } }, self.session) log = regular_log.default() dir_list = [{ 'directory_id': directory.id, 'nickname': directory.nickname, 'selected': 'sync' }] job.update_attached_directories(self.session, dir_list, delete_existing=True) self.session.add(job) commit_with_rollback(self.session) sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job, ) sync_manager.create_task_from_file(file) commit_with_rollback(self.session) self.session.flush() task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is not None)
def test_remove_job_from_all_dirs(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job = data_mocking.create_job({ 'project': project, 'status': 'active' }, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file], 'jobs_to_sync': { 'job_ids': [job.id] } }, self.session) log = regular_log.default() dir_list = [{ 'directory_id': directory.id, 'nickname': directory.nickname, 'selected': 'sync' }] job.update_attached_directories(self.session, dir_list, delete_existing=True) self.session.add(job) commit_with_rollback(self.session) sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job, directory=directory) sync_manager.remove_job_from_all_dirs(soft_delete=False) commit_with_rollback(self.session) self.session.flush() directory_attachments = self.session.query(JobWorkingDir).filter( JobWorkingDir.working_dir_id == directory.id).all() self.assertEqual(len(directory_attachments), 0)
def test__add_file_into_job(self): project = self.project_data['project'] file = data_mocking.create_file({'project_id': project.id}, self.session) job = data_mocking.create_job({'project': project}, session=self.session) directory = data_mocking.create_directory( { 'project': project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) log = regular_log.default() sync_manager = job_dir_sync_utils.JobDirectorySyncManager( session=self.session, log=log, job=job) sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True) commit_with_rollback(self.session) dir_link = self.session.query(WorkingDirFileLink).filter( WorkingDirFileLink.file_id == file.id, WorkingDirFileLink.working_dir_id == job.directory_id) self.assertTrue(dir_link.first() is not None) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is None) # If job has correct status task should be created. job.status = 'active' self.session.add(job) commit_with_rollback(self.session) sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is not None) commit_with_rollback(self.session) # Retest for case of an existing file/task. mngr = SyncEventManager.create_sync_event_and_manager( session=self.session, status='started') sync_manager._JobDirectorySyncManager__add_file_into_job( file, directory, create_tasks=True, sync_event_manager=mngr) task = self.session.query(Task).filter(Task.job_id == job.id) self.assertTrue(task.first() is not None)
def create_project_with_context(context_data, session): """ This function will create mock data for a project and all the necessary context for any unit testing to be performed. context_data will be a dictionary with all the context for creating the project. The idea is that you'll be able to specify users, labels, tasks, etc... and the function will make sure all the data is mocked properly. Example: For my test I need a project with 2 users, 1 admin and another with view permissions. I also need 3 labels. The context_data should look something like this: { 'project_name': 'My test project', 'users': [ {'name': 'john', permissions: 'admin'} {'name': 'maria', permissions: 'view'} }, 'labels': [ {'name': 'catlabel', 'type': 'box'} {'name': 'dogabel2', 'type': 'box'} ] The function will return a similar data structure with the ID's on the test database for further querying inside the test cases. :param context_data: :return: """ random_name = get_random_string(8) project_string_id = context_data.get('project_string_id', random_name) project_name = context_data.get('project_name', random_name) default_project_limit = 10 user = register_user( {'username': '******'.format(project_string_id), 'email': 'test{}@test.com'.format(project_string_id), 'password': '******'}, session ) member = Member(kind = 'human') session.add(member) session.flush() user.member = member project = Project.new( session = session, name = project_name, project_string_id = project_string_id, goal = 'Test stuff', member_created = None, user = user ) user_list = [] for user in context_data['users']: if user.get('project_string_id') is None: user['project_string_id'] = random_name new_user = register_user(user, session) member = Member(kind = 'human') session.add(member) session.flush() new_user.member = member new_user.member_id = member.id session.add(new_user) user_list.append(new_user) regular_methods.commit_with_rollback(session) return { 'project': project, 'users': user_list }
def create_job_launch(job_launch_data, session): job_launch = JobLaunch() job_launch.job_id = job_launch_data['job_id'] session.add(job_launch) regular_methods.commit_with_rollback(session) return job_launch
def create_job_launch_queue_element(job_launch_queue_data, session): job_launch_queue = JobLaunchQueue() job_launch_queue.job_launch_id = job_launch_queue_data['job_launch_id'] session.add(job_launch_queue) regular_methods.commit_with_rollback(session) return job_launch_queue
def test_execute_after_launch_strategy(self): file = data_mocking.create_file( { 'project_id': self.project.id, 'type': 'text' }, self.session) label = data_mocking.create_label({ 'name': 'mylabel', }, self.session) label_file = data_mocking.create_label_file( { 'label': label, 'project_id': self.project.id }, self.session) attach_dir1 = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) connection = data_mocking.create_connection( { 'name': 'test', 'integration_name': 'datasaur', 'project_id': self.project.id }, self.session) labeldict = { "label_file_list_serialized": [{ "id": label_file.id, "hash": "083e9ebc48d64e9a8874c6b95f490b56b8c4c5b0f4dacd90bd3534085e87d9fa", "type": "label", "state": "added", "created_time": "2020-07-15T18:48:34.477333", "time_last_updated": "2020-07-15T18:48:34.705290", "ann_is_complete": None, "original_filename": None, "video_id": None, "video_parent_file_id": None, "count_instances_changed": None, "attribute_group_list": [{ "id": 2, "kind": "multiple_select", "is_root": True, "name": "carwheeltag", "prompt": "How is this car wheel", "show_prompt": True, "time_updated": "2020-08-05 19:37:07.703576", "attribute_template_list": [{ "id": 4, "name": "Is rounded", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 5, "name": "is squared", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 6, "name": "is beautiful", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 7, "name": "is crazy", "value_type": None, "archived": False, "group_id": 2, "display_order": None }] }, { "id": 3, "kind": "select", "is_root": True, "name": "selectwheel", "prompt": "Please selectt something special about this wheels", "show_prompt": True, "time_updated": "2020-08-12 16:29:54.817801", "attribute_template_list": [{ "id": 10, "name": "Silver Wheel", "value_type": None, "archived": False, "group_id": 3, "display_order": None }, { "id": 9, "name": "+Gold wheel", "value_type": None, "archived": False, "group_id": 3, "display_order": None }] }, { "id": 4, "kind": "text", "is_root": True, "name": "freewheel", "prompt": "What are your thought on this wheel?", "show_prompt": True, "time_updated": "2020-08-05 20:50:59.195249", "attribute_template_list": [] }, { "id": 5, "kind": "radio", "is_root": True, "name": "clean", "prompt": "Is this wheel clean?", "show_prompt": True, "time_updated": "2020-08-05 20:53:46.314143", "attribute_template_list": [{ "id": 11, "name": "Wheel is dirty", "value_type": None, "archived": False, "group_id": 5, "display_order": None }, { "id": 12, "name": "Wheek is clean", "value_type": None, "archived": False, "group_id": 5, "display_order": None }] }, { "id": 6, "kind": "text", "is_root": True, "name": "TEST", "prompt": "TEST28", "show_prompt": True, "time_updated": "2020-08-12 16:30:03.770141", "attribute_template_list": [] }], "colour": { "hex": "#194d33", "hsl": { "h": 150, "s": 0.5, "l": 0.2, "a": 1 }, "hsv": { "h": 150, "s": 0.66, "v": 0.3, "a": 1 }, "rgba": { "r": 25, "g": 77, "b": 51, "a": 1 }, "a": 1 }, "label": { "id": 5, "name": "Car wheel", "default_sequences_to_single_frame": False } }], "label_file_colour_map": {} } job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'label_dict': labeldict, 'attached_directories': [attach_dir1], 'interface_connection_id': connection.id }, self.session) strategy = DatasaurTaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default()) with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'create_datasaur_labelset', return_value={ 'result': { 'createLabelSet': { 'id': 'mytestid' } } }): with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'create_datasaur_project', return_value={'result': { 'id': 'datasaur_test' }}): with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'get_project_files_list', return_value={ 'result': { 'id': 'datasaur_test', 'documents': [{ 'id': str(file.id), 'name': str(file.id) }] } }): strategy.execute_after_launch_strategy() commit_with_rollback(self.session) tasks_count = self.session.query(Task).filter( Task.job_id == job.id).count() tasks = self.session.query(Task).filter( Task.job_id == job.id).all() self.assertEqual(tasks_count, 1) external_map = ExternalMap.get( session=self.session, job_id=job.id, external_id='mytestid', connection_id=connection.id, diffgram_class_string='', type='{}_label_set'.format( connection.integration_name), ) self.assertNotEqual(external_map, None) project_map = ExternalMap.get( session=self.session, job_id=job.id, external_id='datasaur_test', connection_id=connection.id, diffgram_class_string='task_template', type='{}_project'.format(connection.integration_name), ) self.assertNotEqual(project_map, None) files_maps = ExternalMap.get( session=self.session, job_id=job.id, external_id=str(file.id), file_id=file.id, connection_id=connection.id, diffgram_class_string='file', type='{}_file'.format(connection.integration_name), ) self.assertNotEqual(files_maps, None)
def create_label(label_data, session): label = Label() label.name = label_data.get('name') session.add(label) regular_methods.commit_with_rollback(session) return label
def enqueue_packet(project_string_id, session, media_url = None, media_type = None, file_id = None, file_name = None, job_id = None, batch_id = None, directory_id = None, source_directory_id = None, instance_list = None, video_split_duration = None, frame_packet_map = None, remove_link = None, add_link = None, copy_instance_list = None, commit_input = False, task_id = None, video_parent_length = None, type = None, task_action = None, external_map_id = None, original_filename = None, external_map_action = None, enqueue_immediately = False, mode = None, allow_duplicates = False, extract_labels_from_batch = False): """ Creates Input() object and enqueues it for media processing Returns Input() object that was created :param packet_data: :return: """ diffgram_input = Input() project = Project.get(session, project_string_id) diffgram_input.file_id = file_id diffgram_input.task_id = task_id diffgram_input.batch_id = batch_id diffgram_input.video_parent_length = video_parent_length diffgram_input.remove_link = remove_link diffgram_input.add_link = add_link diffgram_input.copy_instance_list = copy_instance_list diffgram_input.external_map_id = external_map_id diffgram_input.original_filename = original_filename diffgram_input.external_map_action = external_map_action diffgram_input.task_action = task_action diffgram_input.mode = mode diffgram_input.project = project diffgram_input.media_type = media_type diffgram_input.type = "from_url" diffgram_input.url = media_url diffgram_input.video_split_duration = video_split_duration diffgram_input.allow_duplicates = allow_duplicates if instance_list: diffgram_input.instance_list = {} diffgram_input.instance_list['list'] = instance_list if frame_packet_map: diffgram_input.frame_packet_map = frame_packet_map # print(diffgram_input.frame_packet_map) session.add(diffgram_input) session.flush() if batch_id and extract_labels_from_batch: upload_tools = Upload(session = session, project = project, request = None) upload_tools.extract_instance_list_from_batch(input = diffgram_input, input_batch_id = batch_id, file_name = file_name) # Expect temp dir to be None here. # because each machine should assign it's own temp dir # Something else to consider for future here! # Once this is part of input, it will be smoothly handled at right time as part of # processing queue diffgram_input.job_id = job_id # Process media handles checking if the directory id is valid diffgram_input.directory_id = directory_id diffgram_input.source_directory_id = source_directory_id diffgram_input_id = diffgram_input.id queue_limit = 0 if media_type == "image": queue_limit = 30 # 50 if media_type == "video": queue_limit = 1 if settings.PROCESS_MEDIA_ENQUEUE_LOCALLY_IMMEDIATELY is True or enqueue_immediately: print('diffgram_input_id', diffgram_input_id) if commit_input: regular_methods.commit_with_rollback(session = session) item = PrioritizedItem( priority = 10000, # individual frames have a priority here. input_id = diffgram_input_id, media_type = media_type) add_item_to_queue(item) else: diffgram_input.processing_deferred = True # Default return diffgram_input
def create_job(job_data, session): """ The function will create a Job object for testing purposes. You can supply you own project if there is one in specific that has to be attached, if not the function will also mock a project for you. The idea is that new developer don't have to worry about the relations between object in order to mock faster. For example: "I want to create a Job, but I don't really care about the project." - Then I should not worry about the details of mocking a project. TODO: More data mocks are still pending, like members, labels, etc... For now, this is sufficient for the current tests we're writing. :param job_data: :param session: :return: """ if job_data.get('project'): job = Job(member_created = None, project = job_data.get('project')) else: project_string_id = '{}-job-project'.format(get_random_string(8)) project_context = { 'project_string_id': project_string_id, 'project_name': '{}-job-project'.format(project_string_id), 'users': [ {'username': '******'.format(get_random_string(5)), 'email': '*****@*****.**', 'password': '******', 'project_string_id': project_string_id } ] } project_data = create_project_with_context(project_context, session) job = Job(member_created = None, project = project_data.get('project'), project_id = project_data.get('project').id) session.add(job) # TODO: support mocking labels. job.label_dict = job_data.get('label_dict', {}) job.type = job_data.get('type', 'draft') job.status = job_data.get('status', 'draft') job.label_dict['label_file_list'] = job_data.get('label_file_list', []) job.name = job_data.get('name', None) job.output_dir_action = job_data.get('output_dir_action', 'nothing') job.share_type = job_data.get('name', 'project') job.share_type = job.share_type.lower() job.launch_datetime = datetime.datetime.now() if job.launch_datetime is not None: job.waiting_to_be_launched = True job.interface_connection_id = job_data.get('interface_connection_id') job.file_count = job_data.get('file_count', 0) # note this is user set job.permission = job_data.get('file_count', 'all_secure_users') job.label_mode = job_data.get('label_mode', 'open') job.passes_per_file = job_data.get('passes_per_file', 1) job.instance_type = job_data.get('instance_type', 'box') job.file_handling = job_data.get('file_handling', 'use_existing') job.stat_count_tasks = job_data.get('stat_count_tasks', 0) job.completion_directory_id = job_data.get('completion_directory_id', None) directory = WorkingDir.new_blank_directory(session = session) session.add(directory) job.directory = directory # if job.share_type == "market": # bid_new_core( # session=session, # job=job, # ) regular_methods.commit_with_rollback(session) for dir in job_data.get('attached_directories', []): rel = JobWorkingDir() rel.sync_type = 'sync' rel.job_id = job.id rel.working_dir_id = dir.id session.add(rel) regular_methods.commit_with_rollback(session) return job