def sync_projects_for_task_template(self, task_template): connection = task_template.interface_connection connector_manager = ConnectorManager(connection=connection, session=self.session) datasaur_connector = connector_manager.get_connector_instance() datasaur_connector.connect() project_map = ExternalMap.get( session = self.session, job_id = task_template.id, diffgram_class_string = 'task_template', type = 'datasaur_project', return_kind = 'first') if not project_map: logger.error('Could not find external map for task template {}'.format(task_template.id)) return # Fetch all completed projects project_data = datasaur_connector.fetch_data({ 'action_type': 'get_project', 'event_data': {}, 'project_id': project_map.external_id }) # For each project, generate a export for all files. if 'log' in project_data: if 'error' in project_data['log']: logger.error('Error fetching datasaur project {}'.format(project_data['log']['error'])) logger.error('Datasaur project ID {} not found. Maybe was deleted?'.format(project_map.external_id)) return datasaur_project = project_data['result'] logger.debug('Fetched project: {}'.format(datasaur_project)) if project_map: task_template = project_map.job if datasaur_project['status'] != 'COMPLETE': logger.debug('Datasaur project {} is not completed. Skipping...'.format(datasaur_project['id'])) return # Now get All files from the project files_map = ExternalMap.get( return_kind = 'all', session = self.session, job_id = task_template.id, diffgram_class_string = 'file', type = 'datasaur_file') for file_map in files_map: diffgram_file = file_map.file datasaur_file_id = file_map.external_id logger.debug('Syncing File from Datasaur {}'.format(diffgram_file.id)) self.fetch_instances_from_file(task_template, diffgram_file, datasaur_file_id, datasaur_connector) else: logger.error('Could not find external map for task_template {}'.format(task_template.id))
def map_scale_ai_task(self, task, scale_ai_task, type='box'): task_id = None if type == 'box': task_id = scale_ai_task.id elif type == 'polygon': task_id = scale_ai_task.task_id if task_id is None: raise Exception('Cannot map ScaleAI task. Id is None') external_map = ExternalMap.new( session=self.session, task=task, external_id=scale_ai_task.id, connection=task.job.interface_connection, diffgram_class_string='task', type='{}_task'.format( task.job.interface_connection.integration_name), url='', add_to_session=True, flush_session=True) # Commented to bottom to avoid circular dependencies on job. self.task_template.default_external_map = external_map logger.debug('Created ScaleAI Task {}'.format(scale_ai_task.id)) return external_map
def labelbox_web_hook_manager(): """ Webhook for receiving data on Diffgram once finished on labelbox. # NOTE: Labelbox does not supportText or dropdown classifications in export for videos. :return: """ # First check if secret is correct payload = request.data secret = settings.LABEL_BOX_SECRET log = regular_log.default() computed_signature = hmac.new(bytearray(secret.encode('utf-8')), msg=payload, digestmod=hashlib.sha1).hexdigest() if request.headers['X-Hub-Signature'] != 'sha1=' + computed_signature: error = 'Error: computed_signature does not match signature provided in the headers' logger.error( 'Error: computed_signature does not match signature provided in the headers' ) return error with sessionMaker.session_scope() as session: labelbox_event = request.headers['X-Labelbox-Event'] payload = request.json logger.debug('Payload for labelbox webhooks: {}'.format(payload)) labelbox_project_id = payload['project']['id'] project_external_mapping = ExternalMap.get( session=session, external_id=labelbox_project_id, type='labelbox', diffgram_class_string='task_template') if project_external_mapping: task_template = Job.get_by_id(session, project_external_mapping.job_id) if task_template: connection = task_template.interface_connection logger.debug('Connection for labelbox: {}'.format(connection)) connector_manager = ConnectorManager(connection=connection, session=session) connector = connector_manager.get_connector_instance() connector.connect() sync_manager = LabelBoxSyncManager( session=session, task_template=task_template, labelbox_project=None, log=log, labelbox_connector=connector) sync_manager.handle_task_creation_hook(payload) return jsonify({'message': 'OK.'}) else: log['error']['task_template'] = 'Task template not found.' return jsonify(log) else: log['error'][ 'labelbox_project'] = 'Labelbox external mapping not found.' return jsonify(log)
def get_scale_ai_project(self): external_map = ExternalMap.get( session=self.session, job_id=self.task_template.id, connection_id=self.task_template.interface_connection_id, diffgram_class_string='task_template', type=self.task_template.interface_connection.integration_name, ) if external_map: return external_map.external_id return None
def save_label_instance_ontology_mapping(self, ontology, connection): """ Saves the relationships betweend ID's of Diffgram label instances and ID's of the featureNode ID's in the onthology. :param ontology: :return: """ mapping = {} tools = ontology['project']['ontology']['normalized']['tools'] for tool in tools: diffgram_label_file = self.task_template.get_label_file_by_name(tool['name']) diffgram_label_file_id = self.task_template.get_label_file_by_name(tool['name'])['id'] # Feature schema ID was removed from API. Using name instead # feature_schema_id = tool['featureSchemaId'] feature_schema_id = tool['name'] mapping[feature_schema_id] = {'label_id': diffgram_label_file_id, 'attributes': {}} ExternalMap.new(session=self.session, file_id=diffgram_label_file_id, external_id=feature_schema_id, type=connection.integration_name, diffgram_class_string='label_file', connection=connection, add_to_session=True) if tool.get('classifications', None): classifications = tool.get('classifications', None) for classification in classifications: attribute_group = self.task_template.get_attribute_group_by_name(diffgram_label_file, classification['name']) diffgram_attribute_group_id = attribute_group['id'] # feature_schema_id = classification['featureSchemaId'] # Changing to name since feature schema was removed. feature_schema_id = classification['name'] ExternalMap.new(session=self.session, external_id=feature_schema_id, file_id=diffgram_label_file_id, attribute_template_group_id=diffgram_attribute_group_id, type=connection.integration_name, diffgram_class_string='label_file', connection=connection, add_to_session=True) return mapping
def test_execute_after_launch_strategy(self): file = data_mocking.create_file({'project_id': self.project.id}, self.session) attach_dir1 = data_mocking.create_directory({ 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) connection = data_mocking.create_connection({ 'name': 'test', 'integration_name': 'scale_ai', 'project_id': self.project.id }, self.session) job = data_mocking.create_job({ 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'attached_directories': [ attach_dir1 ], 'interface_connection_id': connection.id }, self.session) strategy = ScaleAITaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default() ) with patch.object(ScaleAITaskTemplateAfterLaunchStrategy, 'create_scale_ai_project', return_value={'id': '123', 'name': 'scaleaitest'}): strategy.execute_after_launch_strategy() commit_with_rollback(self.session) tasks_count = self.session.query(Task).filter( Task.job_id == job.id ).count() tasks = self.session.query(Task).filter( Task.job_id == job.id ).all() self.assertEqual(tasks_count, 1) external_maps = ExternalMap.get( session=self.session, job_id=job.id, diffgram_class_string='task_template', connection_id=connection.id, type=connection.integration_name ) self.assertNotEqual(external_maps, None)
def create_scale_ai_project_mapping(self, scale_ai_project, connection): external_map = ExternalMap.new( session=self.session, job=self.task_template, external_id=scale_ai_project['name'], connection=connection, diffgram_class_string='task_template', type=connection.integration_name, url='https://dashboard.scale.com/test/tasks?project={}'.format(scale_ai_project['name']), add_to_session=True, flush_session=True ) # Commented to bottom to avoid circular dependencies on job. self.task_template.default_external_map = external_map logger.debug('Created ScaleAI Project {}'.format(scale_ai_project['name']))
def create_labelbox_project_mapping(self, labelbox_project, connection): external_map = ExternalMap.new( session=self.session, job=self.task_template, external_id=labelbox_project.uid, connection=connection, diffgram_class_string='task_template', type=connection.integration_name, url='https://app.labelbox.com/projects/{}/overview'.format(labelbox_project.uid), add_to_session=True, flush_session=True ) # Commented to bottom to avoid circular dependencies on job. self.task_template.default_external_map = external_map logger.debug('Created Labelbox Project {}'.format(labelbox_project.uid)) return external_map
def get_external_id_to_task(session, task, task_template): if not task_template: return if not task_template.interface_connection: return connection = task_template.interface_connection if connection.integration_name == 'labelbox': # Try to find the task external ID external_map = ExternalMap.get( session=session, task_id=task.id, diffgram_class_string="task", type="labelbox", ) if not external_map: return None return external_map.external_id
def task_completed_scaleai(): spec_list = [ { 'status': str }, { 'task': { 'task_id': str, 'completed_at': str, 'response': dict, 'created_at': str, 'callback_url': str, 'type': str, 'status': str, 'instruction': str, 'params': dict, 'metadata': dict, } }, { 'response': dict }, { 'task_id': str }, ] log, input, untrusted_input = regular_input.master(request=request, spec_list=spec_list) annotations = input['response']['annotations'] with sessionMaker.session_scope() as session: external_map_task = ExternalMap.get( session=session, external_id=input['task']['task_id'], type='{}_task'.format('scale_ai')) task = external_map_task.task task_template = task.job scale_ai_sync_manager = ScaleAISyncManager(session=session, task_template=task_template, log=log, scale_ai_connector=None) scale_ai_sync_manager.enqueue_scale_ai_annotations(task, annotations) return jsonify({})
def execute_after_launch_strategy(self): """ This strategy will attach files from sync directories and creates tasks in Diffgram for each of them. :return: """ datasaur_project = None connection = self.task_template.interface_connection logger.debug('Connection for Datasaur: {}'.format(connection)) connector_manager = ConnectorManager(connection=connection, session=self.session) connector = connector_manager.get_connector_instance() connector.connect() try: label_data = [] for label_element in self.task_template.label_dict.get('label_file_list_serialized', []): element = { 'uuid': str(uuid.uuid4()), 'diffgram_label_file': label_element['id'], 'name': '{}'.format(label_element['label']['name']), 'color': label_element['colour']['hex'].upper(), } label_data.append(element) # First we need to build a label set label_set_result = self.create_datasaur_labelset(label_data, connector) label_set = label_set_result['result']['createLabelSet'] logger.debug('Created label_set {}'.format(label_set)) if label_set.get('id'): logger.info('Datasaur Labelset created succesfully ID:'.format(label_set['id'])) ExternalMap.new( session=self.session, job=self.task_template, external_id=label_set['id'], connection=connection, diffgram_class_string='', type='{}_label_set'.format(connection.integration_name), url='', add_to_session=True, flush_session=True ) # Now save mappings for created labels for label_element in label_data: ExternalMap.new( session=self.session, job=self.task_template, file_id=label_element['diffgram_label_file'], external_id=label_element['uuid'], connection=connection, diffgram_class_string='label_file', type='{}_label'.format(connection.integration_name), url='', add_to_session=True, flush_session=True ) # Now we create a project files_to_process = self.task_template.get_attached_files(self.session, type='text') files_to_process_by_id = {} if len(files_to_process) == 0: raise Exception('Task template has no files in attached folder. Stopping Datasaur launch strategy.') for file in files_to_process: files_to_process_by_id[str(file.id)] = file print('files_to_process_by_id', files_to_process_by_id) result = self.create_datasaur_project(connector, label_set, files_to_process) logger.debug('Create datasaur Project result: {}'.format(result)) if 'result' in result: datasaur_project = result['result'] ExternalMap.new( session=self.session, job=self.task_template, external_id=datasaur_project['id'], connection=connection, diffgram_class_string='task_template', type='{}_project'.format(connection.integration_name), url='https://datasaur.ai/projects/{}/'.format(datasaur_project['id']), add_to_session=True, flush_session=True, ) logger.debug('Created Datasaur Project.') # Save file ID's mappings project_files_results = self.get_project_files_list(connector, datasaur_project) print('qweqwe', project_files_results) project_files = project_files_results['result']['documents'] for file in project_files: diffgram_file = files_to_process_by_id[file['name']] ExternalMap.new( session=self.session, job=self.task_template, external_id=file['id'], file=diffgram_file, connection=connection, diffgram_class_string='file', type='{}_file'.format(connection.integration_name), url='', add_to_session=True, flush_session=True, ) # Now create tasks as usual. logger.info( 'DatasaurTaskTemplateAfterLaunchStrategy for Task Template ID: {} completed successfully.'.format( self.task_template.id)) logger.debug('Proceding to standard task template launch...') standard_strategy = StandardTaskTemplateAfterLaunchStrategy( session=self.session, task_template=self.task_template, log=self.log ) standard_strategy.execute_after_launch_strategy() else: logger.error('Error from connector: Rolling back project creation...') raise Exception(result) except Exception as e: logger.error('Error during datasaur launch strategy. {}'.format(traceback.format_exc())) if datasaur_project: logger.error('Rolling back project creation...') result = connector.put_data({ 'action_type': 'delete_project', 'project_id': datasaur_project['id'], 'event_data': {}, }) raise e
def send_all_files_in_task_template(self): """ Used for initial sync. Will go on all attached directories of the task template and create a dataset if doesn't exist and then send each file on the dataset to labelbox's dataset. :return: """ datasets = self.task_template.get_attached_dirs(self.session) if not datasets: return for dataset in datasets: # Assumption here is that the labeling interface has already been checked so we assume we need to # create the dataset if it does not exits. logger.debug('Syncing dataset {}-{} in Labelbox'.format( dataset.nickname, dataset.id)) if dataset.default_external_map: # Fetch dataset logger.debug('Dataset already exists... attaching.') dataset_id = dataset.default_external_map.external_id result = self.labelbox_connector.fetch_data({ 'action_type': 'get_dataset', 'event_data': {}, 'dataset_id': dataset_id }) force_create = False if result['exists']: labelbox_dataset = result['result'] # Attach dataset to project result_attach = self.labelbox_connector.put_data({ 'action_type': 'attach_dataset', 'dataset': labelbox_dataset, 'project': self.labelbox_project, 'event_data': {} }) else: logger.debug('Dataset not found, re-creating it...') # If dataset was not found it may have been deleted. So we'll create it again. force_create = True # Create dataset result = self.labelbox_connector.put_data({ 'action_type': 'create_dataset', 'name': dataset.nickname, 'event_data': {}, 'project': self.labelbox_project }) labelbox_dataset = result['result'] # Now attach it result_attach = self.labelbox_connector.put_data({ 'action_type': 'attach_dataset', 'dataset': labelbox_dataset, 'project': self.labelbox_project, 'event_data': {} }) dataset.default_external_map = ExternalMap.new( session=self.session, external_id=labelbox_dataset.uid, dataset=dataset, diffgram_class_string="dataset", type="labelbox", add_to_session=True, flush_session=True) self.session.add(dataset) file_list = WorkingDirFileLink.file_list(self.session, dataset.id, limit=None) self.add_files_to_labelbox_dataset( diffgram_files=file_list, labelbox_dataset=labelbox_dataset, force_create=force_create) else: logger.debug('Dataset does not exist... creating.') # Create dataset result = self.labelbox_connector.put_data({ 'action_type': 'create_dataset', 'name': dataset.nickname, 'event_data': {}, 'project': self.labelbox_project }) labelbox_dataset = result['result'] dataset.default_external_map = ExternalMap.new( session=self.session, external_id=labelbox_dataset.uid, dataset=dataset, url='https://app.labelbox.com/dataset/{}'.format( labelbox_dataset.uid), diffgram_class_string="dataset", type="labelbox", add_to_session=True, flush_session=True, ) self.session.add(dataset) file_list = WorkingDirFileLink.file_list(self.session, dataset.id, limit=None) self.add_files_to_labelbox_dataset( diffgram_files=file_list, labelbox_dataset=labelbox_dataset)
def serialize_builder_info_default(self, session, user=None): # TODO share this with trainer info function user_to_job_serialized = None if user: user_to_job = User_To_Job.get_single_by_ids(session=session, user_id=user.id, job_id=self.id) if user_to_job: user_to_job_serialized = user_to_job.serialize_trainer_info_default( ) percent_completed = 0 tasks_remaining = 0 if self.stat_count_tasks: percent_completed = (self.stat_count_complete / self.stat_count_tasks) * 100 tasks_remaining = self.stat_count_tasks - self.stat_count_complete external_mappings = ExternalMap.get( session=session, job_id=self.id, diffgram_class_string='task_template', return_kind='all') member_list_ids = None if session: member_list_ids = self.get_with_cache( cache_key='member_list_ids', cache_miss_function=self.regenerate_member_list_ids, session=session, miss_function_args={'session': session}) external_mappings_serialized = [ x.serialize() for x in external_mappings ] default_userscript = None if self.default_userscript: default_userscript = self.default_userscript.serialize() return { 'id': self.id, 'name': self.name, 'type': self.type, 'share_type': self.share_type, 'member_list_ids': member_list_ids, 'status': self.status, 'time_created': self.time_created, 'time_completed': self.time_completed, 'user_to_job': user_to_job_serialized, 'attached_directories_dict': self.get_with_cache( cache_key='attached_directories_dict', cache_miss_function=self.get_attached_dirs_serialized, session=session, miss_function_args={'session': session}), 'external_mappings': external_mappings_serialized, 'file_count_statistic': self.file_count_statistic, 'stat_count_tasks': self.stat_count_tasks, 'stat_count_complete': self.stat_count_complete, 'percent_completed': percent_completed, 'tasks_remaining': tasks_remaining, 'is_live': self.is_live, 'pending_initial_dir_sync': self.pending_initial_dir_sync, 'interface_connection': self.interface_connection.serialize() if self.interface_connection else None, # For now the SDK uses the /info path # So if we want to expose this stuff we need that there # maybe something to review in the future 'file_count': self.file_count, 'launch_datetime': self.launch_datetime, 'launch_datetime_deferred': self.launch_datetime_deferred, 'launch_attempt_log': self.launch_attempt_log, 'waiting_to_be_launched': self.waiting_to_be_launched, 'interface_connection_id': self.interface_connection_id, # Realizing we want the label dict # ie to show the label information (not just ids...) 'label_dict': self.label_dict, 'completion_directory_id': self.completion_directory_id, 'output_dir_action': self.output_dir_action, 'pro_network': self.pro_network, 'default_userscript': default_userscript }
def handle_task_creation_hook(self, payload): labelbox_data_row_id = payload['dataRow']['id'] label = json.loads(payload['label']) labelbox_label_id = payload['id'] video_mode = False frames_data = None if 'frames' in label: # Fetch video objects frames_result = self.labelbox_connector.fetch_data({ 'action_type': 'get_frames', 'frames_url': label['frames'], 'event_data': {}, }) if result_has_error(frames_result): return jsonify(frames_result), 400 frames_data = frames_result['result'] video_mode = True else: label_instances = label['objects'] file_external_mapping = ExternalMap.get( session=self.session, external_id=labelbox_data_row_id, diffgram_class_string='file', type='labelbox') if file_external_mapping: diffgram_task = self.session.query(Task).filter( Task.job_id == self.task_template.id, Task.file_id == file_external_mapping.file_id).first() if diffgram_task: # Build external mapping diffgram_task.default_external_map = ExternalMap.new( session=self.session, external_id=payload['id'], task=diffgram_task, diffgram_class_string="task", type="labelbox", add_to_session=True, flush_session=True) self.session.add(diffgram_task) # Now process Labels and add them to file. if video_mode: result = self.update_instance_list_for_video( frames_data, diffgram_task) if not result: logger.error('Error updating instances') return jsonify('Error updating instances'), 400 logger.info('Updated instances succesfully enqueued.') else: result = self.update_instance_list_for_image_or_frame( label_instances, diffgram_task) if not result or not result: logger.error('Error updating instances') return jsonify('Error updating instances'), 400 else: logger.info('Updated instances succesfully enqueued.') else: logger.error('Diffgram task not found') raise Exception('Diffgram task not found') else: logger.error('file_external_mapping not found') raise Exception('file_external_mapping not found')
def update_instance_list_for_image_or_frame(self, label_instances, diffgram_task, video_data=None, frame_packet_map=None): instance_list = [] count = 1 for labelbox_instance in label_instances: # Check if instance mapping already exists, if so provide instance_id to avoid overriding data. instance_map = ExternalMap.get( session=self.session, external_id=labelbox_instance['featureId'], diffgram_class_string='instance', type='labelbox_instance', connection_id=self.task_template.interface_connection.id) if not instance_map: instance_map = ExternalMap.new( session=self.session, external_id=None, diffgram_class_string='instance', type='labelbox_instance', connection=self.task_template.interface_connection, add_to_session=True, flush_session=True) diffgram_label_file_data = self.task_template.get_label_file_by_name( labelbox_instance['title']) diffgram_label_instance = self.transform_labelbox_label_to_diffgram_instance( labelbox_instance, diffgram_label_file_data, instance_map=instance_map, sequence_num=count if video_data is not None else None) if frame_packet_map is not None: if video_data['current_frame'] not in frame_packet_map: frame_packet_map[video_data['current_frame']] = [ diffgram_label_instance ] else: frame_packet_map[video_data['current_frame']].append( diffgram_label_instance) if diffgram_label_instance: instance_list.append(diffgram_label_instance) count += 1 if instance_list and video_data is None: enqueue_packet( project_string_id=self.task_template.project.project_string_id, session=self.session, media_url=None, media_type='image', job_id=self.task_template.id, file_id=diffgram_task.file.id, instance_list=instance_list, task_id=diffgram_task.id, task_action='complete_task', commit_input=True, external_map_id=instance_map.id, external_map_action='set_instance_id', mode="update_with_existing") return True elif instance_list: return True else: return False
def fetch_instances_from_file( self, task_template, diffgram_file, file_id, datasaur_connector): file_export_data = self.trigger_export_single_datasaur_file( datasaur_connector = datasaur_connector, file_id = file_id) instance_list = [] # We get the task based on file id since assumption for datasaur is file and task will be the same concept. task = self.session.query(Task).filter( Task.job_id == task_template.id, Task.file_id == diffgram_file.id ).first() if 'log' in file_export_data and 'error' in file_export_data['log']: logger.error('Error fetching export data {}'.format(file_export_data)) label_items = file_export_data['result']['labelSet']['labelItems'] label_items_by_id = {} for label in label_items: external_map_label = ExternalMap.get( session=self.session, job_id=task_template.id, external_id=label['id'], connection_id=task_template.interface_connection.id, diffgram_class_string='label_file', type='datasaur_label' ) if external_map_label: label_items_by_id[label['id']] = label label_items_by_id[label['id']]['label_file_id'] = external_map_label.file_id else: logger.error('No label_file found for datasaur ID: {}'.format(label['id'])) return sentences = file_export_data['result']['sentences'] for sentence in sentences: instances = sentence['labels'] for instance in instances: instance_map = ExternalMap.get( session = self.session, external_id = instance['id'], diffgram_class_string = 'instance', type = 'datasaur_instance', return_kind = 'first') if not instance_map: logger.debug('Creating Instance Map...') instance_map = ExternalMap.new( session=self.session, job=task_template, external_id=instance['id'], connection=task_template.interface_connection, diffgram_class_string='instance', type='{}_instance'.format( task_template.interface_connection.integration_name), url='', add_to_session=True, flush_session=True) else: logger.debug('Instance Map exists, proceding to update.') instance_list.append({ 'start_sentence': instance['sidS'], 'end_sentence': instance['sidE'], 'start_token': instance['s'], 'end_token': instance['e'], 'start_char': instance['charS'], 'end_char': instance['charE'], 'sentence': sentence['id'], 'type': 'text_token', 'name': label_items_by_id[instance['l']]['labelName'], 'label_file_id': label_items_by_id[instance['l']]['label_file_id'] }) logger.debug('Enqueuing new instances....') # Create new packet to ensure to commit this if task and task_template and diffgram_file: enqueue_packet(project_string_id=task_template.project.project_string_id, session=self.session, media_url=None, media_type='text', job_id=task_template.id, file_id=diffgram_file.id, instance_list=instance_list, task_id=task.id, task_action='complete_task', commit_input=True, mode="update") logger.info('Updated Task {} from datasaur.'.format(task.id))
def test_execute_after_launch_strategy(self): file = data_mocking.create_file( { 'project_id': self.project.id, 'type': 'text' }, self.session) label = data_mocking.create_label({ 'name': 'mylabel', }, self.session) label_file = data_mocking.create_label_file( { 'label': label, 'project_id': self.project.id }, self.session) attach_dir1 = data_mocking.create_directory( { 'project': self.project, 'user': self.project_data['users'][0], 'files': [file] }, self.session) connection = data_mocking.create_connection( { 'name': 'test', 'integration_name': 'datasaur', 'project_id': self.project.id }, self.session) labeldict = { "label_file_list_serialized": [{ "id": label_file.id, "hash": "083e9ebc48d64e9a8874c6b95f490b56b8c4c5b0f4dacd90bd3534085e87d9fa", "type": "label", "state": "added", "created_time": "2020-07-15T18:48:34.477333", "time_last_updated": "2020-07-15T18:48:34.705290", "ann_is_complete": None, "original_filename": None, "video_id": None, "video_parent_file_id": None, "count_instances_changed": None, "attribute_group_list": [{ "id": 2, "kind": "multiple_select", "is_root": True, "name": "carwheeltag", "prompt": "How is this car wheel", "show_prompt": True, "time_updated": "2020-08-05 19:37:07.703576", "attribute_template_list": [{ "id": 4, "name": "Is rounded", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 5, "name": "is squared", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 6, "name": "is beautiful", "value_type": None, "archived": False, "group_id": 2, "display_order": None }, { "id": 7, "name": "is crazy", "value_type": None, "archived": False, "group_id": 2, "display_order": None }] }, { "id": 3, "kind": "select", "is_root": True, "name": "selectwheel", "prompt": "Please selectt something special about this wheels", "show_prompt": True, "time_updated": "2020-08-12 16:29:54.817801", "attribute_template_list": [{ "id": 10, "name": "Silver Wheel", "value_type": None, "archived": False, "group_id": 3, "display_order": None }, { "id": 9, "name": "+Gold wheel", "value_type": None, "archived": False, "group_id": 3, "display_order": None }] }, { "id": 4, "kind": "text", "is_root": True, "name": "freewheel", "prompt": "What are your thought on this wheel?", "show_prompt": True, "time_updated": "2020-08-05 20:50:59.195249", "attribute_template_list": [] }, { "id": 5, "kind": "radio", "is_root": True, "name": "clean", "prompt": "Is this wheel clean?", "show_prompt": True, "time_updated": "2020-08-05 20:53:46.314143", "attribute_template_list": [{ "id": 11, "name": "Wheel is dirty", "value_type": None, "archived": False, "group_id": 5, "display_order": None }, { "id": 12, "name": "Wheek is clean", "value_type": None, "archived": False, "group_id": 5, "display_order": None }] }, { "id": 6, "kind": "text", "is_root": True, "name": "TEST", "prompt": "TEST28", "show_prompt": True, "time_updated": "2020-08-12 16:30:03.770141", "attribute_template_list": [] }], "colour": { "hex": "#194d33", "hsl": { "h": 150, "s": 0.5, "l": 0.2, "a": 1 }, "hsv": { "h": 150, "s": 0.66, "v": 0.3, "a": 1 }, "rgba": { "r": 25, "g": 77, "b": 51, "a": 1 }, "a": 1 }, "label": { "id": 5, "name": "Car wheel", "default_sequences_to_single_frame": False } }], "label_file_colour_map": {} } job = data_mocking.create_job( { 'name': 'my-test-job-{}'.format(1), 'project': self.project, 'status': 'active', 'type': "Normal", 'label_dict': labeldict, 'attached_directories': [attach_dir1], 'interface_connection_id': connection.id }, self.session) strategy = DatasaurTaskTemplateAfterLaunchStrategy( task_template=job, session=self.session, log=regular_log.default()) with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'create_datasaur_labelset', return_value={ 'result': { 'createLabelSet': { 'id': 'mytestid' } } }): with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'create_datasaur_project', return_value={'result': { 'id': 'datasaur_test' }}): with patch.object(DatasaurTaskTemplateAfterLaunchStrategy, 'get_project_files_list', return_value={ 'result': { 'id': 'datasaur_test', 'documents': [{ 'id': str(file.id), 'name': str(file.id) }] } }): strategy.execute_after_launch_strategy() commit_with_rollback(self.session) tasks_count = self.session.query(Task).filter( Task.job_id == job.id).count() tasks = self.session.query(Task).filter( Task.job_id == job.id).all() self.assertEqual(tasks_count, 1) external_map = ExternalMap.get( session=self.session, job_id=job.id, external_id='mytestid', connection_id=connection.id, diffgram_class_string='', type='{}_label_set'.format( connection.integration_name), ) self.assertNotEqual(external_map, None) project_map = ExternalMap.get( session=self.session, job_id=job.id, external_id='datasaur_test', connection_id=connection.id, diffgram_class_string='task_template', type='{}_project'.format(connection.integration_name), ) self.assertNotEqual(project_map, None) files_maps = ExternalMap.get( session=self.session, job_id=job.id, external_id=str(file.id), file_id=file.id, connection_id=connection.id, diffgram_class_string='file', type='{}_file'.format(connection.integration_name), ) self.assertNotEqual(files_maps, None)
def add_files_to_labelbox_dataset(self, diffgram_files=[], labelbox_dataset=None, force_create=False): """ Adds the files to labelbox. Important! If you call this method multiple times, multiple versions of the same file will be created at labelbox, so use only on initialization of task templates. :param diffgram_files: :param labelbox_dataset: :param force_create: Ignore existing files and always create (useful for recreating a dataset that was deleted) :return: """ if labelbox_dataset is None: return False file_urls = [] diffgram_files_by_id = {} external_ids = [] file_ids = [x.id for x in diffgram_files] datarow_external_maps = ExternalMap.get(session=self.session, file_id=file_ids, diffgram_class_string='file', type='labelbox', return_kind='all') # To avoid querying external map each time on for loop. external_map_by_id = { ext_map.file_id: ext_map for ext_map in datarow_external_maps } data_row_ids = [ external_map.external_id for external_map in datarow_external_maps if external_map.external_id ] result_datarows = self.labelbox_connector.fetch_data({ 'action_type': 'get_data_rows', 'event_data': '', 'dataset': labelbox_dataset, 'data_row_ids': data_row_ids }) labelbox_existing_data_rows = result_datarows['result']['datasets'][0][ 'dataRows'] existing_data_rows_ids = [x['id'] for x in labelbox_existing_data_rows] deleted_data_rows = [ row_id for row_id in data_row_ids if row_id not in existing_data_rows_ids ] for diffgram_file in diffgram_files: # If we have a registered ID on labelbox, we skip file creation for this file. # We have to re-create it if it was deleted for some reason. diffgram_file_external_map = external_map_by_id.get( diffgram_file.id) if diffgram_file_external_map and diffgram_file_external_map.external_id and not force_create \ and external_map_by_id.get(diffgram_file.id).external_id not in deleted_data_rows: logger.debug('File {} exists. Skipping..'.format( diffgram_file.id)) continue if diffgram_file.type == "image": logger.debug('Adding image {} in Labelbox'.format( diffgram_file.id)) if diffgram_file.image: data = diffgram_file.image.serialize_for_source_control( self.session) data_row = { labelbox.schema.data_row.DataRow.row_data: data['url_signed'], 'external_id': diffgram_file.id } # Cache in memory the file for updating labelbox ID's later diffgram_files_by_id[diffgram_file.id] = diffgram_file external_ids.append(diffgram_file.id) file_urls.append(data_row) if diffgram_file.type == "video": if diffgram_file.video: logger.debug('Adding video {} in Labelbox'.format( diffgram_file.id)) data = diffgram_file.video.serialize_list_view( self.session, self.task_template.project) data_row = { labelbox.schema.data_row.DataRow.row_data: data['file_signed_url'], 'external_id': diffgram_file.id } # Cache in memory the file for updating labelbox ID's later external_ids.append(diffgram_file.id) diffgram_files_by_id[diffgram_file.id] = diffgram_file file_urls.append(data_row) task = labelbox_dataset.create_data_rows(file_urls) # We want to wait since we're already deferring the creation process. task.wait_till_done() # Now update al Diffgram files with their labelbox data_row ID. query = """query($datasetId: ID!, $externalId: [String!]) { datasets(where:{id: $datasetId }){ name id dataRows(where:{externalId_in: $externalId}){ id, externalId } } } """ data = {'datasetId': labelbox_dataset.uid, 'externalId': external_ids} result = self.labelbox_connector.put_data({ 'action_type': 'execute', 'event_data': [], 'query': query, 'data': data }) created_datarows = result['result']['datasets'][0]['dataRows'] for datarow in created_datarows: file = diffgram_files_by_id[int(datarow['externalId'])] file.default_external_map = ExternalMap.new( session=self.session, external_id=datarow['id'], file=file, diffgram_class_string="file", type="labelbox", add_to_session=True, flush_session=True) self.session.add(file) return task