def handle(self, *args, **options): use_sandbox = options['use_sandbox'] project_name = options['project'] queryset = Manager_Projects.get_all() if project_name is not None: queryset = queryset.filter(name=project_name) for project in queryset: self.stdout.write( self.style.SUCCESS('Syncing project {} with {} HITs'.format( project.name, HIT.objects.filter(batch__project=project).count()))) Manager_Batches.sync_mturk(project, use_sandbox)
def aggregate_batches(cls, queryset) -> dict: queryset = Manager_Batches.annotate_assignments(queryset) return queryset.annotate( costs_approved=ExpressionWrapper(F('count_assignments_approved') * F('settings_batch__reward'), output_field=IntegerField()), costs_rejected=ExpressionWrapper(F('count_assignments_rejected') * F('settings_batch__reward'), output_field=IntegerField()), costs_submitted=ExpressionWrapper( F('count_assignments_submitted') * F('settings_batch__reward'), output_field=IntegerField()), costs_dead=ExpressionWrapper(F('count_assignments_dead') * F('settings_batch__reward'), output_field=IntegerField()), costs_pending=ExpressionWrapper(F('count_assignments_pending') * F('settings_batch__reward'), output_field=IntegerField()), ).aggregate( sum_costs_approved=Coalesce(Sum('costs_approved'), 0), sum_costs_rejected=Coalesce(Sum('costs_rejected'), 0), sum_costs_submitted=Coalesce(Sum('costs_submitted'), 0), sum_costs_dead=Coalesce(Sum('costs_dead'), 0), sum_costs_pending=Coalesce(Sum('costs_pending'), 0), )
def clear_sandbox(request, slug_project, database_object_project, use_sandbox, format=None): dictionary_data = Manager_Batches.clear_sandbox(database_object_project) return Response(dictionary_data)
def download_batches(request, slug_project, database_object_project, use_sandbox, format=None): response = Manager_Batches.download(database_object_project, request) return response
def download_info_batches(request, slug_project, database_object_project, use_sandbox, format=None): dictionary_data = Manager_Batches.download_info(database_object_project, request) return Response(dictionary_data)
def import_batches(request, slug_project, database_object_project, use_sandbox, format=None): dictionary_data = Manager_Batches.import_batches( database_object_project=database_object_project, request=request, use_sandbox=use_sandbox) return Response(dictionary_data)
def get(self, request, slug_project, database_object_project, use_sandbox, id_batch, format=None): batch = Manager_Batches.get(id_batch=id_batch) serializer = Serializer_Batch(batch, context={'request': request}) return Response(serializer.data)
def patch(self, request, slug_project, database_object_project, use_sandbox, format=None): result = Manager_Batches.sync_mturk(database_object_project, use_sandbox) # list_batches_changed = Manager_Batches.sync_mturk(database_object_project, use_sandbox) # serializer = Serializer_Batch(list_batches_changed, many=True) return Response(result)
def test_annotate_assignments(self): queryset = Batch.objects.all() queryset = Manager_Batches.annotate_assignments(queryset) batch = queryset.filter(name='batch1').get() self.assertEqual(batch.count_hits, 10) self.assertEqual(batch.count_assignments_total, 100) self.assertEqual(batch.count_assignments_approved, 18) self.assertEqual(batch.count_assignments_rejected, 14) self.assertEqual(batch.count_assignments_submitted, 13) self.assertEqual(batch.count_assignments_dead, 45) self.assertEqual(batch.count_assignments_living_total, 50) self.assertEqual(batch.count_assignments_living_available, 10) self.assertEqual(batch.count_assignments_pending, 40)
def batches_for_annotation(request, slug_project, database_object_project, use_sandbox, format=None): queryset = Manager_Batches.get_all( database_object_project=database_object_project, use_sandbox=use_sandbox, request=request) serializer = Serializer_Batch(queryset, context={ 'usecase': 'annotation', }, many=True) return Response(serializer.data)
def handle(self, *args, **options): queryset = Batch.objects.filter(use_sandbox=False) queryset = Manager_Batches.annotate_assignments(queryset) queryset = queryset.filter(count_assignments_living_total__gt=0) count_batches = queryset.count() projects = queryset.values('project__name').annotate( Sum('count_assignments_living_total')) if count_batches > 0: self.stdout.write( self.style.SUCCESS( '{} living batches in the projects {}'.format( queryset.count(), ', '.join([ project['project__name'] for project in projects ])))) else: self.stdout.write(self.style.SUCCESS('No living batches'))
def create(self, validated_data): print('validated_data') print(validated_data) print('validated_data') batch = Manager_Batches.create( database_object_project=validated_data.get( 'database_object_project'), use_sandbox=validated_data.get('use_sandbox'), data=validated_data) batch = Batch.objects.get_or_create(name='gmudupzvxtjtpmcepnre', use_sandbox=True)[0] foo = Settings_Batch.objects.get_or_create(name='gmudupzvxtjtpmcepnre', defaults={ 'batch': batch, 'project': Project.objects.first() }) return batch
def get(self, request, format=None): try: use_sandbox = False if request.query_params[ 'use_sandbox'] == 'false' else True except KeyError: use_sandbox = True queryset = Manager_Batches.get_all(request=request, use_sandbox=use_sandbox) queryset_paginated, count_items = paginate_queryset(queryset, request) serializer = Serializer_Batch(queryset_paginated, many=True, context={ 'usecase': 'list_batches', }) return Response({ 'items_total': count_items, 'data': serializer.data, })
def get(self, request, slug_project, database_object_project, use_sandbox, format=None): queryset = Manager_Batches.get_all( database_object_project=database_object_project, use_sandbox=use_sandbox, request=request) queryset_paginated, count_items = paginate_queryset(queryset, request) serializer = Serializer_Batch(queryset_paginated, many=True, context={ 'usecase': 'list_batches', }) return Response({ 'items_total': count_items, 'data': serializer.data, })
def create_batch(self, data, database_object_project=None, use_sandbox=True): from api.classes import Manager_Projects, ManagerTasks, Manager_Batches, Manager_Templates_Worker, Manager_Settings_Batch try: ManagerTasks.start(self.request.id) client = Manager_Projects.get_mturk_api(use_sandbox) dictionary_settings_batch = data['settings_batch'] # inject blocking code into the template if dictionary_settings_batch['block_workers']: dictionary_settings_batch[ 'template_worker'].template = Manager_Batches.preprocess_template_request( database_object_project, dictionary_settings_batch['template_worker'].template) # generate batch name if not given try: name_batch = data['name'].upper() except KeyError: name_batch = uuid.uuid4().hex.upper() # create batch database_object_batch = Manager_Batches.create_batch( name_batch=name_batch, database_object_project=database_object_project, use_sandbox=use_sandbox, ) Manager_Templates_Worker.clone_and_fix_template( dictionary_settings_batch['template_worker']) Manager_Settings_Batch.clone_and_fix_settings_batch( database_object_project=database_object_project, database_object_batch=database_object_batch, dictionary_settings_batch=dictionary_settings_batch, ) title = dictionary_settings_batch['title'] if dictionary_settings_batch['has_content_adult'] == True: title = 'Contains adult content! {}'.format(title) # return database_object_batch for index, dictionary_hit in enumerate(data['data_csv']): try: mturk_obj_hit = client.create_hit( Keywords=','.join([ keyword['text'] for keyword in dictionary_settings_batch['keywords'] ]), MaxAssignments=dictionary_settings_batch[ 'count_assignments'], LifetimeInSeconds=dictionary_settings_batch['lifetime'], AssignmentDurationInSeconds=dictionary_settings_batch[ 'duration'], AutoApprovalDelayInSeconds=1209600, Reward=Manager_Batches.cent_to_dollar( dictionary_settings_batch['reward']), Title=title, Description=dictionary_settings_batch['description'], Question=Manager_Batches.create_question( dictionary_settings_batch['template_worker'].template, dictionary_settings_batch['template_worker']. height_frame, dictionary_hit), QualificationRequirements=[] # QualificationRequirements=Manager_Batches.get_qualifications(data) ) pass except ClientError as e: print(e) # messages.error(request, ''' # An error occured # <a href="#alert_1" data-toggle="collapse" class="alert-link">details</a> # <p class="collapse mb-0" id="alert_1"> # {} # </p> # '''.format(e)) if index == 0: database_object_batch.delete() break # db_obj_tag = m_Tag.objects.create( # name=project.glob_prefix_name_tag_hit+mturk_obj_hit['HIT']['HITId'], # key_corpus=database_object_project.name # ) # print(mturk_obj_hit) db_obj_hit = HIT.objects.create( # id_hit=str(random.randint(0, 9999999)), id_hit=mturk_obj_hit['HIT']['HITId'].upper(), batch=database_object_batch, parameters=json.dumps(dictionary_hit), datetime_expiration=mturk_obj_hit['HIT']['Expiration'], datetime_creation=mturk_obj_hit['HIT']['CreationTime'], ) self.update_state(state='PROGRESS', meta={ 'current': index + 1, 'total': len(data['data_csv']) }) ManagerTasks.delete_by_uid(self.request.id) except Exception as e: ManagerTasks.failed(self.request.id) print('{}'.format(e))