def delete(self, request, slug_project, database_object_project, use_sandbox, id_settings_batch, format=None): Manager_Settings_Batch.delete(id_settings_batch) return Response(status=status.HTTP_204_NO_CONTENT)
def put(self, request, slug_project, database_object_project, use_sandbox, id_settings_batch, format=None): item = Manager_Settings_Batch.get(id_settings_batch) serializer = Serializer_Settings_Batch(item, data=request.data, partial=True) if serializer.is_valid(): serializer.save() return Response(serializer.data) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def get(self, request, slug_project, database_object_project, use_sandbox, format=None): queryset, list_fields = Manager_Settings_Batch.get_all( database_object_project=database_object_project, request=request, ) queryset_paginated, count_items = paginate_queryset(queryset, request) serializer = Serializer_Settings_Batch(queryset_paginated, many=True, context={ 'usecase': 'list_settings_batch', 'fields': list_fields, }) return Response({ 'items_total': count_items, 'data': serializer.data, })
def create(self, validated_data): print('validated_data') print(validated_data) print('validated_data') project = Manager_Settings_Batch.create(data=validated_data) return project
def update(self, instance, validated_data): print('validated_data') print(validated_data) print('validated_data') instance = Manager_Settings_Batch.update( instance=instance, data=validated_data, ) return instance
def settings_batch_all(request, slug_project, database_object_project, use_sandbox, format=None): queryset, list_fields = Manager_Settings_Batch.get_all( database_object_project=database_object_project, request=request, ) serializer = Serializer_Settings_Batch(queryset, many=True, context={ 'usecase': 'settings_batch_all', 'fields': list_fields, }) return Response(serializer.data)
def settings_batch_all(request, slug_project, database_object_project, use_sandbox, format=None): list_fields = request.query_params.getlist('fields[]') if len(list_fields) == 0: list_fields = None queryset = Manager_Settings_Batch.get_all( database_object_project=database_object_project, request=request, fields=list_fields, ) serializer = Serializer_Settings_Batch( queryset, many=True, context={ 'usecase': 'settings_batch_all', 'fields': list_fields, } ) return Response(serializer.data)
def import_batches(database_object_project: Project, request: Request, use_sandbox: bool): from api.classes import Manager_Settings_Batch, Manager_Templates_Worker for parsed_csv in request.data.get('parsedCSVs', []): name_batch = request.data['nameBatch'] if name_batch is None: name_batch = uuid.uuid4().hex.upper() else: name_batch = name_batch.upper() # create batch database_object_batch = Manager_Batches.create_batch( name_batch=name_batch, database_object_project=database_object_project, use_sandbox=use_sandbox, datetime_creation=datetime.datetime.strptime( sorted(map(lambda x: x['CreationTime'], parsed_csv))[0], '%a %b %d %H:%M:%S %Z %Y'), ) template_worker = Manager_Templates_Worker.create( data={ 'database_object_project': database_object_project, 'name': '{}__{}'.format(name_batch, timezone.now().timestamp()), 'height_frame': 800, 'template': request.data.get('templateWorker'), 'template_original': True, }) count_assignments_estimated = collections.Counter( map(lambda x: x['HITId'], parsed_csv)).most_common(1)[0][1] Manager_Settings_Batch.clone_and_fix_settings_batch( database_object_project=database_object_project, database_object_batch=database_object_batch, dictionary_settings_batch={ 'title': parsed_csv[0]['Title'], 'reward': mturk_reward_to_database_reward(parsed_csv[0]['Reward']), 'count_assignments': count_assignments_estimated, 'description': parsed_csv[0]['Description'], 'lifetime': 600, # 'lifetime': int(parsed_csv[0]['LifetimeInSeconds']), 'duration': int(parsed_csv[0]['AssignmentDurationInSeconds']), # 'keywords': parsed_csv[0]['Keywords'] 'template_worker': template_worker, }, ) for assignment in parsed_csv: print(assignment.keys()) break return {'name_batch': name_batch}
def create_batch(self, data, database_object_project=None, use_sandbox=True): from api.classes import Manager_Projects, ManagerTasks, Manager_Batches, Manager_Templates_Worker, Manager_Settings_Batch try: ManagerTasks.start(self.request.id) client = Manager_Projects.get_mturk_api(use_sandbox) dictionary_settings_batch = data['settings_batch'] # inject blocking code into the template if dictionary_settings_batch['block_workers']: dictionary_settings_batch[ 'template_worker'].template = Manager_Batches.preprocess_template_request( database_object_project, dictionary_settings_batch['template_worker'].template) # generate batch name if not given try: name_batch = data['name'].upper() except KeyError: name_batch = uuid.uuid4().hex.upper() # create batch database_object_batch = Manager_Batches.create_batch( name_batch=name_batch, database_object_project=database_object_project, use_sandbox=use_sandbox, ) Manager_Templates_Worker.clone_and_fix_template( dictionary_settings_batch['template_worker']) Manager_Settings_Batch.clone_and_fix_settings_batch( database_object_project=database_object_project, database_object_batch=database_object_batch, dictionary_settings_batch=dictionary_settings_batch, ) title = dictionary_settings_batch['title'] if dictionary_settings_batch['has_content_adult'] == True: title = 'Contains adult content! {}'.format(title) # return database_object_batch for index, dictionary_hit in enumerate(data['data_csv']): try: mturk_obj_hit = client.create_hit( Keywords=','.join([ keyword['text'] for keyword in dictionary_settings_batch['keywords'] ]), MaxAssignments=dictionary_settings_batch[ 'count_assignments'], LifetimeInSeconds=dictionary_settings_batch['lifetime'], AssignmentDurationInSeconds=dictionary_settings_batch[ 'duration'], AutoApprovalDelayInSeconds=1209600, Reward=Manager_Batches.cent_to_dollar( dictionary_settings_batch['reward']), Title=title, Description=dictionary_settings_batch['description'], Question=Manager_Batches.create_question( dictionary_settings_batch['template_worker'].template, dictionary_settings_batch['template_worker']. height_frame, dictionary_hit), QualificationRequirements=[] # QualificationRequirements=Manager_Batches.get_qualifications(data) ) pass except ClientError as e: print(e) # messages.error(request, ''' # An error occured # <a href="#alert_1" data-toggle="collapse" class="alert-link">details</a> # <p class="collapse mb-0" id="alert_1"> # {} # </p> # '''.format(e)) if index == 0: database_object_batch.delete() break # db_obj_tag = m_Tag.objects.create( # name=project.glob_prefix_name_tag_hit+mturk_obj_hit['HIT']['HITId'], # key_corpus=database_object_project.name # ) # print(mturk_obj_hit) db_obj_hit = HIT.objects.create( # id_hit=str(random.randint(0, 9999999)), id_hit=mturk_obj_hit['HIT']['HITId'].upper(), batch=database_object_batch, parameters=json.dumps(dictionary_hit), datetime_expiration=mturk_obj_hit['HIT']['Expiration'], datetime_creation=mturk_obj_hit['HIT']['CreationTime'], ) self.update_state(state='PROGRESS', meta={ 'current': index + 1, 'total': len(data['data_csv']) }) ManagerTasks.delete_by_uid(self.request.id) except Exception as e: ManagerTasks.failed(self.request.id) print('{}'.format(e))
def import_batches(database_object_project: Project, request: Request, use_sandbox: bool): from api.classes import Manager_Settings_Batch, Manager_Templates_Worker for parsed_csv in request.data.get('parsedCSVs', []): name_batch = request.data['nameBatch'] if name_batch is None: name_batch = uuid.uuid4().hex.upper() else: name_batch = name_batch.upper() # create batch database_object_batch = Manager_Batches.create_batch( name_batch=name_batch, database_object_project=database_object_project, use_sandbox=use_sandbox, datetime_creation=datetime.datetime.strptime(sorted(map(lambda x: x['CreationTime'], parsed_csv))[0], '%a %b %d %H:%M:%S %Z %Y'), ) database_object_template_worker = Manager_Templates_Worker.get(request.data.get('templateWorker')) template_worker = Manager_Templates_Worker.clone_and_fix_template(database_object_template_worker) count_assignments_estimated = collections.Counter(map(lambda x: x['HITId'], parsed_csv)).most_common(1)[0][1] if 'name_settings_batch' in request.data: Manager_Settings_Batch.create(data={ 'database_object_project': database_object_project, 'name': request.data['name_settings_batch'], 'title': parsed_csv[0]['Title'], 'reward': mturk_reward_to_database_reward(parsed_csv[0]['Reward']), 'count_assignments': count_assignments_estimated, 'description': parsed_csv[0]['Description'], 'lifetime': 604800, 'duration': int(parsed_csv[0]['AssignmentDurationInSeconds']), 'template_worker': template_worker, 'block_workers': False, 'has_content_adult': False, 'keywords': [], }) Manager_Settings_Batch.clone_and_fix_settings_batch( database_object_project=database_object_project, database_object_batch=database_object_batch, dictionary_settings_batch={ 'title': parsed_csv[0]['Title'], 'reward': mturk_reward_to_database_reward(parsed_csv[0]['Reward']), 'count_assignments': count_assignments_estimated, 'description': parsed_csv[0]['Description'], 'lifetime': 604800, # 'lifetime': int(parsed_csv[0]['LifetimeInSeconds']), 'duration': int(parsed_csv[0]['AssignmentDurationInSeconds']), # 'keywords': parsed_csv[0]['Keywords'] 'template_worker': template_worker, }, ) dict_hits = {} dictionary_workers_available = {worker.id_worker: worker for worker in Worker.objects.all()} for assignment in parsed_csv: print('###################') try: database_object_hit = dict_hits[assignment['HITId']] except KeyError: database_object_hit = HIT.objects.create( id_hit=assignment['HITId'].upper(), batch=database_object_batch, parameters=json.dumps({name_input: assignment['Input.{}'.format(name_input)] for name_input in [key.replace('Input.', '') for key in assignment if key.startswith('Input.')]}), datetime_expiration=datetime.datetime.strptime(assignment['Expiration'], '%a %b %d %H:%M:%S %Z %Y'), datetime_creation=datetime.datetime.strptime(assignment['CreationTime'], '%a %b %d %H:%M:%S %Z %Y'), ) dict_hits[assignment['HITId']] = database_object_hit try: database_object_worker = dictionary_workers_available[assignment['WorkerId']] except KeyError: # otherwise create the new worker and add it to the dictionary database_object_worker = Worker.objects.get_or_create( id_worker=assignment['WorkerId'], )[0] dictionary_workers_available[assignment['WorkerId']] = database_object_worker Assignment.objects.create( id_assignment=assignment['AssignmentId'], hit=database_object_hit, worker=database_object_worker, status_external=mturk_status_to_database_status(assignment['AssignmentStatus']), answer=json.dumps({name_input: assignment['Answer.{}'.format(name_input)] for name_input in [key.replace('Answer.', '') for key in assignment if key.startswith('Answer.')]}), datetime_submit=datetime.datetime.strptime(assignment['SubmitTime'], '%a %b %d %H:%M:%S %Z %Y'), datetime_accept=datetime.datetime.strptime(assignment['AcceptTime'], '%a %b %d %H:%M:%S %Z %Y'), ) # break return { 'name_batch': name_batch }
def get(self, request, slug_project, id_settings_batch): item = Manager_Settings_Batch.get(id_settings_batch) serializer = Serializer_Settings_Batch(item, context={'request': request}) return Response(serializer.data)