def destroy(self, request, *args, **kwargs): instance_ids = request.data.get('instance_ids') self.object = self.get_object() if isinstance(self.object, XForm): if not instance_ids: raise ParseError(_(u"Data id(s) not provided.")) else: instance_ids = [ x for x in instance_ids.split(',') if x.isdigit() ] if not instance_ids: raise ParseError(_(u"Invalid data ids were provided.")) initial_count = self.object.submission_count() queryset = Instance.objects.filter( id__in=instance_ids, xform=self.object, # do not update this timestamp when the record have # already been deleted. deleted_at__isnull=True) # loop through queryset # then call delete_instance that calls .save() # to allow emitting post_save signal for instance in queryset_iterator(queryset): delete_instance(instance, request.user) # updates the num_of_submissions for the form. after_count = self.object.submission_count(force_update=True) number_of_records_deleted = initial_count - after_count # send message send_message(instance_id=instance_ids, target_id=self.object.id, target_type=XFORM, user=request.user, message_verb=SUBMISSION_DELETED) return Response(data={ "message": "%d records were deleted" % number_of_records_deleted }, status=status.HTTP_200_OK) elif isinstance(self.object, Instance): if request.user.has_perm(CAN_DELETE_SUBMISSION, self.object.xform): delete_instance(self.object, request.user) # send message send_message(instance_id=self.object, target_id=self.object.xform.id, target_type=XFORM, user=request.user, message_verb=SUBMISSION_DELETED) else: raise PermissionDenied( _(u"You do not have delete " u"permissions.")) return Response(status=status.HTTP_204_NO_CONTENT)
def create(self, request, *args, **kwargs): """ Custom create method. Handle bulk create """ if isinstance(request.data, list): serializer = self.get_serializer(data=request.data, many=True) serializer.is_valid(raise_exception=True) self.perform_create(serializer) instance_ids = [ sub_review['instance'] for sub_review in serializer.data ] headers = self.get_success_headers(serializer.data) xform = SubmissionReview.objects.get( id=serializer.data[0]['id']).instance.xform else: serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) serializer.save() instance_ids = serializer.data['instance'] headers = self.get_success_headers(serializer.data) xform = SubmissionReview.objects.get( id=serializer.data['id']).instance.xform send_message(instance_id=instance_ids, target_id=xform.id, target_type=XFORM, user=request.user, message_verb=SUBMISSION_REVIEWED) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def _get_instance(xml, new_uuid, submitted_by, status, xform, checksum, request=None): history = None instance = None message_verb = SUBMISSION_EDITED # check if its an edit submission old_uuid = get_deprecated_uuid_from_xml(xml) if old_uuid: instance = Instance.objects.filter(uuid=old_uuid, xform_id=xform.pk).first() history = InstanceHistory.objects.filter( xform_instance__xform_id=xform.pk, uuid=new_uuid).only('xform_instance').first() if instance: # edits check_edit_submission_permissions(submitted_by, xform) last_edited = timezone.now() InstanceHistory.objects.create(checksum=instance.checksum, xml=instance.xml, xform_instance=instance, uuid=old_uuid, user=submitted_by, geom=instance.geom, submission_date=instance.last_edited or instance.date_created) instance.xml = xml instance.last_edited = last_edited instance.uuid = new_uuid instance.checksum = checksum instance.save() # call webhooks process_submission.send(sender=instance.__class__, instance=instance) elif history: instance = history.xform_instance if old_uuid is None or (instance is None and history is None): # new submission message_verb = SUBMISSION_CREATED instance = Instance.objects.create(xml=xml, user=submitted_by, status=status, xform=xform, checksum=checksum) # send notification on submission creation send_message(instance_id=instance.id, target_id=instance.xform.id, target_type=XFORM, user=instance.user or instance.xform.user, message_verb=message_verb) return instance
def send_message_on_save(sender, instance, created=False, **kwargs): """ Signal handler send message on save """ send_message(instance_id=instance.id, target_id=instance.id, target_type=XFORM, user=instance.created_by, message_verb=FORM_UPDATED)
def send_message_on_save(sender, instance, **kwargs): # pylint: disable=unused-argument """ Signal handler send message on save """ send_message(instance_id=instance.instance.id, target_id=instance.instance.xform.id, target_type=XFORM, user=instance.created_by, message_verb=SUBMISSION_REVIEWED)
def test_send_message_payload_chunking(self, message_serializer_mock): """ Test that the send_message function chunks the message payload if list of IDs goes over limit """ def is_valid(): return True message_serializer_mock.is_valid.side_effect = is_valid self._create_user_and_login() self._publish_transportation_form() instance_ids = [num for num in range(0, 20)] send_message(instance_ids, self.xform.id, 'xform', self.user, SUBMISSION_DELETED) self.assertTrue(message_serializer_mock.called) self.assertEqual(message_serializer_mock.call_count, 2)
def _try_update_xlsform(request, xform, owner): survey = \ utils.publish_xlsform(request, owner, xform.id_string, xform.project) if isinstance(survey, XForm): serializer = XFormSerializer( xform, context={'request': request}) # send form update notification send_message( instance_id=xform.id, target_id=xform.id, target_type=XFORM, user=request.user or owner, message_verb=FORM_UPDATED) return Response(serializer.data, status=status.HTTP_200_OK) return Response(survey, status=status.HTTP_400_BAD_REQUEST)
def submit_csv(username, xform, csv_file, overwrite=False): """Imports CSV data to an existing form Takes a csv formatted file or string containing rows of submission/instance and converts those to xml submissions and finally submits them by calling :py:func:`onadata.libs.utils.logger_tools.safe_create_instance` :param str username: the submission user :param onadata.apps.logger.models.XForm xform: The submission's XForm. :param (str or file) csv_file: A CSV formatted file with submission rows. :return: If sucessful, a dict with import summary else dict with error str. :rtype: Dict """ csv_file_validation_summary = validate_csv_file(csv_file, xform) if csv_file_validation_summary.get('valid'): additional_col = csv_file_validation_summary.get('additional_col') else: return async_status( FAILED, csv_file_validation_summary.get('error_msg') ) num_rows = sum(1 for row in csv_file) - 1 # Change stream position to start of file csv_file.seek(0) csv_reader = ucsv.DictReader(csv_file, encoding='utf-8-sig') xform_json = json.loads(xform.json) select_multiples = [ qstn.name for qstn in xform.get_survey_elements_of_type(MULTIPLE_SELECT_TYPE)] ona_uuid = {'formhub': {'uuid': xform.uuid}} additions = duplicates = inserts = 0 rollback_uuids = [] errors = {} # Retrieve the columns we should validate values for # Currently validating date, datetime, integer and decimal columns col_to_validate = { 'date': (get_columns_by_type(XLS_DATE_FIELDS, xform_json), parse), 'datetime': ( get_columns_by_type(XLS_DATETIME_FIELDS, xform_json), parse), 'integer': (get_columns_by_type(['integer'], xform_json), int), 'decimal': (get_columns_by_type(['decimal'], xform_json), float) } if overwrite: instance_ids = [i['id'] for i in xform.instances.values('id')] xform.instances.filter(deleted_at__isnull=True)\ .update(deleted_at=timezone.now(), deleted_by=User.objects.get(username=username)) # send message send_message( instance_id=instance_ids, target_id=xform.id, target_type=XFORM, user=User.objects.get(username=username), message_verb=SUBMISSION_DELETED) try: for row_no, row in enumerate(csv_reader): # Remove additional columns for index in additional_col: del row[index] # Remove 'n/a' and '' values from csv row = {k: v for (k, v) in row.items() if v not in [NA_REP, '']} row, error = validate_row(row, col_to_validate) if error: errors[row_no] = error # Only continue the process if no errors where encountered while # validating the data if not errors: location_data = {} for key in list(row): # Collect row location data into separate location_data # dict if key.endswith(('.latitude', '.longitude', '.altitude', '.precision')): location_key, location_prop = key.rsplit(u'.', 1) location_data.setdefault(location_key, {}).update({ location_prop: row.get(key, '0') }) # collect all location K-V pairs into single geopoint field(s) # in location_data dict for location_key in list(location_data): location_data.update({ location_key: (u'%(latitude)s %(longitude)s ' '%(altitude)s %(precision)s') % defaultdict( lambda: '', location_data.get(location_key)) }) nested_dict = csv_dict_to_nested_dict( row, select_multiples=select_multiples) row = flatten_split_select_multiples( nested_dict, select_multiples=select_multiples) location_data = csv_dict_to_nested_dict(location_data) # Merge location_data into the Row data row = dict_merge(row, location_data) submission_time = datetime.utcnow().isoformat() row_uuid = row.get('meta/instanceID') or 'uuid:{}'.format( row.get(UUID)) if row.get(UUID) else None submitted_by = row.get('_submitted_by') submission_date = row.get('_submission_time', submission_time) for key in list(row): # remove metadata (keys starting with '_') if key.startswith('_'): del row[key] # Inject our forms uuid into the submission row.update(ona_uuid) old_meta = row.get('meta', {}) new_meta, update = get_submission_meta_dict(xform, row_uuid) inserts += update old_meta.update(new_meta) row.update({'meta': old_meta}) row_uuid = row.get('meta').get('instanceID') rollback_uuids.append(row_uuid.replace('uuid:', '')) try: xml_file = BytesIO( dict2xmlsubmission( row, xform, row_uuid, submission_date)) try: error, instance = safe_create_instance( username, xml_file, [], xform.uuid, None) except ValueError as e: error = e if error: if not (isinstance(error, OpenRosaResponse) and error.status_code == 202): Instance.objects.filter( uuid__in=rollback_uuids, xform=xform).delete() return async_status(FAILED, text(error)) else: duplicates += 1 else: additions += 1 if additions % PROGRESS_BATCH_UPDATE == 0: try: current_task.update_state( state='PROGRESS', meta={ 'progress': additions, 'total': num_rows, 'info': additional_col }) except Exception: logging.exception( _(u'Could not update state of ' 'import CSV batch process.')) finally: xform.submission_count(True) users = User.objects.filter( username=submitted_by) if submitted_by else [] if users: instance.user = users[0] instance.save() except Exception as e: return failed_import(rollback_uuids, xform, e, text(e)) except UnicodeDecodeError as e: return failed_import(rollback_uuids, xform, e, 'CSV file must be utf-8 encoded') if errors: # Rollback all created instances if an error occurred during # validation Instance.objects.filter( uuid__in=rollback_uuids, xform=xform).delete() xform.submission_count(True) return async_status( FAILED, u'Invalid CSV data imported in row(s): {}'.format( errors) if errors else '' ) else: added_submissions = additions - inserts event_by = User.objects.get(username=username) event_name = None tracking_properties = { 'xform_id': xform.pk, 'project_id': xform.project.pk, 'submitted_by': event_by, 'label': f'csv-import-for-form-{xform.pk}', 'from': 'CSV Import', } if added_submissions > 0: tracking_properties['value'] = added_submissions event_name = INSTANCE_CREATE_EVENT analytics.track( event_by, event_name, properties=tracking_properties) if inserts > 0: tracking_properties['value'] = inserts event_name = INSTANCE_UPDATE_EVENT analytics.track( event_by, event_name, properties=tracking_properties) return { 'additions': added_submissions, 'duplicates': duplicates, 'updates': inserts, 'info': "Additional column(s) excluded from the upload: '{0}'." .format(', '.join(list(additional_col)))}