def import_csv(self, user): from geokey.contributions.serializers import ContributionSerializer contributions = [] errors = [] csv.field_size_limit(sys.maxsize) with open(self.csv_file.path, 'rU') as csvfile: reader = csv.reader(csvfile) field_names = next(reader, None) line_number = 0 self.lookupfields = self.get_lookup_fields() for row in reader: line_number += 1 properties = {} geom_offset = 0 for idx, column in enumerate(row): if field_names[idx] == self.geom_field: geometry = ogr.CreateGeometryFromWkt(column) geom_offset = 1 else: try: key = self.fields[idx - geom_offset] if key != '-1': properties[key] = self.get_field_value( key, column) except IndexError: pass feature = { "location": { "geometry": geometry.ExportToJson() }, "properties": properties, "meta": { "category": self.category.id, "status": "pending" } } serializer = ContributionSerializer( data=feature, context={'user': user, 'project': self.project} ) try: serializer.is_valid(raise_exception=True) except ValidationError, e: errors.append({ 'line': line_number, 'messages': e.messages }) contributions.append(serializer)
def store_feature(self, feature): user = User.objects.get(display_name="AnonymousUser") feature["meta"] = {"category": self.category.id} feature["location"] = {"geometry": json_dumps(feature.pop("geometry"))} serializer = ContributionSerializer(data=feature, context={"user": user, "project": self.project}) if serializer.is_valid(raise_exception=True): serializer.save()
def store_feature(self, feature): user = User.objects.get(display_name='AnonymousUser') feature['meta'] = { 'category': self.category.id } feature['location'] = { 'geometry': json_dumps(feature.pop('geometry')) } serializer = ContributionSerializer( data=feature, context={'user': user, 'project': self.project} ) if serializer.is_valid(raise_exception=True): serializer.save()
def submit_measurement(self, request, data, instance): user = request.user project = request.data.get('project', None) properties = data.get('properties', None) if project is not None and properties is not None: finished = data.get('finished', None) results = properties.get('results', None) if finished is not None and results is not None: try: project = Project.objects.get(pk=project, status='active') aq_project = AirQualityProject.objects.get( status='active', project=project ) category_types = dict(AirQualityCategory.TYPES) field_types = dict(AirQualityField.TYPES) results = float(results) if results < 40: category = category_types['1'] elif results >= 40 and results < 60: category = category_types['2'] elif results >= 60 and results < 80: category = category_types['3'] elif results >= 80 and results < 100: category = category_types['4'] else: category = category_types['5'] aq_category = AirQualityCategory.objects.get( type=category, project=aq_project ) properties = {} for key, value in field_types.iteritems(): aq_field = AirQualityField.objects.get( type=value, category=aq_category ) instance_properties = instance.location.properties value = None if key == 'results': value = results elif key == 'date_out': value = filter_date(instance.started, 'd/m/Y') elif key == 'time_out': value = filter_date(instance.started, 'H:i') elif key == 'date_collected': value = filter_date(instance.finished, 'd/m/Y') elif key == 'time_collected': value = filter_date(instance.finished, 'H:i') elif key == 'exposure_min': value = instance.finished - instance.started value = int(value.total_seconds() / 60) elif key == 'distance_from_road': value = '%sm' % instance_properties.get( 'distance' ) elif key == 'height': value = '%sm' % instance_properties.get( 'height' ) elif key == 'site_characteristics': value = instance_properties.get( 'characteristics' ) elif key == 'additional_details': value = instance.properties.get( 'additional_details' ) elif key == 'made_by_students': value = instance.properties.get( 'made_by_students' ) if value: value = 'Yes' else: value = 'No' try: value = aq_field.field.lookupvalues.get( name=value).id except Field.DoesNotExist: return False if value is not None: properties[aq_field.field.key] = str(value) except: return False if project.can_contribute(user): data = { 'type': 'Feature', 'meta': { 'status': 'active', 'category': aq_category.category.id }, 'location': { 'geometry': instance.location.geometry.geojson }, 'properties': properties } serializer = ContributionSerializer( data=data, context={'user': user, 'project': project} ) if serializer.is_valid(raise_exception=True): serializer.save() instance.delete() return True return False
def import_from_csv(self, user, csv_file, form_category_id=None): """ Reads an uploaded CSV file and creates the contributions and returns the number of contributions created, updated and ignored. Parameter --------- user : geokey.users.models.User User who uploaded the CSV. Will be used as the creator of each contribution. csv_file : django.core.files.File The file that was uploaded. form_category_id : int optionally identifies the GeoKey category backing the SapelliForm which generated the data in the CSV file. This is only really used if the CSV file header does not contain Form identification info (i.e. modelID & modelSchemaNumber). Returns ------- int The number of contributions created int The number of contributions created with joined locations int The number of contributions created without locations int The number of contributions updated int The number of contributions ignored due to being duplicates Raises ------ SapelliCSVException When no Sapelli Project/Form (known on this server, and accessible by this user) can be found which matches the one used to generate the data in the CSV file. """ # Make sure form_category_id is an int (or None): if form_category_id is None or form_category_id == '': form_category_id = None else: form_category_id = int(form_category_id) # Check if we got a file at all: if csv_file is None: raise SapelliCSVException('No file provided') # Sapelli Collector produces CSV files in 'utf-8-sig' encoding (= UTF8 with BOM): reader = UnicodeDictReader(csv_file, encoding='utf-8-sig') # Parse modelID & modelSchemaNumber from header row: model_id = None model_schema_number = None try: model_id = int( re.match(r"modelID=(?P<model_id_str>[0-9]+)", [ fn for fn in reader.fieldnames if fn.startswith('modelID=') ][0]).group('model_id_str')) model_schema_number = int( re.match( r"modelSchemaNumber=(?P<model_schema_number_str>[0-9]+)", [ fn for fn in reader.fieldnames if fn.startswith('modelSchemaNumber=') ][0]).group('model_schema_number_str')) except BaseException: pass # Get form and perform checks: if (model_id is not None) and (model_schema_number is not None): # Form identification found in CSV header row... # Check if this is the right project (with matching model_id): if model_id != self.sapelli_model_id: raise SapelliCSVException( 'modelID mismatch (CSV: %s; project "%s": %s), ' 'data in CSV file was probably generated using ' 'another Sapelli project (version).' % (model_id, self.geokey_project.name, self.sapelli_model_id)) # Get form using model_schema_number: try: form = self.forms.get( sapelli_model_schema_number=model_schema_number) except SapelliForm.DoesNotExist: raise SapelliCSVException( 'No Form with modelSchemaNumber %s found in Project "%s".' % (model_schema_number, self.geokey_project.name)) # Check if form matches form_category_id given in request: if (form_category_id is not None) and form_category_id != form.category.id: raise SapelliCSVException( 'The data in the CSV file was not created using selected form "%s".' % form.sapelli_id) elif (form_category_id is not None): # No Form identification found in CSV header row, use form_category_id given in request... try: form = self.forms.get(pk=form_category_id) except SapelliForm.DoesNotExist: raise SapelliCSVException( 'No Form with category_id %s found in Project "%s".' % (form_category_id, self.geokey_project.name)) else: # No Form identification found in CSV header row, nor in request... raise SapelliCSVException( 'No Form identification found in CSV header row, please select appropriate form.' ) imported = 0 imported_joined_locations = 0 imported_no_location = 0 updated = 0 ignored_duplicate = 0 for row in reader: joined_locations = False dummy_location = False coordinates = [] for sapelli_location_field in form.location_fields.all(): sapelli_id = sapelli_location_field.sapelli_id longitute = row['%s.Longitude' % sapelli_id] latitute = row['%s.Latitude' % sapelli_id] if longitute and latitute: coordinates.append('[%s, %s]' % (float(longitute), float(latitute))) if len(coordinates) > 1: coordinates = ', '.join(coordinates) geometry = '{ "type": "MultiPoint", "coordinates": [ %s ] }' % coordinates joined_locations = True else: if len(coordinates) == 1: coordinates = coordinates[0] else: coordinates = '[0.0, 0.0]' dummy_location = True geometry = '{ "type": "Point", "coordinates": %s }' % coordinates feature = { "location": { "geometry": geometry }, "properties": { "DeviceId": row['DeviceID'], "StartTime": row['StartTime'] }, "meta": { "category": form.category.id } } for sapelli_field in form.fields.all(): key = sapelli_field.field.key value = row[sapelli_field.sapelli_id] if sapelli_field.truefalse: value = 0 if value == 'false' else 1 if value: if sapelli_field.items.count() > 0: leaf = sapelli_field.items.get(number=value) value = leaf.lookup_value.id feature['properties'][key] = value from geokey.contributions.serializers import ContributionSerializer try: observation = self.geokey_project.observations.get( category_id=form.category.id, properties__StartTime=row['StartTime'], properties__DeviceId=row['DeviceID']) equal = True if json.loads(feature['location']['geometry']) != json.loads( observation.location.geometry.json): equal = False if len(feature['properties']) != len(observation.properties): equal = False for key in feature['properties']: if feature['properties'][key] != observation.properties[ key]: equal = False if not equal: serializer = ContributionSerializer(observation, data=feature, context={ 'user': user, 'project': self.geokey_project }) if serializer.is_valid(raise_exception=True): serializer.save() updated += 1 else: ignored_duplicate += 1 except Observation.DoesNotExist: serializer = ContributionSerializer(data=feature, context={ 'user': user, 'project': self.geokey_project }) if serializer.is_valid(raise_exception=True): serializer.save() if joined_locations: imported_joined_locations += 1 elif dummy_location: imported_no_location += 1 else: imported += 1 return imported, imported_joined_locations, imported_no_location, updated, ignored_duplicate
def post(self, request, project_id): try: epicollect = EpiCollectProjectModel.objects.get(pk=project_id) except EpiCollectProjectModel.DoesNotExist: return HttpResponse('0') user = User.objects.get(display_name='AnonymousUser') upload_type = request.GET.get('type') if upload_type in ['thumbnail', 'full_image']: the_file = request.FILES.get('name') try: epicollect_file = EpiCollectMedia.objects.get( file_name=the_file.name ) except EpiCollectMedia.DoesNotExist: return HttpResponse('0') ImageFile.objects.create( name=the_file.name, description='', creator=user, contribution=epicollect_file.contribution, image=the_file ) epicollect_file.delete() return HttpResponse('1') elif upload_type == 'video': the_file = request.FILES.get('name') try: epicollect_file = EpiCollectMedia.objects.get( file_name=the_file.name ) except EpiCollectMedia.DoesNotExist: return HttpResponse('0') MediaFile.objects._create_video_file( the_file.name, '', user, epicollect_file.contribution, the_file ) epicollect_file.delete() return HttpResponse('1') data = request.POST try: category = Category.objects.get(pk=data.get('category')) except Category.DoesNotExist: return HttpResponse('0') except ValueError: # The value provided for category is not a number return HttpResponse('0') try: lng = float(data.get('location_lon')) lat = float(data.get('location_lat')) except TypeError: return HttpResponse('0') observation = { 'type': 'Feature', 'location': { 'geometry': ('{"type": "Point", "coordinates": ' '[%s, %s]}' % (lng, lat)) }, 'properties': { 'location_acc': data.get('location_acc'), 'location_provider': data.get('location_provider'), 'location_alt': data.get('location_alt'), 'location_bearing': data.get('location_bearing'), 'unique_id': data.get('unique_id'), 'DeviceID': request.GET.get('phoneid') }, 'meta': { 'category': data.get('category'), } } for field in category.fields.all(): key = field.key.replace('-', '_') value = data.get(key + '_' + str(category.id)) if field.fieldtype == 'MultipleLookupField': value = json.loads('[' + value + ']') elif field.fieldtype in ['DateField', 'DateTimeField']: value = datetime.strptime(value, '%d/%m/%Y').strftime('%Y-%m-%d') observation['properties'][field.key] = value contribution = ContributionSerializer( data=observation, context={'user': user, 'project': epicollect.project} ) if contribution.is_valid(raise_exception=True): contribution.save() photo_id = data.get('photo') if photo_id is not None: EpiCollectMedia.objects.create( contribution=contribution.instance, file_name=photo_id ) video_id = data.get('video') if video_id is not None: EpiCollectMedia.objects.create( contribution=contribution.instance, file_name=video_id ) return HttpResponse('1')