def create_osm_node_by_data(data, user, duplication_check=True): """ Create data based on data and user""" # Now, we post the data directly to OSM. try: # Split osm and extension attribute osm_attr, locality_attr = split_osm_and_extension_attr(data['tag']) data['tag'] = osm_attr validate_osm_data(data, duplication_check=duplication_check) # Map Healthsites tags to OSM tags mapping_file_path = ABS_PATH('api', 'fixtures', 'mapping.yml') data['tag'] = convert_to_osm_tag(mapping_file_path, data['tag'], 'node') # Push data to OSM response = create_osm_node(user, data) # create pending index create_pending_update('node', response['id'], data['tag']['name'], user, response['version']) save_extensions('node', response['id'], locality_attr) return response except Exception as e: create_pending_review(user, data, '%s' % e) output = { 'error': '%s' % e, 'payload': data, } return output
def upload_to_osm(self): """Push parsed localities/facilities/healthsites data to OSM instance. """ self._upload_status['total'] = len(self._parsed_data) for row_number, data in enumerate(self._parsed_data): upload_status = {'uploaded': True, 'message': 'Uploaded'} # split osm and extension attributes osm_attr, locality_attr = split_osm_and_extension_attr(data['tag']) data['tag'] = osm_attr # Map Healthsites tags to OSM tags mapping_file_path = ABS_PATH('api', 'fixtures', 'mapping.yml') data['tag'] = convert_to_osm_tag(mapping_file_path, data['tag'], 'node') # Push data to OSM user = get_object_or_404(User, username=data['osm_user']) try: response = create_osm_node(user, data) # create pending index create_pending_update('node', response['id'], data['tag']['name'], user, response['version']) save_extensions('node', response['id'], locality_attr) except: # noqa upload_status.update({ 'uploaded': False, 'message': '{0}: {1}'.format(unicode(sys.exc_info()[0].__name__), unicode(sys.exc_info()[1])) }) self._upload_status['status'][row_number + 1] = upload_status self._upload_status['count'] = row_number + 1 self._upload_status['summary'] = ( 'There is error when uploading the data. ' 'Please see the status detail for more ' 'information.' if not self.is_uploaded() else '') # write status to progress file f = open(self.progress_file, 'w+') f.write(json.dumps(self._upload_status)) f.close() return False not in ([ status['uploaded'] for status in self._upload_status['status'].values() ])
def post(self, request): user = request.user data = copy.deepcopy(request.data) if user.username in settings.TEST_USERS: raise Exception('Create osm : {}'.format( json.dumps({ 'payload': data, 'user': user.username }))) # Now, we post the data directly to OSM. try: # Split osm and extension attribute osm_attr, locality_attr = split_osm_and_extension_attr(data['tag']) data['tag'] = osm_attr # Verify data uploader and owner/collector if the API is being used # for uploading data from other osm user. if request.user.is_staff and request.GET.get('review', None): data['osm_user'] = get_pending_review( request.GET.get('review')).uploader.username if data.get('osm_user'): is_valid, message = verify_user(user, data['osm_user']) if not is_valid: return HttpResponseForbidden(message) else: try: user = get_object_or_404(User, username=data['osm_user']) except Http404: message = 'User %s is not exist.' % data['osm_user'] return HttpResponseForbidden(message) duplication_check = request.GET.get('duplication-check', True) if duplication_check == 'false': duplication_check = False validate_osm_data(data, duplication_check=duplication_check) # Map Healthsites tags to OSM tags mapping_file_path = ABS_PATH('api', 'fixtures', 'mapping.yml') data['tag'] = convert_to_osm_tag(mapping_file_path, data['tag'], 'node') # Push data to OSM response = create_osm_node(user, data) # create pending index create_pending_update('node', response['id'], data['tag']['name'], user, response['version']) save_extensions('node', response['id'], locality_attr) if request.GET.get('review', None): delete_pending_review(request.GET.get('review', None)) return Response(response) except Exception as e: if not request.GET.get('review', None): if user != request.user: create_pending_review(user, request.data, '%s' % e) else: try: update_pending_review(request.GET.get('review', None), request.data, '%s' % e) except Exception as e: return HttpResponseBadRequest('%s' % e) output = { 'error': '%s' % e, 'payload': request.data, } return HttpResponseBadRequest('%s' % json.dumps(output))
def handle(self, *args, **options): if not options['username']: sys.exit('Please provide username parameter by adding ' '--username=<username>') return try: user = User.objects.get(username=options['username']) except User.DoesNotExist: sys.exit('Username does not exist') return pathname = \ os.path.join(settings.CACHE_DIR, 'data-migration-progress') progress_file = \ os.path.join(pathname, '{}.txt'.format(options['username'])) progress_file_found = os.path.exists(progress_file) if progress_file_found: sys.exit('Data migration process for user {} ' 'is already running.'.format(options['username'])) return print('Start migrating data.........') query = Locality.objects.filter( changeset__social_user__username=options['username']) total_query = query.count() for idx, locality in enumerate(query): # change into osm format osm_dict = convert_into_osm_dict(locality) osm, extension = split_osm_and_extension_attr(osm_dict) values = locality.repr_dict() if values.get('osm_id', None): if values.get('osm_type', None): osm_type = values['osm_type'] else: continue osm_id = values['osm_id'] print('Checking locality for osm id: {}, osm type: {}'.format( osm_id, osm_type)) save_extensions(osm_type, osm_id, extension) else: request_data = osm.copy() request_data.update(extension) data = { 'tag': request_data, 'lat': values['geom'][1], 'lon': values['geom'][0] } create_osm_node_by_data(user=user, data=data) locality.migrated = True locality.save() found = os.path.exists(pathname) if not found: os.makedirs(pathname) data_counter = { 'count': idx + 1, 'total': total_query, } filename = os.path.join(pathname, '{}.txt'.format(options['username'])) f = open(filename, 'w+') f.write(json.dumps(data_counter)) f.close() print('Migrating old data is finished.')