def setUp(self, ): self.path = os.path.dirname(os.path.realpath(__file__)) Group.objects.create(name='TestDefaultExportExtentGroup') self.user = User.objects.create(username='******', email='*****@*****.**', password='******') # bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) bbox = Polygon.from_bbox((-10.85, 6.25, -10.62, 6.40)) the_geom = GEOSGeometry(bbox, srid=4326) self.job = Job.objects.create(name='TestJob', description='Test description', user=self.user, the_geom=the_geom) self.job.feature_save = True self.job.feature_pub = True self.job.save() self.run = ExportRun.objects.create(job=self.job, user=self.user) parser = presets.PresetParser(self.path + '/files/hdm_presets.xml') tags = parser.parse() self.assertIsNotNone(tags) self.assertEquals(238, len(tags)) # save all the tags from the preset for tag_dict in tags: tag = Tag.objects.create(key=tag_dict['key'], value=tag_dict['value'], job=self.job, data_model='osm', geom_types=tag_dict['geom_types']) self.assertEquals(238, self.job.tags.all().count())
def run(*script_args): url = 'http://localhost/interpreter' bbox = '6.25,-10.85,6.40,-10.62' # monrovia lib_bbox = '4.15,-11.60,8.55,-7.36' path = '/home/ubuntu/www/hotosm/utils/tests' query = '(node(6.25,-10.85,6.40,-10.62);<;);out body;' parser = presets.PresetParser(preset=path + '/files/osm_presets.xml') kvps = parser.parse() filters = [] for kvp in kvps: filter_tag = '{0}={1}'.format(kvp['key'], kvp['value']) filters.append(filter_tag) print filters print "==============" print "Querying Monrovia with OSM filters." print timezone.now() op = Overpass(bbox=bbox, stage_dir=path + '/files/', job_name='test', filters=filters) osm = op.run_query() print timezone.now() stat = os.stat(osm) size = stat.st_size / 1024 / 1024.00 print 'Result file size: {0}'.format(size) filtered = op.filter() # check pbf conversion # pbf = OSMToPBF(osm=path + '/files/filter.osm', pbffile=path + '/files/filter.pbf', debug=True) # pbf.convert() """
def test_get_categorised_tags(self, ): parser = presets.PresetParser(self.path + '/files/hdm_presets.xml') tags = parser.parse() self.assertIsNotNone(tags) self.assertEquals(238, len(tags)) for tag_dict in tags: Tag.objects.create(key=tag_dict['key'], value=tag_dict['value'], job=self.job, data_model='osm', geom_types=tag_dict['geom_types'], groups=tag_dict['groups']) self.assertEquals(238, self.job.tags.all().count())
def test_tags(self, ): self.job.tags.all().delete() parser = presets.PresetParser(self.path + '/files/hdm_presets.xml') tags = parser.parse() self.assertIsNotNone(tags) self.assertEquals(238, len(tags)) # save all the tags from the preset for tag_dict in tags: Tag.objects.create(key=tag_dict['key'], value=tag_dict['value'], job=self.job, data_model='osm', geom_types=tag_dict['geom_types']) self.assertEquals(238, self.job.tags.all().count())
def test_save_tags_from_preset(self, ): parser = presets.PresetParser(self.path + '/files/hdm_presets.xml') tags = parser.parse() self.assertIsNotNone(tags) self.assertEquals(238, len(tags)) for tag_dict in tags: Tag.objects.create(key=tag_dict['key'], value=tag_dict['value'], job=self.job, data_model='osm', geom_types=tag_dict['geom_types'], groups=tag_dict['groups']) self.assertEquals(238, self.job.tags.all().count()) # check the groups got saved correctly saved_tag = self.job.tags.filter(value='service')[0] self.assertIsNotNone(saved_tag) self.assertEquals(3, len(saved_tag.groups))
def test_categorised_tags(self, ): # delete existing tags self.job.tags.all().delete() parser = presets.PresetParser(self.path + '/files/hdm_presets.xml') tags = parser.parse() self.assertIsNotNone(tags) self.assertEquals(238, len(tags)) # save all the tags from the preset for tag_dict in tags: Tag.objects.create(key=tag_dict['key'], value=tag_dict['value'], job=self.job, data_model='osm', geom_types=tag_dict['geom_types']) self.assertEquals(238, self.job.tags.all().count()) job = Job.objects.all()[0] categories = job.categorised_tags self.assertIsNotNone(categories) self.assertEquals(24, len(categories['points'])) self.assertEquals(12, len(categories['lines'])) self.assertEquals(22, len(categories['polygons']))
def setUp(self, ): self.url = 'http://localhost/interpreter' self.bbox = '6.25,-10.85,6.40,-10.62' # monrovia self.path = settings.ABS_PATH() self.formats = ExportFormat.objects.all( ) # pre-loaded by 'insert_export_formats' migration Group.objects.create(name='TestDefaultExportExtentGroup') self.user = User.objects.create(username='******', email='*****@*****.**', password='******') bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) the_geom = GEOSGeometry(bbox, srid=4326) self.job = Job.objects.create(name='TestJob', description='Test description', event='Nepal activation', user=self.user, the_geom=the_geom) self.uid = self.job.uid # add the formats to the job self.job.formats = self.formats self.job.save() self.osm = self.path + '/files/query.osm' self.query = '[maxsize:2147483648][timeout:1600];(node(6.25,-10.85,6.40,-10.62);<;);out body;' self.job.tags.all().delete() parser = presets.PresetParser(self.path + '/utils/tests/files/hdm_presets.xml') tags = parser.parse() self.assertIsNotNone(tags) self.assertEquals(256, len(tags)) # save all the tags from the preset for tag_dict in tags: tag = Tag.objects.create(key=tag_dict['key'], value=tag_dict['value'], job=self.job, data_model='osm', geom_types=tag_dict['geom_types']) self.assertEquals(256, self.job.tags.all().count())
def setUp(self, ): self.path = settings.ABS_PATH() parser = presets.PresetParser(self.path + '/utils/tests/files/hdm_presets.xml') self.tags = parser.parse() Group.objects.create(name='TestDefaultExportExtentGroup') self.user = User.objects.create(username='******', email='*****@*****.**', password='******') bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) the_geom = GEOSGeometry(bbox, srid=4326) self.job = Job.objects.create(name='TestJob', description='Test description', event='Nepal activation', user=self.user, the_geom=the_geom) tags_dict = parser.parse() for entry in self.tags: tag = Tag.objects.create(name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='PRESET', job=self.job)
def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) if (serializer.is_valid()): # add the export formats formats = request.data.get('formats') tags = request.data.get('tags') preset = request.data.get('preset') translation = request.data.get('translation') transform = request.data.get('transform') featuresave = request.data.get('featuresave') featurepub = request.data.get('featurepub') export_formats = [] job = None for slug in formats: # would be good to accept either format slug or uuid here.. try: export_format = ExportFormat.objects.get(slug=slug) export_formats.append(export_format) except ExportFormat.DoesNotExist as e: logger.warn( 'Export format with uid: {0} does not exist'.format( slug)) if len(export_formats) > 0: # save the job and make sure it's committed before running tasks.. try: with transaction.atomic(): job = serializer.save() job.formats = export_formats if preset: # get the tags from the uploaded preset logger.debug('Found preset with uid: %s' % preset) config = ExportConfig.objects.get(uid=preset) job.configs.add(config) preset_path = config.upload.path logger.debug(config.upload.path) # use unfiltered preset parser parser = presets.UnfilteredPresetParser( preset=preset_path) tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='PRESET', job=job) elif tags: # get tags from request for entry in tags: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], job=job, data_model=entry['data_model'], geom_types=entry['geom_types'], groups=entry['groups']) else: # use hdm preset as default tags path = os.path.dirname(os.path.realpath(__file__)) parser = presets.PresetParser(preset=path + '/hdm_presets.xml') tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='HDM', job=job) # check for translation file if translation: config = ExportConfig.objects.get(uid=translation) job.configs.add(config) # check for transform file if transform: config = ExportConfig.objects.get(uid=transform) job.configs.add(config) except Error as e: error_data = OrderedDict() error_data['id'] = _('server_error') error_data[ 'message'] = 'Error creating export job: {0}'.format(e) return Response( error_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: error_data = OrderedDict() error_data['formats'] = [_('Invalid format provided.')] return Response(error_data, status=status.HTTP_400_BAD_REQUEST) # run the tasks task_runner = ExportTaskRunner() job_uid = str(job.uid) task_runner.run_task(job_uid=job_uid) running = JobSerializer(job, context={'request': request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def create(self, request, *args, **kwargs): """ Create a Job from the supplied request data. The request data is validated by *api.serializers.JobSerializer*. Associates the *Job* with required export formats, *ExportConfig* and *Tags* Args: request: the HTTP request. *args: Variable length argument list. **kwargs: Arbitary keyword arguments. Returns: the newly created Job instance. Raises: ValidationError: in case of validation errors. """ serializer = self.get_serializer(data=request.data) if (serializer.is_valid()): """Get the required data from the validated request.""" export_formats = request.data.get('formats') tags = request.data.get('tags') preset = request.data.get('preset') featuresave = request.data.get('featuresave') featurepub = request.data.get('featurepub') job = None if len(export_formats) > 0: """Save the job and make sure it's committed before running tasks.""" try: with transaction.atomic(): job = serializer.save() job.export_formats = export_formats if preset: """Get the tags from the uploaded preset.""" config = ExportConfig.objects.get(uid=preset) job.config = config job.save() preset_path = config.upload.path """Use the UnfilteredPresetParser.""" parser = presets.UnfilteredPresetParser( preset=preset_path) tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='PRESET', job=job) elif tags: """Get tags from request.""" for entry in tags: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], job=job, data_model=entry['data_model'], geom_types=entry['geom_types'], groups=entry['groups']) else: """ Use hdm preset as default tags if no preset or tags are provided in the request. """ path = os.path.dirname(os.path.realpath(__file__)) parser = presets.PresetParser( preset=path + '/presets/hdm_presets.xml') tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='HDM', job=job) except Exception as e: error_data = OrderedDict() error_data['id'] = _('server_error') error_data['message'] = _( 'Error creating export job: %(error)s') % { 'error': e } return Response( error_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: error_data = OrderedDict() error_data['formats'] = [_('Invalid format provided.')] return Response(error_data, status=status.HTTP_400_BAD_REQUEST) # run the tasks task_runner = ExportTaskRunner() job_uid = str(job.uid) task_runner.run_task(job_uid=job_uid) running = JobSerializer(job, context={'request': request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)