Example #1
0
    def on_get(self, req, resp, entity_id):
        """Handles GET requests
        Returns list of datasets found with this identifier

        http://localhost:8000/dataset/LC81920272016240LGN00.json

        """
        logger.info('[GET] /dataset/%s.json' % (entity_id))
        for key, value in self.headers.iteritems():
            resp.set_header(key, value)

        results = list()
        result_set = self._get_dataset_(entity_id)
        if len(result_set) == 0:
            resp.status = falcon.HTTP_404
        else:
            if req.get_header('Serialization') != 'General_Structure':
                # Return simple dict structure for web client
                for obj in result_set:
                    results.append(serialize(obj, as_json=False)['data'])
            else:
                for obj in result_set:
                    results.append(serialize(obj, as_json=False))
            resp.status = self.default_status

        if can_zip_response(req.headers):
            resp.set_header('Content-Encoding', 'gzip')
            resp.body = compress_body(ujson.dumps(results))
        else:
            resp.body = ujson.dumps(results)
Example #2
0
 def on_delete(self, req, resp, entity_id):
     logger.info('[DEL] /dataset/%s.json' % (entity_id))
     results = Persistance().delete_dataset(entity_id)
     resp.status = falcon.HTTP_200
     resp.set_header('Content-Type', 'application/json')
     resp.body = ujson.dumps({
         'action': 'delete',
         'status': 'OK',
         "entity_id": entity_id
     })
Example #3
0
    def on_get(self, req, resp, group=None):
        """Handles GET requests
        http://localhost:8000/sensors
        http://localhost:8000/sensors/platform (sensor_level, mission, platform)
        """
        if group:
            logger.info('[GET] /sensors/%s' % group)
        else:
            logger.info('[GET] /sensors/')
        for key, value in self.headers.iteritems():
            resp.set_header(key, value)

        # set default group to sensor_level
        if not group:
            group = 'sensor_level'
        results = list()

        for counter, result in enumerate(Persistance().get_sensors(group)):
            values = {
                'sensor_name': result[1],
                'proc_level': result[2],
                'id': counter,
                'label': '%s' % result[0],
                'type': group
            }
            types = {
                'sensor_name': str,
                'proc_level': str,
                'id': int,
                'label': str,
                'type': str
            }
            x = General_Structure(values, types)
            x.__class__.__name__ = 'Sensor'
            results.append(x.__dict__)

        if len(results) == 0:
            raise falcon.HTTPNotFound()

        resp.status = self.default_status
        if can_zip_response(req.headers):
            resp.set_header('Content-Encoding', 'gzip')
            resp.body = compress_body(ujson.dumps(results))
        else:
            resp.body = ujson.dumps(results)
Example #4
0
    def on_get(self, req, resp, sensor):
        logger.info('[GET] /catalog/status/count/%s' % (sensor))
        results = dict()

        minx, maxx, miny, maxy = -180, 180, -90, 90
        if 'last_days' in req.params:
            last_days = int(req.params['last_days'])
        else:
            last_days = 4

        global_extent = [[miny, minx], [maxy, maxx]]
        res = Persistance().get_observation_coverage(int(sensor),
                                                     last_days=last_days)
        results['geojson'] = make_GeoJson(res['geojson'], res['attr'])

        content_type = 'text/html'
        results = j2_env.get_template('leaflet_map.html').render(
            title='Reference object: %s' % sensor,
            center='[%f, %f]' % (21.5, -102),
            zoomlevel=5,
            geojson=ujson.dumps(results['geojson']),
            label_attribute=None,
            extent=ujson.dumps(global_extent))

        if can_zip_response(req.headers):
            resp.set_header('Content-Type', content_type)
            resp.set_header('Content-Encoding', 'gzip')
            if content_type == 'application/json':
                resp.body = compress_body(ujson.dumps(results))
            else:
                resp.body = compress_body(results)
        else:
            resp.set_header('Content-Type', content_type)
            if content_type == 'application/json':
                resp.body = ujson.dumps(results)
            else:
                resp.body = results
        resp.status = falcon.HTTP_200
Example #5
0
                                        href='http://docs.example.com/auth')
        except ValueError, e:
            description = 'Given parameters contain bad values: %s'% str(e)
            raise falcon.HTTPBadRequest('KeyError', description,
                                        href='http://docs.example.com/auth')

        query = self._query_([{"ref_group": ref_group, "ref_id": ref_id}],
                             [{"start_date": dates[0], "end_date": dates[1]}],
                             sensor_list, clouds)
        query_struct = {'area':[{"ref_group": ref_group, "ref_id": ref_id}],
                        'dates':[{"start_date": dates[0], "end_date": dates[1]}],
                        'sensors':sensor_list, 'clouds':clouds
                       }

        found_dataset = self._get_datasets(query)
        logger.info('[GET] /catalog/search/result.%s' % format, extra={x:str(y) for x,y in query_struct.iteritems()})
        if check_resources:
            for ds in found_dataset:
                if 's3public' in ds['resources'].keys():
                    if 'zip' in ds['resources']['s3public'].keys():
                        if not remote_file_exists( ds['resources']['s3public']['zip']):
                            print '%s missing' % ds['resources']['s3public']['zip']

        if format.lower() == 'json':
            if 'search/count' in req.url:
                results['count'] = query.count()
            else:
                results['count'] = query.count()
                results['found_dataset'] = found_dataset
                results['found_tiles'] = sorted(list(set([x['tile_identifier'] for x in found_dataset])))
                results['found_resources'] = [BASE_URL + self.router.reverse('dataset_entity', entity_id=x['entity_id'])
Example #6
0
    def on_put(self, req, resp, entity_id):
        #logger.info('[PUT] /dataset/%s.json' % (entity_id))
        for key, value in self.headers.iteritems():
            resp.set_header(key, value)

        output = cStringIO.StringIO()
        while True:
            chunk = req.stream.read(4096)
            if not chunk:
                break
            output.write(chunk)

        json_string = output.getvalue()
        output.close()
        obj_list = deserialize(json_string)

        for obj in obj_list:
            try:
                new_dataset = Persistance().add_dataset(obj)
                if new_dataset:
                    if obj.sensor == 'Sentinel-2A':
                        group_id = 10
                    elif obj.sensor in ['OLI_TIRS', 'OLI', 'TIRS']:
                        group_id = 11
                    else:
                        group_id = None
                    if group_id:
                        result = Persistance(
                        ).get_reference_by_groupid_reference_name(
                            group_id, obj.tile_identifier)
                    ref_obj = result.first()
                    if ref_obj:
                        coords = ujson.loads(ref_obj[2])['coordinates']
                        min_coord = min([b for x in coords for b in x])
                        max_coord = max([b for x in coords for b in x])
                        cent_y = (max_coord[0] -
                                  min_coord[0]) / 2 + min_coord[0]
                        cent_x = (max_coord[1] -
                                  min_coord[1]) / 2 + min_coord[1]

                        logger_container = dict()
                        logger_container['sensor'] = obj.sensor
                        logger_container['entity_id'] = obj.entity_id
                        logger_container[
                            'tile_identifier'] = obj.tile_identifier
                        logger_container['acq_time'] = obj.acq_time
                        logger_container['clouds'] = obj.clouds
                        logger_container['time_registered'] = str(
                            obj.time_registered)
                        logger_container['location'] = {
                            'lat': cent_y,
                            'lon': cent_x
                        }
                    else:
                        logger_container = dict()
                    resp.body = ujson.dumps({
                        'action': 'create dataset',
                        'status': 'OK',
                        "new_obj_id": obj.entity_id
                    })
                    resp.status = falcon.HTTP_201
                    logger.info('Register new dataset: %s' % (obj.entity_id),
                                extra=logger_container)
                else:
                    logger.warn(
                        'Dataset (%s/%s/%s) already exists' %
                        (obj.entity_id, obj.tile_identifier, obj.acq_time))
                    description = 'Dataset (%s/%s/%s already exists' % (
                        obj.entity_id, obj.tile_identifier, obj.acq_time)
                    raise falcon.HTTPConflict(
                        'Dataset already exists',
                        description,
                        href='http://docs.example.com/auth')
            except TimeoutError, e:
                resp.body = ujson.dumps({
                    'status': 'ERROR',
                    "errorcode": str(e)
                })
                resp.status = falcon.HTTP_408
Example #7
0
    def on_get(self, req, resp, group_id, reference_id, format):
        """Handles GET requests

        """
        logger.info('[GET] /reference/%s/%s.%s' %
                    (group_id, reference_id, format))

        start_time = time.time()
        results = dict()
        minx, miny, maxx, maxy = None, None, None, None

        if reference_id == 'all' and 'bbox' in req.params:
            minx, miny, maxx, maxy = [float(x) for x in req.params['bbox']]
            polygon = Polygon([(minx, miny), (minx, maxy), (maxx, maxy),
                               (maxx, miny), (minx, miny)])

            ref_objects = Persistance().get_reference_by_groupid_polygon(
                group_id, polygon.wkt)
        elif reference_id != 'all':
            ref_objects = Persistance().get_reference_by_groupid_reference_id(
                group_id, reference_id)

        else:
            description = 'Please specify entity_name OR bbox with request, given %s:%s.' % (
                reference_id, req.params.get('bbox'))
            raise falcon.HTTPBadRequest('DateFormat',
                                        description,
                                        href='http://docs.example.com/auth')

        results['counter'] = ref_objects.count()

        geoms, attrs = list(), list()
        extents = list()
        for ref_objs in ref_objects.all():
            ref_obj, geojson, extent = ref_objs
            extents.append(ujson.loads(extent))
            geoms.append(ujson.loads(geojson))
            attrs.append({
                "ref_name":
                ref_obj.ref_name,
                "reference_id":
                ref_obj.ref_id,
                'group_id':
                self.ref_groups[ref_obj.referencetype_id].id,
                'group_name':
                self.ref_groups[ref_obj.referencetype_id].name,
                'group_description':
                self.ref_groups[ref_obj.referencetype_id].description
            })

        results['geojson'] = make_GeoJson(geoms, attrs)
        # return plain geojson object

        results['processing_time'] = time.time() - start_time
        content_type = 'application/json'
        if format == 'json':
            del results['geojson']
            results['attributes'] = attrs
        elif format == 'geojson':
            results = results['geojson']
        elif format == 'html':
            if minx != None and miny != None and maxx != None and maxy != None:
                global_extent = [[miny, minx], [maxy, maxx]]
            else:
                mins = list()
                maxs = list()
                for ext in extents:
                    coords = numpy.array(ext[u'coordinates'])
                    mins.append(numpy.min(coords, axis=1))
                    maxs.append(numpy.max(coords, axis=1))
                ext_min, ext_max = list(numpy.min(mins, axis=1)), list(
                    numpy.max(maxs, axis=1))
                global_extent = [[ext_min[0][1], ext_min[0][0]],
                                 [ext_max[0][1], ext_max[0][0]]]

            content_type = 'text/html'
            results = j2_env.get_template('leaflet_map.html').render(
                title='Reference object: %s' % group_id,
                center='[%f, %f]' % (21.5, -102),
                zoomlevel=5,
                geojson=ujson.dumps(results['geojson']),
                label_attribute='ref_name',
                extent=ujson.dumps(global_extent))
        else:
            description = 'Unknown format given %s.' % (format)
            raise falcon.HTTPBadRequest('Reference',
                                        description,
                                        href='http://docs.example.com/auth')

        if can_zip_response(req.headers):
            resp.set_header('Content-Type', content_type)
            resp.set_header('Content-Encoding', 'gzip')
            if content_type == 'application/json':
                resp.body = compress_body(ujson.dumps(results))
            else:
                resp.body = compress_body(results)
        else:
            resp.set_header('Content-Type', content_type)
            if content_type == 'application/json':
                resp.body = ujson.dumps(results)
            else:
                resp.body = results
        resp.status = falcon.HTTP_200
Example #8
0
    def on_get(self, req, resp):
        """Handles GET requests"""

        BASE_URL = get_base_url(req.url)

        start_time = time.time()
        results = dict()
        results['entities'] = list()
        results['fuzzyness'] = 5
        results['total_count'] = len(self.ref_objects)
        results['Levenshtein_distance_threshold'] = THRES
        results['total_groups'] = len(self.ref_groups)
        results['searched_groups'] = len(PRIORITY.keys())

        if 'entity_name' not in req.params.keys():
            description = 'entity_name not specified in query '
            raise falcon.HTTPBadRequest('KeyError',
                                        description,
                                        href='http://docs.example.com/auth')

        entity_name = req.params['entity_name']
        logger.info('[GET] /reference/search/count?entity_name+%s' %
                    (entity_name))
        entity_name_length = len(entity_name)
        counter = 0
        similarities = list()

        for ref_obj in self.ref_objects:
            if ref_obj[0][:entity_name_length].lower() == entity_name.lower():
                similarities.append((100 - PRIORITY[ref_obj.referencetype_id],
                                     ref_obj.ref_name,
                                     ref_obj.referencetype_id, ref_obj.ref_id))
            elif entity_name.lower() in ref_obj[0].lower():
                similarities.append((100 - PRIORITY[ref_obj.referencetype_id],
                                     ref_obj.ref_name,
                                     ref_obj.referencetype_id, ref_obj.ref_id))

        if len(similarities) < results['fuzzyness']:
            word_size = -1  # len(entity_name)
            for ref_obj in self.ref_objects:
                if ref_obj.ref_name not in [x[1] for x in similarities]:
                    sim = fuzz.partial_ratio(entity_name,
                                             ref_obj.ref_name[:word_size])
                    if sim > THRES:
                        similarities.append(
                            (sim - 1, ref_obj.ref_name,
                             ref_obj.referencetype_id, ref_obj.ref_id))
                        counter += 1

        sorted_similarities = sorted(set(similarities),
                                     key=lambda tup: (-tup[0], tup[1]),
                                     reverse=True)
        sorted_similarities.reverse()

        for item in sorted_similarities:
            results['entities'].append({
                'entity_id':
                item[1],
                'distance':
                item[0],
                'reference_id':
                item[3],
                'resource_json':
                BASE_URL +
                self.router.reverse('reference_entity',
                                    group_id=self.ref_groups[item[2]].id,
                                    reference_id=item[3],
                                    format='json'),
                'resource_geojson':
                BASE_URL +
                self.router.reverse('reference_entity',
                                    group_id=self.ref_groups[item[2]].id,
                                    reference_id=item[3],
                                    format='geojson'),
                'resource_html':
                BASE_URL +
                self.router.reverse('reference_entity',
                                    group_id=self.ref_groups[item[2]].id,
                                    reference_id=item[3],
                                    format='html'),
                'entity_group': {
                    'group_id': self.ref_groups[item[2]].id,
                    'group_name': self.ref_groups[item[2]].name,
                    'group_description': self.ref_groups[item[2]].description,
                    'group_shortcut': self.ref_groups[item[2]].shortcut
                }
            })

        results['counter'] = counter
        results['processing_time'] = time.time() - start_time
        resp.status = falcon.HTTP_200

        if can_zip_response(req.headers):
            resp.set_header('Content-Type', 'application/json')
            resp.set_header('Content-Encoding', 'gzip')
            resp.body = compress_body(ujson.dumps(results))
        else:
            resp.set_header('Content-Type', 'application/json')
            resp.body = ujson.dumps(results)
from client.services.references import ReferenceSearcher, Reference
from client.services.root_service import RootResource
from client.services.sensors import Sensors
from api_logging import logger

__author__ = "Thilo Wehrmann, Steffen Gebhardt"
__copyright__ = "Copyright 2016, EOSS GmbH"
__credits__ = ["Thilo Wehrmann", "Steffen Gebhardt"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Thilo Wehrmann"
__email__ = "*****@*****.**"
__status__ = "Production"

# main app
logger.info('Starting EOSS data catalog API')
my_router = ReverseRouter()
app = falcon.API(middleware=[cors.middleware, RequireJSON()], router=my_router)

# specify URL routes
app.add_route('/', RootResource(my_router), name='root')
app.add_route('/dataset/{entity_id}.json', Dataset(), name='dataset_entity')
app.add_route('/catalog/search/result.{format}',
              CatalogApi(my_router),
              name='catalog_result')
app.add_route('/catalog/search/count',
              CatalogApi(my_router),
              name='catalog_count')

app.add_route('/reference/search/count',
              ReferenceSearcher(my_router),