Exemple #1
0
def search(**kwargs):
    bbox = kwargs.get('bbox')
    to_date = kwargs.get('to_date')
    from_date = kwargs.get('from_date')
    exclude_gridded = 'exclude_gridded' in kwargs.get('options')

    if not bbox:
        raise ValidationError({
            'error': 'Required argument: bbox'})

    bbox_area = bbox.area() * SQKM_PER_SQM

    if bbox_area > settings.BIGCZ_MAX_AREA:
        raise ValidationError({
            'error': 'The selected area of interest with a bounding box of {} '
                     'km² is larger than the currently supported maximum size '
                     'of {} km².'.format(round(bbox_area, 2),
                                          settings.BIGCZ_MAX_AREA)})

    world = BBox(-180, -90, 180, 90)

    services = get_services_in_box(world)
    networkIDs = filter_networkIDs(services, exclude_gridded)
    series = get_series_catalog_in_box(bbox, from_date, to_date, networkIDs)
    series = group_series_by_location(series)
    results = sorted(parse_records(series, services),
                     key=attrgetter('end_date'),
                     reverse=True)

    return ResourceList(
        api_url=None,
        catalog=CATALOG_NAME,
        count=len(results),
        results=results)
def search(**kwargs):
    query = kwargs.get('query')
    to_date = kwargs.get('to_date')
    from_date = kwargs.get('from_date')
    bbox = kwargs.get('bbox')

    params = {'f': 'json'}

    if query:
        params.update({'q': query})
    if from_date:
        params.update({'time': prepare_time(from_date, to_date)})
    if bbox:
        params.update({'bbox': prepare_bbox(bbox)})

    try:
        response = requests.get(GEOPORTAL_URL,
                                timeout=settings.BIGCZ_CLIENT_TIMEOUT,
                                params=params)
    except requests.Timeout:
        raise RequestTimedOutError()

    data = response.json()

    if 'hits' not in data:
        raise ValueError(data)

    results = data['hits']['hits']
    count = data['hits']['total']

    return ResourceList(api_url=response.url,
                        catalog=CATALOG_NAME,
                        count=count,
                        results=[parse_record(item) for item in results])
def search(**kwargs):
    bbox = kwargs.get('bbox')
    to_date = kwargs.get('to_date')
    from_date = kwargs.get('from_date')
    exclude_gridded = 'exclude_gridded' in kwargs.get('options')

    if not bbox:
        raise ValidationError({
            'error': 'Required argument: bbox'})

    world = BBox(-180, -90, 180, 90)

    services = get_services_in_box(world)
    networkIDs = filter_networkIDs(services, exclude_gridded)
    series = get_series_catalog_in_box(bbox, from_date, to_date, networkIDs)
    series = group_series_by_location(series)
    results = sorted(parse_records(series, services),
                     key=attrgetter('end_date'),
                     reverse=True)

    return ResourceList(
        api_url=None,
        catalog=CATALOG_NAME,
        count=len(results),
        results=results)
def search(**kwargs):
    bbox = kwargs.get('bbox')
    # Currently not being used
    # to_date = kwargs.get('to_date')
    # from_date = kwargs.get('from_date')

    if not bbox:
        raise ValidationError({'error': 'Required argument: bbox'})

    bbox_area = bbox.area() * SQKM_PER_SQM

    if bbox_area > USGS_MAX_SIZE_SQKM:
        raise ValidationError({
            'error':
            'The selected area of interest with a bounding box of '
            f'{round(bbox_area, 2)} km² is larger than the currently '
            f'supported maximum size of {USGS_MAX_SIZE_SQKM} km².'
        })
    params = {
        # bBox might be used in the future
        # 'bBox': '{0:.3f},{1:.3f},{2:.3f},{3:.3f}'.format(bbox.xmin, bbox.ymin, bbox.xmax, bbox.ymax),  # NOQA
        'huc': unique_huc12s_in(kwargs.get('geojson')),
        'mimeType': 'csv',
        'sorted': 'no',
        'minresults': '1',
        'zip': 'yes'
    }

    try:
        response = requests.get(CATALOG_URL, params=params)
        with ZipFile(BytesIO(response.content)) as z:
            data = read_unicode_csv(z.open(z.filelist[0].filename))
    except requests.Timeout:
        raise RequestTimedOutError()

    if not data:
        raise ValueError('Could not fetch data from USGS WQP portal.')

    results = [parse_record(row) for row in data]

    return ResourceList(api_url=response.url,
                        catalog=CATALOG_NAME,
                        count=len(results),
                        results=results)
def search(**kwargs):
    query = kwargs.get('query')
    to_date = kwargs.get('to_date')
    from_date = kwargs.get('from_date')
    bbox = kwargs.get('bbox')
    page = kwargs.get('page')

    params = {
        'f': 'json',
        'size': PAGE_SIZE,
    }

    if query:
        params.update({'q': prepare_query(query.lower())})
    if from_date or to_date:
        params.update({'time': prepare_time(from_date, to_date)})
    if bbox:
        params.update({'bbox': prepare_bbox(bbox)})
    if page:
        params.update({
            # page 1 is from 1, page 2 from 101, page 3 from 201, ...
            'from': PAGE_SIZE * (page - 1) + 1
        })

    try:
        response = requests.get(CATALOG_URL,
                                timeout=settings.BIGCZ_CLIENT_TIMEOUT,
                                params=params)
    except requests.Timeout:
        raise RequestTimedOutError()

    data = response.json()

    if 'results' not in data:
        raise UnexpectedResponseError()

    results = data['results']
    count = data['total']

    return ResourceList(api_url=response.url,
                        catalog=CATALOG_NAME,
                        count=count,
                        results=[parse_record(item) for item in results])
def search(**kwargs):
    query = kwargs.get('query')
    to_date = kwargs.get('to_date')
    from_date = kwargs.get('from_date')
    bbox = kwargs.get('bbox')

    if not query:
        raise ValidationError({'error': 'Required argument: query'})

    params = {
        'full_text_search': query,
    }

    if to_date:
        params.update({'to_date': prepare_date(to_date)})
    if from_date:
        params.update({'from_date': prepare_date(from_date)})
    if bbox:
        params.update(prepare_bbox(bbox))

    try:
        response = requests.get(HYDROSHARE_URL,
                                timeout=settings.BIGCZ_CLIENT_TIMEOUT,
                                params=params)
    except requests.Timeout:
        raise RequestTimedOutError()

    data = response.json()

    if 'results' not in data:
        raise ValueError(data)

    results = data['results']
    count = data['count']

    return ResourceList(api_url=response.url,
                        catalog=CATALOG_NAME,
                        count=count,
                        results=[parse_record(item) for item in results])
def search(**kwargs):
    bbox = kwargs.get('bbox')
    to_date = kwargs.get('to_date')
    from_date = kwargs.get('from_date')

    if not bbox:
        raise ValidationError({
            'error': 'Required argument: bbox'})

    box = BBox(bbox)
    world = BBox('-180,-90,180,90')

    series = get_series_catalog_in_box(box, from_date, to_date)
    series = group_series_by_location(series)
    services = get_services_in_box(world)
    results = parse_records(series, services)

    return ResourceList(
        api_url=None,
        catalog=CATALOG_NAME,
        count=len(results),
        results=results)
def search(**kwargs):
    query = kwargs.get('query')
    to_date = kwargs.get('to_date')
    from_date = kwargs.get('from_date')
    bbox = kwargs.get('bbox')
    page = kwargs.get('page')
    exclude_private = 'exclude_private' in kwargs.get('options')

    if not query:
        raise ValidationError({'error': 'Required argument: query'})

    params = {
        'full_text_search': query,
    }

    if bbox:
        params.update(prepare_bbox(bbox))
        params.update({'coverage_type': 'box'})
    if page:
        params.update({'page': page})

    session = Session()
    request = session.prepare_request(
        Request('GET', CATALOG_URL, params=params))

    key = f'bigcz_hydroshare_{hash(frozenset(params.items()))}'
    cached = cache.get(key)
    if cached:
        data = cached

    else:
        try:
            response = session.send(request,
                                    timeout=settings.BIGCZ_CLIENT_TIMEOUT)
            data = response.json()
            cache.set(key, data, timeout=1800)  # Cache for half hour
        except Timeout:
            raise RequestTimedOutError()

    if 'results' not in data:
        raise ValueError(data)

    items = data['results']
    if exclude_private:
        items = [item for item in items if item['public']]

    records = [parse_record(item) for item in items]
    # Include only those with geometries
    records = [r for r in records if r.geom]

    if from_date:
        records = [
            r for r in records
            if r.end_date and r.end_date >= make_aware(from_date)
        ]

    if to_date:
        records = [
            r for r in records
            if r.begin_date and r.begin_date <= make_aware(to_date)
        ]

    results = sorted(records,
                     key=nullable_attrgetter('end_date', DATE_MIN),
                     reverse=True)
    count = data['count']

    return ResourceList(api_url=request.url,
                        catalog=CATALOG_NAME,
                        count=count,
                        results=results)