Exemple #1
0
def export_shape(dataset_name):
    """Route for /shapes/<shapeset>/ endpoint. Requires a dataset argument
    and can apply column specific filters to it.

    :param dataset_name: user provided name of target shapeset
    :returns: response object result of _export_shape
    """
    # Find a way to work these into the validator, they shouldn't be out here.
    if dataset_name not in ShapeMetadata.tablenames():
        return make_error(dataset_name + ' not found.', 404)
    try:
        ShapeMetadata.get_by_dataset_name(dataset_name).shape_table
    except NoSuchTableError:
        return make_error(dataset_name + ' has yet to be ingested.', 404)

    meta_params = ('shape', 'data_type', 'location_geom__within', 'job')
    request_args = request.args.to_dict()

    # Using the 'shape' key triggers the correct validator.
    request_args['shape'] = dataset_name
    validated_args = validate(ExportFormatsValidator(only=meta_params),
                              request_args)

    if validated_args.errors:
        return bad_request(validated_args.errors)
    elif validated_args.data.get('job'):
        return make_job_response('export-shape', validated_args)
    else:
        query = _export_shape(validated_args)
        shapeset = validated_args.data.get('shapeset')
        data_type = validated_args.data.get('data_type')
        return export_dataset_to_response(shapeset, data_type, query)
Exemple #2
0
def meta():
    fields = ('obs_date__le', 'obs_date__ge', 'dataset_name', 'location_geom__within')
    validated_args = validate(NoDefaultDatesValidator(only=fields), request.args.to_dict())
    if validated_args.errors:
        return bad_request(validated_args.errors)

    return _meta(validated_args)
Exemple #3
0
def export_shape(dataset_name):
    """Route for /shapes/<shapeset>/ endpoint. Requires a dataset argument
    and can apply column specific filters to it.

    :param dataset_name: user provided name of target shapeset
    :returns: response object result of _export_shape
    """
    # Find a way to work these into the validator, they shouldn't be out here.
    if dataset_name not in ShapeMetadata.tablenames():
        return make_error(dataset_name + ' not found.', 404)
    try:
        ShapeMetadata.get_by_dataset_name(dataset_name).shape_table
    except NoSuchTableError:
        return make_error(dataset_name + ' has yet to be ingested.', 404)

    meta_params = ('shape', 'data_type', 'location_geom__within', 'job')
    request_args = request.args.to_dict()

    # Using the 'shape' key triggers the correct validator.
    request_args['shape'] = dataset_name
    validated_args = validate(
        ExportFormatsValidator(only=meta_params),
        request_args
    )

    if validated_args.errors:
        return bad_request(validated_args.errors)
    elif validated_args.data.get('job'):
        return make_job_response('export-shape', validated_args)
    else:
        query = _export_shape(validated_args)
        shapeset = validated_args.data.get('shapeset')
        data_type = validated_args.data.get('data_type')
        return export_dataset_to_response(shapeset, data_type, query)
Exemple #4
0
def detail_aggregate():
    fields = ('location_geom__within', 'dataset_name', 'agg', 'obs_date__ge',
              'obs_date__le', 'data_type')
    validator = NoGeoJSONDatasetRequiredValidator(only=fields)
    validated_args = validate(validator, request.args.to_dict())
    if validated_args.errors:
        return bad_request(validated_args.errors)
    return _detail_aggregate(validated_args)
Exemple #5
0
def grid():
    fields = ('dataset_name', 'resolution', 'buffer', 'obs_date__le', 'obs_date__ge',
              'location_geom__within')
    validated_args = validate(DatasetRequiredValidator(only=fields), request.args.to_dict())
    if validated_args.errors:
        return bad_request(validated_args.errors)

    return _grid(validated_args)
Exemple #6
0
def timeseries():
    fields = ('location_geom__within', 'dataset_name', 'dataset_name__in',
              'agg', 'obs_date__ge', 'obs_date__le', 'data_type')
    validator = NoGeoJSONValidator(only=fields)
    validated_args = validate(validator, request.args.to_dict())
    if validated_args.errors:
        return bad_request(validated_args.errors)

    return _timeseries(validated_args)
Exemple #7
0
def detail():
    fields = ('location_geom__within', 'dataset_name', 'shape', 'obs_date__ge',
              'obs_date__le', 'data_type', 'offset', 'date__time_of_day_ge',
              'date__time_of_day_le', 'limit')
    validator = DatasetRequiredValidator(only=fields)
    validated_args = validate(validator, request.args.to_dict())
    if validated_args.errors:
        return bad_request(validated_args.errors)

    return _detail(validated_args)
Exemple #8
0
def meta():
    fields = ('obs_date__le', 'obs_date__ge', 'dataset_name', 'location_geom__within', 'job')
    validator_result = validate(NoDefaultDatesValidator(only=fields), request.args.to_dict())

    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    if validator_result.data.get('job'):
        return make_job_response('meta', validator_result)
    else:
        result_data = _meta(validator_result)
        return api_response.meta_response(result_data, validator_result)
Exemple #9
0
def aggregate_point_data(point_dataset_name, polygon_dataset_name):

    params = request.args.copy()
    # Doesn't this override the path-derived parameter with a query parameter?
    # Do we want that?
    if not params.get('shape'):
        # form_detail_query expects to get info about a shape dataset this way.
        params['shape'] = polygon_dataset_name
    params['dataset_name'] = point_dataset_name

    args = validate(DatasetRequiredValidator(), params)
    if args.errors:
        return bad_request(args.errors)

    # Apply standard filters to point dataset
    # And join each point to the containing shape
    q = form_detail_sql_query(args, True)
    q = q.add_columns(func.count(args.data['dataset'].c.hash))

    # Apply a bounding box filter in case a geom was provided
    geom = args.data['geom']
    dataset = args.data['dataset']
    if geom:
        intersection = dataset.c.geom.ST_Within(
            func.ST_GeomFromGeoJSON(geom)
        )
        q = q.filter(intersection)

    # Page in RESPONSE_LIMIT chunks
    # This seems contradictory. Don't we want one row per shape, no matter what?
    offset = args.data['offset']
    q = q.limit(RESPONSE_LIMIT)
    if offset > 0:
        q = q.offset(offset)

    res_cols = []
    columns = [str(col) for col in args.data['dataset'].columns]
    columns += [str(col) for col in args.data['shape'].columns]
    for col in columns:
        col = col.split('.')
        if col[0] == polygon_dataset_name:
            res_cols.append(col[1])
    res_cols.append('count')

    rows = [OrderedDict(zip(res_cols, res)) for res in q.all()]
    if params.get('data_type') == 'csv':
        resp = form_csv_detail_response(['hash', 'ogc_fid'], rows)
    else:
        resp = form_geojson_detail_response(['hash', 'ogc_fid'], args, rows)

    return resp
Exemple #10
0
def aggregate_point_data(point_dataset_name, polygon_dataset_name):
    consider = ('dataset_name', 'shape', 'obs_date__ge', 'obs_date__le',
                'data_type', 'location_geom__within')

    request_args = request.args.to_dict()
    request_args['dataset_name'] = point_dataset_name
    request_args['shape'] = polygon_dataset_name

    validated_args = validate(Validator(only=consider), request_args)

    if validated_args.errors:
        return bad_request(validated_args.errors)

    return _aggregate_point_data(validated_args)
Exemple #11
0
def detail_aggregate():
    fields = ('location_geom__within', 'dataset_name', 'agg', 'obs_date__ge',
              'obs_date__le', 'data_type', 'job')
    validator = NoGeoJSONDatasetRequiredValidator(only=fields)
    validator_result = validate(validator, request.args.to_dict())

    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    if validator_result.data.get('job'):
        return make_job_response('detail-aggregate', validator_result)
    else:
        time_counts = _detail_aggregate(validator_result)
        return api_response.detail_aggregate_response(time_counts, validator_result)
Exemple #12
0
def meta():
    fields = ('obs_date__le', 'obs_date__ge', 'dataset_name',
              'location_geom__within', 'job')
    validator_result = validate(NoDefaultDatesValidator(only=fields),
                                request.args.to_dict())

    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    if validator_result.data.get('job'):
        return make_job_response('meta', validator_result)
    else:
        result_data = _meta(validator_result)
        return api_response.meta_response(result_data, validator_result)
Exemple #13
0
def dataset_fields(dataset_name):
    request_args = request.args.to_dict()
    request_args['dataset_name'] = dataset_name
    fields = ('obs_date__le', 'obs_date__ge', 'dataset_name', 'job')
    validator = DatasetRequiredValidator(only=fields)
    validator_result = validate(validator, request_args)

    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    if validator_result.data.get('job'):
        return make_job_response('fields', validator_result)
    else:
        result_data = _meta(validator_result)
        return api_response.fields_response(result_data, validator_result)
Exemple #14
0
def detail_aggregate():
    fields = ('location_geom__within', 'dataset_name', 'agg', 'obs_date__ge',
              'obs_date__le', 'data_type', 'job')
    validator = NoGeoJSONDatasetRequiredValidator(only=fields)
    validator_result = validate(validator, request.args.to_dict())

    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    if validator_result.data.get('job'):
        return make_job_response('detail-aggregate', validator_result)
    else:
        time_counts = _detail_aggregate(validator_result)
        return api_response.detail_aggregate_response(time_counts,
                                                      validator_result)
Exemple #15
0
def detail():
    fields = ('location_geom__within', 'dataset_name', 'shape', 'obs_date__ge',
              'obs_date__le', 'data_type', 'offset', 'date__time_of_day_ge',
              'date__time_of_day_le', 'limit', 'job')
    validator = DatasetRequiredValidator(only=fields)
    validator_result = validate(validator, request.args.to_dict())

    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    if validator_result.data.get('job'):
        return make_job_response('detail', validator_result)
    else:
        result_rows = _detail(validator_result)
        return api_response.detail_response(result_rows, validator_result)
Exemple #16
0
def detail():
    fields = ('location_geom__within', 'dataset_name', 'shape', 'obs_date__ge',
              'obs_date__le', 'data_type', 'offset', 'date__time_of_day_ge',
              'date__time_of_day_le', 'limit', 'job')
    validator = DatasetRequiredValidator(only=fields)
    validator_result = validate(validator, request.args.to_dict())

    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    if validator_result.data.get('job'):
        return make_job_response('detail', validator_result)
    else:
        result_rows = _detail(validator_result)
        return api_response.detail_response(result_rows, validator_result)
Exemple #17
0
def dataset_fields(dataset_name):
    request_args = request.args.to_dict()
    request_args['dataset_name'] = dataset_name
    fields = ('obs_date__le', 'obs_date__ge', 'dataset_name', 'job')
    validator = DatasetRequiredValidator(only=fields)
    validator_result = validate(validator, request_args)

    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    if validator_result.data.get('job'):
        return make_job_response('fields', validator_result)
    else:
        result_data = _meta(validator_result)
        return api_response.fields_response(result_data, validator_result)
Exemple #18
0
def dataset_fields(dataset_name):
    request_args = request.args.to_dict()
    request_args['dataset_name'] = dataset_name
    fields = ('obs_date__le', 'obs_date__ge', 'dataset_name')
    validator = DatasetRequiredValidator(only=fields)
    validated_args = validate(validator, request_args)
    if validated_args.errors:
        return bad_request(validated_args.errors)

    response = _meta(validated_args)

    # API defines column values to be in the 'objects' list.
    resp_dict = json.loads(response.data)
    resp_dict['objects'] = resp_dict['objects'][0]['columns']
    response.data = json.dumps(resp_dict)
    return response
Exemple #19
0
def aggregate_point_data(point_dataset_name, polygon_dataset_name):
    consider = ('dataset_name', 'shape', 'obs_date__ge', 'obs_date__le',
                'data_type', 'location_geom__within', 'job')

    request_args = request.args.to_dict()
    request_args['dataset_name'] = point_dataset_name
    request_args['shape'] = polygon_dataset_name

    validated_args = validate(Validator(only=consider), request_args)

    if validated_args.errors:
        return bad_request(validated_args.errors)
    elif validated_args.data.get('job'):
        return make_job_response('aggregate-point-data', validated_args)
    else:
        result = _aggregate_point_data(validated_args)
        data_type = validated_args.data.get('data_type')
        return aggregate_point_data_response(
            data_type, result, [polygon_dataset_name, point_dataset_name])
Exemple #20
0
def datadump_view():
    fields = ('location_geom__within', 'dataset_name', 'shape', 'obs_date__ge',
              'obs_date__le', 'offset', 'date__time_of_day_ge',
              'date__time_of_day_le', 'limit', 'job', 'data_type')

    validator = DatasetRequiredValidator(only=fields)
    validator_result = validate(validator, request.args.to_dict())

    if validator_result.errors:
        return api_response.error(validator_result.errors, 400)

    stream = datadump(**validator_result.data)

    dataset = validator_result.data['dataset'].name
    fmt = validator_result.data['data_type']
    content_disposition = 'attachment; filename={}.{}'.format(dataset, fmt)

    attachment = Response(stream_with_context(stream), mimetype='text/%s' % fmt)
    attachment.headers['Content-Disposition'] = content_disposition
    return attachment
Exemple #21
0
def datadump_view():
    fields = ('location_geom__within', 'dataset_name', 'shape', 'obs_date__ge',
              'obs_date__le', 'offset', 'date__time_of_day_ge',
              'date__time_of_day_le', 'limit', 'job', 'data_type')

    validator = DatasetRequiredValidator(only=fields)
    validator_result = validate(validator, request.args.to_dict())

    if validator_result.errors:
        return api_response.error(validator_result.errors, 400)

    stream = datadump(**validator_result.data)

    dataset = validator_result.data['dataset'].name
    fmt = validator_result.data['data_type']
    content_disposition = 'attachment; filename={}.{}'.format(dataset, fmt)

    attachment = Response(stream_with_context(stream),
                          mimetype='text/%s' % fmt)
    attachment.headers['Content-Disposition'] = content_disposition
    return attachment
Exemple #22
0
def aggregate_point_data(point_dataset_name, polygon_dataset_name):
    consider = ('dataset_name', 'shape', 'obs_date__ge', 'obs_date__le',
                'data_type', 'location_geom__within', 'job')

    request_args = request.args.to_dict()
    request_args['dataset_name'] = point_dataset_name
    request_args['shape'] = polygon_dataset_name

    validated_args = validate(Validator(only=consider), request_args)

    if validated_args.errors:
        return bad_request(validated_args.errors)
    elif validated_args.data.get('job'):
        return make_job_response('aggregate-point-data', validated_args)
    else:
        result = _aggregate_point_data(validated_args)
        data_type = validated_args.data.get('data_type')
        return aggregate_point_data_response(
            data_type,
            result,
            [polygon_dataset_name, point_dataset_name]
        )
Exemple #23
0
def grid():

    fields = (
        'dataset',
        'dataset_name',
        'resolution',
        'buffer',
        'obs_date__le',
        'obs_date__ge',
        'location_geom__within',
    )

    validator = PointsetRequiredValidator(only=fields)
    validator_result = validate(validator, request.args)
    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    results = _grid(validator_result)

    query = validator.dumps(validator_result.data)
    query = json.loads(query.data)
    results['properties'] = query
    return jsonify(results)
Exemple #24
0
def grid():

    fields = (
        'dataset',
        'dataset_name',
        'resolution',
        'buffer',
        'obs_date__le',
        'obs_date__ge',
        'location_geom__within',
    )

    validator = PointsetRequiredValidator(only=fields)
    validator_result = validate(validator, request.args)
    if validator_result.errors:
        return api_response.bad_request(validator_result.errors)

    results = _grid(validator_result)

    query = validator.dumps(validator_result.data)
    query = json.loads(query.data)
    results['properties'] = query
    return jsonify(results)