Пример #1
0
def form_csv_detail_response(to_remove, rows, dataset_names=None):
    to_remove.append('geom')
    remove_columns_from_dict(rows, to_remove)

    if len(rows) <= 0:
        csv_resp = [['Sorry! Your query did not return any results.']]
        csv_resp += [['Try to modify your date or location parameters.']]
    else:
        # Column headers from arbitrary row,
        # then the values from all the others
        csv_resp = [list(rows[0].keys())
                    ] + [list(row.values()) for row in rows]

    resp = make_response(make_csv(csv_resp), 200)

    dname = request.args.get('dataset_name')
    # For queries where the dataset name is not provided as a query argument
    # (ex. shapes/<shapeset>/<dataset>), the dataset names can be manually
    # assigned.
    if dname is None:
        dname = reduce(lambda name1, name2: name1 + '_and_' + name2,
                       dataset_names)

    filedate = datetime.now().strftime('%Y-%m-%d')
    resp.headers['Content-Type'] = 'text/csv'
    resp.headers['Content-Disposition'] = 'attachment; filename=%s_%s.csv' % (
        dname, filedate)
    return resp
Пример #2
0
def detail_aggregate():
    raw_query_params = request.args.copy()
    # First, make sure name of dataset was provided...
    try:
        dataset_name = raw_query_params.pop('dataset_name')
    except KeyError:
        return bad_request("'dataset_name' is required")

    # and that we have that dataset.
    try:
        validator = ParamValidator(dataset_name)
    except NoSuchTableError:
        return bad_request("Cannot find dataset named {}".format(dataset_name))

    validator\
        .set_optional('obs_date__ge', date_validator, datetime.now() - timedelta(days=90))\
        .set_optional('obs_date__le', date_validator, datetime.now())\
        .set_optional('location_geom__within', geom_validator, None)\
        .set_optional('data_type', make_format_validator(['json', 'csv']), 'json')\
        .set_optional('agg', agg_validator, 'week')

    # If any optional parameters are malformed, we're better off bailing and telling the user
    # than using a default and confusing them.
    err = validator.validate(raw_query_params)
    if err:
        return bad_request(err)

    start_date = validator.vals['obs_date__ge']
    end_date = validator.vals['obs_date__le']
    agg = validator.vals['agg']
    geom = validator.get_geom()
    dataset = MetaTable.get_by_dataset_name(dataset_name)

    try:
        ts = dataset.timeseries_one(agg_unit=agg,
                                    start=start_date,
                                    end=end_date,
                                    geom=geom,
                                    column_filters=validator.conditions)
    except Exception as e:
        return internal_error('Failed to construct timeseries', e)

    datatype = validator.vals['data_type']
    if datatype == 'json':
        time_counts = [{'count': c, 'datetime': d} for c, d in ts[1:]]
        resp = json_response_base(validator, time_counts)
        resp['count'] = sum([c['count'] for c in time_counts])
        resp = make_response(json.dumps(resp, default=dthandler), 200)
        resp.headers['Content-Type'] = 'application/json'

    elif datatype == 'csv':
        resp = make_csv(ts)
        resp.headers['Content-Type'] = 'text/csv'
        filedate = datetime.now().strftime('%Y-%m-%d')
        resp.headers[
            'Content-Disposition'] = 'attachment; filename=%s.csv' % filedate

    return resp
Пример #3
0
def detail_aggregate():
    raw_query_params = request.args.copy()
    # First, make sure name of dataset was provided...
    try:
        dataset_name = raw_query_params.pop('dataset_name')
    except KeyError:
        return bad_request("'dataset_name' is required")

    # and that we have that dataset.
    try:
        validator = ParamValidator(dataset_name)
    except NoSuchTableError:
        return bad_request("Cannot find dataset named {}".format(dataset_name))

    validator\
        .set_optional('obs_date__ge', date_validator, datetime.now() - timedelta(days=90))\
        .set_optional('obs_date__le', date_validator, datetime.now())\
        .set_optional('location_geom__within', geom_validator, None)\
        .set_optional('data_type', make_format_validator(['json', 'csv']), 'json')\
        .set_optional('agg', agg_validator, 'week')

    # If any optional parameters are malformed, we're better off bailing and telling the user
    # than using a default and confusing them.
    err = validator.validate(raw_query_params)
    if err:
        return bad_request(err)

    start_date = validator.vals['obs_date__ge']
    end_date = validator.vals['obs_date__le']
    agg = validator.vals['agg']
    geom = validator.get_geom()
    dataset = MetaTable.get_by_dataset_name(dataset_name)

    try:
        ts = dataset.timeseries_one(agg_unit=agg, start=start_date,
                                    end=end_date, geom=geom,
                                    column_filters=validator.conditions)
    except Exception as e:
        return internal_error('Failed to construct timeseries', e)

    datatype = validator.vals['data_type']
    if datatype == 'json':
        time_counts = [{'count': c, 'datetime': d} for c, d in ts[1:]]
        resp = json_response_base(validator, time_counts)
        resp['count'] = sum([c['count'] for c in time_counts])
        resp = make_response(json.dumps(resp, default=dthandler), 200)
        resp.headers['Content-Type'] = 'application/json'

    elif datatype == 'csv':
        resp = make_csv(ts)
        resp.headers['Content-Type'] = 'text/csv'
        filedate = datetime.now().strftime('%Y-%m-%d')
        resp.headers['Content-Disposition'] = 'attachment; filename=%s.csv' % filedate

    return resp
Пример #4
0
def form_csv_detail_response(to_remove, rows):
    to_remove.append('geom')
    remove_columns_from_dict(rows, to_remove)

    # Column headers from arbitrary row,
    # then the values from all the others
    csv_resp = [rows[0].keys()] + [row.values() for row in rows]
    resp = make_response(make_csv(csv_resp), 200)
    dname = request.args.get('dataset_name')
    filedate = datetime.now().strftime('%Y-%m-%d')
    resp.headers['Content-Type'] = 'text/csv'
    resp.headers['Content-Disposition'] = 'attachment; filename=%s_%s.csv' % (dname, filedate)
    return resp
Пример #5
0
def form_csv_detail_response(to_remove, validator, rows):
    to_remove.append('geom')
    remove_columns_from_dict(rows, to_remove)

    # Column headers from arbitrary row,
    # then the values from all the others
    csv_resp = [rows[0].keys()] + [row.values() for row in rows]
    resp = make_response(make_csv(csv_resp), 200)
    dname = validator.dataset.name  #dataset_name
    filedate = datetime.now().strftime('%Y-%m-%d')
    resp.headers['Content-Type'] = 'text/csv'
    resp.headers['Content-Disposition'] = 'attachment; filename=%s_%s.csv' % (
        dname, filedate)
    return resp
Пример #6
0
def form_csv_detail_response(to_remove, rows):
    to_remove.append('geom')
    remove_columns_from_dict(rows, to_remove)

    if len(rows) <= 0:
        csv_resp = [["Sorry! Your query didn't return any results."]]
        csv_resp += [["Try to modify your date or location parameters."]]
    else:
        # Column headers from arbitrary row,
        # then the values from all the others
        csv_resp = [rows[0].keys()] + [row.values() for row in rows]

    resp = make_response(make_csv(csv_resp), 200)
    dname = request.args.get('dataset_name')
    filedate = datetime.now().strftime('%Y-%m-%d')
    resp.headers['Content-Type'] = 'text/csv'
    resp.headers['Content-Disposition'] = 'attachment; filename=%s_%s.csv' % (dname, filedate)
    return resp
Пример #7
0
def timeseries():
    validator = ParamValidator()\
        .set_optional('agg', agg_validator, 'week')\
        .set_optional('data_type', make_format_validator(['json', 'csv']), 'json')\
        .set_optional('dataset_name__in', list_of_datasets_validator, MetaTable.index)\
        .set_optional('obs_date__ge', date_validator, datetime.now() - timedelta(days=90))\
        .set_optional('obs_date__le', date_validator, datetime.now())\
        .set_optional('location_geom__within', geom_validator, None)\
        .set_optional('buffer', int_validator, 100)

    err = validator.validate(request.args)
    if err:
        return bad_request(err)

    geom = validator.get_geom()
    table_names = validator.vals['dataset_name__in']
    start_date = validator.vals['obs_date__ge']
    end_date = validator.vals['obs_date__le']
    agg = validator.vals['agg']

    # Only examine tables that have a chance of containing records within the date and space boundaries.
    try:
        table_names = MetaTable.narrow_candidates(table_names, start_date,
                                                  end_date, geom)
    except Exception as e:
        msg = 'Failed to gather candidate tables.'
        return internal_error(msg, e)

    try:
        panel = MetaTable.timeseries_all(table_names=table_names,
                                         agg_unit=agg,
                                         start=start_date,
                                         end=end_date,
                                         geom=geom)
    except Exception as e:
        msg = 'Failed to construct timeseries.'
        return internal_error(msg, e)

    panel = MetaTable.attach_metadata(panel)
    resp = json_response_base(validator, panel)

    datatype = validator.vals['data_type']
    if datatype == 'json':
        resp = make_response(json.dumps(resp, default=dthandler), 200)
        resp.headers['Content-Type'] = 'application/json'
    elif datatype == 'csv':

        # response format
        # temporal_group,dataset_name_1,dataset_name_2
        # 2014-02-24 00:00:00,235,653
        # 2014-03-03 00:00:00,156,624

        fields = ['temporal_group']
        for o in resp['objects']:
            fields.append(o['dataset_name'])

        csv_resp = []
        i = 0
        for k, g in groupby(resp['objects'], key=itemgetter('dataset_name')):
            l_g = list(g)[0]

            j = 0
            for row in l_g['items']:
                # first iteration, populate the first column with temporal_groups
                if i == 0:
                    csv_resp.append([row['datetime']])
                csv_resp[j].append(row['count'])
                j += 1
            i += 1

        csv_resp.insert(0, fields)
        csv_resp = make_csv(csv_resp)
        resp = make_response(csv_resp, 200)
        resp.headers['Content-Type'] = 'text/csv'
        filedate = datetime.now().strftime('%Y-%m-%d')
        resp.headers[
            'Content-Disposition'] = 'attachment; filename=%s.csv' % filedate
    return resp
Пример #8
0
def timeseries():
    validator = ParamValidator()\
        .set_optional('agg', agg_validator, 'week')\
        .set_optional('data_type', make_format_validator(['json', 'csv']), 'json')\
        .set_optional('dataset_name__in', list_of_datasets_validator, MetaTable.index)\
        .set_optional('obs_date__ge', date_validator, datetime.now() - timedelta(days=90))\
        .set_optional('obs_date__le', date_validator, datetime.now())\
        .set_optional('location_geom__within', geom_validator, None)\
        .set_optional('buffer', int_validator, 100)

    err = validator.validate(request.args)
    if err:
        return bad_request(err)

    geom = validator.get_geom()
    table_names = validator.vals['dataset_name__in']
    start_date = validator.vals['obs_date__ge']
    end_date = validator.vals['obs_date__le']
    agg = validator.vals['agg']

    # Only examine tables that have a chance of containing records within the date and space boundaries.
    try:
        table_names = MetaTable.narrow_candidates(table_names, start_date, end_date, geom)
    except Exception as e:
        msg = 'Failed to gather candidate tables.'
        return internal_error(msg, e)

    try:
        panel = MetaTable.timeseries_all(table_names=table_names,
                                         agg_unit=agg,
                                         start=start_date,
                                         end=end_date,
                                         geom=geom)
    except Exception as e:
        msg = 'Failed to construct timeseries.'
        return internal_error(msg, e)

    panel = MetaTable.attach_metadata(panel)
    resp = json_response_base(validator, panel)

    datatype = validator.vals['data_type']
    if datatype == 'json':
        resp = make_response(json.dumps(resp, default=dthandler), 200)
        resp.headers['Content-Type'] = 'application/json'
    elif datatype == 'csv':

        # response format
        # temporal_group,dataset_name_1,dataset_name_2
        # 2014-02-24 00:00:00,235,653
        # 2014-03-03 00:00:00,156,624

        fields = ['temporal_group']
        for o in resp['objects']:
            fields.append(o['dataset_name'])

        csv_resp = []
        i = 0
        for k,g in groupby(resp['objects'], key=itemgetter('dataset_name')):
            l_g = list(g)[0]

            j = 0
            for row in l_g['items']:
                # first iteration, populate the first column with temporal_groups
                if i == 0:
                    csv_resp.append([row['datetime']])
                csv_resp[j].append(row['count'])
                j += 1
            i += 1

        csv_resp.insert(0, fields)
        csv_resp = make_csv(csv_resp)
        resp = make_response(csv_resp, 200)
        resp.headers['Content-Type'] = 'text/csv'
        filedate = datetime.now().strftime('%Y-%m-%d')
        resp.headers['Content-Disposition'] = 'attachment; filename=%s.csv' % filedate
    return resp