コード例 #1
0
def export_shape(dataset_name):
    """Route for /shapes/<shapeset>/ endpoint. Requires a dataset argument
    and can apply column specific filters to it.

    :param dataset_name: user provided name of target shapeset
    :returns: response object result of _export_shape
    """
    # Find a way to work these into the validator, they shouldn't be out here.
    if dataset_name not in ShapeMetadata.tablenames():
        return make_error(dataset_name + ' not found.', 404)
    try:
        ShapeMetadata.get_by_dataset_name(dataset_name).shape_table
    except NoSuchTableError:
        return make_error(dataset_name + ' has yet to be ingested.', 404)

    meta_params = ('shape', 'data_type', 'location_geom__within', 'job')
    request_args = request.args.to_dict()

    # Using the 'shape' key triggers the correct validator.
    request_args['shape'] = dataset_name
    validated_args = validate(ExportFormatsValidator(only=meta_params),
                              request_args)

    if validated_args.errors:
        return bad_request(validated_args.errors)
    elif validated_args.data.get('job'):
        return make_job_response('export-shape', validated_args)
    else:
        query = _export_shape(validated_args)
        shapeset = validated_args.data.get('shapeset')
        data_type = validated_args.data.get('data_type')
        return export_dataset_to_response(shapeset, data_type, query)
コード例 #2
0
ファイル: shape.py プロジェクト: UrbanCCD-UChicago/plenario
def export_shape(dataset_name):
    """Route for /shapes/<shapeset>/ endpoint. Requires a dataset argument
    and can apply column specific filters to it.

    :param dataset_name: user provided name of target shapeset
    :returns: response object result of _export_shape
    """
    # Find a way to work these into the validator, they shouldn't be out here.
    if dataset_name not in ShapeMetadata.tablenames():
        return make_error(dataset_name + ' not found.', 404)
    try:
        ShapeMetadata.get_by_dataset_name(dataset_name).shape_table
    except NoSuchTableError:
        return make_error(dataset_name + ' has yet to be ingested.', 404)

    meta_params = ('shape', 'data_type', 'location_geom__within', 'job')
    request_args = request.args.to_dict()

    # Using the 'shape' key triggers the correct validator.
    request_args['shape'] = dataset_name
    validated_args = validate(
        ExportFormatsValidator(only=meta_params),
        request_args
    )

    if validated_args.errors:
        return bad_request(validated_args.errors)
    elif validated_args.data.get('job'):
        return make_job_response('export-shape', validated_args)
    else:
        query = _export_shape(validated_args)
        shapeset = validated_args.data.get('shapeset')
        data_type = validated_args.data.get('data_type')
        return export_dataset_to_response(shapeset, data_type, query)
コード例 #3
0
ファイル: shape.py プロジェクト: gitter-badger/plenario
def _export_dataset_to_response(shapeset, data_type, query=None):
    export_format = unicode.lower(unicode(data_type))

    # Make a filename that we are reasonably sure to be unique and not occupied by anyone else.
    sacrifice_file = tempfile.NamedTemporaryFile()
    export_path = sacrifice_file.name
    sacrifice_file.close()  # Removes file from system.

    try:
        # Write to that filename.
        OgrExport(export_format, export_path, shapeset.name, query).write_file()
        # Dump it in the response.
        with open(export_path, 'r') as to_export:
            resp = make_response(to_export.read(), 200)

        extension = _shape_format_to_file_extension(export_format)

        # Make the downloaded filename look nice
        shapemeta = ShapeMetadata.get_by_dataset_name(shapeset.name)
        resp.headers['Content-Type'] = _shape_format_to_content_header(export_format)
        resp.headers['Content-Disposition'] = 'attachment; filename={}.{}'.format(shapemeta.human_name, extension)
        return resp

    except Exception as e:
        error_message = 'Failed to export shape dataset {}'.format(shapeset.name)
        print repr(e)
        return make_response(error_message, 500)
    finally:
        # Don't leave that file hanging around.
        if os.path.isfile(export_path):
            os.remove(export_path)
コード例 #4
0
def export_dataset_to_response(shapeset, data_type, query=None):
    export_format = str.lower(str(data_type))

    # Make a filename that we are reasonably sure to be unique and not occupied by anyone else.
    sacrifice_file = tempfile.NamedTemporaryFile()
    export_path = sacrifice_file.name
    sacrifice_file.close()  # Removes file from system.

    try:
        # Write to that filename.
        OgrExport(export_format, export_path, shapeset.name,
                  query).write_file()
        # Dump it in the response.
        with open(export_path, 'rb') as to_export:
            resp = make_response(to_export.read(), 200)

        extension = _shape_format_to_file_extension(export_format)

        # Make the downloaded filename look nice
        shapemeta = ShapeMetadata.get_by_dataset_name(shapeset.name)
        resp.headers['Content-Type'] = _shape_format_to_content_header(
            export_format)
        resp.headers[
            'Content-Disposition'] = "attachment; filename='{}.{}'".format(
                shapemeta.human_name, extension)
        return resp

    except Exception as e:
        error_message = 'Failed to export shape dataset {}'.format(
            shapeset.name)
        print((repr(e)))
        return make_response(error_message, 500)
    finally:
        # Don't leave that file hanging around.
        if os.path.isfile(export_path):
            os.remove(export_path)
コード例 #5
0
ファイル: validator.py プロジェクト: gitter-badger/plenario
def validate(validator, request_args):
    """Validate a dictionary of arguments. Substitute all missing fields with
    defaults if not explicitly told to do otherwise.

    :param validator: what kind of validator to use
    :param request_args: dictionary of arguments from a request object

    :returns: ValidatorResult namedtuple"""

    args = request_args.copy()

    # For validator dataset_name__in... need to find a better way to
    # make it play nice with the validator.
    if args.get('dataset_name__in'):
        args['dataset_name__in'] = args['dataset_name__in'].split(',')

    # This first validation step covers conditions that are dataset
    # agnostic. These are values can be used to apply to all datasets
    # (ex. obs_date), or concern the format of the response (ex. limit,
    # datatype, offset).

    # If there are errors, fail quickly and return.
    result = validator.load(args)
    if result.errors:
        return result

    # If all arguments are valid, fill in validator defaults.
    result = validator.dump(result.data)

    # Certain values will be dumped as strings. This conversion
    # makes them into their corresponding type. (ex. Table)
    convert(result.data)

    # Holds messages concerning unnecessary parameters. These can be either
    # junk parameters, or redundant column parameters if a tree filter was
    # used.
    warnings = []

    # At this point validation splits. We can either validate tree-style column
    # arguments or validate them individually. We don't do both.

    # Determine unchecked parameters provided in the request.
    unchecked = set(args.keys()) - set(validator.fields.keys())

    # If tree filters were provided, ignore ALL unchecked parameters that are
    # not tree filters or response format information.
    if has_tree_filters(request_args):

        for key in request_args:
            value = args[key]
            if 'filter' in key:
                # This pattern matches the last occurrence of the '__' pattern.
                # Prevents an error that is caused by dataset names with trailing
                # underscores.
                t_name = re.split(r'__(?!_)', key)[0]

                # Report a filter which specifies a non-existent tree.
                try:
                    table = MetaTable.get_by_dataset_name(t_name).point_table
                except (AttributeError, NoSuchTableError):
                    try:
                        table = ShapeMetadata.get_by_dataset_name(t_name).shape_table
                    except (AttributeError, NoSuchTableError):
                        result.errors[t_name] = "Table name {} could not be found.".format(t_name)
                        return result

                # Report a tree which causes the JSON parser to fail.
                # Or a tree whose value is not valid.
                try:
                    cond_tree = json.loads(value)
                    if valid_tree(table, cond_tree):
                        result.data[key] = cond_tree
                except (ValueError, KeyError) as err:
                    result.errors[t_name] = "Bad tree: {} -- causes error {}.".format(value, err)
                    return result

            # These keys just have to do with the formatting of the JSON response.
            # We keep these values around even if they have no effect on a condition
            # tree.
            elif key in {'geom', 'offset', 'limit', 'agg', 'obs_date__le', 'obs_date__ge'}:
                pass

            # These keys are also ones that should be passed over when searching for
            # unused params. They are used, just in different forms later on, so no need
            # to report them.
            elif key in {'shape', 'dataset_name', 'dataset_name__in'}:
                pass

            # If the key is not a filter, and not used to format JSON, report
            # that we ignored it.
            else:
                warnings.append("Unused parameter {}, you cannot specify both "
                                "column and filter arguments.".format(key))

    # If no tree filters were provided, see if any of the unchecked parameters
    # are usable as column conditions.
    else:
        try:
            table = result.data['dataset']
        except KeyError:
            table = result.data.get('shapeset')
        for param in unchecked:
            field = param.split('__')[0]
            if table is not None:
                try:
                    valid_column_condition(table, field, args[param])
                    result.data[param] = args[param]
                except KeyError:
                    warnings.append('Unused parameter value "{}={}"'.format(param, args[param]))
                    warnings.append('{} is not a valid column for {}'.format(param, table))
                except ValueError:
                    warnings.append('Unused parameter value "{}={}"'.format(param, args[param]))
                    warnings.append('{} is not a valid value for {}'.format(args[param], param))

    # ValidatorResult(dict, dict, list)
    return ValidatorResult(result.data, result.errors, warnings)
コード例 #6
0
ファイル: validator.py プロジェクト: gitter-badger/plenario
# Many methods in response.py rely on information that used to be provided
# by the old ParamValidator attributes. This namedtuple carries that same
# info around, and allows me to not have to rewrite any response code.

ValidatorResult = namedtuple('ValidatorResult', 'data errors warnings')


# converters
# ==========
# Callables which are used to convert request arguments to their correct types.

converters = {
    'agg': str,
    'buffer': int,
    'dataset': lambda x: MetaTable.get_by_dataset_name(x).point_table,
    'shapeset': lambda x: ShapeMetadata.get_by_dataset_name(x).shape_table,
    'data_type': str,
    'shape': lambda x: ShapeMetadata.get_by_dataset_name(x).shape_table,
    'dataset_name__in': lambda x: x.split(','),
    'date__time_of_day_ge': int,
    'date__time_of_day_le': int,
    'obs_date__ge': lambda x: parser.parse(x).date(),
    'obs_date__le': lambda x: parser.parse(x).date(),
    'date': lambda x: parser.parse(x).date(),
    'point_date': lambda x: parser.parse(x),
    'offset': int,
    'resolution': int,
    'geom': lambda x: make_fragment_str(extract_first_geometry_fragment(x)),
}