示例#1
0
def export_shape(dataset_name):
    """Route for /shapes/<shapeset>/ endpoint. Requires a dataset argument
    and can apply column specific filters to it.

    :param dataset_name: user provided name of target shapeset
    :returns: response object result of _export_shape
    """
    # Find a way to work these into the validator, they shouldn't be out here.
    if dataset_name not in ShapeMetadata.tablenames():
        return make_error(dataset_name + ' not found.', 404)
    try:
        ShapeMetadata.get_by_dataset_name(dataset_name).shape_table
    except NoSuchTableError:
        return make_error(dataset_name + ' has yet to be ingested.', 404)

    meta_params = ('shape', 'data_type', 'location_geom__within', 'job')
    request_args = request.args.to_dict()

    # Using the 'shape' key triggers the correct validator.
    request_args['shape'] = dataset_name
    validated_args = validate(
        ExportFormatsValidator(only=meta_params),
        request_args
    )

    if validated_args.errors:
        return bad_request(validated_args.errors)
    elif validated_args.data.get('job'):
        return make_job_response('export-shape', validated_args)
    else:
        query = _export_shape(validated_args)
        shapeset = validated_args.data.get('shapeset')
        data_type = validated_args.data.get('data_type')
        return export_dataset_to_response(shapeset, data_type, query)
示例#2
0
def export_shape(dataset_name):
    """Route for /shapes/<shapeset>/ endpoint. Requires a dataset argument
    and can apply column specific filters to it.

    :param dataset_name: user provided name of target shapeset
    :returns: response object result of _export_shape
    """
    # Find a way to work these into the validator, they shouldn't be out here.
    if dataset_name not in ShapeMetadata.tablenames():
        return make_error(dataset_name + ' not found.', 404)
    try:
        ShapeMetadata.get_by_dataset_name(dataset_name).shape_table
    except NoSuchTableError:
        return make_error(dataset_name + ' has yet to be ingested.', 404)

    meta_params = ('shape', 'data_type', 'location_geom__within', 'job')
    request_args = request.args.to_dict()

    # Using the 'shape' key triggers the correct validator.
    request_args['shape'] = dataset_name
    validated_args = validate(ExportFormatsValidator(only=meta_params),
                              request_args)

    if validated_args.errors:
        return bad_request(validated_args.errors)
    elif validated_args.data.get('job'):
        return make_job_response('export-shape', validated_args)
    else:
        query = _export_shape(validated_args)
        shapeset = validated_args.data.get('shapeset')
        data_type = validated_args.data.get('data_type')
        return export_dataset_to_response(shapeset, data_type, query)
示例#3
0
    def test_delete_shape(self):
        # Can we remove a shape that's fully ingested?
        city_meta = postgres_session.query(ShapeMetadata).get(shape_fixtures['city'].table_name)
        self.assertIsNotNone(city_meta)
        city_meta.remove_table()
        postgres_session.commit()
        city_meta = postgres_session.query(ShapeMetadata).get(shape_fixtures['city'].table_name)
        self.assertIsNone(city_meta)

        # Can we remove a shape that's only in the metadata?
        dummy_meta = postgres_session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNotNone(dummy_meta)
        dummy_meta.remove_table()
        postgres_session.commit()
        dummy_meta = postgres_session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNone(dummy_meta)

        # Add them back to return to original test state
        ShapeTests.ingest_fixture(shape_fixtures['city'])
        ShapeMetadata.add(human_name='Dummy Name',
                          source_url=None,
                          update_freq='yearly',
                          approved_status=False)

        postgres_session.commit()
示例#4
0
    def test_delete_shape(self):
        # Can we remove a shape that's fully ingested?
        city_meta = postgres_session.query(ShapeMetadata).get(
            shape_fixtures['city'].table_name)
        self.assertIsNotNone(city_meta)
        city_meta.remove_table()
        postgres_session.commit()
        city_meta = postgres_session.query(ShapeMetadata).get(
            shape_fixtures['city'].table_name)
        self.assertIsNone(city_meta)

        # Can we remove a shape that's only in the metadata?
        dummy_meta = postgres_session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNotNone(dummy_meta)
        dummy_meta.remove_table()
        postgres_session.commit()
        dummy_meta = postgres_session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNone(dummy_meta)

        # Add them back to return to original test state
        ShapeTests.ingest_fixture(shape_fixtures['city'])
        ShapeMetadata.add(human_name='Dummy Name',
                          source_url=None,
                          update_freq='yearly',
                          approved_status=False)

        postgres_session.commit()
示例#5
0
 def ingest_fixture(fixture):
     # Add the fixture to the metadata first
     shape_meta = ShapeMetadata.add(caller_session=session, human_name=fixture.human_name, source_url=None)
     session.commit()
     # Bypass the celery task and call on a ShapeETL directly
     ShapeETL(meta=shape_meta, source_path=fixture.path).import_shapefile()
     return shape_meta
示例#6
0
def _export_dataset_to_response(shapeset, data_type, query=None):
    export_format = unicode.lower(unicode(data_type))

    # Make a filename that we are reasonably sure to be unique and not occupied by anyone else.
    sacrifice_file = tempfile.NamedTemporaryFile()
    export_path = sacrifice_file.name
    sacrifice_file.close()  # Removes file from system.

    try:
        # Write to that filename.
        OgrExport(export_format, export_path, shapeset.name, query).write_file()
        # Dump it in the response.
        with open(export_path, 'r') as to_export:
            resp = make_response(to_export.read(), 200)

        extension = _shape_format_to_file_extension(export_format)

        # Make the downloaded filename look nice
        shapemeta = ShapeMetadata.get_by_dataset_name(shapeset.name)
        resp.headers['Content-Type'] = _shape_format_to_content_header(export_format)
        resp.headers['Content-Disposition'] = 'attachment; filename={}.{}'.format(shapemeta.human_name, extension)
        return resp

    except Exception as e:
        error_message = 'Failed to export shape dataset {}'.format(shapeset.name)
        print repr(e)
        return make_response(error_message, 500)
    finally:
        # Don't leave that file hanging around.
        if os.path.isfile(export_path):
            os.remove(export_path)
示例#7
0
文件: shape.py 项目: carhart/plenario
def get_all_shape_datasets():
    """
    Fetches metadata for every shape dataset in meta_shape
    """
    try:
        response_skeleton = {
                'meta': {
                    'status': 'ok',
                    'message': '',
                },
                'objects': []
            }

        geom = request.args.get('location_geom__within')
        if geom:
            geom = make_sql_ready_geom(geom)

        public_listing = ShapeMetadata.index(geom)
        response_skeleton['objects'] = public_listing
        status_code = 200

    except Exception as e:
        print e.message
        response_skeleton = {
            'meta': {
                'status': 'error',
                'message': '',
            }
        }
        status_code = 500

    resp = make_response(json.dumps(response_skeleton), status_code)
    resp.headers['Content-Type'] = 'application/json'
    return resp
示例#8
0
def get_all_shape_datasets():
    """
    Fetches metadata for every shape dataset in meta_shape
    """
    try:
        response_skeleton = {
                'meta': {
                    'status': 'ok',
                    'message': '',
                },
                'objects': []
            }

        geom = request.args.get('location_geom_within')
        if geom:
            geom = make_sql_ready_geom(geom)

        public_listing = ShapeMetadata.index(geom)
        response_skeleton['objects'] = public_listing
        status_code = 200

    except Exception as e:
        print e.message
        response_skeleton = {
            'meta': {
                'status': 'error',
                'message': '',
            }
        }
        status_code = 500

    resp = make_response(json.dumps(response_skeleton), status_code)
    resp.headers['Content-Type'] = 'application/json'
    return resp
示例#9
0
文件: api.py 项目: hectron/plenario
def get_all_shape_datasets():
    """
    Fetches metadata for every shape dataset in meta_shape
    """
    try:
        response_skeleton = {
                'meta': {
                    'status': 'ok',
                    'message': '',
                },
                'objects': []
            }

        public_listing = ShapeMetadata.index(caller_session=session)
        response_skeleton['objects'] = public_listing
        status_code = 200

    except Exception as e:
        print e.message
        response_skeleton = {
            'meta': {
                'status': 'error',
                'message': '',
            }
        }
        status_code = 500

    resp = make_response(json.dumps(response_skeleton), status_code)
    resp.headers['Content-Type'] = 'application/json'
    return resp
示例#10
0
 def ingest_fixture(fixture):
     # Add the fixture to the metadata first
     shape_meta = ShapeMetadata.add(human_name=fixture.human_name,
                                    source_url=None,
                                    update_freq=fixture.update_freq,
                                    approved_status=False)
     session.commit()
     # Bypass the celery task and call on a ShapeETL directly
     ShapeETL(meta=shape_meta, source_path=fixture.path).add()
     return shape_meta
示例#11
0
 def ingest_fixture(fixture):
     # Add the fixture to the metadata first
     shape_meta = ShapeMetadata.add(human_name=fixture.human_name,
                                    source_url=None,
                                    update_freq=fixture.update_freq,
                                    approved_status=False)
     session.commit()
     # Bypass the celery task and call on a ShapeETL directly
     ShapeETL(meta=shape_meta, source_path=fixture.path).add()
     return shape_meta
示例#12
0
def view_datasets():
    datasets_pending = fetch_pending_tables(MetaTable)
    shapes_pending = fetch_pending_tables(ShapeMetadata)
    datasets = MetaTable.get_all_with_etl_status()
    shapesets = ShapeMetadata.get_all_with_etl_status()

    return render_template('admin/view-datasets.html',
                           datasets_pending=datasets_pending,
                           shapes_pending=shapes_pending,
                           datasets=datasets,
                           shape_datasets=shapesets)
示例#13
0
def view_datasets():
    datasets_pending = fetch_pending_tables(MetaTable)
    shapes_pending = fetch_pending_tables(ShapeMetadata)
    datasets = MetaTable.get_all_with_etl_status()
    shapesets = ShapeMetadata.get_all_with_etl_status()

    return render_template('admin/view-datasets.html',
                           datasets_pending=datasets_pending,
                           shapes_pending=shapes_pending,
                           datasets=datasets,
                           shape_datasets=shapesets)
示例#14
0
    def test_delete_shape(self):
        # Can we remove a shape that's fully ingested?
        city_meta = session.query(ShapeMetadata).get(fixtures["city"].table_name)
        self.assertIsNotNone(city_meta)
        city_meta.remove_table(caller_session=session)
        session.commit()
        city_meta = session.query(ShapeMetadata).get(fixtures["city"].table_name)
        self.assertIsNone(city_meta)

        # Can we remove a shape that's only in the metadata?
        dummy_meta = session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNotNone(dummy_meta)
        dummy_meta.remove_table(caller_session=session)
        session.commit()
        dummy_meta = session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNone(dummy_meta)

        # Add them back to return to original test state
        ShapeTests.ingest_fixture(fixtures["city"])
        ShapeMetadata.add(caller_session=session, human_name=u"Dummy Name", source_url=None)
        session.commit()
示例#15
0
def get_all_shape_datasets():
    """Fetches metadata for every shape dataset in meta_shape.
    """
    try:
        response_skeleton = {
            'meta': {
                'status': 'ok',
                'message': '',
            },
            'objects': []
        }

        geom = request.args.get('location_geom__within')
        simple_bbox = request.args.get('simple_bbox')

        if geom:
            geom = make_fragment_str(
                extract_first_geometry_fragment(geom)
            )

        if simple_bbox:
            public_listing = ShapeMetadata.simple_index(geom)
        else:
            public_listing = ShapeMetadata.index(geom)
        response_skeleton['objects'] = public_listing
        status_code = 200

    except Exception as e:
        response_skeleton = {
            'meta': {
                'status': 'error',
                'message': str(e),
            }
        }
        status_code = 500

    resp = make_response(json.dumps(response_skeleton, default=str), status_code)
    resp.headers['Content-Type'] = 'application/json'
    return resp
示例#16
0
def get_all_shape_datasets():
    """Fetches metadata for every shape dataset in meta_shape.
    """
    try:
        response_skeleton = {
            'meta': {
                'status': 'ok',
                'message': '',
            },
            'objects': []
        }

        geom = request.args.get('location_geom__within')
        simple_bbox = request.args.get('simple_bbox')

        if geom:
            geom = make_fragment_str(extract_first_geometry_fragment(geom))

        if simple_bbox:
            public_listing = ShapeMetadata.simple_index(geom)
        else:
            public_listing = ShapeMetadata.index(geom)
        response_skeleton['objects'] = public_listing
        status_code = 200

    except Exception as e:
        response_skeleton = {
            'meta': {
                'status': 'error',
                'message': str(e),
            }
        }
        status_code = 500

    resp = make_response(json.dumps(response_skeleton, default=str),
                         status_code)
    resp.headers['Content-Type'] = 'application/json'
    return resp
示例#17
0
def shape_meta_from_submit_form(form, is_approved):
    md = ShapeMetadata.add(
        human_name=form['dataset_name'],
        source_url=form['file_url'],
        view_url=form.get('view_url'),
        attribution=form.get('dataset_attribution'),
        description=form.get('dataset_description'),
        update_freq=form['update_frequency'],
        contributor_name=form['contributor_name'],
        contributor_organization=form.get('contributor_organization'),
        contributor_email=form['contributor_email'],
        approved_status=is_approved)
    postgres_session.commit()
    return md
示例#18
0
def shape_meta_from_submit_form(form, is_approved):
    md = ShapeMetadata.add(
        human_name=form['dataset_name'],
        source_url=form['file_url'],
        view_url=form.get('view_url'),
        attribution=form.get('dataset_attribution'),
        description=form.get('dataset_description'),
        update_freq=form['update_frequency'],
        contributor_name=form['contributor_name'],
        contributor_organization=form.get('contributor_organization'),
        contributor_email=form['contributor_email'],
        approved_status=is_approved)
    postgres_session.commit()
    return md
示例#19
0
    def ingest_shapes(cls):
        fixtures = [f for k, f in shape_fixtures.items() if k != 'changed_neighborhoods']
        fixture_table_names = [f.table_name for f in fixtures]
        drop_tables(fixture_table_names)
        postgres_session.commit()

        for fixture in fixtures:
            cls.ingest_fixture(fixture)

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(human_name='Dummy Name',
                                           source_url=None,
                                           update_freq='yearly',
                                           approved_status=False).dataset_name
        postgres_session.commit()
示例#20
0
    def setUpClass(cls, shutdown=False):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful
        # to inspect them in the DB after running the tests.
        meta_table_names = ['meta_shape']
        fixture_table_names = [fixture.table_name for key, fixture in fixtures.iteritems()]

        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_meta()

        # Fully ingest the fixtures
        BasePlenarioTest.ingest_fixture(fixtures['city'])
        BasePlenarioTest.ingest_fixture(fixtures['streets'])
        BasePlenarioTest.ingest_fixture(fixtures['zips'])
        BasePlenarioTest.ingest_fixture(fixtures['neighborhoods'])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(human_name=u'Dummy Name',
                                           source_url=None,
                                           update_freq='yearly',
                                           approved_status=False).dataset_name
        session.commit()

        tables_to_drop = [
            'flu_shot_clinics',
            'landmarks',
            'crimes',
            'meta_master'
        ]
        drop_tables(tables_to_drop)

        init_meta()

        ingest_from_fixture(flu_shot_meta, flu_path)
        ingest_from_fixture(landmarks_meta, landmarks_path)
        ingest_from_fixture(crime_meta, crime_path)

        cls.app = create_app().test_client()

        '''/detail'''
示例#21
0
    def setUpClass(cls, shutdown=False):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful
        # to inspect them in the DB after running the tests.
        meta_table_names = ['meta_shape']
        fixture_table_names = [
            fixture.table_name for key, fixture in fixtures.iteritems()
        ]

        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_meta()

        # Fully ingest the fixtures
        BasePlenarioTest.ingest_fixture(fixtures['city'])
        BasePlenarioTest.ingest_fixture(fixtures['streets'])
        BasePlenarioTest.ingest_fixture(fixtures['zips'])
        BasePlenarioTest.ingest_fixture(fixtures['neighborhoods'])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(human_name=u'Dummy Name',
                                           source_url=None,
                                           update_freq='yearly',
                                           approved_status=False).dataset_name
        session.commit()

        tables_to_drop = [
            'flu_shot_clinics', 'landmarks', 'crimes', 'meta_master'
        ]
        drop_tables(tables_to_drop)

        init_meta()

        ingest_from_fixture(flu_shot_meta, flu_path)
        ingest_from_fixture(landmarks_meta, landmarks_path)
        ingest_from_fixture(crime_meta, crime_path)

        cls.app = create_app().test_client()
        '''/detail'''
示例#22
0
def view_datasets():
    datasets_pending = session.query(MetaTable).\
        filter(MetaTable.approved_status != True).\
        all()

    shapes_pending = session.query(ShapeMetadata).\
        filter(ShapeMetadata.approved_status != True).\
        all()

    try:
        q = text(''' 
            SELECT m.*, c.status, c.task_id
            FROM meta_master AS m 
            LEFT JOIN celery_taskmeta AS c 
              ON c.id = (
                SELECT id FROM celery_taskmeta 
                WHERE task_id = ANY(m.result_ids) 
                ORDER BY date_done DESC 
                LIMIT 1
              )
            WHERE m.approved_status = 'true'
        ''')
        with engine.begin() as c:
            datasets = list(c.execute(q))
    except NoSuchTableError:
        datasets = session.query(MetaTable)\
            .filter(MetaTable.approved_status == True)\
            .all()

    try:
        shape_datasets = ShapeMetadata.get_all_with_etl_status()
    except NoSuchTableError:
        # If we can't find shape metadata, soldier on.
        shape_datasets = None

    return render_template('admin/view-datasets.html',
                           datasets_pending=datasets_pending,
                           shapes_pending=shapes_pending,
                           datasets=datasets,
                           shape_datasets=shape_datasets)
示例#23
0
def view_datasets():
    datasets_pending = session.query(MetaTable).\
        filter(MetaTable.approved_status != True).\
        all()

    shapes_pending = session.query(ShapeMetadata).\
        filter(ShapeMetadata.approved_status != True).\
        all()

    try:
        q = text(''' 
            SELECT m.*, c.status, c.task_id
            FROM meta_master AS m 
            LEFT JOIN celery_taskmeta AS c 
              ON c.id = (
                SELECT id FROM celery_taskmeta 
                WHERE task_id = ANY(m.result_ids) 
                ORDER BY date_done DESC 
                LIMIT 1
              )
            WHERE m.approved_status = 'true'
        ''')
        with engine.begin() as c:
            datasets = list(c.execute(q))
    except NoSuchTableError:
        datasets = session.query(MetaTable)\
            .filter(MetaTable.approved_status == True)\
            .all()

    try:
        shape_datasets = ShapeMetadata.get_all_with_etl_status()
    except NoSuchTableError:
        # If we can't find shape metadata, soldier on.
        shape_datasets = None

    return render_template('admin/view-datasets.html',
                           datasets_pending=datasets_pending,
                           shapes_pending=shapes_pending,
                           datasets=datasets,
                           shape_datasets=shape_datasets)
示例#24
0
    def setUpClass(cls):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful to inspect them in the DB after running the tests.
        meta_table_names = ["dat_master", "meta_shape", "meta_master", "plenario_user"]
        fixture_table_names = [fixture.table_name for key, fixture in fixtures.iteritems()]
        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_master_meta_user()

        # Fully ingest the fixtures
        ShapeTests.ingest_fixture(fixtures["city"])
        ShapeTests.ingest_fixture(fixtures["streets"])
        ShapeTests.ingest_fixture(fixtures["zips"])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(
            caller_session=session, human_name=u"Dummy Name", source_url=None
        ).dataset_name
        session.commit()

        cls.app = create_app().test_client()
示例#25
0
def export_dataset_to_response(shapeset, data_type, query=None):
    export_format = str.lower(str(data_type))

    # Make a filename that we are reasonably sure to be unique and not occupied by anyone else.
    sacrifice_file = tempfile.NamedTemporaryFile()
    export_path = sacrifice_file.name
    sacrifice_file.close()  # Removes file from system.

    try:
        # Write to that filename.
        OgrExport(export_format, export_path, shapeset.name,
                  query).write_file()
        # Dump it in the response.
        with open(export_path, 'rb') as to_export:
            resp = make_response(to_export.read(), 200)

        extension = _shape_format_to_file_extension(export_format)

        # Make the downloaded filename look nice
        shapemeta = ShapeMetadata.get_by_dataset_name(shapeset.name)
        resp.headers['Content-Type'] = _shape_format_to_content_header(
            export_format)
        resp.headers[
            'Content-Disposition'] = "attachment; filename='{}.{}'".format(
                shapemeta.human_name, extension)
        return resp

    except Exception as e:
        error_message = 'Failed to export shape dataset {}'.format(
            shapeset.name)
        print((repr(e)))
        return make_response(error_message, 500)
    finally:
        # Don't leave that file hanging around.
        if os.path.isfile(export_path):
            os.remove(export_path)
示例#26
0
 def __init__(self, human_name, file_name):
     self.human_name = human_name
     self.table_name = ShapeMetadata.make_table_name(human_name)
     self.path = os.path.join(FIXTURE_PATH, file_name)
     self.update_freq = 'yearly'
示例#27
0
def shape_status():
    table_name = request.args['dataset_name']
    shape_meta = ShapeMetadata.get_metadata_with_etl_result(table_name)
    return render_template('admin/shape-status.html', shape=shape_meta)
示例#28
0
def shape_already_submitted(name):
    shape = ShapeMetadata.get_by_human_name(name)
    print shape, shape is not None

    return shape is not None
示例#29
0
# Many methods in response.py rely on information that used to be provided
# by the old ParamValidator attributes. This namedtuple carries that same
# info around, and allows me to not have to rewrite any response code.

ValidatorResult = namedtuple('ValidatorResult', 'data errors warnings')


# converters
# ==========
# Callables which are used to convert request arguments to their correct types.

converters = {
    'agg': str,
    'buffer': int,
    'dataset': lambda x: MetaTable.get_by_dataset_name(x).point_table,
    'shapeset': lambda x: ShapeMetadata.get_by_dataset_name(x).shape_table,
    'data_type': str,
    'shape': lambda x: ShapeMetadata.get_by_dataset_name(x).shape_table,
    'dataset_name__in': lambda x: x.split(','),
    'date__time_of_day_ge': int,
    'date__time_of_day_le': int,
    'obs_date__ge': lambda x: parser.parse(x).date(),
    'obs_date__le': lambda x: parser.parse(x).date(),
    'date': lambda x: parser.parse(x).date(),
    'point_date': lambda x: parser.parse(x),
    'offset': int,
    'resolution': int,
    'geom': lambda x: make_fragment_str(extract_first_geometry_fragment(x)),
}

示例#30
0
 def __init__(self, human_name, file_name):
     self.human_name = human_name
     self.table_name = ShapeMetadata.make_table_name(human_name)
     self.path = os.path.join(FIXTURE_PATH, file_name)
     self.update_freq = 'yearly'
示例#31
0
def validate(validator, request_args):
    """Validate a dictionary of arguments. Substitute all missing fields with
    defaults if not explicitly told to do otherwise.

    :param validator: what kind of validator to use
    :param request_args: dictionary of arguments from a request object

    :returns: ValidatorResult namedtuple"""

    args = request_args.copy()

    # For validator dataset_name__in... need to find a better way to
    # make it play nice with the validator.
    if args.get('dataset_name__in'):
        args['dataset_name__in'] = args['dataset_name__in'].split(',')

    # This first validation step covers conditions that are dataset
    # agnostic. These are values can be used to apply to all datasets
    # (ex. obs_date), or concern the format of the response (ex. limit,
    # datatype, offset).

    # If there are errors, fail quickly and return.
    result = validator.load(args)
    if result.errors:
        return result

    # If all arguments are valid, fill in validator defaults.
    result = validator.dump(result.data)

    # Certain values will be dumped as strings. This conversion
    # makes them into their corresponding type. (ex. Table)
    convert(result.data)

    # Holds messages concerning unnecessary parameters. These can be either
    # junk parameters, or redundant column parameters if a tree filter was
    # used.
    warnings = []

    # At this point validation splits. We can either validate tree-style column
    # arguments or validate them individually. We don't do both.

    # Determine unchecked parameters provided in the request.
    unchecked = set(args.keys()) - set(validator.fields.keys())

    # If tree filters were provided, ignore ALL unchecked parameters that are
    # not tree filters or response format information.
    if has_tree_filters(request_args):

        for key in request_args:
            value = args[key]
            if 'filter' in key:
                # This pattern matches the last occurrence of the '__' pattern.
                # Prevents an error that is caused by dataset names with trailing
                # underscores.
                t_name = re.split(r'__(?!_)', key)[0]

                # Report a filter which specifies a non-existent tree.
                try:
                    table = MetaTable.get_by_dataset_name(t_name).point_table
                except (AttributeError, NoSuchTableError):
                    try:
                        table = ShapeMetadata.get_by_dataset_name(t_name).shape_table
                    except (AttributeError, NoSuchTableError):
                        result.errors[t_name] = "Table name {} could not be found.".format(t_name)
                        return result

                # Report a tree which causes the JSON parser to fail.
                # Or a tree whose value is not valid.
                try:
                    cond_tree = json.loads(value)
                    if valid_tree(table, cond_tree):
                        result.data[key] = cond_tree
                except (ValueError, KeyError) as err:
                    result.errors[t_name] = "Bad tree: {} -- causes error {}.".format(value, err)
                    return result

            # These keys just have to do with the formatting of the JSON response.
            # We keep these values around even if they have no effect on a condition
            # tree.
            elif key in {'geom', 'offset', 'limit', 'agg', 'obs_date__le', 'obs_date__ge'}:
                pass

            # These keys are also ones that should be passed over when searching for
            # unused params. They are used, just in different forms later on, so no need
            # to report them.
            elif key in {'shape', 'dataset_name', 'dataset_name__in'}:
                pass

            # If the key is not a filter, and not used to format JSON, report
            # that we ignored it.
            else:
                warnings.append("Unused parameter {}, you cannot specify both "
                                "column and filter arguments.".format(key))

    # If no tree filters were provided, see if any of the unchecked parameters
    # are usable as column conditions.
    else:
        try:
            table = result.data['dataset']
        except KeyError:
            table = result.data.get('shapeset')
        for param in unchecked:
            field = param.split('__')[0]
            if table is not None:
                try:
                    valid_column_condition(table, field, args[param])
                    result.data[param] = args[param]
                except KeyError:
                    warnings.append('Unused parameter value "{}={}"'.format(param, args[param]))
                    warnings.append('{} is not a valid column for {}'.format(param, table))
                except ValueError:
                    warnings.append('Unused parameter value "{}={}"'.format(param, args[param]))
                    warnings.append('{} is not a valid value for {}'.format(args[param], param))

    # ValidatorResult(dict, dict, list)
    return ValidatorResult(result.data, result.errors, warnings)
示例#32
0
def shape_already_submitted(name):
    shape = ShapeMetadata.get_by_human_name(name)
    return shape is not None
示例#33
0
def shape_status():
    table_name = request.args['dataset_name']
    shape_meta = ShapeMetadata.get_metadata_with_etl_result(table_name)
    return render_template('admin/shape-status.html', shape=shape_meta)