Exemple #1
0
    def setUpClass(cls, shutdown=False):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful
        # to inspect them in the DB after running the tests.
        meta_table_names = ['meta_shape']
        fixture_table_names = [fixture.table_name for key, fixture in fixtures.iteritems()]

        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_meta()

        # Fully ingest the fixtures
        BasePlenarioTest.ingest_fixture(fixtures['city'])
        BasePlenarioTest.ingest_fixture(fixtures['streets'])
        BasePlenarioTest.ingest_fixture(fixtures['zips'])
        BasePlenarioTest.ingest_fixture(fixtures['neighborhoods'])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(human_name=u'Dummy Name',
                                           source_url=None,
                                           update_freq='yearly',
                                           approved_status=False).dataset_name
        session.commit()

        tables_to_drop = [
            'flu_shot_clinics',
            'landmarks',
            'crimes',
            'meta_master'
        ]
        drop_tables(tables_to_drop)

        init_meta()

        ingest_from_fixture(flu_shot_meta, flu_path)
        ingest_from_fixture(landmarks_meta, landmarks_path)
        ingest_from_fixture(crime_meta, crime_path)

        cls.app = create_app().test_client()

        '''/detail'''
Exemple #2
0
    def setUpClass(cls, shutdown=False):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful
        # to inspect them in the DB after running the tests.
        meta_table_names = ['meta_shape']
        fixture_table_names = [
            fixture.table_name for key, fixture in fixtures.iteritems()
        ]

        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_meta()

        # Fully ingest the fixtures
        BasePlenarioTest.ingest_fixture(fixtures['city'])
        BasePlenarioTest.ingest_fixture(fixtures['streets'])
        BasePlenarioTest.ingest_fixture(fixtures['zips'])
        BasePlenarioTest.ingest_fixture(fixtures['neighborhoods'])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(human_name=u'Dummy Name',
                                           source_url=None,
                                           update_freq='yearly',
                                           approved_status=False).dataset_name
        session.commit()

        tables_to_drop = [
            'flu_shot_clinics', 'landmarks', 'crimes', 'meta_master'
        ]
        drop_tables(tables_to_drop)

        init_meta()

        ingest_from_fixture(flu_shot_meta, flu_path)
        ingest_from_fixture(landmarks_meta, landmarks_path)
        ingest_from_fixture(crime_meta, crime_path)

        cls.app = create_app().test_client()
        '''/detail'''
Exemple #3
0
    def setUpClass(cls):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful to inspect them in the DB after running the tests.
        meta_table_names = ["dat_master", "meta_shape", "meta_master", "plenario_user"]
        fixture_table_names = [fixture.table_name for key, fixture in fixtures.iteritems()]
        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_master_meta_user()

        # Fully ingest the fixtures
        ShapeTests.ingest_fixture(fixtures["city"])
        ShapeTests.ingest_fixture(fixtures["streets"])
        ShapeTests.ingest_fixture(fixtures["zips"])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(
            caller_session=session, human_name=u"Dummy Name", source_url=None
        ).dataset_name
        session.commit()

        cls.app = create_app().test_client()
 def setUpClass(cls):
     cls.app = create_app()
     cls.test_client = cls.app.test_client()
Exemple #5
0
import logging
import os

from plenario import create_app
from plenario.update import create_worker
logging.basicConfig()

if os.environ.get('WORKER'):
    application = create_worker()
else:
    application = create_app()

if __name__ == "__main__":
    should_run_debug = os.environ.get('DEBUG') is not None
    application.run(debug=should_run_debug)
Exemple #6
0
from plenario import create_app
app = create_app()

if __name__ == "__main__":
    app.run(debug=True, port=5001)
Exemple #7
0
    def setUpClass(cls):
        # Assume there exists a test database with postgis at the connection string specified in test_settings.py
        tables_to_drop = [
            "census_blocks",
            "dat_flu_shot_clinic_locations",
            "dat_master",
            "meta_master",
            "meta_shape",
            "plenario_user",
        ]
        drop_tables(tables_to_drop)

        # Create meta, master, user tables
        init_master_meta_user()

        # Ingest the census blocks
        init_census()

        # TODO: support local ingest of csv
        # For now, ingest Chicago's csv of 2013 flu shot locations from the data portal.
        # It's a nice little Chicago dataset that won't change.

        # So, adding the dataset to meta_table happens in view.py.
        # I don't want to mock out a whole response object with form data and such,
        # so here's a janky way.
        url = "https://data.cityofchicago.org/api/views/g5vx-5vqf/rows.csv?accessType=DOWNLOAD"
        url_hash = md5(url).hexdigest()

        d = {
            "dataset_name": u"flu_shot_clinic_locations",
            "human_name": u"flu_shot_clinic_locations",
            "attribution": u"foo",
            "description": u"bar",
            "source_url": url,
            "source_url_hash": url_hash,
            "update_freq": "yearly",
            "business_key": u"Event",
            "observed_date": u"Date",
            "latitude": u"Latitude",
            "longitude": u"Longitude",
            "location": u"Location",
            "contributor_name": u"Frederick Mcgillicutty",
            "contributor_organization": u"StrexCorp",
            "contributor_email": u"*****@*****.**",
            "contributed_data_types": None,
            "approved_status": True,
            "is_socrata_source": False,
        }

        # add this to meta_master
        md = MetaTable(**d)
        session.add(md)
        session.commit()

        meta = {
            "dataset_name": u"flu_shot_clinic_locations",
            "source_url": url,
            "business_key": u"Event",
            "observed_date": u"Date",
            "latitude": u"Latitude",
            "longitude": u"Longitude",
            "location": u"Location",
            "source_url_hash": url_hash,
        }
        point_etl = PlenarioETL(meta)
        point_etl.add()

        cls.app = create_app().test_client()
Exemple #8
0
 def setUp(self):
     self.app = create_app()
     self.test_client = self.app.test_client()