示例#1
0
def update_meta(metatable, table):
    """
    After ingest/update, update the metatable registry to reflect table information.

    :param metatable: MetaTable instance to update.
    :param table: Table instance to update from.

    :returns: None
    """

    metatable.update_date_added()

    metatable.obs_from, metatable.obs_to = session.query(
        func.min(table.c.point_date),
        func.max(table.c.point_date)
    ).first()

    metatable.bbox = session.query(
        func.ST_SetSRID(
            func.ST_Envelope(func.ST_Union(table.c.geom)),
            4326
        )
    ).first()[0]

    session.add(metatable)

    try:
        session.commit()
    except:
        session.rollback()
        raise
示例#2
0
文件: views.py 项目: EzanLTD/plenario
def approve_dataset(source_url_hash):
    # get the MetaTable row and change the approved_status and bounce back to view-datasets.

    meta = session.query(MetaTable).get(source_url_hash)

    json_data_types = None
    if ((not meta.is_socrata_source) and meta.contributed_data_types):
        json_data_types = json.loads(meta.contributed_data_types)

    add_dataset_task.delay(source_url_hash, data_types=json_data_types)

    upd = {'approved_status': 'true'}

    meta.approved_status = 'true'
    session.commit()

    # Email the user who submitted that their dataset has been approved.
    # email the response to somebody

    msg_body = """Hello %s,\r\n
\r\n
Your dataset has been approved and added to Plenar.io:\r\n
\r\n
%s\r\n
\r\n
It should appear on http://plenar.io within 24 hours.\r\n
\r\n
Thank you!\r\n
The Plenario Team\r\n
http://plenar.io""" % (meta.contributor_name, meta.human_name)

    send_mail(subject="Your dataset has been added to Plenar.io",
              recipient=meta.contributor_email,
              body=msg_body)
示例#3
0
def point_meta_from_submit_form(form, is_approved):
    column_names, labels = form_columns(form)
    name = slugify(form['dataset_name'], delim=u'_')[:50]

    md = MetaTable(
        url=form['file_url'],
        view_url=form.get('view_url'),
        dataset_name=name,
        human_name=form['dataset_name'],
        attribution=form.get('dataset_attribution'),
        description=form.get('dataset_description'),
        update_freq=form['update_frequency'],
        contributor_name=form['contributor_name'],
        contributor_organization=form.get('contributor_organization'),
        contributor_email=form['contributor_email'],
        approved_status=is_approved,
        observed_date=labels['observed_date'],
        latitude=labels.get('latitude', None),
        longitude=labels.get('longitude', None),
        location=labels.get('location', None),
        column_names=column_names
    )
    session.add(md)
    session.commit()
    return md
示例#4
0
def approve_dataset(source_url_hash):
    # get the MetaTable row and change the approved_status and bounce back to view-datasets.

    meta = session.query(MetaTable).get(source_url_hash)

    json_data_types = None
    if ((not meta.is_socrata_source) and meta.contributed_data_types):
        json_data_types = json.loads(meta.contributed_data_types)
        
    add_dataset_task.delay(source_url_hash, data_types=json_data_types)
    
    upd = { 'approved_status': 'true' }

    meta.approved_status = 'true'
    session.commit()

    # Email the user who submitted that their dataset has been approved.
    # email the response to somebody

    msg_body = """Hello %s,\r\n
\r\n
Your dataset has been approved and added to Plenar.io:\r\n
\r\n
%s\r\n
\r\n
It should appear on http://plenar.io within 24 hours.\r\n
\r\n
Thank you!\r\n
The Plenario Team\r\n
http://plenar.io""" % (meta.contributor_name, meta.human_name)

    send_mail(subject="Your dataset has been added to Plenar.io", 
        recipient=meta.contributor_email, body=msg_body)
示例#5
0
def edit_shape(dataset_name):
    form = EditShapeForm()
    meta = session.query(ShapeMetadata).get(dataset_name)

    if form.validate_on_submit():
        upd = {
            'human_name': form.human_name.data,
            'description': form.description.data,
            'attribution': form.attribution.data,
            'update_freq': form.update_freq.data,
        }
        session.query(ShapeMetadata)\
            .filter(ShapeMetadata.dataset_name == meta.dataset_name)\
            .update(upd)
        session.commit()

        if not meta.approved_status:
            approve_shape(dataset_name)

        flash('%s updated successfully!' % meta.human_name, 'success')
        return redirect(url_for('views.view_datasets'))
    else:
        pass

    context = {
        'form': form,
        'meta': meta,
    }
    return render_template('admin/edit-shape.html', **context)
示例#6
0
def ingest_from_fixture(fixture_meta, fname):
    md = MetaTable(**fixture_meta)
    session.add(md)
    session.commit()
    path = os.path.join(fixtures_path, fname)
    point_etl = PlenarioETL(md, source_path=path)
    point_etl.add()
示例#7
0
def edit_dataset(source_url_hash):
    form = EditDatasetForm()
    meta = session.query(MetaTable).get(source_url_hash)
    table = Table('dat_%s' % meta.dataset_name,
                  Base.metadata,
                  autoload=True,
                  autoload_with=engine)
    fieldnames = table.columns.keys()
    if form.validate_on_submit():
        upd = {
            'human_name': form.human_name.data,
            'description': form.description.data,
            'attribution': form.attribution.data,
            'obs_from': form.obs_from.data,
            'obs_to': form.obs_to.data,
            'update_freq': form.update_freq.data,
            'business_key': form.business_key.data,
            'latitude': form.latitude.data,
            'longitude': form.longitude.data,
            'location': form.location.data,
            'observed_date': form.observed_date.data,
        }
        session.query(MetaTable)\
            .filter(MetaTable.source_url_hash == meta.source_url_hash)\
            .update(upd)
        session.commit()
        flash('%s updated successfully!' % meta.human_name, 'success')
        return redirect(url_for('views.view_datasets'))
    context = {
        'form': form,
        'meta': meta,
        'fieldnames': fieldnames,
    }
    return render_template('edit-dataset.html', **context)
示例#8
0
def edit_shape(dataset_name):
    form = EditShapeForm()
    meta = session.query(ShapeMetadata).get(dataset_name)

    if form.validate_on_submit():
        upd = {
            'human_name': form.human_name.data,
            'description': form.description.data,
            'attribution': form.attribution.data,
            'update_freq': form.update_freq.data,
        }
        session.query(ShapeMetadata)\
            .filter(ShapeMetadata.dataset_name == meta.dataset_name)\
            .update(upd)
        session.commit()

        if not meta.approved_status:
            approve_shape(dataset_name)

        flash('%s updated successfully!' % meta.human_name, 'success')
        return redirect(url_for('views.view_datasets'))
    else:
        pass

    context = {
        'form': form,
        'meta': meta,
    }
    return render_template('admin/edit-shape.html', **context)
示例#9
0
def ingest_from_fixture(fixture_meta, fname):
    md = MetaTable(**fixture_meta)
    session.add(md)
    session.commit()
    path = os.path.join(fixtures_path, fname)
    point_etl = PlenarioETL(md, source_path=path)
    point_etl.add()
示例#10
0
 def ingest_fixture(fixture):
     # Add the fixture to the metadata first
     shape_meta = ShapeMetadata.add(caller_session=session, human_name=fixture.human_name, source_url=None)
     session.commit()
     # Bypass the celery task and call on a ShapeETL directly
     ShapeETL(meta=shape_meta, source_path=fixture.path).import_shapefile()
     return shape_meta
示例#11
0
    def setUp(self):
        session.rollback()
        # Ensure we have metadata loaded into the database
        # to mimic the behavior of metadata ingestion preceding file ingestion.
        drop_meta('dog_park_permits')
        drop_meta('community_radio_events')
        drop_meta('public_opera_performances')

        # Make new MetaTable objects
        self.unloaded_meta = MetaTable(url='nightvale.gov/events.csv',
                                       human_name='Community Radio Events',
                                       business_key='Event Name',
                                       observed_date='Date',
                                       latitude='lat',
                                       longitude='lon',
                                       approved_status=True)

        self.existing_meta = MetaTable(url='nightvale.gov/dogpark.csv',
                                       human_name='Dog Park Permits',
                                       business_key='Hooded Figure ID',
                                       observed_date='Date',
                                       latitude='lat',
                                       longitude='lon',
                                       approved_status=False)

        self.opera_meta = MetaTable(url='nightvale.gov/opera.csv',
                                    human_name='Public Opera Performances',
                                    business_key='Event Name',
                                    observed_date='Date',
                                    location='Location',
                                    approved_status=False)
        session.add_all(
            [self.existing_meta, self.opera_meta, self.unloaded_meta])
        session.commit()

        # Also, let's have one table pre-loaded...
        self.existing_table = sa.Table(
            'dog_park_permits', MetaData(), Column('hooded_figure_id',
                                                   Integer),
            Column('point_date', TIMESTAMP, nullable=False),
            Column('date', Date, nullable=True),
            Column('lat', Float, nullable=False),
            Column('lon', Float, nullable=False),
            Column('hash', String(32), primary_key=True),
            Column('geom', Geometry('POINT', srid=4326), nullable=True))
        drop_if_exists(self.existing_table.name)
        self.existing_table.create(bind=app_engine)

        # ... with some pre-existing data
        ins = self.existing_table.insert().values(
            hooded_figure_id=1,
            point_date=date(2015, 1, 2),
            lon=-87.6495076896,
            lat=41.7915865543,
            geom=None,
            hash='addde9be7f59e95fc08e54e29b2a947f')
        app_engine.execute(ins)
示例#12
0
def approve_dataset(source_url_hash):
    # Approve it
    meta = session.query(MetaTable).get(source_url_hash)
    meta.approved_status = True
    session.commit()
    # Ingest it
    add_dataset_task.delay(source_url_hash)
    send_approval_email(meta.human_name, meta.contributor_name,
                        meta.contributor_email)
示例#13
0
def approve_dataset(source_url_hash):
    # Approve it
    meta = session.query(MetaTable).get(source_url_hash)
    meta.approved_status = True
    session.commit()
    # Ingest it
    add_dataset_task.delay(source_url_hash)
    send_approval_email(meta.human_name, meta.contributor_name,
                        meta.contributor_email)
示例#14
0
def approve_shape(dataset_name):
    # Approve it
    meta = session.query(ShapeMetadata).get(dataset_name)
    meta.approved_status = True
    session.commit()
    # Ingest it
    add_shape_task.delay(dataset_name)
    send_approval_email(meta.human_name, meta.contributor_name,
                        meta.contributor_email)
示例#15
0
def approve_shape(dataset_name):
    # Approve it
    meta = session.query(ShapeMetadata).get(dataset_name)
    meta.approved_status = True
    session.commit()
    # Ingest it
    add_shape_task.delay(dataset_name)
    send_approval_email(meta.human_name, meta.contributor_name,
                        meta.contributor_email)
示例#16
0
def edit_dataset(source_url_hash):
    form = EditDatasetForm()
    meta = session.query(MetaTable).get(source_url_hash)
    fieldnames = meta.column_names
    num_rows = 0

    if meta.approved_status:
        try:
            table_name = meta.dataset_name
            table = Table(table_name,
                          Base.metadata,
                          autoload=True,
                          autoload_with=engine)

            # Would prefer to just get the names from the metadata
            # without needing to reflect.
            fieldnames = table.columns.keys()
            pk_name = [p.name for p in table.primary_key][0]
            pk = table.c[pk_name]
            num_rows = session.query(pk).count()

        except sqlalchemy.exc.NoSuchTableError:
            # dataset has been approved, but perhaps still processing.
            pass

    if form.validate_on_submit():
        upd = {
            'human_name': form.human_name.data,
            'description': form.description.data,
            'attribution': form.attribution.data,
            'update_freq': form.update_freq.data,
            'latitude': form.latitude.data,
            'longitude': form.longitude.data,
            'location': form.location.data,
            'observed_date': form.observed_date.data,
        }
        session.query(MetaTable)\
            .filter(MetaTable.source_url_hash == meta.source_url_hash)\
            .update(upd)
        session.commit()

        if not meta.approved_status:
            approve_dataset(source_url_hash)

        flash('%s updated successfully!' % meta.human_name, 'success')
        return redirect(url_for('views.view_datasets'))
    else:
        pass

    context = {
        'form': form,
        'meta': meta,
        'fieldnames': fieldnames,
        'num_rows': num_rows,
    }
    return render_template('admin/edit-dataset.html', **context)
示例#17
0
 def ingest_fixture(fixture):
     # Add the fixture to the metadata first
     shape_meta = ShapeMetadata.add(human_name=fixture.human_name,
                                    source_url=None,
                                    update_freq=fixture.update_freq,
                                    approved_status=False)
     session.commit()
     # Bypass the celery task and call on a ShapeETL directly
     ShapeETL(meta=shape_meta, source_path=fixture.path).add()
     return shape_meta
示例#18
0
def init_user():
    if plenario.settings.DEFAULT_USER:
        print 'creating default user %s' % plenario.settings.DEFAULT_USER['name']
        user = plenario.models.User(**plenario.settings.DEFAULT_USER)
        session.add(user)
        try:
            session.commit()
        except Exception as e:
            session.rollback()
            raise e
示例#19
0
 def ingest_fixture(fixture):
     # Add the fixture to the metadata first
     shape_meta = ShapeMetadata.add(human_name=fixture.human_name,
                                    source_url=None,
                                    update_freq=fixture.update_freq,
                                    approved_status=False)
     session.commit()
     # Bypass the celery task and call on a ShapeETL directly
     ShapeETL(meta=shape_meta, source_path=fixture.path).add()
     return shape_meta
示例#20
0
def init_user():
    if plenario.settings.DEFAULT_USER:
        print 'creating default user %s' % plenario.settings.DEFAULT_USER['name']
        user = plenario.models.User(**plenario.settings.DEFAULT_USER)
        session.add(user)
        try:
            session.commit()
        except Exception as e:
            session.rollback()
            raise e
示例#21
0
def init_master_meta_user():
    print 'creating master, meta and user tables'
    Base.metadata.create_all(bind=app_engine)
    if plenario.settings.DEFAULT_USER:
        print 'creating default user %s' % plenario.settings.DEFAULT_USER['name']
        user = plenario.models.User(**plenario.settings.DEFAULT_USER)
        session.add(user)
        try:
            session.commit()
        except IntegrityError:
            pass
示例#22
0
def edit_dataset(source_url_hash):
    form = EditDatasetForm()
    meta = session.query(MetaTable).get(source_url_hash)
    fieldnames = meta.column_names
    num_rows = 0
    
    if meta.approved_status:
        try:
            table_name = meta.dataset_name
            table = Table(table_name, Base.metadata,
                          autoload=True, autoload_with=engine)

            # Would prefer to just get the names from the metadata
            # without needing to reflect.
            fieldnames = table.columns.keys()
            pk_name = [p.name for p in table.primary_key][0]
            pk = table.c[pk_name]
            num_rows = session.query(pk).count()
            
        except sqlalchemy.exc.NoSuchTableError:
            # dataset has been approved, but perhaps still processing.
            pass

    if form.validate_on_submit():
        upd = {
            'human_name': form.human_name.data,
            'description': form.description.data,
            'attribution': form.attribution.data,
            'update_freq': form.update_freq.data,
            'latitude': form.latitude.data,
            'longitude': form.longitude.data,
            'location': form.location.data,
            'observed_date': form.observed_date.data,
        }
        session.query(MetaTable)\
            .filter(MetaTable.source_url_hash == meta.source_url_hash)\
            .update(upd)
        session.commit()

        if not meta.approved_status:
            approve_dataset(source_url_hash)
        
        flash('%s updated successfully!' % meta.human_name, 'success')
        return redirect(url_for('views.view_datasets'))
    else:
        pass

    context = {
        'form': form,
        'meta': meta,
        'fieldnames': fieldnames,
        'num_rows': num_rows,
    }
    return render_template('admin/edit-dataset.html', **context)
示例#23
0
文件: views.py 项目: EzanLTD/plenario
def add_dataset_to_metatable(request, url, dataset_id, dataset_info,
                             socrata_source, approved_status):
    data_types = []
    business_key = None
    observed_date = None
    latitude = None
    longitude = None
    location = None
    for k, v in request.form.iteritems():
        if k.startswith('data_type_'):
            key = k.replace("data_type_", "")
            data_types.append({"field_name": key, "data_type": v})

        if k.startswith('key_type_'):
            key = k.replace("key_type_", "")
            if (v == "business_key"): business_key = key
            if (v == "observed_date"): observed_date = key
            if (v == "latitude"): latitude = key
            if (v == "longitude"): longitude = key
            if (v == "location"): location = key

    if socrata_source:
        data_types = dataset_info['columns']
        url = dataset_info['source_url']

    d = {
        'dataset_name': slugify(request.form.get('dataset_name'),
                                delim=u'_')[:50],
        'human_name': request.form.get('dataset_name'),
        'attribution': request.form.get('dataset_attribution'),
        'description': request.form.get('dataset_description'),
        'source_url': url,
        'source_url_hash': dataset_id,
        'update_freq': request.form.get('update_frequency'),
        'business_key': business_key,
        'observed_date': observed_date,
        'latitude': latitude,
        'longitude': longitude,
        'location': location,
        'contributor_name': request.form.get('contributor_name'),
        'contributor_organization':
        request.form.get('contributor_organization'),
        'contributor_email': request.form.get('contributor_email'),
        'contributed_data_types': json.dumps(data_types),
        'approved_status': approved_status,
        'is_socrata_source': socrata_source
    }

    # add this to meta_master
    md = MetaTable(**d)
    session.add(md)
    session.commit()

    return md
示例#24
0
def update_shape(self, table_name):
    # Associate the dataset with this celery task
    # so we can check on the task's status
    meta = session.query(ShapeMetadata).get(table_name)
    meta.celery_task_id = self.request.id
    session.commit()

    # Update the shapefile
    ShapeETL(meta=meta).update()
    return 'Finished updating shape dataset {} from {}.'.\
        format(meta.dataset_name, meta.source_url)
示例#25
0
def update_shape(self, table_name):
    # Associate the dataset with this celery task
    # so we can check on the task's status
    meta = session.query(ShapeMetadata).get(table_name)
    meta.celery_task_id = self.request.id
    session.commit()

    # Update the shapefile
    ShapeETL(meta=meta).update()
    return 'Finished updating shape dataset {} from {}.'.\
        format(meta.dataset_name, meta.source_url)
示例#26
0
    def setUp(self):
        session.rollback()
        # Ensure we have metadata loaded into the database
        # to mimic the behavior of metadata ingestion preceding file ingestion.
        drop_meta('dog_park_permits')
        drop_meta('community_radio_events')
        drop_meta('public_opera_performances')

        # Make new MetaTable objects
        self.unloaded_meta = MetaTable(url='nightvale.gov/events.csv',
                                      human_name='Community Radio Events',
                                      business_key='Event Name',
                                      observed_date='Date',
                                      latitude='lat', longitude='lon',
                                      approved_status=True)

        self.existing_meta = MetaTable(url='nightvale.gov/dogpark.csv',
                                      human_name='Dog Park Permits',
                                      business_key='Hooded Figure ID',
                                      observed_date='Date',
                                      latitude='lat', longitude='lon',
                                      approved_status=False)

        self.opera_meta = MetaTable(url='nightvale.gov/opera.csv',
                                   human_name='Public Opera Performances',
                                   business_key='Event Name',
                                   observed_date='Date',
                                   location='Location',
                                   approved_status=False)
        session.add_all([self.existing_meta, self.opera_meta, self.unloaded_meta])
        session.commit()

        # Also, let's have one table pre-loaded...
        self.existing_table = sa.Table('dog_park_permits', MetaData(),
                                      Column('hooded_figure_id', Integer),
                                      Column('point_date', TIMESTAMP, nullable=False),
                                      Column('date', Date, nullable=True),
                                      Column('lat', Float, nullable=False),
                                      Column('lon', Float, nullable=False),
                                       Column('hash', String(32), primary_key=True),
                                      Column('geom', Geometry('POINT', srid=4326), nullable=True))
        drop_if_exists(self.existing_table.name)
        self.existing_table.create(bind=app_engine)

        # ... with some pre-existing data
        ins = self.existing_table.insert().values(hooded_figure_id=1,
                                                  point_date=date(2015, 1, 2),
                                                  lon=-87.6495076896,
                                                  lat=41.7915865543,
                                                  geom=None,
                                                  hash='addde9be7f59e95fc08e54e29b2a947f')
        app_engine.execute(ins)
示例#27
0
def init_census():
    print 'initializing and populating US Census blocks'
    print 'this will *also* take a few minutes ...'
    census_settings = plenario.settings.CENSUS_BLOCKS

    # Only try to cache to AWS if we've specified a key
    save_to_s3 = (plenario.settings.AWS_ACCESS_KEY != '')

    census_meta = plenario.models.ShapeMetadata.add(source_url=census_settings['source_url'],
                                                      human_name=census_settings['human_name'],
                                                      caller_session=session)
    session.commit()
    ShapeETL(meta=census_meta, save_to_s3=save_to_s3).import_shapefile()
示例#28
0
def delete_dataset(self, source_url_hash):
    md = session.query(MetaTable).get(source_url_hash)
    try:
        dat_table = md.point_table
        dat_table.drop(engine, checkfirst=True)
    except NoSuchTableError:
        # Move on so we can get rid of the metadata
        pass
    session.delete(md)
    try:
        session.commit()
    except InternalError, e:
        raise delete_dataset.retry(exc=e)
示例#29
0
def delete_dataset(self, source_url_hash):
    md = session.query(MetaTable).get(source_url_hash)
    try:
        dat_table = md.point_table
        dat_table.drop(engine, checkfirst=True)
    except NoSuchTableError:
        # Move on so we can get rid of the metadata
        pass
    session.delete(md)
    try:
        session.commit()
    except InternalError, e:
        raise delete_dataset.retry(exc=e)
示例#30
0
def add_dataset_to_metatable(request, url, dataset_id, dataset_info, socrata_source, approved_status):
    data_types = []
    business_key = None
    observed_date = None
    latitude = None
    longitude = None 
    location = None
    for k, v in request.form.iteritems():
        if k.startswith('data_type_'):
            key = k.replace("data_type_", "")
            data_types.append({"field_name": key, "data_type": v})

        if k.startswith('key_type_'):
            key = k.replace("key_type_", "")
            if (v == "business_key"): business_key = key
            if (v == "observed_date"): observed_date = key
            if (v == "latitude"): latitude = key
            if (v == "longitude"): longitude = key
            if (v == "location"): location = key

    if socrata_source:
        data_types = dataset_info['columns']
        url = dataset_info['source_url']

    d = {
        'dataset_name': slugify(request.form.get('dataset_name'), delim=u'_')[:50],
        'human_name': request.form.get('dataset_name'),
        'attribution': request.form.get('dataset_attribution'),
        'description': request.form.get('dataset_description'),
        'source_url': url,
        'source_url_hash': dataset_id,
        'update_freq': request.form.get('update_frequency'),
        'business_key': business_key,
        'observed_date': observed_date,
        'latitude': latitude,
        'longitude': longitude,
        'location': location,
        'contributor_name': request.form.get('contributor_name'),
        'contributor_organization': request.form.get('contributor_organization'),
        'contributor_email': request.form.get('contributor_email'),
        'contributed_data_types': json.dumps(data_types),
        'approved_status': approved_status,
        'is_socrata_source': socrata_source
    }

    # add this to meta_master
    md = MetaTable(**d)
    session.add(md)
    session.commit()

    return md
示例#31
0
def reset_password():
    form = ResetPasswordForm()
    errors = []
    if form.validate_on_submit():
        user = db_session.query(User).get(flask_session['user_id'])
        check = user.check_password(user.name, form.old_password.data)
        if check:
            user.password = form.new_password.data
            db_session.add(user)
            db_session.commit()
            flash('Password reset successful!', 'success')
        else:
            errors.append('Password is not correct')
    return render_template('reset-password.html', form=form, errors=errors)
示例#32
0
    def import_shapefile(self):
        if self.meta.is_ingested:
            raise PlenarioETLError("Table {} has already been ingested.".format(self.table_name))

        # NB: this function is not atomic.
        # update_after_ingest could fail after _ingest_shapefile succeeds, leaving us with inaccurate metadata.
        # If this becomes a problem, we can tweak the ogr2ogr import to return a big SQL string
        # rather than just going ahead and importing the shapefile.
        # Then we could put both operations in the same transaction.

        self._ingest_shapefile()
        self.meta.update_after_ingest(session)

        session.commit()
示例#33
0
def reset_password():
    form = ResetPasswordForm()
    errors = []
    if form.validate_on_submit():
        user = db_session.query(User).get(flask_session['user_id'])
        check = user.check_password(user.name, form.old_password.data)
        if check:
            user.password = form.new_password.data
            db_session.add(user)
            db_session.commit()
            flash('Password reset successful!', 'success')
        else:
            errors.append('Password is not correct')
    return render_template('reset-password.html', form=form, errors=errors)
示例#34
0
 def add(self):
     if self.meta.is_ingested:
         raise PlenarioETLError("Table {} has already been ingested.".
                                format(self.table_name))
     
     new = HashedShape(self.table_name, self.source_url, self.source_path)
     try:
         new.ingest()
         self.meta.update_after_ingest()
         session.commit()
     except:
         # In case ingestion failed partway through,
         # be sure to leave no trace.
         new.drop()
         raise
示例#35
0
def init_user():
    if DEFAULT_USER['name']:
        print 'Creating default user %s' % DEFAULT_USER['name']
        if session.query(plenario.models.User).count() > 0:
            print 'Users already exist. Skipping this step.'
            return
        user = plenario.models.User(**DEFAULT_USER)
        session.add(user)
        try:
            session.commit()
        except Exception as e:
            session.rollback()
            print "Problem while creating default user: ", e
    else:
        print 'No default user specified. Skipping this step.'
示例#36
0
def shape_meta_from_submit_form(form, is_approved):

    md = ShapeMetadata.add(
        human_name=form['dataset_name'],
        source_url=form['file_url'],
        view_url=form.get('view_url'),
        attribution=form.get('dataset_attribution'),
        description=form.get('dataset_description'),
        update_freq=form['update_frequency'],
        contributor_name=form['contributor_name'],
        contributor_organization=form.get('contributor_organization'),
        contributor_email=form['contributor_email'],
        approved_status=is_approved)
    session.commit()
    return md
示例#37
0
def shape_meta_from_submit_form(form, is_approved):

    md = ShapeMetadata.add(
        human_name=form['dataset_name'],
        source_url=form['file_url'],
        view_url=form.get('view_url'),
        attribution=form.get('dataset_attribution'),
        description=form.get('dataset_description'),
        update_freq=form['update_frequency'],
        contributor_name=form['contributor_name'],
        contributor_organization=form.get('contributor_organization'),
        contributor_email=form['contributor_email'],
        approved_status=is_approved)
    session.commit()
    return md
示例#38
0
    def add(self):
        if self.meta.is_ingested:
            raise PlenarioETLError(
                "Table {} has already been ingested.".format(self.table_name))

        new = HashedShape(self.table_name, self.source_url, self.source_path)
        try:
            new.ingest()
            self.meta.update_after_ingest()
            session.commit()
        except:
            # In case ingestion failed partway through,
            # be sure to leave no trace.
            new.drop()
            raise
示例#39
0
def add_user():
    form = AddUserForm()
    if form.validate_on_submit():
        user_info = {
            'name': form.name.data,
            'email': form.email.data,
            'password': form.password.data
        }
        user = User(**user_info)
        db_session.add(user)
        db_session.commit()
    context = {
        'form': form,
        'name': form.name.data,
        'email': form.email.data,
        'users': db_session.query(User).all()
    }
    return render_template('admin/add-user.html', **context)
示例#40
0
def add_user():
    form = AddUserForm()
    if form.validate_on_submit():
        user_info = {
            'name': form.name.data,
            'email': form.email.data,
            'password': form.password.data
        }
        user = User(**user_info)
        db_session.add(user)
        db_session.commit()
    context = {
        'form': form,
        'name': form.name.data,
        'email': form.email.data,
        'users': db_session.query(User).all()
    }
    return render_template('add-user.html', **context)
示例#41
0
    def setUpClass(cls, shutdown=False):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful
        # to inspect them in the DB after running the tests.
        meta_table_names = ['meta_shape']
        fixture_table_names = [fixture.table_name for key, fixture in fixtures.iteritems()]

        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_meta()

        # Fully ingest the fixtures
        BasePlenarioTest.ingest_fixture(fixtures['city'])
        BasePlenarioTest.ingest_fixture(fixtures['streets'])
        BasePlenarioTest.ingest_fixture(fixtures['zips'])
        BasePlenarioTest.ingest_fixture(fixtures['neighborhoods'])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(human_name=u'Dummy Name',
                                           source_url=None,
                                           update_freq='yearly',
                                           approved_status=False).dataset_name
        session.commit()

        tables_to_drop = [
            'flu_shot_clinics',
            'landmarks',
            'crimes',
            'meta_master'
        ]
        drop_tables(tables_to_drop)

        init_meta()

        ingest_from_fixture(flu_shot_meta, flu_path)
        ingest_from_fixture(landmarks_meta, landmarks_path)
        ingest_from_fixture(crime_meta, crime_path)

        cls.app = create_app().test_client()

        '''/detail'''
示例#42
0
    def setUpClass(cls, shutdown=False):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful
        # to inspect them in the DB after running the tests.
        meta_table_names = ['meta_shape']
        fixture_table_names = [
            fixture.table_name for key, fixture in fixtures.iteritems()
        ]

        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_meta()

        # Fully ingest the fixtures
        BasePlenarioTest.ingest_fixture(fixtures['city'])
        BasePlenarioTest.ingest_fixture(fixtures['streets'])
        BasePlenarioTest.ingest_fixture(fixtures['zips'])
        BasePlenarioTest.ingest_fixture(fixtures['neighborhoods'])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(human_name=u'Dummy Name',
                                           source_url=None,
                                           update_freq='yearly',
                                           approved_status=False).dataset_name
        session.commit()

        tables_to_drop = [
            'flu_shot_clinics', 'landmarks', 'crimes', 'meta_master'
        ]
        drop_tables(tables_to_drop)

        init_meta()

        ingest_from_fixture(flu_shot_meta, flu_path)
        ingest_from_fixture(landmarks_meta, landmarks_path)
        ingest_from_fixture(crime_meta, crime_path)

        cls.app = create_app().test_client()
        '''/detail'''
示例#43
0
def update_meta(metadata, table):
    """
    After ingest/update, update the metadata registry to reflect
    :param metadata:
    :param table:
    """
    metadata.update_date_added()
    metadata.obs_from, metadata.obs_to =\
        session.query(func.min(table.c.point_date),
                      func.max(table.c.point_date)).first()

    bbox = session.query(
        func.ST_SetSRID(func.ST_Envelope(func.ST_Union(table.c.geom)),
                        4326)).first()[0]
    metadata.bbox = bbox
    session.add(metadata)
    try:
        session.commit()
    except:
        session.rollback()
        raise
示例#44
0
    def test_delete_shape(self):
        # Can we remove a shape that's fully ingested?
        city_meta = session.query(ShapeMetadata).get(
            fixtures['city'].table_name)
        self.assertIsNotNone(city_meta)
        city_meta.remove_table()
        session.commit()
        city_meta = session.query(ShapeMetadata).get(
            fixtures['city'].table_name)
        self.assertIsNone(city_meta)

        # Can we remove a shape that's only in the metadata?
        dummy_meta = session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNotNone(dummy_meta)
        dummy_meta.remove_table()
        session.commit()
        dummy_meta = session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNone(dummy_meta)

        # Add them back to return to original test state
        ShapeTests.ingest_fixture(fixtures['city'])
        ShapeMetadata.add(human_name=u'Dummy Name',
                          source_url=None,
                          update_freq='yearly',
                          approved_status=False)

        session.commit()
示例#45
0
    def test_delete_shape(self):
        # Can we remove a shape that's fully ingested?
        city_meta = session.query(ShapeMetadata).get(fixtures['city'].table_name)
        self.assertIsNotNone(city_meta)
        city_meta.remove_table()
        session.commit()
        city_meta = session.query(ShapeMetadata).get(fixtures['city'].table_name)
        self.assertIsNone(city_meta)

        # Can we remove a shape that's only in the metadata?
        dummy_meta = session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNotNone(dummy_meta)
        dummy_meta.remove_table()
        session.commit()
        dummy_meta = session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNone(dummy_meta)

        # Add them back to return to original test state
        ShapeTests.ingest_fixture(fixtures['city'])
        ShapeMetadata.add(human_name=u'Dummy Name',
                          source_url=None,
                          update_freq='yearly',
                          approved_status=False)

        session.commit()
示例#46
0
def update_meta(metadata, table):
    """
    After ingest/update, update the metadata registry to reflect
    :param metadata:
    :param table:
    """
    metadata.update_date_added()
    metadata.obs_from, metadata.obs_to =\
        session.query(func.min(table.c.point_date),
                      func.max(table.c.point_date)).first()

    bbox = session.query(func.ST_SetSRID(
                                         func.ST_Envelope(func.ST_Union(table.c.geom)),
                                         4326
                                         )).first()[0]
    metadata.bbox = bbox
    session.add(metadata)
    try:
        session.commit()
    except:
        session.rollback()
        raise
示例#47
0
    def setUpClass(cls):

        # Remove tables that we're about to recreate.
        # This doesn't happen in teardown because I find it helpful to inspect them in the DB after running the tests.
        meta_table_names = ["dat_master", "meta_shape", "meta_master", "plenario_user"]
        fixture_table_names = [fixture.table_name for key, fixture in fixtures.iteritems()]
        drop_tables(meta_table_names + fixture_table_names)

        # Re-add meta tables
        init_master_meta_user()

        # Fully ingest the fixtures
        ShapeTests.ingest_fixture(fixtures["city"])
        ShapeTests.ingest_fixture(fixtures["streets"])
        ShapeTests.ingest_fixture(fixtures["zips"])

        # Add a dummy dataset to the metadata without ingesting a shapefile for it
        cls.dummy_name = ShapeMetadata.add(
            caller_session=session, human_name=u"Dummy Name", source_url=None
        ).dataset_name
        session.commit()

        cls.app = create_app().test_client()
示例#48
0
文件: point.py 项目: geosir/plenario
def update_meta(metatable, table):
    """
    After ingest/update, update the metatable registry to reflect table information.

    :param metatable: MetaTable instance to update.
    :param table: Table instance to update from.

    :returns: None
    """

    try:
        metatable.update_date_added()

        metatable.obs_from, metatable.obs_to = session.query(
            func.min(table.c.point_date),
            func.max(table.c.point_date)
        ).first()

        metatable.bbox = session.query(
            func.ST_SetSRID(
                func.ST_Envelope(func.ST_Union(table.c.geom)),
                4326
            )
        ).first()[0]

        metatable.column_names = {
            c.name: str(c.type) for c in metatable.column_info()
            if c.name not in {u'geom', u'point_date', u'hash'}
        }

        session.add(metatable)
        session.commit()

    except:
        session.rollback()
        raise
示例#49
0
def point_meta_from_submit_form(form, is_approved):
    column_names, labels = form_columns(form)
    name = slugify(form['dataset_name'], delim=u'_')[:50]

    md = MetaTable(
        url=form['file_url'],
        view_url=form.get('view_url'),
        dataset_name=name,
        human_name=form['dataset_name'],
        attribution=form.get('dataset_attribution'),
        description=form.get('dataset_description'),
        update_freq=form['update_frequency'],
        contributor_name=form['contributor_name'],
        contributor_organization=form.get('contributor_organization'),
        contributor_email=form['contributor_email'],
        approved_status=is_approved,
        observed_date=labels['observed_date'],
        latitude=labels.get('latitude', None),
        longitude=labels.get('longitude', None),
        location=labels.get('location', None),
        column_names=column_names)
    session.add(md)
    session.commit()
    return md
示例#50
0
    def test_delete_shape(self):
        # Can we remove a shape that's fully ingested?
        city_meta = session.query(ShapeMetadata).get(fixtures["city"].table_name)
        self.assertIsNotNone(city_meta)
        city_meta.remove_table(caller_session=session)
        session.commit()
        city_meta = session.query(ShapeMetadata).get(fixtures["city"].table_name)
        self.assertIsNone(city_meta)

        # Can we remove a shape that's only in the metadata?
        dummy_meta = session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNotNone(dummy_meta)
        dummy_meta.remove_table(caller_session=session)
        session.commit()
        dummy_meta = session.query(ShapeMetadata).get(self.dummy_name)
        self.assertIsNone(dummy_meta)

        # Add them back to return to original test state
        ShapeTests.ingest_fixture(fixtures["city"])
        ShapeMetadata.add(caller_session=session, human_name=u"Dummy Name", source_url=None)
        session.commit()
示例#51
0
文件: views.py 项目: EzanLTD/plenario
    if form.validate_on_submit():
        upd = {
            'human_name': form.human_name.data,
            'description': form.description.data,
            'attribution': form.attribution.data,
            'update_freq': form.update_freq.data,
            'business_key': form.business_key.data,
            'latitude': form.latitude.data,
            'longitude': form.longitude.data,
            'location': form.location.data,
            'observed_date': form.observed_date.data,
        }
        session.query(MetaTable)\
            .filter(MetaTable.source_url_hash == meta.source_url_hash)\
            .update(upd)
        session.commit()

        if (meta.approved_status != 'true'):
            approve_dataset(source_url_hash)

        flash('%s updated successfully!' % meta.human_name, 'success')
        return redirect(url_for('views.view_datasets'))
    else:
        pass

    context = {
        'form': form,
        'meta': meta,
        'fieldnames': fieldnames,
        'num_rows': num_rows,
        'num_weather_observations': num_weather_observations,
示例#52
0
def drop_tables(table_names):
    drop_template = 'DROP TABLE IF EXISTS {};'
    command = ''.join([drop_template.format(table_name) for table_name in table_names])
    session.execute(command)
    session.commit()
示例#53
0
def submit_dataset():
    # Slightly dumb way to make sure that POSTs are only coming from
    # originating domain for the time being
    referer = request.headers.get('Referer')
    if referer:
        referer = urlparse(referer).netloc
        req_url = urlparse(request.url).netloc
        if referer != req_url:
            abort(401)
    else:
        abort(401)
    resp = {'status': 'ok', 'message': ''}
    status_code = 200
    errors = []
    post = request.form.get('data')
    if not post:
        try:
            post = request.form.keys()[0]
        except IndexError:
            resp['status'] = 'error'
            resp['message'] = 'Unable to decode POST data'
            status_code = 400
    if status_code == 200:
        post = json.loads(post)
        if post.get('view_url'):
            if post.get('socrata'):
                source_domain = urlparse(post['view_url']).netloc
                four_by_four = re.findall(r'/([a-z0-9]{4}-[a-z0-9]{4})',
                                          post['view_url'])[-1]
                view_url = 'http://%s/api/views/%s' % (source_domain,
                                                       four_by_four)
                dataset_info, errors, status_code = get_socrata_data_info(
                    view_url)
                source_url = '%s/rows.csv?accessType=DOWNLOAD' % view_url
            else:
                dataset_info = {
                    'attribution': '',
                    'description': '',
                }
                source_url = post['view_url']
                dataset_info['name'] = urlparse(source_url).path.split('/')[-1]
            if errors:
                resp['message'] = ', '.join([e for e in errors])
                resp['status'] = 'error'
                status_code = 400
            else:
                dataset_id = md5(source_url).hexdigest()
                md = session.query(MetaTable).get(dataset_id)
                if not md:
                    d = {
                        'dataset_name': slugify(dataset_info['name'],
                                                delim=u'_'),
                        'human_name': dataset_info['name'],
                        'attribution': dataset_info['attribution'],
                        'description': dataset_info['description'],
                        'source_url': source_url,
                        'source_url_hash': dataset_id,
                        'update_freq': post['update_frequency'],
                        'business_key': post['field_definitions']['id_field'],
                        'observed_date':
                        post['field_definitions']['date_field'],
                        'latitude': post['field_definitions'].get('latitude'),
                        'longitude':
                        post['field_definitions'].get('longitude'),
                        'location': post['field_definitions'].get('location')
                    }
                    if len(d['dataset_name']) > 49:
                        d['dataset_name'] = d['dataset_name'][:50]
                    md = MetaTable(**d)
                    session.add(md)
                    session.commit()
                add_dataset.delay(md.source_url_hash,
                                  data_types=post.get('data_types'))
                resp[
                    'message'] = 'Dataset %s submitted successfully' % dataset_info[
                        'name']
        else:
            resp['status'] = 'error'
            resp['message'] = 'Must provide a url where data can be downloaded'
            status_code = 400
    resp = make_response(json.dumps(resp, default=dthandler), status_code)
    resp.headers['Content-Type'] = 'application/json'
    return resp
示例#54
0
def drop_tables(table_names):
    drop_template = 'DROP TABLE IF EXISTS {};'
    command = ''.join(
        [drop_template.format(table_name) for table_name in table_names])
    session.execute(command)
    session.commit()
示例#55
0
def delete_shape(self, table_name):
    shape_meta = session.query(ShapeMetadata).get(table_name)
    shape_meta.remove_table()
    session.commit()
    return 'Removed {}'.format(table_name)