def write_results_to_database(self, options, public_health_output_list):

        drop_table(
            '{grid_outcome_schema}.{grid_outcome_table}'.format(**options))

        attribute_list = filter(lambda x: x != 'id', self.outcome_fields)
        options['output_field_syntax'] = 'id int, ' + \
                                         create_sql_calculations(attribute_list, '{0} numeric(20,8)')

        execute_sql(
            "create table {grid_outcome_schema}.{grid_outcome_table} ({output_field_syntax});"
            .format(**options))

        output_textfile = StringIO("")
        for row in public_health_output_list:
            stringrow = []
            for item in row:
                if isinstance(item, int):
                    stringrow.append(str(item))
                else:
                    stringrow.append(str(round(item, 8)))
            output_textfile.write("\t".join(stringrow) + "\n")

        output_textfile.seek(os.SEEK_SET)
        #copy text file output back into Postgres
        copy_from_text_to_db(
            output_textfile,
            '{grid_outcome_schema}.{grid_outcome_table}'.format(**options))
        output_textfile.close()
        ##---------------------------
        pSql = '''alter table {grid_outcome_schema}.{grid_outcome_table}
                    add column wkb_geometry geometry (GEOMETRY, 4326);'''.format(
            **options)
        execute_sql(pSql)

        pSql = '''update {grid_outcome_schema}.{grid_outcome_table} b set
                    wkb_geometry = st_setSRID(a.wkb_geometry, 4326)
                    from (select id, wkb_geometry from {source_grid_schema}.{source_grid_table}) a
                    where cast(a.id as int) = cast(b.id as int);
        '''.format(**options)
        execute_sql(pSql)

        add_geom_idx(options['grid_outcome_schema'],
                     options['grid_outcome_table'], 'wkb_geometry')
        add_primary_key(options['grid_outcome_schema'],
                        options['grid_outcome_table'], 'id')

        # Since not every grid cell results in a grid_outcome, we need to wipe out the rel
        # table and recreate it to match the base grid_coutcome table. Otherwise there will
        # be to many rel table rows and cloning the DbEntity or ConfigEntity will fail
        logger.info(
            "Writing to relative table {grid_outcome_schema}.{grid_outcome_table}rel"
            .format(**options))
        truncate_table(
            "{grid_outcome_schema}.{grid_outcome_table}rel".format(**options))
        from footprint.main.publishing.data_import_publishing import create_and_populate_relations
        create_and_populate_relations(
            self.config_entity,
            self.config_entity.computed_db_entities(
                key=DbEntityKey.PH_GRID_OUTCOMES)[0])
Пример #2
0
def run_aggregate_within_variable_distance_processes(sql_config_dict):

    drop_table(
        '{public_health_variables_schema}.{uf_canvas_table}_variable'.format(
            public_health_variables_schema=sql_config_dict[
                'public_health_variables_schema'],
            uf_canvas_table=sql_config_dict['uf_canvas_table']))

    pSql = '''
    create table {public_health_variables_schema}.{uf_canvas_table}_variable
    as select
      a.id, st_transform(a.wkb_geometry, 3310) as wkb_geometry, cast(a.attractions_hbw * 1609.0 as float) as distance,
      sum(du * st_area(st_intersection(a.wkb_geometry, b.wkb_geometry)) / st_area(b.wkb_geometry)) as du_variable,
      sum(emp * st_area(st_intersection(a.wkb_geometry, b.wkb_geometry)) / st_area(b.wkb_geometry)) as emp_variable
    from
      (select id, wkb_geometry, attractions_hbw from {trip_lengths_schema}.{trip_lengths_table}) a,
      (select wkb_geometry, du, emp from {uf_canvas_schema}.{uf_canvas_table} where du + emp > 0) b
    where st_intersects(b.wkb_geometry, a.wkb_geometry) group by a.id, a.wkb_geometry, a.attractions_hbw;
    '''.format(public_health_variables_schema=sql_config_dict[
        'public_health_variables_schema'],
               uf_canvas_schema=sql_config_dict['uf_canvas_schema'],
               uf_canvas_table=sql_config_dict['uf_canvas_table'],
               trip_lengths_schema=sql_config_dict['trip_lengths_schema'],
               trip_lengths_table=sql_config_dict['trip_lengths_table'])

    execute_sql(pSql)

    add_geom_idx(sql_config_dict['public_health_variables_schema'],
                 sql_config_dict['uf_canvas_table'] + '_variable')
    add_primary_key(sql_config_dict['public_health_variables_schema'],
                    sql_config_dict['uf_canvas_table'] + '_variable', 'id')

    add_analysis_geom(sql_config_dict['public_health_variables_schema'],
                      sql_config_dict['public_health_variables_table'])

    aggregate_within_variable_distance(
        dict(source_table=sql_config_dict['public_health_variables_schema'] +
             '.' + sql_config_dict['uf_canvas_table'] + '_variable',
             source_table_query='id is not null',
             target_table_schema=sql_config_dict[
                 'public_health_variables_schema'],
             target_table=sql_config_dict['public_health_variables_table'],
             target_table_query='pop > 0',
             target_table_pk='id',
             suffix='variable',
             aggregation_type='sum',
             variable_field_list=['du_variable', 'emp_variable']))

    drop_table(
        '{public_health_variables_schema}.{uf_canvas_table}_variable'.format(
            public_health_variables_schema=sql_config_dict[
                'public_health_variables_schema'],
            uf_canvas_table=sql_config_dict['uf_canvas_table']))
Пример #3
0
    def write_results_to_database(self, options, energy_output_list):

        drop_table('{energy_schema}.{energy_result_table}'.format(**options))

        attribute_list = filter(
            lambda x: x not in ['id', 'title24_zone', 'fcz_zone'],
            self.output_fields)

        output_field_syntax = 'id int, title24_zone int, fcz_zone int, ' + create_sql_calculations(
            attribute_list, '{0} numeric(14, 4)')

        pSql = '''
        create table {energy_schema}.{energy_result_table} ({output_field_syntax});'''.format(
            output_field_syntax=output_field_syntax, **options)
        execute_sql(pSql)

        output_textfile = StringIO("")

        for row in energy_output_list:
            stringrow = []
            for item in row:
                if isinstance(item, int):
                    stringrow.append(str(item))
                else:
                    stringrow.append(str(round(item, 4)))
            output_textfile.write("\t".join(stringrow) + "\n")

        output_textfile.seek(os.SEEK_SET)
        #copy text file output back into Postgres
        copy_from_text_to_db(
            output_textfile,
            '{energy_schema}.{energy_result_table}'.format(**options))
        output_textfile.close()

        pSql = '''alter table {energy_schema}.{energy_result_table} add column wkb_geometry geometry (GEOMETRY, 4326);'''.format(
            **options)
        execute_sql(pSql)

        pSql = '''update {energy_schema}.{energy_result_table} b set
                    wkb_geometry = st_setSRID(a.wkb_geometry, 4326)
                    from (select id, wkb_geometry from {base_schema}.{base_table}) a
                    where cast(a.id as int) = cast(b.id as int);
        '''.format(**options)

        execute_sql(pSql)

        add_geom_idx(options['energy_schema'], options['energy_result_table'],
                     'wkb_geometry')
        add_primary_key(options['energy_schema'],
                        options['energy_result_table'], 'id')
        add_attribute_idx(options['energy_schema'],
                          options['energy_result_table'],
                          'annual_million_btus_per_unit')
Пример #4
0
    def write_results_to_database(self, options, public_health_output_list):

        drop_table('{grid_outcome_schema}.{grid_outcome_table}'.format(**options))

        attribute_list = filter(lambda x: x != 'id', self.outcome_fields)
        options['output_field_syntax'] = 'id int, ' + \
                                         create_sql_calculations(attribute_list, '{0} numeric(20,8)')

        execute_sql("create table {grid_outcome_schema}.{grid_outcome_table} ({output_field_syntax});".format(
            **options))

        output_textfile = StringIO("")
        for row in public_health_output_list:
            stringrow = []
            for item in row:
                if isinstance(item, int):
                    stringrow.append(str(item))
                else:
                    stringrow.append(str(round(item, 8)))
            output_textfile.write("\t".join(stringrow) + "\n")

        output_textfile.seek(os.SEEK_SET)
        #copy text file output back into Postgres
        copy_from_text_to_db(output_textfile, '{grid_outcome_schema}.{grid_outcome_table}'.format(**options))
        output_textfile.close()
        ##---------------------------
        pSql = '''alter table {grid_outcome_schema}.{grid_outcome_table}
                    add column wkb_geometry geometry (GEOMETRY, 4326);'''.format(**options)
        execute_sql(pSql)

        pSql = '''update {grid_outcome_schema}.{grid_outcome_table} b set
                    wkb_geometry = st_setSRID(a.wkb_geometry, 4326)
                    from (select id, wkb_geometry from {source_grid_schema}.{source_grid_table}) a
                    where cast(a.id as int) = cast(b.id as int);
        '''.format(**options)
        execute_sql(pSql)

        add_geom_idx(options['grid_outcome_schema'], options['grid_outcome_table'], 'wkb_geometry')
        add_primary_key(options['grid_outcome_schema'], options['grid_outcome_table'],  'id')

        # Since not every grid cell results in a grid_outcome, we need to wipe out the rel
        # table and recreate it to match the base grid_coutcome table. Otherwise there will
        # be to many rel table rows and cloning the DbEntity or ConfigEntity will fail
        logger.info("Writing to relative table {grid_outcome_schema}.{grid_outcome_table}rel".format(**options))
        truncate_table("{grid_outcome_schema}.{grid_outcome_table}rel".format(**options))
        from footprint.main.publishing.data_import_publishing import create_and_populate_relations
        create_and_populate_relations(
            self.config_entity,
            self.config_entity.computed_db_entities(key=DbEntityKey.PH_GRID_OUTCOMES)[0])
Пример #5
0
def run_aggregate_within_variable_distance_processes(sql_config_dict):

    drop_table('{public_health_variables_schema}.{uf_canvas_table}_variable'.format(
        public_health_variables_schema=sql_config_dict['public_health_variables_schema'],
        uf_canvas_table=sql_config_dict['uf_canvas_table']))

    pSql = '''
    create table {public_health_variables_schema}.{uf_canvas_table}_variable
    as select
      a.id, st_transform(a.wkb_geometry, 3310) as wkb_geometry, cast(a.attractions_hbw * 1609.0 as float) as distance,
      sum(du * st_area(st_intersection(a.wkb_geometry, b.wkb_geometry)) / st_area(b.wkb_geometry)) as du_variable,
      sum(emp * st_area(st_intersection(a.wkb_geometry, b.wkb_geometry)) / st_area(b.wkb_geometry)) as emp_variable
    from
      (select id, wkb_geometry, attractions_hbw from {trip_lengths_schema}.{trip_lengths_table}) a,
      (select wkb_geometry, du, emp from {uf_canvas_schema}.{uf_canvas_table} where du + emp > 0) b
    where st_intersects(b.wkb_geometry, a.wkb_geometry) group by a.id, a.wkb_geometry, a.attractions_hbw;
    '''.format(public_health_variables_schema=sql_config_dict['public_health_variables_schema'],
               uf_canvas_schema=sql_config_dict['uf_canvas_schema'],
               uf_canvas_table=sql_config_dict['uf_canvas_table'],
               trip_lengths_schema=sql_config_dict['trip_lengths_schema'],
               trip_lengths_table=sql_config_dict['trip_lengths_table'])

    execute_sql(pSql)

    add_geom_idx(sql_config_dict['public_health_variables_schema'], sql_config_dict['uf_canvas_table'] + '_variable')
    add_primary_key(sql_config_dict['public_health_variables_schema'], sql_config_dict['uf_canvas_table'] + '_variable', 'id')

    add_analysis_geom(sql_config_dict['public_health_variables_schema'], sql_config_dict['public_health_variables_table'])

    aggregate_within_variable_distance(dict(
        source_table=sql_config_dict['public_health_variables_schema'] + '.' + sql_config_dict['uf_canvas_table'] + '_variable',
        source_table_query='id is not null',
        target_table_schema=sql_config_dict['public_health_variables_schema'],
        target_table=sql_config_dict['public_health_variables_table'],
        target_table_query='pop > 0',
        target_table_pk='id',
        suffix='variable',
        aggregation_type='sum',
        variable_field_list=['du_variable', 'emp_variable']
    ))

    drop_table('{public_health_variables_schema}.{uf_canvas_table}_variable'.format(
        public_health_variables_schema=sql_config_dict['public_health_variables_schema'],
        uf_canvas_table=sql_config_dict['uf_canvas_table']))
Пример #6
0
    def write_results_to_database(self, options, energy_output_list):

        drop_table('{energy_schema}.{energy_result_table}'.format(**options))

        attribute_list = filter(lambda x: x not in ['id', 'title24_zone', 'fcz_zone'], self.output_fields)

        output_field_syntax = 'id int, title24_zone int, fcz_zone int, ' + create_sql_calculations(attribute_list, '{0} numeric(14, 4)')

        pSql = '''
        create table {energy_schema}.{energy_result_table} ({output_field_syntax});'''.format(output_field_syntax=output_field_syntax, **options)
        execute_sql(pSql)

        output_textfile = StringIO("")

        for row in energy_output_list:
            stringrow = []
            for item in row:
                if isinstance(item, int):
                    stringrow.append(str(item))
                else:
                    stringrow.append(str(round(item, 4)))
            output_textfile.write("\t".join(stringrow) + "\n")

        output_textfile.seek(os.SEEK_SET)
        #copy text file output back into Postgres
        copy_from_text_to_db(output_textfile, '{energy_schema}.{energy_result_table}'.format(**options))
        output_textfile.close()

        pSql = '''alter table {energy_schema}.{energy_result_table} add column wkb_geometry geometry (GEOMETRY, 4326);'''.format(**options)
        execute_sql(pSql)

        pSql = '''update {energy_schema}.{energy_result_table} b set
                    wkb_geometry = st_setSRID(a.wkb_geometry, 4326)
                    from (select id, wkb_geometry from {base_schema}.{base_table}) a
                    where cast(a.id as int) = cast(b.id as int);
        '''.format(**options)

        execute_sql(pSql)

        add_geom_idx(options['energy_schema'], options['energy_result_table'], 'wkb_geometry')
        add_primary_key(options['energy_schema'], options['energy_result_table'],  'id')
        add_attribute_idx(options['energy_schema'], options['energy_result_table'],  'annual_million_btus_per_unit')
    def update(self, **kwargs):
        """
            This function handles the update or creation on the environmental constraints geography producing the area
            for each layer with the environmental constraint behavior. This function will both add and remove
            constraints and produce the final constraints layer in the primary geography of the active scenario
        """
        # TODO : remove hard-coded 3310 (only works in CA), need to set an "analysis projection" in the Region
        start_time = time.time()

        current_db_entities = \
            set(self.config_entity.db_entities_having_behavior_key(BehaviorKey.Fab.ricate('environmental_constraint')))

        base_feature_class = self.config_entity.db_entity_feature_class(
            DbEntityKey.BASE_CANVAS)

        options = dict(project_schema=parse_schema_and_table(
            base_feature_class._meta.db_table)[0],
                       base_table=base_feature_class.db_entity_key)

        logger.info('Inserting raw geographies into the environmental constraint geographies table for DbEntities: %s' % \
                    ', '.join(map(lambda db_entity: db_entity.name, current_db_entities)))

        drop_table(
            '{project_schema}.environmental_constraint_geographies_table'.
            format(project_schema=options['project_schema']))

        current_environmental_constraints = []
        for db_entity in current_db_entities:
            constraint_class = self.config_entity.db_entity_feature_class(
                db_entity.key)
            current_environmental_constraints.append(
                constraint_class.db_entity_key)

        create_id_field_format = create_sql_calculations(
            current_environmental_constraints, '{0}_id int')
        insert_id_field_format = create_sql_calculations(
            current_environmental_constraints, '{0}_id')

        pSql = '''
        create table {project_schema}.environmental_constraint_geographies_table
            (primary_id integer, wkb_geometry geometry, {create_id_field_format});
        SELECT UpdateGeometrySRID('{project_schema}', 'environmental_constraint_geographies_table', 'wkb_geometry', 3310)

        '''.format(project_schema=options['project_schema'],
                   create_id_field_format=create_id_field_format)

        execute_sql(pSql)

        for db_entity in current_db_entities:
            logger.info(
                'Inserting into environmental constraint geographies table for DbEntity: %s'
                % db_entity.full_name)

            constraint_class = self.config_entity.db_entity_feature_class(
                db_entity.key)

            pSql = '''
                insert into {project_schema}.environmental_constraint_geographies_table (primary_id, wkb_geometry, {constraint_db_entity_key}_id) select
                    cast(primary_id as int), wkb_geometry, {constraint_db_entity_key}_id from (
                    select
                        id as primary_id,
                        {constraint_db_entity_id} as {constraint_db_entity_key}_id,
                        st_setSRID(st_transform(st_buffer((st_dump(wkb_geometry)).geom, 0), 3310), 3310) as wkb_geometry

                    from (
                        select b.id, st_intersection(a.wkb_geometry, b.wkb_geometry) as wkb_geometry
	                    from {constraint_schema}.{constraint_db_entity_key} a,
                        {project_schema}.{base_table} b
                            where st_intersects(a.wkb_geometry, b.wkb_geometry)) as intersection
                    ) as polygons;
                '''.format(
                project_schema=options['project_schema'],
                base_table=options['base_table'],
                constraint_schema=parse_schema_and_table(
                    constraint_class._meta.db_table)[0],
                constraint_db_entity_key=constraint_class.db_entity_key,
                constraint_db_entity_id=db_entity.id)

            execute_sql(pSql)

            logger.info(
                'finished inserting db_entity: {db_entity} {time} elapsed'.
                format(time=time.time() - start_time,
                       db_entity=constraint_class.db_entity_key))

        #only regenerate the merged environmental constraint whenever an envrionmental constraint is added or removed
        # from the layer

        add_geom_idx(options['project_schema'],
                     'environmental_constraint_geographies_table')

        logger.info('Unioning all environmental constraint geographies')
        drop_table(
            '{project_schema}.environmental_constraint_geographies_table_unioned'
            .format(project_schema=options['project_schema']))

        pSql = '''
            CREATE TABLE {project_schema}.environmental_constraint_geographies_table_unioned
                (id serial, wkb_geometry geometry, acres float, primary_id int, {create_id_field_format});
            SELECT UpdateGeometrySRID('{project_schema}', 'environmental_constraint_geographies_table_unioned', 'wkb_geometry', 3310);
        '''.format(project_schema=options['project_schema'],
                   create_id_field_format=create_id_field_format)

        execute_sql(pSql)

        pSql = '''
        insert into {project_schema}.environmental_constraint_geographies_table_unioned (wkb_geometry, acres, primary_id, {insert_id_field_format})
               SELECT
                    st_buffer(wkb_geometry, 0) as wkb_geometry,
                    st_area(st_buffer(wkb_geometry, 0)) * 0.000247105 as acres,
                    primary_id, {insert_id_field_format}

                    FROM (
                        SELECT
                            (ST_Dump(wkb_geometry)).geom as wkb_geometry,
                            primary_id, {insert_id_field_format}

                        FROM (
                            SELECT ST_Polygonize(wkb_geometry) AS wkb_geometry, primary_id, {insert_id_field_format}   FROM (
                                SELECT ST_Collect(wkb_geometry) AS wkb_geometry, primary_id, {insert_id_field_format}   FROM (
                                    SELECT ST_ExteriorRing(wkb_geometry) AS wkb_geometry, primary_id, {insert_id_field_format}
                                        FROM {project_schema}.environmental_constraint_geographies_table) AS lines
                                            group by primary_id, {insert_id_field_format}) AS noded_lines
                                                group by primary_id, {insert_id_field_format}) as polygons
                    ) as final
                WHERE st_area(st_buffer(wkb_geometry, 0)) > 5;'''.format(
            project_schema=options['project_schema'],
            insert_id_field_format=insert_id_field_format)

        execute_sql(pSql)

        logger.info('finished unioning env constraints: {time} elapsed'.format(
            time=time.time() - start_time))

        #reproject table back to 4326 for integration with web viewing
        pSql = '''
        SELECT UpdateGeometrySRID('{project_schema}', 'environmental_constraint_geographies_table_unioned', 'wkb_geometry', 4326);
        update {project_schema}.environmental_constraint_geographies_table_unioned a set wkb_geometry = st_transform(st_buffer(wkb_geometry, 0), 4326);
        '''.format(project_schema=options['project_schema'])
        execute_sql(pSql)

        add_geom_idx(options['project_schema'],
                     'environmental_constraint_geographies_table_unioned')

        logger.info('Env Union Finished: %s' % str(time.time() - start_time))
Пример #8
0
def run_vmt_variable_trip_length_buffers(sql_config_dict):

    drop_table('{vmt_variables_schema}.{vmt_variables_table}_vmt_variable'.format(
        vmt_variables_schema=sql_config_dict['vmt_variables_schema'],
        vmt_variables_table=sql_config_dict['vmt_variables_table']))

    pSql = '''
    create table {vmt_variables_schema}.{vmt_variables_table}_vmt_variable
    as select
      a.id, st_transform(a.wkb_geometry, 3310) as wkb_geometry, cast(a.attractions_hbw * 1609.0 as float) as distance,
      sum(acres_parcel_res) as acres_parcel_res_vb,
      sum(acres_parcel_emp) as acres_parcel_emp_vb,
      sum(acres_parcel_mixed_use) as acres_parcel_mixed_use_vb,
      sum(pop) as pop_vb,
      sum(hh) as hh_vb,
      sum(du) as du_vb,
      sum(du_mf) as du_mf_vb,
      sum(emp) as emp_vb,
      sum(emp_ret) as emp_ret_vb,
      sum(hh_inc_00_10) as hh_inc_00_10_vb,
      sum(hh_inc_10_20) as hh_inc_10_20_vb,
      sum(hh_inc_20_30) as hh_inc_20_30_vb,
      sum(hh_inc_30_40) as hh_inc_30_40_vb,
      sum(hh_inc_40_50) as hh_inc_40_50_vb,
      sum(hh_inc_50_60) as hh_inc_50_60_vb,
      sum(hh_inc_60_75) as hh_inc_60_75_vb,
      sum(hh_inc_75_100) as hh_inc_75_100_vb,
      sum(hh_inc_100p) as hh_inc_100p_vb,
      sum(pop_employed) as pop_employed_vb,
      sum(pop_age16_up) as pop_age16_up_vb,
      sum(pop_age65_up) as pop_age65_up_vb

    from
      (select id, wkb_geometry, attractions_hbw from {trip_lengths_schema}.{trip_lengths_table}) a,
      (select point, acres_parcel_res, acres_parcel_emp, acres_parcel_mixed_use, pop, hh, du, du_mf, emp, emp_ret,
            hh * hh_inc_00_10_rate as hh_inc_00_10,
            hh * hh_inc_10_20_rate as hh_inc_10_20,
            hh * hh_inc_20_30_rate as hh_inc_20_30,
            hh * hh_inc_30_40_rate as hh_inc_30_40,
            hh * hh_inc_40_50_rate as hh_inc_40_50,
            hh * hh_inc_50_60_rate as hh_inc_50_60,
            hh * hh_inc_60_75_rate as hh_inc_60_75,
            hh * hh_inc_75_100_rate as hh_inc_75_100,
            hh * hh_inc_100p_rate as hh_inc_100p,
            pop * pop_age16_up_rate * pop_employed_rate as pop_employed,
            pop * pop_age16_up_rate as pop_age16_up,
            pop * pop_age65_up_rate as pop_age65_up

        from (select st_centroid(wkb_geometry) as point, pop, hh, du, du_mf, emp, emp_ret, acres_parcel_res,
        acres_parcel_emp, acres_parcel_mixed_use
        from {uf_canvas_schema}.{uf_canvas_table}) a,
             (select wkb_geometry, hh_inc_00_10_rate, hh_inc_10_20_rate, hh_inc_20_30_rate,
              hh_inc_30_40_rate, hh_inc_40_50_rate, hh_inc_50_60_rate, hh_inc_60_75_rate,
              hh_inc_75_100_rate, hh_inc_100_125_rate + hh_inc_125_150_rate + hh_inc_150_200_rate + hh_inc_200p_rate as hh_inc_100p_rate,
              pop_employed_rate, pop_age16_up_rate, pop_age65_up_rate from {census_rates_schema}.{census_rates_table}) c
          where st_intersects(point, c.wkb_geometry)
          ) b
    where st_intersects(point, a.wkb_geometry) group by a.id, a.wkb_geometry, a.attractions_hbw;
    '''.format(vmt_variables_schema=sql_config_dict['vmt_variables_schema'],
               vmt_variables_table=sql_config_dict['vmt_variables_table'],
               uf_canvas_schema=sql_config_dict['uf_canvas_schema'],
               uf_canvas_table=sql_config_dict['uf_canvas_table'],
               census_rates_schema=sql_config_dict['census_rates_schema'],
               census_rates_table=sql_config_dict['census_rates_table'],
               trip_lengths_schema=sql_config_dict['trip_lengths_schema'],
               trip_lengths_table=sql_config_dict['trip_lengths_table'])

    execute_sql(pSql)

    add_geom_idx(sql_config_dict['vmt_variables_schema'], sql_config_dict['vmt_variables_table'] + '_vmt_variable')
    add_primary_key(sql_config_dict['vmt_variables_schema'], sql_config_dict['vmt_variables_table'] + '_vmt_variable', 'id')

    aggregate_within_variable_distance(dict(
        source_table=sql_config_dict['vmt_variables_schema'] + '.' + sql_config_dict['vmt_variables_table'] + '_vmt_variable',
        source_table_query='du_vb + emp_vb > 0',
        target_table_schema=sql_config_dict['vmt_variables_schema'],
        target_table=sql_config_dict['vmt_variables_table'],
        target_table_query='id is not null',
        target_table_pk='id',
        suffix='vmt_vb',
        aggregation_type='sum',
        variable_field_list=['acres_parcel_res_vb', 'acres_parcel_emp_vb', 'acres_parcel_mixed_use_vb', 'du_vb', 'pop_vb',
                             'emp_vb', 'emp_ret_vb', 'hh_vb', 'du_mf_vb', 'hh_inc_00_10_vb', 'hh_inc_10_20_vb',
                             'hh_inc_20_30_vb', 'hh_inc_30_40_vb', 'hh_inc_40_50_vb', 'hh_inc_50_60_vb',
                             'hh_inc_60_75_vb', 'hh_inc_75_100_vb', 'hh_inc_100p_vb', 'pop_employed_vb',
                             'pop_age16_up_vb', 'pop_age65_up_vb']
    ))

    pSql = '''DROP INDEX {schema}.{schema}_{table}_analysis_geom;
    Alter Table {schema}.{table} drop column analysis_geom;'''.format(schema=sql_config_dict['vmt_variables_schema'],
                                                                   table=sql_config_dict['vmt_variables_table'])
    execute_sql(pSql)
Пример #9
0
    def update(self, **kwargs):

        """
            This function handles the update or creation on the environmental constraints geography producing the area
            for each layer with the environmental constraint behavior. This function will both add and remove
            constraints and produce the final constraints layer in the primary geography of the active scenario
        """
        # TODO : remove hard-coded 3310 (only works in CA), need to set an "analysis projection" in the Region
        start_time = time.time()

        current_db_entities = \
            set(self.config_entity.db_entities_having_behavior_key(BehaviorKey.Fab.ricate('environmental_constraint')))

        base_feature_class = self.config_entity.db_entity_feature_class(
            DbEntityKey.BASE_CANVAS)

        options = dict(
            project_schema=parse_schema_and_table(base_feature_class._meta.db_table)[0],
            base_table=base_feature_class.db_entity_key
        )

        logger.info('Inserting raw geographies into the environmental constraint geographies table for DbEntities: %s' % \
                    ', '.join(map(lambda db_entity: db_entity.name, current_db_entities)))

        drop_table('{project_schema}.environmental_constraint_geographies_table'.format(
            project_schema=options['project_schema'])
        )

        current_environmental_constraints = []
        for db_entity in current_db_entities:
            constraint_class = self.config_entity.db_entity_feature_class(db_entity.key)
            current_environmental_constraints.append(constraint_class.db_entity_key)

        create_id_field_format = create_sql_calculations(current_environmental_constraints, '{0}_id int')
        insert_id_field_format = create_sql_calculations(current_environmental_constraints, '{0}_id')

        pSql = '''
        create table {project_schema}.environmental_constraint_geographies_table
            (primary_id integer, wkb_geometry geometry, {create_id_field_format});
        SELECT UpdateGeometrySRID('{project_schema}', 'environmental_constraint_geographies_table', 'wkb_geometry', 3310)

        '''.format(project_schema=options['project_schema'], create_id_field_format=create_id_field_format)

        execute_sql(pSql)

        for db_entity in current_db_entities:
            logger.info('Inserting into environmental constraint geographies table for DbEntity: %s' % db_entity.full_name)

            constraint_class = self.config_entity.db_entity_feature_class(db_entity.key)

            pSql = '''
                insert into {project_schema}.environmental_constraint_geographies_table (primary_id, wkb_geometry, {constraint_db_entity_key}_id) select
                    cast(primary_id as int), wkb_geometry, {constraint_db_entity_key}_id from (
                    select
                        id as primary_id,
                        {constraint_db_entity_id} as {constraint_db_entity_key}_id,
                        st_setSRID(st_transform(st_buffer((st_dump(wkb_geometry)).geom, 0), 3310), 3310) as wkb_geometry

                    from (
                        select b.id, st_intersection(a.wkb_geometry, b.wkb_geometry) as wkb_geometry
	                    from {constraint_schema}.{constraint_db_entity_key} a,
                        {project_schema}.{base_table} b
                            where st_intersects(a.wkb_geometry, b.wkb_geometry)) as intersection
                    ) as polygons;
                '''.format(
                project_schema=options['project_schema'],
                base_table=options['base_table'],
                constraint_schema=parse_schema_and_table(constraint_class._meta.db_table)[0],
                constraint_db_entity_key=constraint_class.db_entity_key,
                constraint_db_entity_id=db_entity.id
            )

            execute_sql(pSql)

            logger.info('finished inserting db_entity: {db_entity} {time} elapsed'.format(
                time=time.time() - start_time,
                db_entity=constraint_class.db_entity_key))

        #only regenerate the merged environmental constraint whenever an envrionmental constraint is added or removed
        # from the layer

        add_geom_idx(options['project_schema'], 'environmental_constraint_geographies_table')

        logger.info('Unioning all environmental constraint geographies')
        drop_table('{project_schema}.environmental_constraint_geographies_table_unioned'.format(
            project_schema=options['project_schema'])
        )

        pSql = '''
            CREATE TABLE {project_schema}.environmental_constraint_geographies_table_unioned
                (id serial, wkb_geometry geometry, acres float, primary_id int, {create_id_field_format});
            SELECT UpdateGeometrySRID('{project_schema}', 'environmental_constraint_geographies_table_unioned', 'wkb_geometry', 3310);
        '''.format(project_schema=options['project_schema'], create_id_field_format=create_id_field_format)

        execute_sql(pSql)

        pSql = '''
        insert into {project_schema}.environmental_constraint_geographies_table_unioned (wkb_geometry, acres, primary_id, {insert_id_field_format})
               SELECT
                    st_buffer(wkb_geometry, 0) as wkb_geometry,
                    st_area(st_buffer(wkb_geometry, 0)) * 0.000247105 as acres,
                    primary_id, {insert_id_field_format}

                    FROM (
                        SELECT
                            (ST_Dump(wkb_geometry)).geom as wkb_geometry,
                            primary_id, {insert_id_field_format}

                        FROM (
                            SELECT ST_Polygonize(wkb_geometry) AS wkb_geometry, primary_id, {insert_id_field_format}   FROM (
                                SELECT ST_Collect(wkb_geometry) AS wkb_geometry, primary_id, {insert_id_field_format}   FROM (
                                    SELECT ST_ExteriorRing(wkb_geometry) AS wkb_geometry, primary_id, {insert_id_field_format}
                                        FROM {project_schema}.environmental_constraint_geographies_table) AS lines
                                            group by primary_id, {insert_id_field_format}) AS noded_lines
                                                group by primary_id, {insert_id_field_format}) as polygons
                    ) as final
                WHERE st_area(st_buffer(wkb_geometry, 0)) > 5;'''.format(
            project_schema=options['project_schema'],
            insert_id_field_format=insert_id_field_format
        )

        execute_sql(pSql)

        logger.info('finished unioning env constraints: {time} elapsed'.format(
            time=time.time() - start_time))

        #reproject table back to 4326 for integration with web viewing
        pSql = '''
        SELECT UpdateGeometrySRID('{project_schema}', 'environmental_constraint_geographies_table_unioned', 'wkb_geometry', 4326);
        update {project_schema}.environmental_constraint_geographies_table_unioned a set wkb_geometry = st_transform(st_buffer(wkb_geometry, 0), 4326);
        '''.format(
            project_schema=options['project_schema']
        )
        execute_sql(pSql)

        add_geom_idx(options['project_schema'], 'environmental_constraint_geographies_table_unioned')

        logger.info('Env Union Finished: %s' % str(time.time() - start_time))
Пример #10
0
def run_vmt_variable_trip_length_buffers(sql_config_dict):

    drop_table('{vmt_variables_schema}.{vmt_variables_table}_vmt_variable'.format(
        vmt_variables_schema=sql_config_dict['vmt_variables_schema'],
        vmt_variables_table=sql_config_dict['vmt_variables_table']))

    pSql = '''
    create table {vmt_variables_schema}.{vmt_variables_table}_vmt_variable
    as select
      a.id, st_transform(a.wkb_geometry, 3310) as wkb_geometry, cast(a.attractions_hbw * 1609.0 as float) as distance,
      sum(acres_parcel_res) as acres_parcel_res_vb,
      sum(acres_parcel_emp) as acres_parcel_emp_vb,
      sum(acres_parcel_mixed_use) as acres_parcel_mixed_use_vb,
      sum(pop) as pop_vb,
      sum(hh) as hh_vb,
      sum(du) as du_vb,
      sum(du_mf) as du_mf_vb,
      sum(emp) as emp_vb,
      sum(emp_ret) as emp_ret_vb,
      sum(hh_inc_00_10) as hh_inc_00_10_vb,
      sum(hh_inc_10_20) as hh_inc_10_20_vb,
      sum(hh_inc_20_30) as hh_inc_20_30_vb,
      sum(hh_inc_30_40) as hh_inc_30_40_vb,
      sum(hh_inc_40_50) as hh_inc_40_50_vb,
      sum(hh_inc_50_60) as hh_inc_50_60_vb,
      sum(hh_inc_60_75) as hh_inc_60_75_vb,
      sum(hh_inc_75_100) as hh_inc_75_100_vb,
      sum(hh_inc_100p) as hh_inc_100p_vb,
      sum(pop_employed) as pop_employed_vb,
      sum(pop_age16_up) as pop_age16_up_vb,
      sum(pop_age65_up) as pop_age65_up_vb

    from
      (select id, wkb_geometry, attractions_hbw from {trip_lengths_schema}.{trip_lengths_table}) a,
      (select point, acres_parcel_res, acres_parcel_emp, acres_parcel_mixed_use, pop, hh, du, du_mf, emp, emp_ret,
            hh * hh_inc_00_10_rate as hh_inc_00_10,
            hh * hh_inc_10_20_rate as hh_inc_10_20,
            hh * hh_inc_20_30_rate as hh_inc_20_30,
            hh * hh_inc_30_40_rate as hh_inc_30_40,
            hh * hh_inc_40_50_rate as hh_inc_40_50,
            hh * hh_inc_50_60_rate as hh_inc_50_60,
            hh * hh_inc_60_75_rate as hh_inc_60_75,
            hh * hh_inc_75_100_rate as hh_inc_75_100,
            hh * hh_inc_100p_rate as hh_inc_100p,
            pop * pop_age16_up_rate * pop_employed_rate as pop_employed,
            pop * pop_age16_up_rate as pop_age16_up,
            pop * pop_age65_up_rate as pop_age65_up

        from (select st_centroid(wkb_geometry) as point, pop, hh, du, du_mf, emp, emp_ret, acres_parcel_res,
        acres_parcel_emp, acres_parcel_mixed_use
        from {uf_canvas_schema}.{uf_canvas_table}) a,
             (select wkb_geometry, hh_inc_00_10_rate, hh_inc_10_20_rate, hh_inc_20_30_rate,
              hh_inc_30_40_rate, hh_inc_40_50_rate, hh_inc_50_60_rate, hh_inc_60_75_rate,
              hh_inc_75_100_rate, hh_inc_100_125_rate + hh_inc_125_150_rate + hh_inc_150_200_rate + hh_inc_200p_rate as hh_inc_100p_rate,
              pop_employed_rate, pop_age16_up_rate, pop_age65_up_rate from {census_rates_schema}.{census_rates_table}) c
          where st_intersects(point, c.wkb_geometry)
          ) b
    where st_intersects(point, a.wkb_geometry) group by a.id, a.wkb_geometry, a.attractions_hbw;
    '''.format(vmt_variables_schema=sql_config_dict['vmt_variables_schema'],
               vmt_variables_table=sql_config_dict['vmt_variables_table'],
               uf_canvas_schema=sql_config_dict['uf_canvas_schema'],
               uf_canvas_table=sql_config_dict['uf_canvas_table'],
               census_rates_schema=sql_config_dict['census_rates_schema'],
               census_rates_table=sql_config_dict['census_rates_table'],
               trip_lengths_schema=sql_config_dict['trip_lengths_schema'],
               trip_lengths_table=sql_config_dict['trip_lengths_table'])

    execute_sql(pSql)

    add_geom_idx(sql_config_dict['vmt_variables_schema'], sql_config_dict['vmt_variables_table'] + '_vmt_variable')
    add_primary_key(sql_config_dict['vmt_variables_schema'], sql_config_dict['vmt_variables_table'] + '_vmt_variable', 'id')

    aggregate_within_variable_distance(dict(
        source_table=sql_config_dict['vmt_variables_schema'] + '.' + sql_config_dict['vmt_variables_table'] + '_vmt_variable',
        source_table_query='du_vb + emp_vb > 0',
        target_table_schema=sql_config_dict['vmt_variables_schema'],
        target_table=sql_config_dict['vmt_variables_table'],
        target_table_query='id is not null',
        target_table_pk='id',
        suffix='vmt_vb',
        aggregation_type='sum',
        variable_field_list=['acres_parcel_res_vb', 'acres_parcel_emp_vb', 'acres_parcel_mixed_use_vb', 'du_vb', 'pop_vb',
                             'emp_vb', 'emp_ret_vb', 'hh_vb', 'du_mf_vb', 'hh_inc_00_10_vb', 'hh_inc_10_20_vb',
                             'hh_inc_20_30_vb', 'hh_inc_30_40_vb', 'hh_inc_40_50_vb', 'hh_inc_50_60_vb',
                             'hh_inc_60_75_vb', 'hh_inc_75_100_vb', 'hh_inc_100p_vb', 'pop_employed_vb',
                             'pop_age16_up_vb', 'pop_age65_up_vb']
    ))

    pSql = '''DROP INDEX {schema}.{schema}_{table}_analysis_geom;
    Alter Table {schema}.{table} drop column analysis_geom;'''.format(schema=sql_config_dict['vmt_variables_schema'],
                                                                   table=sql_config_dict['vmt_variables_table'])
    execute_sql(pSql)