def test__write_metadata(self):
        try:
            from opus_core.indicator_framework.image_types.table import Table
        except:
            pass
        else:
            table = Table(
                source_data=self.cross_scenario_source_data,
                attribute='opus_core.test.attribute',
                dataset_name='test',
                output_type='tab',
                years=[
                    1980, 1981
                ]  # Indicators are not actually being computed, so the years don't matter here.
            )

            table.create(False)
            table.date_computed = None
            output = self.data_manager._export_indicator_to_file(
                indicator=table,
                source_data=self.cross_scenario_source_data,
                year=None)

            expected = [
                '<version>1.0</version>', '<Table>',
                '\t<attributes>[\'opus_core.test.attribute\']</attributes>',
                '\t<dataset_name>test</dataset_name>',
                '\t<years>[1980, 1981]</years>',
                '\t<date_computed>None</date_computed>',
                '\t<name>attribute</name>', '\t<operation>None</operation>',
                '\t<storage_location>%s</storage_location>' %
                os.path.join(self.temp_cache_path, 'indicators'),
                '\t<output_type>tab</output_type>', '\t<source_data>',
                '\t\t<cache_directory>%s</cache_directory>' %
                self.temp_cache_path,
                '\t\t<comparison_cache_directory>%s</comparison_cache_directory>'
                % self.temp_cache_path2,
                '\t\t<run_description>%s</run_description>' %
                self.cross_scenario_source_data.get_run_description(),
                '\t\t<years>[1980]</years>',
                '\t\t<package_order>[\'opus_core\']</package_order>',
                '\t</source_data>', '</Table>'
            ]

            for i in range(len(output)):
                if expected[i] != output[i]:
                    print expected[i]
                    print output[i]

            self.assertEqual(output, expected)
 def test__write_metadata(self):
     try:
         from opus_core.indicator_framework.image_types.table import Table
     except: pass
     else:
         table = Table(
             source_data = self.cross_scenario_source_data,
             attribute = 'opus_core.test.attribute',
             dataset_name = 'test',
             output_type = 'tab',
             years = [1980,1981] # Indicators are not actually being computed, so the years don't matter here.
         )
         
         table.create(False)
         table.date_computed = None
         output = self.data_manager._export_indicator_to_file(
              indicator = table,
              source_data = self.cross_scenario_source_data,
              year = None)
         
         expected = [
             '<version>1.0</version>',          
             '<Table>',
             '\t<attributes>[\'opus_core.test.attribute\']</attributes>',
             '\t<dataset_name>test</dataset_name>',
             '\t<years>[1980, 1981]</years>',
             '\t<date_computed>None</date_computed>',
             '\t<name>attribute</name>',
             '\t<operation>None</operation>',
             '\t<storage_location>%s</storage_location>'%os.path.join(self.temp_cache_path, 'indicators'),
             '\t<output_type>tab</output_type>',
             '\t<source_data>',
             '\t\t<cache_directory>%s</cache_directory>'%self.temp_cache_path,
             '\t\t<comparison_cache_directory>%s</comparison_cache_directory>'%self.temp_cache_path2, 
             '\t\t<run_description>%s</run_description>'%self.cross_scenario_source_data.get_run_description(),
             '\t\t<years>[1980]</years>',
             '\t\t<package_order>[\'opus_core\']</package_order>',
             '\t</source_data>',
             '</Table>'
         ]
         
         for i in range(len(output)):
             if expected[i] != output[i]:
                 print expected[i]
                 print output[i]
                 
         self.assertEqual(output,expected)
    def test__read_write_metadata(self):
        try:
            from opus_core.indicator_framework.image_types.table import Table
        except:
            raise
        else:

            table = Table(
                source_data=self.source_data,
                attribute='opus_core.test.attribute',
                dataset_name='test',
                output_type='tab',
                years=[
                    1980, 1981
                ]  # Indicators are not actually being computed, so the years don't matter here.
            )

            table.create(False)
            self.data_manager._export_indicator_to_file(
                indicator=table, source_data=self.source_data, year=None)

            metadata_file = table.get_file_name(extension='meta')
            metadata_path = os.path.join(table.get_storage_location(),
                                         metadata_file)
            self.assertEqual(os.path.exists(metadata_path), True)

            expected_path = 'test__tab__attribute.meta'
            self.assertEqual(metadata_file, expected_path)

            new_table = self.data_manager._import_indicator_from_file(
                metadata_path)
            for attr in [
                    'attributes', 'dataset_name', 'output_type',
                    'date_computed', 'years'
            ]:
                old_val = table.__getattribute__(attr)
                new_val = new_table.__getattribute__(attr)
                self.assertEqual(old_val, new_val)
            self.assertEqual(table.source_data.cache_directory,
                             new_table.source_data.cache_directory)
            self.assertEqual(
                table.source_data.dataset_pool_configuration.package_order,
                new_table.source_data.dataset_pool_configuration.package_order)
Exemple #4
0
 def test_one_indicator(self):
     source_data = SourceData(
         cache_directory = self.temp_dir,
         run_description = 'test',
         years = [self.year],
         dataset_pool_configuration = DatasetPoolConfiguration(
             package_order=['psrc','urbansim','opus_core'],
             ),
         )       
             
     indicator_defs = [
         Table(
               attribute = 'psrc.large_area.average_land_value_for_plan_type_group_residential',
               dataset_name = 'large_area',
               source_data = source_data,
               ),
         ]
     
     IndicatorFactory().create_indicators(
         indicators = indicator_defs,
         display_error_box = False, 
         show_results = False)   
 def test__create_indicators(self):
     try:
         from opus_core.indicator_framework.image_types.table import Table
     except:
         pass
     else:
         indicator_path = os.path.join(self.temp_cache_path, 'indicators')
         self.assert_(not os.path.exists(indicator_path))
         
         indicators = [
            Table(
               source_data = self.source_data,
               dataset_name = 'test', 
               attribute = 'opus_core.test.attribute', 
               output_type = 'tab'
            )
         ]
         
         factory = IndicatorFactory()
         
         factory.create_indicators(indicators = indicators)
         
         self.assert_(os.path.exists(indicator_path))
         self.assert_(os.path.exists(os.path.join(indicator_path, 'test__tab__attribute.tab')))
 def test__read_write_metadata(self):
     try:
         from opus_core.indicator_framework.image_types.table import Table
     except: 
         raise
     else:
         
         table = Table(
             source_data = self.source_data,
             attribute = 'opus_core.test.attribute',
             dataset_name = 'test',
             output_type = 'tab',
             years = [1980,1981] # Indicators are not actually being computed, so the years don't matter here.
         )
         
         table.create(False)
         self.data_manager._export_indicator_to_file(indicator = table,
                                                     source_data = self.source_data,
                                                     year = None)
         
         metadata_file = table.get_file_name(extension = 'meta')
         metadata_path = os.path.join(table.get_storage_location(),
                                      metadata_file)
         self.assertEqual(os.path.exists(metadata_path), True)
         
         expected_path = 'test__tab__attribute.meta'
         self.assertEqual(metadata_file,expected_path)
         
         new_table = self.data_manager._import_indicator_from_file(metadata_path)
         for attr in ['attributes','dataset_name',
                      'output_type','date_computed',
                      'years']:
             old_val = table.__getattribute__(attr)
             new_val = new_table.__getattribute__(attr)
             self.assertEqual(old_val,new_val)
         self.assertEqual(table.source_data.cache_directory,
                          new_table.source_data.cache_directory)
         self.assertEqual(table.source_data.dataset_pool_configuration.package_order,
                          new_table.source_data.dataset_pool_configuration.package_order)
Exemple #7
0
        package_order=['sanfrancisco','urbansim','opus_core'],
        ),       
)

multi_year_requests = [
    #Chart(
        #attribute = 'bus_ = alldata.aggregate_all(business.sector_id == 1)',
        #dataset_name = 'alldata',
        #source_data = source_data,
        #years=arange(2001,2026),
        #),
    
    Table(
    source_data = source_data,
    dataset_name = 'alldata',
    name = 'Business Counts 1',
    output_type='csv',
    attribute = 'bus_1 = alldata.aggregate_all(business.sector_id == 1)',
    years = [2001, 2002],
    ),
    
    Table(
    source_data = source_data,
    dataset_name = 'alldata',
    name = 'Business Counts 2',
    output_type='csv',
    attribute = 'bus_2 = alldata.aggregate_all(business.sector_id == 2)',
    years = [2001, 2002],
    ),

    Table(
    source_data = source_data,
Exemple #8
0
     scale = [0, 0.2],
     dataset_name = 'large_area',
     source_data = source_data,
     ),
 Map(
     scale = [-5000, 250000],
     name = 'urbansim_population_change',
     attribute = 'psrc.large_area.population',
     source_data = source_data,
     operation = 'change',
     dataset_name = 'large_area',
     ),
 Table(
    source_data = source_data,
    dataset_name = 'large_area',
    name = 'test_overwrite',
    #operation = 'change',
    attribute = 'large_area.aggregate(urbansim.zone.number_of_jobs, intermediates=[faz])',
    years = [2000, 2005]
 ), 
 Table(
    source_data = source_data,
    dataset_name = 'large_area',
    name = 'test_overwrite2',
    #operation = 'change',
    attribute = 'large_area.aggregate(urbansim.zone.number_of_jobs, intermediates=[faz])',
    years = [2000, 2005]
 ), 
 
 Table(
    source_data = source_data,
    dataset_name = 'zone',
#An example script:

source_data = SourceData(
    #cache_directory = r'D:\urbansim_cache\run_1090.2006_11_14_12_12',
    cache_directory=
    '/Users/hana/urbansim_cache/washtenaw/runs/run_3328.2007_08_01_19_36',
    #comparison_cache_directory = r'D:\urbansim_cache\run_1091.2006_11_14_12_12',
    years=[2006],
    dataset_pool_configuration=DatasetPoolConfiguration(
        package_order=['urbansim', 'opus_core'], ),
)

indicators = [
    Table(source_data=source_data,
          dataset_name='gridcell',
          attribute='urbansim.gridcell.number_of_households',
          output_type='tab'),
    Table(
        source_data=source_data,
        dataset_name='city',
        name='average_income',
        attribute=
        'city.aggregate(urbansim.gridcell.sum_income) / city.aggregate(urbansim.gridcell.number_of_households)',
        output_type='tab'),
    Table(
        source_data=source_data,
        dataset_name='city',
        name='nonresidential_land_value_higher_than_10000',
        attribute=
        'city.aggregate(urbansim.gridcell.nonresidential_land_value > 10000)',
        output_type='tab'),
Exemple #10
0
)

indicators = [
    #    Table(
    #        attribute = 'population=large_area.aggregate(urbansim_parcel.building.population, intermediates=[parcel, zone, faz])',
    #        dataset_name = 'large_area',
    #        source_data = source_data,
    #        ),
    #    Table(
    #        attribute = 'employment=large_area.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel, zone, faz])',
    #        dataset_name = 'large_area',
    #        source_data = source_data,
    #        ),
    Table(
        attribute=
        'at_home_workers=district.aggregate(person.work_at_home==1, intermediates=[household, building, parcel, zone])',
        dataset_name='district',
        source_data=source_data,
    ),
    DatasetTable(
        attributes=[
            'origin_district_id',
            'destination_district_id',
            'psrc_parcel.district_commute.commute_trips',
        ],
        dataset_name='district_commute',
        source_data=source_data,
        name='commutes_by_district',
    ),
    #    Map(
    #        attribute = 'population=zone.aggregate(urbansim_parcel.building.population, intermediates=[parcel])',
    #        dataset_name = 'zone',
#cache_directory = r'D:\urbansim_cache\run_1154.2006_11_17_20_06'
#run_description = '(run 1154 - no ugb + double highway capacity 11/28/2006)'
#cache_directory = r'D:\urbansim_cache\run_1155.2006_11_17_20_07'
#run_description = '(run 1155 - no ugb 11/28/2006)'

source_data = SourceData(
    cache_directory=cache_directory,
    run_description=run_description,
    years=[1980, 1981, 1982],
    dataset_pool_configuration=DatasetPoolConfiguration(
        package_order=['eugene', 'urbansim', 'opus_core'], ),
)
single_year_requests = [
    Table(
        attribute='urbansim.zone.population',
        dataset_name='zone',
        source_data=source_data,
    ),
    Table(
        attribute='urbansim.zone.number_of_jobs',
        dataset_name='zone',
        source_data=source_data,
    ),
    Map(
        attribute='urbansim.zone.population',
        scale=[1, 60000],
        dataset_name='zone',
        source_data=source_data,
    ),
    Map(
        attribute='urbansim.zone.number_of_jobs',
Exemple #12
0
def get_indicators(cache_directory, run_description, years = [2015], base_year=2014):
    source_data = SourceData(
        cache_directory = cache_directory,
        run_description = run_description,
        years = years,
        base_year = base_year,
        dataset_pool_configuration = DatasetPoolConfiguration(
            package_order=['psrc_parcel','urbansim_parcel','psrc', 'urbansim','opus_core'],
            package_order_exceptions={},
            ),       
    )
    
    indicators=[
    
    # ## City indicators
    # ==================
    
       Table(
           attribute = 'max_dev_residential_capacity=city.aggregate(psrc_parcel.parcel.max_developable_residential_capacity)',
           dataset_name = 'city',
           source_data = source_data,
           ), 
       Table(
           attribute = 'max_dev_nonresidential_capacity=city.aggregate(psrc_parcel.parcel.max_developable_nonresidential_capacity)',
           dataset_name = 'city',
           source_data = source_data,
           ), 
       Table(
           attribute = 'max_dev_capacity=city.aggregate(psrc_parcel.parcel.max_developable_capacity)',
           dataset_name = 'city',
           source_data = source_data,
           ),
       #Table(
           #attribute = 'max_dev_residential_capacity=city.aggregate(parcel.aggregate(development_project_proposal.aggregate(urbansim_parcel.development_project_proposal_component.residential_units), function=maximum))',
           #dataset_name = 'city',
           #source_data = source_data,
           #), 
       #Table(
           #attribute = 'max_dev_nonresidential_capacity=city.aggregate(parcel.aggregate(urbansim_parcel.development_project_proposal.building_sqft_non_residential, function=maximum))',
           #dataset_name = 'city',
           #source_data = source_data,
           #), 
       #Table(
           #attribute = 'max_dev_capacity=city.aggregate(parcel.aggregate(urbansim_parcel.development_project_proposal.building_sqft, function=maximum))',
           #dataset_name = 'city',
           #source_data = source_data,
           #),    
       
       # ## FAZ indicators
       # ==================
       
       Table(
           attribute = 'max_dev_residential_capacity=faz.aggregate(psrc_parcel.parcel.max_developable_residential_capacity, intermediates=[zone])',
           dataset_name = 'faz',
           source_data = source_data,
           ),
       Table(
           attribute = 'max_dev_nonresidential_capacity=faz.aggregate(psrc_parcel.parcel.max_developable_nonresidential_capacity, intermediates=[zone])',
           dataset_name = 'faz',
           source_data = source_data,
           ),
       Table(
           attribute = 'max_dev_capacity=faz.aggregate(psrc_parcel.parcel.max_developable_capacity, intermediates=[zone])',
           dataset_name = 'faz',
           source_data = source_data,
           ),              
    
       #Table(
           #attribute = 'max_dev_residential_capacity=faz.aggregate(parcel.aggregate(development_project_proposal.aggregate(urbansim_parcel.development_project_proposal_component.residential_units), function=maximum), intermediates=[zone])',
           #dataset_name = 'faz',
           #source_data = source_data,
           #), 
       #Table(
           #attribute = 'max_dev_nonresidential_capacity=faz.aggregate(parcel.aggregate(urbansim_parcel.development_project_proposal.building_sqft_non_residential, function=maximum), intermediates=[zone])',
           #dataset_name = 'faz',
           #source_data = source_data,
           #), 
       #Table(
           #attribute = 'max_dev_capacity=faz.aggregate(parcel.aggregate(urbansim_parcel.development_project_proposal.building_sqft, function=maximum), intermediates=[zone])',
           #dataset_name = 'faz',
           #source_data = source_data,
           #),   
           
       # ## TAZ indicators                                                                                                                                     
       # ==================                                                                                                                                    

       #Table(
           #attribute = 'max_dev_residential_capacity=zone.aggregate(parcel.aggregate(development_project_proposal.aggregate(urbansim_parcel.development_project_proposal_component.residential_units), function=maximum))',
           #dataset_name = 'zone',
           #source_data = source_data,
           #),
       #Table(
           #attribute = 'max_dev_nonresidential_capacity=zone.aggregate(parcel.aggregate(urbansim_parcel.development_project_proposal.building_sqft_non_residential, function=maximum))',
           #dataset_name = 'zone',
           #source_data = source_data,
           #),
       #Table(
           #attribute = 'max_dev_capacity=zone.aggregate(parcel.aggregate(urbansim_parcel.development_project_proposal.building_sqft, function=maximum))',
           #dataset_name = 'zone',
           #source_data = source_data,
           #),
       Table(
           attribute = 'max_dev_residential_capacity=zone.aggregate(psrc_parcel.parcel.max_developable_residential_capacity)',
           dataset_name = 'zone',
           source_data = source_data,
           ),
       Table(
           attribute = 'max_dev_nonresidential_capacity=zone.aggregate(psrc_parcel.parcel.max_developable_nonresidential_capacity)',
           dataset_name = 'zone',
           source_data = source_data,
           ),
       Table(
           attribute = 'max_dev_capacity=zone.aggregate(psrc_parcel.parcel.max_developable_capacity)',
           dataset_name = 'zone',
           source_data = source_data,
           ), 
       
       # ## Growth centers indicators
       # ============================
   
       Table(
              attribute = 'max_dev_residential_capacity=growth_center.aggregate(psrc_parcel.parcel.max_developable_residential_capacity)',
              dataset_name = 'growth_center',
              source_data = source_data,
              ), 
       Table(
              attribute = 'max_dev_nonresidential_capacity=growth_center.aggregate(psrc_parcel.parcel.max_developable_nonresidential_capacity)',
              dataset_name = 'growth_center',
              source_data = source_data,
              ), 
       Table(
              attribute = 'max_dev_capacity=growth_center.aggregate(psrc_parcel.parcel.max_developable_capacity)',
              dataset_name = 'growth_center',
              source_data = source_data,
              ),       
    
    ]
    return indicators
Exemple #13
0
def get_indicators(
        cache_directory,
        run_description,
        years=[2014, 2015, 2017, 2020, 2025, 2030, 2035, 2040, 2045, 2050],
        base_year=2014):
    #def get_indicators(cache_directory, run_description, years = [2014,2015,2017,2020,2021,2022,2023,2024,2025,2030,2035,2040,2045,2046,2047,2048,2049,2050], base_year=2014):
    #def get_indicators(cache_directory, run_description, years = [2050], base_year=2014):
    #def get_indicators(cache_directory, run_description, years = [2014,2017,2050], base_year=2014):
    source_data = SourceData(
        cache_directory=cache_directory,
        run_description=run_description,
        years=years,
        base_year=base_year,
        dataset_pool_configuration=DatasetPoolConfiguration(
            package_order=[
                'psrc_parcel', 'urbansim_parcel', 'psrc', 'urbansim',
                'opus_core'
            ],
            package_order_exceptions={},
        ),
    )

    indicators = [
        DatasetTable(
            source_data=source_data,
            dataset_name='faz',
            name='DU_and_HH_by_bld_type_by_faz_by_year',
            attributes=[
                'DU_SF_19=faz.aggregate(urbansim_parcel.building.residential_units * (building.building_type_id==19), intermediates=[parcel])',
                'DU_MF_12=faz.aggregate(urbansim_parcel.building.residential_units * (building.building_type_id==12), intermediates=[parcel])',
                'DU_CO_4=faz.aggregate(urbansim_parcel.building.residential_units * (building.building_type_id==4), intermediates=[parcel])',
                'DU_MH_11=faz.aggregate(urbansim_parcel.building.residential_units * (building.building_type_id==11), intermediates=[parcel])',
                'DU_Total=faz.aggregate(urbansim_parcel.building.residential_units, intermediates=[parcel])',
                'HH_SF_19=faz.aggregate(urbansim_parcel.building.number_of_households * (building.building_type_id==19), intermediates=[parcel])',
                'HH_MF_12=faz.aggregate(urbansim_parcel.building.number_of_households * (building.building_type_id==12), intermediates=[parcel])',
                'HH_CO_4=faz.aggregate(urbansim_parcel.building.number_of_households * (building.building_type_id==4), intermediates=[parcel])',
                'HH_MH_11=faz.aggregate(urbansim_parcel.building.number_of_households * (building.building_type_id==11), intermediates=[parcel])',
                'HH_Total=faz.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel])',
            ],
        ),

        ## County Level Control indicators  - added 4.17.2018
        Table(
            attribute=
            'population = county.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel])',
            dataset_name='county',
            source_data=source_data,
        ),
        Table(
            attribute=
            'households = county.aggregate(urbansim_parcel.parcel.number_of_households, intermediates=[parcel])',
            dataset_name='county',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment = county.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[parcel])',
            dataset_name='county',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units = county.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel]) + county.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[parcel])',
            dataset_name='county',
            source_data=source_data,
        ),

        ## County Regional Geography indicators  - added 1.23.2018
        Table(
            attribute=
            'population = fips_rgs_proposed.aggregate(urbansim_parcel.parcel.population, intermediates=[city])',
            dataset_name='fips_rgs_proposed',
            source_data=source_data,
        ),
        Table(
            attribute=
            'households = fips_rgs_proposed.aggregate(urbansim_parcel.parcel.number_of_households, intermediates=[city])',
            dataset_name='fips_rgs_proposed',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment = fips_rgs_proposed.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[city])',
            dataset_name='fips_rgs_proposed',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units = fips_rgs_proposed.aggregate(urbansim_parcel.parcel.population, intermediates=[city]) + fips_rgs_proposed.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[city])',
            dataset_name='fips_rgs_proposed',
            source_data=source_data,
        ),

        ## County MHS vacancy indicators - added 11.1.2017
        DatasetTable(
            source_data=source_data,
            dataset_name='county',
            name='eoy_vacancy_by_building_type',
            output_type='csv',
            attributes=[
                'res_4_VR=numpy.safe_array_divide(county.aggregate(urbansim_parcel.building.vacant_residential_units*(building.building_type_id==4)),county.aggregate(urbansim_parcel.building.residential_units*(building.building_type_id==4)))',
                'res_12_VR=numpy.safe_array_divide(county.aggregate(urbansim_parcel.building.vacant_residential_units*(building.building_type_id==12)),county.aggregate(urbansim_parcel.building.residential_units*(building.building_type_id==12)))',
                'res_19_VR=numpy.safe_array_divide(county.aggregate(urbansim_parcel.building.vacant_residential_units*(building.building_type_id==19)),county.aggregate(urbansim_parcel.building.residential_units*(building.building_type_id==19)))',
                'nonres_3_VR=numpy.safe_array_divide(county.aggregate(psrc_parcel.building.vacant_non_home_based_job_space*(psrc_parcel.building.building_type_id==3)),county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==3)))',
                'nonres_8_VR=numpy.safe_array_divide(county.aggregate(psrc_parcel.building.vacant_non_home_based_job_space*(psrc_parcel.building.building_type_id==8)),county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==8)))',
                'nonres_13_VR=numpy.safe_array_divide(county.aggregate(psrc_parcel.building.vacant_non_home_based_job_space*(psrc_parcel.building.building_type_id==13)),county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==13)))',
                'nonres_20_VR=numpy.safe_array_divide(county.aggregate(psrc_parcel.building.vacant_non_home_based_job_space*(psrc_parcel.building.building_type_id==20)),county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==20)))',
                'nonres_21_VR=numpy.safe_array_divide(county.aggregate(psrc_parcel.building.vacant_non_home_based_job_space*(psrc_parcel.building.building_type_id==21)),county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==21)))',
            ],
        ),
        DatasetTable(
            source_data=source_data,
            dataset_name='county',
            name='units_and_nonres_sqft_by_building_type',
            output_type='csv',
            attributes=[
                'res_4_units=county.aggregate(urbansim_parcel.building.residential_units*(building.building_type_id==4))',
                'res_12_units=county.aggregate(urbansim_parcel.building.residential_units*(building.building_type_id==12))',
                'res_19_units=county.aggregate(urbansim_parcel.building.residential_units*(building.building_type_id==19))',
                'nonres_3_spaces=county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==3))',
                'nonres_8_spaces=county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==8))',
                'nonres_13_spaces=county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==13))',
                'nonres_20_spaces=county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==20))',
                'nonres_21_spaces=county.aggregate(psrc_parcel.building.total_non_home_based_job_space*(psrc_parcel.building.building_type_id==21))',
                'nonres_3_sqft=county.aggregate(psrc_parcel.building.non_residential_sqft*(psrc_parcel.building.building_type_id==3))',
                'nonres_8_sqft=county.aggregate(psrc_parcel.building.non_residential_sqft*(psrc_parcel.building.building_type_id==8))',
                'nonres_13_sqft=county.aggregate(psrc_parcel.building.non_residential_sqft*(psrc_parcel.building.building_type_id==13))',
                'nonres_20_sqft=county.aggregate(psrc_parcel.building.non_residential_sqft*(psrc_parcel.building.building_type_id==20))',
                'nonres_21_sqft=county.aggregate(psrc_parcel.building.non_residential_sqft*(psrc_parcel.building.building_type_id==21))',
            ],
        ),

        ## FAZ indicators
        ## =====================
        Table(
            attribute=
            'households=faz.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel,zone])',
            dataset_name='faz',
            source_data=source_data,
        ),
        Table(
            attribute=
            'population=faz.aggregate(urbansim_parcel.building.population, intermediates=[parcel,zone])',
            dataset_name='faz',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment=faz.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel,zone])',
            dataset_name='faz',
            source_data=source_data,
        ),
        Table(
            attribute=
            'nonres_sqft=faz.aggregate(urbansim_parcel.building.non_residential_sqft, intermediates=[parcel,zone])',
            dataset_name='faz',
            source_data=source_data,
        ),
        Table(
            attribute=
            'residential_units=faz.aggregate(urbansim_parcel.building.residential_units, intermediates=[parcel,zone])',
            dataset_name='faz',
            source_data=source_data,
        ),
        Table(
            attribute=
            'building_sqft=faz.aggregate(urbansim_parcel.parcel.building_sqft, intermediates=[zone])',
            dataset_name='faz',
            source_data=source_data,
        ),

        ## TAZ indicators
        Table(
            attribute=
            'residential_units=zone.aggregate(urbansim_parcel.building.residential_units, intermediates=[parcel])',
            dataset_name='zone',
            source_data=source_data,
        ),
        Table(
            attribute=
            'households=zone.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel])',
            dataset_name='zone',
            source_data=source_data,
        ),
        Table(
            attribute=
            'population=zone.aggregate(urbansim_parcel.building.population, intermediates=[parcel])',
            dataset_name='zone',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment=zone.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel])',
            dataset_name='zone',
            source_data=source_data,
        ),
        Table(
            attribute=
            'nonres_sqft=zone.aggregate(urbansim_parcel.building.non_residential_sqft, intermediates=[parcel])',
            dataset_name='zone',
            source_data=source_data,
        ),
        Table(
            attribute=
            'building_sqft=zone.aggregate(urbansim_parcel.parcel.building_sqft)',
            dataset_name='zone',
            source_data=source_data,
        ),
        DatasetTable(source_data=source_data,
                     dataset_name='zone',
                     name='employment_by_aggr_sector',
                     attributes=jobs_by_sector("zone", "urbansim_parcel"),
                     output_type='tab'),

        ## City indicators
        ## ==================
        Table(
            attribute=
            'households=city.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute=
            'population=city.aggregate(urbansim_parcel.building.population, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment=city.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute=
            'residential_units=city.aggregate(urbansim_parcel.building.residential_units, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute=
            'nonres_sqft=city.aggregate(urbansim_parcel.building.non_residential_sqft, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute=
            'building_sqft=city.aggregate(urbansim_parcel.parcel.building_sqft)',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute='acres=city.aggregate(parcel.parcel_sqft/43560.)',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units = city.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel]) + city.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),
        DatasetTable(source_data=source_data,
                     dataset_name='city',
                     name='employment_by_aggr_sector',
                     attributes=jobs_by_sector("city"),
                     output_type='tab'),

        # # ## Tract-City indicators
        # # ==================

        # # # Table(
        # # # attribute = 'households=tractcity.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel])',
        # # # dataset_name = 'tractcity',
        # # # source_data = source_data,
        # # # ),
        # # # Table(
        # # # attribute = 'population=tractcity.aggregate(urbansim_parcel.building.population, intermediates=[parcel])',
        # # # dataset_name = 'tractcity',
        # # # source_data = source_data,
        # # # ),
        # # # Table(
        # # # attribute = 'employment=tractcity.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel])',
        # # # dataset_name = 'tractcity',
        # # # source_data = source_data,
        # # # ),
        # # # Table(
        # # # attribute = 'residential_units=tractcity.aggregate(urbansim_parcel.building.residential_units, intermediates=[parcel])',
        # # # dataset_name = 'tractcity',
        # # # source_data = source_data,
        # # # ),
        # # # Table(
        # # # attribute = 'nonres_sqft=tractcity.aggregate(urbansim_parcel.building.non_residential_sqft, intermediates=[parcel])',
        # # # dataset_name = 'tractcity',
        # # # source_data = source_data,
        # # # ),
        # # # Table(
        # # # attribute = 'building_sqft=tractcity.aggregate(urbansim_parcel.parcel.building_sqft)',
        # # # dataset_name = 'tractcity',
        # # # source_data = source_data,
        # # # ),

        ## Growth Centers Indicators
        ## ============================
        Table(
            attribute=
            'residential_units=growth_center.aggregate(urbansim_parcel.building.residential_units, intermediates=[parcel])',
            dataset_name='growth_center',
            source_data=source_data,
        ),
        Table(
            attribute=
            'households=growth_center.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel])',
            dataset_name='growth_center',
            source_data=source_data,
        ),
        Table(
            attribute=
            'population=growth_center.aggregate(urbansim_parcel.building.population, intermediates=[parcel])',
            dataset_name='growth_center',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment=growth_center.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel])',
            dataset_name='growth_center',
            source_data=source_data,
        ),
        Table(
            attribute=
            'nonres_sqft=growth_center.aggregate(urbansim_parcel.building.non_residential_sqft, intermediates=[parcel])',
            dataset_name='growth_center',
            source_data=source_data,
        ),
        Table(
            attribute=
            'building_sqft=growth_center.aggregate(urbansim_parcel.parcel.building_sqft)',
            dataset_name='growth_center',
            source_data=source_data,
        ),
        Table(
            attribute=
            'acres=growth_center.aggregate(parcel.parcel_sqft/43560.)',
            dataset_name='growth_center',
            source_data=source_data,
        ),

        ## Large Area Indicators
        ## ============================
        DatasetTable(source_data=source_data,
                     dataset_name='large_area',
                     name='employment_by_aggr_sector',
                     attributes=['large_area.county_id'] +
                     jobs_by_sector("large_area"),
                     output_type='tab'),

        ## Tract indicators
        ##============================
        Table(
            attribute=
            'households=census_tract.aggregate(urbansim_parcel.parcel.number_of_households)',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'population=census_tract.aggregate(urbansim_parcel.parcel.population)',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment=census_tract.aggregate(urbansim_parcel.parcel.number_of_jobs)',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        # Table(
        # attribute = 'nonres_sqft=fcensus_tract.aggregate(urbansim_parcel.parcel.non_residential_sqft, intermediates=[census_block_group])',
        # dataset_name = 'census_tract',
        # source_data = source_data,
        # )
        Table(
            attribute=
            'residential_units=census_tract.aggregate(urbansim_parcel.parcel.residential_units)',
            dataset_name='census_tract',
            source_data=source_data,
        ),

        #DatasetTable(
        #    source_data = source_data,
        #    dataset_name = 'census_tract',
        #    name = 'employment_by_aggr_sector',
        #    attributes = jobs_by_sector("census_tract"),
        #    output_type = 'csv'
        #    ),

        ##------Liming's Unplaced Households and Jobs in the Region-----------
        Table(
            attribute=
            'num_unplaced_hhs=alldata.aggregate_all(household.building_id<=0)',
            dataset_name='alldata',
            source_data=source_data,
        ),
        Table(
            attribute=
            'num_unplaced_jobs=alldata.aggregate_all(job.building_id<=0)',
            dataset_name='alldata',
            source_data=source_data,
        ),

        ##       Regional Total Tables
        Table(
            attribute=
            'residential_units=alldata.aggregate_all(urbansim_parcel.building.residential_units)',
            dataset_name='alldata',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_residential_sqft=alldata.aggregate_all(urbansim_parcel.building.non_residential_sqft)',
            dataset_name='alldata',
            source_data=source_data,
        ),
        Table(
            attribute=
            'households=alldata.aggregate_all(urbansim_parcel.building.number_of_households)',
            dataset_name='alldata',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment=alldata.aggregate_all(urbansim_parcel.building.number_of_jobs)',
            dataset_name='alldata',
            source_data=source_data,
        ),
        Table(
            attribute=
            'population=alldata.aggregate_all(urbansim_parcel.building.population)',
            dataset_name='alldata',
            source_data=source_data,
        ),

        ## Demographic indicators
        ## ======================
        # DatasetTable(
        # source_data = source_data,
        # dataset_name = 'alldata',
        # name =  'pptyp',
        # output_type = 'csv',
        # attributes = [
        # 'full_time_worker = alldata.aggregate_all(person.employment_status==1)',
        # 'part_time_worker = alldata.aggregate_all(numpy.logical_and(person.employment_status==2,person.student==0))',
        # 'non_working_adult_age_65_plus = alldata.aggregate_all(numpy.logical_and(person.employment_status<1,numpy.logical_and(person.student==0,person.age>64)))',
        # 'non_working_adult_age_16_64 = alldata.aggregate_all(numpy.logical_and(person.employment_status<1,numpy.logical_and(person.student==0,numpy.logical_and(person.age<65,person.age>15))))',
        # 'university_student = alldata.aggregate_all(numpy.logical_and(person.employment_status<>1,numpy.logical_and(person.student==1,person.age>18)))',
        # 'hs_student_age_15_up = alldata.aggregate_all(numpy.logical_and(person.employment_status<>1,numpy.logical_and(person.student==1,numpy.logical_and(person.age>15,person.age<19))))',
        # 'child_age_5_15 = alldata.aggregate_all(numpy.logical_and(person.age>4,person.age<16))',
        # 'child_age_0_4 = alldata.aggregate_all(person.age<5)',
        # ##                'age_6_to_10 = alldata.aggregate_all(numpy.logical_and(person.age<11,person.age>=6))',
        # ##                'age_11_to_15 = alldata.aggregate_all(numpy.logical_and(person.age<16,person.age>=11))',
        # ##                'age_16_to_20 = alldata.aggregate_all(numpy.logical_and(person.age<21,person.age>=16))',
        # ##                'age_21_to_25 = alldata.aggregate_all(numpy.logical_and(person.age<26,person.age>=21))',
        # ##                'income = households.income',
        # ],
        # ),
        # DatasetTable(
        # source_data = source_data,
        # dataset_name = 'alldata',
        # name =  'persons_by_age_groups_of_interest',
        # output_type = 'csv',
        # attributes = [
        # 'Under5 = alldata.aggregate_all(person.age<5)',
        # 'Five_18 = alldata.aggregate_all(numpy.logical_and(person.age<19,person.age>=5))',
        # 'Nineteen_24 = alldata.aggregate_all(numpy.logical_and(person.age<25,person.age>=19))',
        # 'Twentyfive_60 = alldata.aggregate_all(numpy.logical_and(person.age<61,person.age>=25))',
        # 'Over_60 = alldata.aggregate_all(person.age>=61)',
        # ],
        # ),

        # DatasetTable(
        # source_data = source_data,
        # dataset_name = 'alldata',
        # name =  'persons_by_5year_age_groups',
        # output_type = 'csv',
        # attributes = [
        # 'age_0_to_5 = alldata.aggregate_all(person.age<6)',
        # 'age_6_to_10 = alldata.aggregate_all(numpy.logical_and(person.age<11,person.age>=6))',
        # 'age_11_to_15 = alldata.aggregate_all(numpy.logical_and(person.age<16,person.age>=11))',
        # 'age_16_to_20 = alldata.aggregate_all(numpy.logical_and(person.age<21,person.age>=16))',
        # 'age_21_to_25 = alldata.aggregate_all(numpy.logical_and(person.age<26,person.age>=21))',
        # 'age_26_to_30 = alldata.aggregate_all(numpy.logical_and(person.age<31,person.age>=26))',
        # 'age_31_to_35 = alldata.aggregate_all(numpy.logical_and(person.age<36,person.age>=31))',
        # 'age_36_to_40 = alldata.aggregate_all(numpy.logical_and(person.age<41,person.age>=36))',
        # 'age_41_to_45 = alldata.aggregate_all(numpy.logical_and(person.age<46,person.age>=41))',
        # 'age_46_to_50 = alldata.aggregate_all(numpy.logical_and(person.age<51,person.age>=46))',
        # 'age_51_to_55 = alldata.aggregate_all(numpy.logical_and(person.age<56,person.age>=51))',
        # 'age_56_to_60 = alldata.aggregate_all(numpy.logical_and(person.age<61,person.age>=56))',
        # 'age_61_to_65 = alldata.aggregate_all(numpy.logical_and(person.age<66,person.age>=61))',
        # 'age_66_to_70 = alldata.aggregate_all(numpy.logical_and(person.age<71,person.age>=66))',
        # 'age_71_to_75 = alldata.aggregate_all(numpy.logical_and(person.age<76,person.age>=71))',
        # 'age_76_to_80 = alldata.aggregate_all(numpy.logical_and(person.age<81,person.age>=76))',
        # 'age_81_to_85 = alldata.aggregate_all(numpy.logical_and(person.age<86,person.age>=81))',
        # 'age_86_to_90 = alldata.aggregate_all(numpy.logical_and(person.age<91,person.age>=86))',
        # 'age_91_to_95 = alldata.aggregate_all(numpy.logical_and(person.age<96,person.age>=91))',
        # 'age_96_and_up = alldata.aggregate_all(person.age>=96)',
        # ],
        # ),

        # DatasetTable(
        # source_data = source_data,
        # dataset_name = 'alldata',
        # name =  'regional_total_hhs_by_new_14incomegroups',
        # output_type = 'csv',
        # #output_type = 'sql',
        # #storage_location = database,
        # attributes = [
        # 'Group1_Under50K = alldata.aggregate_all(household.income<50000)',
        # 'Group2_50_75K = alldata.aggregate_all(numpy.logical_and(household.income<75001,household.income>=50000))',
        # 'Group3_75_100K = alldata.aggregate_all(numpy.logical_and(household.income<100001,household.income>=75000))',
        # 'Group4_Over100K = alldata.aggregate_all(household.income>=100001)',
        # ],
        # ),

        # DatasetTable(
        # source_data = source_data,
        # dataset_name = 'alldata',
        # name =  'regional_total_hhs_by_30_60_90_in_14dollars_groups',
        # output_type = 'csv',
        # #output_type = 'sql',
        # #storage_location = database,
        # attributes = [
        # 'Group1_Under36870K = alldata.aggregate_all(household.income<36870)',
        # 'Group2_UpTo73700 = alldata.aggregate_all(numpy.logical_and(household.income<73700,household.income>=36870))',
        # 'Group3_UpTo110600 = alldata.aggregate_all(numpy.logical_and(household.income<110600,household.income>=73700))',
        # 'Group4_Over110600 = alldata.aggregate_all(household.income>=110600)',
        # ],
        # ),

        # DatasetTable(
        # source_data = source_data,
        # dataset_name = 'alldata',
        # name =  'pwtyp',
        # output_type = 'csv',
        # attributes = [
        # 'full_time = alldata.aggregate_all((person.employment_status==1)*(urbansim_parcel.person.job_id > 0))',
        # 'part_time = alldata.aggregate_all((person.employment_status==2)*(urbansim_parcel.person.job_id > 0))',
        # 'workers_no_job = alldata.aggregate_all((person.employment_status >0)*(urbansim_parcel.person.job_id < 0))',
        # 'non_workers_no_job = alldata.aggregate_all((person.employment_status <1)*(urbansim_parcel.person.job_id < 0))',
        # ],
        # ),
    ]
    return indicators
def make_multiyear_workbook(cache_directory, yearstart=2010, yearend=2035):
    """ This spits out the indicators for a multiyear workbook and then combines them 
        into a single file, cache_directory/alldata.csv
        You can then copy this a copy of the MultiyearLU_template.xls and see some basic
          performance/troubleshooting graphs for the various submodels.
    """

    multiyear_workbook_source_data = SourceData(
        cache_directory=cache_directory,
        run_description="Run description is used for what?",
        years=range(yearstart, yearend + 1),
        dataset_pool_configuration=DatasetPoolConfiguration(
            package_order=['sanfrancisco', 'urbansim', 'opus_core'], ),
    )

    multiyear_workbook_alldata_attributes = \
    [
        # commercial
        'bldg_count_comm=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==1,1,0))',
        'bldg_occsqft_comm=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==1,sanfrancisco.building.occupied_sqft,0))',
        'bldg_count_totsqft_comm=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==1,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_vacrate_comm=(alldata.aggregate_all(where(sanfrancisco.building.building_group_id==1,sanfrancisco.building.non_residential_sqft,0))-' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==1,sanfrancisco.building.occupied_sqft,0)))/' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==1,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_count_totunit_comm=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==1,sanfrancisco.building.residential_units,0))',

        # institutional
        'bldg_count_inst=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==2,1,0))',
        'bldg_occsqft_inst=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==2,sanfrancisco.building.occupied_sqft,0))',
        'bldg_count_totsqft_inst=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==2,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_vacrate_inst=(alldata.aggregate_all(where(sanfrancisco.building.building_group_id==2,sanfrancisco.building.non_residential_sqft,0))-' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==2,sanfrancisco.building.occupied_sqft,0)))/' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==2,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_count_totunit_inst=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==2,sanfrancisco.building.residential_units,0))',

        # office
        'bldg_count_offc=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==3,1,0))',
        'bldg_occsqft_offc=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==3,sanfrancisco.building.occupied_sqft,0))',
        'bldg_count_totsqft_offc=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==3,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_vacrate_offc=(alldata.aggregate_all(where(sanfrancisco.building.building_group_id==3,sanfrancisco.building.non_residential_sqft,0))-' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==3,sanfrancisco.building.occupied_sqft,0)))/' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==3,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_count_totunit_offc=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==3,sanfrancisco.building.residential_units,0))',

        # residential
        'bldg_count_res=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==4,1,0))',
        'bldg_occunit_res=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==4,sanfrancisco.building.number_of_households,0))',
        'bldg_count_totunit_res=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==4,sanfrancisco.building.residential_units,0))',
        'bldg_vacrate_res=(alldata.aggregate_all(where(sanfrancisco.building.building_group_id==4,sanfrancisco.building.residential_units,0))-' +
                          'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==4,sanfrancisco.building.number_of_households,0)))/' +
                          'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==4,sanfrancisco.building.residential_units,0))',
        'bldg_count_totsqft_res=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==4,sanfrancisco.building.non_residential_sqft,0))',

        # res in non-res group buildings
        'bldg_count_reso=alldata.aggregate_all(where(sanfrancisco.building.building_group_id!=4,1,0))',
        'bldg_occunit_reso=alldata.aggregate_all(where(sanfrancisco.building.building_group_id!=4,sanfrancisco.building.number_of_households,0))',
        'bldg_count_totunit_reso=alldata.aggregate_all(where(sanfrancisco.building.building_group_id!=4,sanfrancisco.building.residential_units,0))',
        'bldg_vacrate_reso=(alldata.aggregate_all(where(sanfrancisco.building.building_group_id!=4,sanfrancisco.building.residential_units,0))-' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id!=4,sanfrancisco.building.number_of_households,0)))/' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id!=4,sanfrancisco.building.residential_units,0))',
        'bldg_count_totsqft_ores=alldata.aggregate_all(where(sanfrancisco.building.building_group_id!=4,sanfrancisco.building.non_residential_sqft,0))',

        # visitor
        'bldg_count_vis=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==5,1,0))',
        'bldg_occsqft_vis=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==5,sanfrancisco.building.occupied_sqft,0))',
        'bldg_count_totsqft_vis=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==5,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_vacrate_vis=(alldata.aggregate_all(where(sanfrancisco.building.building_group_id==5,sanfrancisco.building.non_residential_sqft,0))-' +
                          'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==5,sanfrancisco.building.occupied_sqft,0)))/' +
                          'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==5,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_count_totunit_vis=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==5,sanfrancisco.building.residential_units,0))',

        # mixed
        'bldg_count_mix=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==6,1,0))',
        'bldg_occsqft_mix=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==6,sanfrancisco.building.occupied_sqft,0))',
        'bldg_count_totsqft_mix=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==6,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_vacrate_mix=(alldata.aggregate_all(where(sanfrancisco.building.building_group_id==6,sanfrancisco.building.non_residential_sqft,0))-' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==6,sanfrancisco.building.occupied_sqft,0)))/' +
                           'alldata.aggregate_all(where(sanfrancisco.building.building_group_id==6,sanfrancisco.building.non_residential_sqft,0))',
        'bldg_count_totunit_mix=alldata.aggregate_all(where(sanfrancisco.building.building_group_id==6,sanfrancisco.building.residential_units,0))',

        # unplaced buildings
        'bldg_count_unplaced=alldata.aggregate_all(where(sanfrancisco.building.parcel_id<0,1,0))',
        'bldg_count_unplaced_BLCMspec=alldata.aggregate_all(where(numpy.logical_and(sanfrancisco.building.parcel_id<0,sanfrancisco.building.is_placed_type>0),1,0))',
        'bldg_count_unplaced_noBLCMspec=alldata.aggregate_all(where(numpy.logical_and(sanfrancisco.building.parcel_id<0,sanfrancisco.building.is_placed_type==0),1,0))',

        # household counts
        'hhld_count_sz1=alldata.aggregate_all(where(household.household_size==1,1,0))',
        'hhld_count_sz2=alldata.aggregate_all(where(household.household_size==2,1,0))',
        'hhld_count_sz34=alldata.aggregate_all(where(numpy.logical_or(household.household_size==3,household.household_size==4),1,0))',
        'hhld_count_sz56=alldata.aggregate_all(where(numpy.logical_or(household.household_size==5,household.household_size==6),1,0))',
        'hhld_count_sz7=alldata.aggregate_all(where(household.household_size>=7,1,0))',

        # business counts
        'jobs_count_sect1=alldata.aggregate_all(where(business.sector_id==1,business.employment,0))',
        'jobs_count_sect2=alldata.aggregate_all(where(business.sector_id==2,business.employment,0))',
        'jobs_count_sect3=alldata.aggregate_all(where(business.sector_id==3,business.employment,0))',
        'jobs_count_sect4=alldata.aggregate_all(where(business.sector_id==4,business.employment,0))',
        'jobs_count_sect5=alldata.aggregate_all(where(business.sector_id==5,business.employment,0))',
        'jobs_count_sect6=alldata.aggregate_all(where(business.sector_id==6,business.employment,0))',
        'jobs_count_sect7=alldata.aggregate_all(where(business.sector_id==7,business.employment,0))',
        'jobs_count_sect8=alldata.aggregate_all(where(business.sector_id==8,business.employment,0))',
        'jobs_count_sect9=alldata.aggregate_all(where(business.sector_id==9,business.employment,0))',
        'jobs_count_sect10=alldata.aggregate_all(where(business.sector_id==10,business.employment,0))',
        'jobs_count_sect11=alldata.aggregate_all(where(business.sector_id==11,business.employment,0))',

        # unplaced businesses, how full are buildings?  overall nonres sqft totals
        'business_count_unplaced=alldata.aggregate_all(where(business.building_id<1,1,0))',
        'bldg_count_overfullbiz=alldata.aggregate_all(where(sanfrancisco.building.occupied_sqft>building.non_residential_sqft,1,0))',
        'bldg_count_partialfullbiz=alldata.aggregate_all(where(numpy.logical_and(sanfrancisco.building.occupied_sqft<building.non_residential_sqft,'+
                                                                                'sanfrancisco.building.occupied_sqft>0),1,0))',
        'bldg_count_vacantbiz=alldata.aggregate_all(where(numpy.logical_and(building.non_residential_sqft>0,'+
                                                                           'sanfrancisco.building.occupied_sqft==0),1,0))',
        # these are covered above for building_groups
        'bldg_nonres_sqft_total=alldata.aggregate_all(building.non_residential_sqft)',
        'bldg_nonres_sqft_occ=alldata.aggregate_all(sanfrancisco.building.occupied_sqft)',
        'bldg_nonres_sqft_vacant=alldata.aggregate_all(building.non_residential_sqft)-alldata.aggregate_all(sanfrancisco.building.occupied_sqft)',

        # unplaced households, how full are buildings?  overall hhunit totals
        'hhld_count_unplaced=alldata.aggregate_all(where(household.building_id<1,1,0))',
        'hhld_count_overfullhh=alldata.aggregate_all(where(sanfrancisco.building.number_of_households>building.residential_units,1,0))',
        'hhld_count_partialfullhh=alldata.aggregate_all(where(numpy.logical_and(sanfrancisco.building.number_of_households<building.residential_units,'+
                                                                                'sanfrancisco.building.number_of_households>0),1,0))',
        'hhld_count_vacanthh=alldata.aggregate_all(where(numpy.logical_and(building.residential_units>0,'+
                                                                          'sanfrancisco.building.number_of_households==0),1,0))',
        # these are covered above for building_groups
        'hhld_res_unit_total=alldata.aggregate_all(building.residential_units)',
        'hhld_res_unit_occ=alldata.aggregate_all(sanfrancisco.building.number_of_households)',
        'hhld_res_unit_vacant=alldata.aggregate_all(building.residential_units)-alldata.aggregate_all(sanfrancisco.building.number_of_households)',

    ]

    class SFIndicatorDialect(csv.excel):
        lineterminator = '\n'

    csv.register_dialect("SFIndicatorDialect", SFIndicatorDialect)

    onetableWriter = csv.writer(open(
        os.path.join(cache_directory, "alldata.csv"), 'w'),
                                dialect='SFIndicatorDialect')
    headeryears = []
    for year in multiyear_workbook_source_data.years:
        headeryears.append("y" + str(year))
    onetableWriter.writerow(['variable'] + headeryears)

    for attr in multiyear_workbook_alldata_attributes:
        attr_name = attr.partition('=')[0]
        request = [
            Table(source_data=multiyear_workbook_source_data,
                  dataset_name='alldata',
                  name=attr_name,
                  attribute=attr)
        ]
        IndicatorFactory().create_indicators(indicators=request,
                                             display_error_box=False,
                                             show_results=False)
        # open this file
        filename = request[0].get_file_path()
        indicatorReader = csv.reader(open(filename, 'r'))

        # title row
        fields = indicatorReader.next()
        assert (len(fields) == len(multiyear_workbook_source_data.years) + 1
                )  # quick check

        # data row
        fields = indicatorReader.next()
        fields[0] = attr_name
        onetableWriter.writerow(fields)

        # for *_count_* rows, add *_new_* row
        if attr_name.find("_count_") >= 0:
            attr_new_name = attr_name.replace("_count_", "_new_")
            new_fields = [attr_new_name, 0]
            for ind in range(1, len(multiyear_workbook_source_data.years)):
                new_fields.append(float(fields[ind + 1]) - float(fields[ind]))

            onetableWriter.writerow(new_fields)
def get_indicators(cache_directory,
                   run_description,
                   years=[2014, 2017, 2050],
                   base_year=2014):

    source_data = SourceData(
        cache_directory=cache_directory,
        run_description=run_description,
        years=years,
        base_year=base_year,
        dataset_pool_configuration=DatasetPoolConfiguration(
            package_order=[
                'psrc_parcel', 'urbansim_parcel', 'psrc', 'urbansim',
                'opus_core'
            ],
            package_order_exceptions={},
        ),
    )

    indicators = [

        # # Minority - population, households, employment, and activity units
        Table(
            attribute=
            'population = minority.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel])',
            dataset_name='minority',
            source_data=source_data,
        ),
        Table(
            attribute=
            'households = minority.aggregate(urbansim_parcel.parcel.number_of_households, intermediates=[parcel])',
            dataset_name='minority',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment = minority.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[parcel])',
            dataset_name='minority',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units = minority.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel]) + minority.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[parcel])',
            dataset_name='minority',
            source_data=source_data,
        ),

        # ## Hex Level Indicators (minority_id = 2) -- for mapping
        Table(
            attribute=
            'minority_population = hex.aggregate(urbansim_parcel.parcel.population * (parcel.minority_id == 2))',
            dataset_name='hex',
            source_data=source_data,
        ),
        Table(
            attribute=
            'minority_households = hex.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.minority_id == 2))',
            dataset_name='hex',
            source_data=source_data,
        ),
        Table(
            attribute=
            'minority_employment = hex.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.minority_id == 2))',
            dataset_name='hex',
            source_data=source_data,
        ),
        Table(
            attribute=
            'minority_activity_units = hex.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel]) + hex.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.minority_id == 2))',
            dataset_name='hex',
            source_data=source_data,
        ),

        # # # Minority Park/OS Buffer Indicators - #64
        Table(
            attribute=
            'population_park_buffer = minority.aggregate(urbansim_parcel.parcel.population * (parcel.park_buffer_id == 1) * (parcel.is_inside_urban_growth_boundary == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),

        # Table(
        # attribute = 'households_park_buffer = minority.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.park_buffer_id == 1) * (parcel.is_inside_urban_growth_boundary == 1))',
        # dataset_name = 'minority',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'employment_park_buffer = minority.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.park_buffer_id == 1) * (parcel.is_inside_urban_growth_boundary == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units_park_buffer = minority.aggregate((urbansim_parcel.parcel.population + urbansim_parcel.parcel.number_of_jobs) * (parcel.park_buffer_id == 1) * (parcel.is_inside_urban_growth_boundary == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),

        # # # Minority Growth Amenities Buffer Indicators - #31
        Table(
            attribute=
            'population_growth_amenities = minority.aggregate(urbansim_parcel.parcel.population * (parcel.growth_amenities_id == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),

        # Table(
        # attribute = 'households_growth_amenities = minority.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.growth_amenities_id == 1))',
        # dataset_name = 'minority',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'employment_growth_amenities = minority.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.growth_amenities_id == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units_growth_amenities = minority.aggregate((urbansim_parcel.parcel.population + urbansim_parcel.parcel.number_of_jobs) * (parcel.growth_amenities_id == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),

        # # # Minority Transit Buffer level by Indicators - #28a
        Table(
            attribute=
            'population_transit_buffer = minority.aggregate(urbansim_parcel.parcel.population * (parcel.transit_buffer_id == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),

        # Table(
        # attribute = 'households_transit_buffer = minority.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.transit_buffer_id == 1))',
        # dataset_name = 'minority',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'employment_transit_buffer = minority.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.transit_buffer_id == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units_transit_buffer = minority.aggregate((urbansim_parcel.parcel.population + urbansim_parcel.parcel.number_of_jobs) * (parcel.transit_buffer_id == 1))',
            dataset_name='minority',
            source_data=source_data,
        ),

        # # # Minority Census Tract Level Indicators - #79

        # # Non-minority = 1
        Table(
            attribute=
            'non_minority_population = census_tract.aggregate(urbansim_parcel.parcel.population * (parcel.minority_id == 1))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_minority_households = census_tract.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.minority_id == 1))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_minority_employment = census_tract.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.minority_id == 1))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        # # # Minority = 2
        Table(
            attribute=
            'non_minority_population = census_tract.aggregate(urbansim_parcel.parcel.population * (parcel.minority_id == 2))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_minority_households = census_tract.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.minority_id == 2))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_minority_employment = census_tract.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.minority_id == 2))',
            dataset_name='census_tract',
            source_data=source_data,
        ),

        # # # Minority Subarea Level Indicators - #18

        # # Non-minority = 1
        Table(
            attribute=
            'non_minority_population = subarea.aggregate(urbansim_parcel.parcel.population * (parcel.minority_id == 1))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_minority_households = subarea.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.minority_id == 1))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_minority_employment = subarea.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.minority_id == 1))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        # # Minority = 2
        Table(
            attribute=
            'minority_population = subarea.aggregate(urbansim_parcel.parcel.population * (parcel.minority_id == 2))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        Table(
            attribute=
            'minority_households = subarea.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.minority_id == 2))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        Table(
            attribute=
            'minority_employment = subarea.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.minority_id == 2))',
            dataset_name='subarea',
            source_data=source_data,
        ),

        # # # Minority TOD Level Indicators - #30

        # # Non-Minority = 1
        Table(
            attribute=
            'non_minority_population = tod.aggregate(urbansim_parcel.parcel.population * (parcel.minority_id == 1))',
            dataset_name='tod',
            source_data=source_data,
        ),
        # Table(
        # attribute = 'non_minority_households = tod.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.minority_id == 1))',
        # dataset_name = 'tod',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'non_minority_employment = tod.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.minority_id == 1))',
            dataset_name='tod',
            source_data=source_data,
        ),

        # # Minority = 2
        Table(
            attribute=
            'minority_population = tod.aggregate(urbansim_parcel.parcel.population * (parcel.minority_id == 2))',
            dataset_name='tod',
            source_data=source_data,
        ),
        Table(
            attribute=
            'minority_households = tod.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.minority_id == 2))',
            dataset_name='tod',
            source_data=source_data,
        ),
        Table(
            attribute=
            'minority_employment = tod.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.minority_id == 2))',
            dataset_name='tod',
            source_data=source_data,
        ),
        Table(
            attribute=
            'developed_acres = minority.aggregate(numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft,intermediates=[parcel])/43560.0',
            dataset_name='minority',
            source_data=source_data,
        ),
        DatasetTable(
            source_data=source_data,
            dataset_name='minority',
            name='Acreage_by_built_res_density',
            attributes=[
                'Nonres_existing = minority.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) < 2015) * (psrc_parcel.parcel.residential_units == 0) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Nonres_redev = minority.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units == 0) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Nonres_newdev = minority.aggregate((parcel.baseyear_built < 1850) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units == 0) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Low_existing = minority.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) < 2015) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 3630) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Low_redev = minority.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 3630) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Low_newdev = minority.aggregate((parcel.baseyear_built < 1850) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 3630) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Medium_existing = minority.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) < 2015) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 3630) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Medium_redev = minority.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 3630) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Medium_newdev = minority.aggregate((parcel.baseyear_built < 1850) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 3630) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'High_existing = minority.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) < 2015) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'High_redev = minority.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'High_newdev = minority.aggregate((parcel.baseyear_built < 1850) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
            ],
        ),

        # # Poverty - population, households, employment, and activity units
        Table(
            attribute=
            'population = poverty.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel])',
            dataset_name='poverty',
            source_data=source_data,
        ),
        Table(
            attribute=
            'households = poverty.aggregate(urbansim_parcel.parcel.number_of_households, intermediates=[parcel])',
            dataset_name='poverty',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employment = poverty.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[parcel])',
            dataset_name='poverty',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units = poverty.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel]) + poverty.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[parcel])',
            dataset_name='poverty',
            source_data=source_data,
        ),

        ## Hex Level Indicators (poverty = 2) -- for mapping
        Table(
            attribute=
            'poverty_population = hex.aggregate(urbansim_parcel.parcel.population, * (parcel.poverty_id == 2))',
            dataset_name='hex',
            source_data=source_data,
        ),
        Table(
            attribute=
            'poverty_households = hex.aggregate(urbansim_parcel.parcel.number_of_households, * (parcel.poverty_id == 2))',
            dataset_name='hex',
            source_data=source_data,
        ),
        Table(
            attribute=
            'poverty_employment = hex.aggregate(urbansim_parcel.parcel.number_of_jobs, * (parcel.poverty_id == 2))',
            dataset_name='hex',
            source_data=source_data,
        ),
        Table(
            attribute=
            'poverty_activity_units = hex.aggregate(urbansim_parcel.parcel.population, intermediates=[parcel]) + hex.aggregate(urbansim_parcel.parcel.number_of_jobs, * (parcel.poverty_id == 2))',
            dataset_name='hex',
            source_data=source_data,
        ),

        # # # Poverty Park/OS Buffer Indicators - #64
        Table(
            attribute=
            'population_park_buffer = poverty.aggregate(urbansim_parcel.parcel.population * (parcel.park_buffer_id == 1) * (parcel.is_inside_urban_growth_boundary == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),

        # Table(
        # attribute = 'households_park_buffer = poverty.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.park_buffer_id == 1) * (parcel.is_inside_urban_growth_boundary == 1))',
        # dataset_name = 'poverty',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'employment_park_buffer = poverty.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.park_buffer_id == 1) * (parcel.is_inside_urban_growth_boundary == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units_park_buffer = poverty.aggregate((urbansim_parcel.parcel.population + urbansim_parcel.parcel.number_of_jobs) * (parcel.park_buffer_id == 1) * (parcel.is_inside_urban_growth_boundary == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),

        # # # Poverty Growth Amenities Buffer Indicators - #31
        Table(
            attribute=
            'population_growth_amenities = poverty.aggregate(urbansim_parcel.parcel.population * (parcel.growth_amenities_id == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),

        # Table(
        # attribute = 'households_growth_amenities = poverty.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.growth_amenities_id == 1))',
        # dataset_name = 'poverty',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'employment_growth_amenities = poverty.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.growth_amenities_id == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units_growth_amenities = poverty.aggregate((urbansim_parcel.parcel.population + urbansim_parcel.parcel.number_of_jobs) * (parcel.growth_amenities_id == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),

        # # # Poverty Transit Buffer level by Indicators - #28a
        Table(
            attribute=
            'population_transit_buffer = poverty.aggregate(urbansim_parcel.parcel.population * (parcel.transit_buffer_id == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),

        # Table(
        # attribute = 'households_transit_buffer = poverty.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.transit_buffer_id == 1))',
        # dataset_name = 'poverty',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'employment_transit_buffer = poverty.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.transit_buffer_id == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),
        Table(
            attribute=
            'activity_units_transit_buffer = poverty.aggregate((urbansim_parcel.parcel.population + urbansim_parcel.parcel.number_of_jobs) * (parcel.transit_buffer_id == 1))',
            dataset_name='poverty',
            source_data=source_data,
        ),

        # # # Poverty Census Tract Level Indicators - #79

        # # Non-poverty = 1
        Table(
            attribute=
            'non_poverty_population = census_tract.aggregate(urbansim_parcel.parcel.population * (parcel.poverty_id == 1))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_poverty_households = census_tract.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.poverty_id == 1))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_poverty_employment = census_tract.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.poverty_id == 1))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        # # # Poverty = 2
        Table(
            attribute=
            'non_poverty_population = census_tract.aggregate(urbansim_parcel.parcel.population * (parcel.poverty_id == 2))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_poverty_households = census_tract.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.poverty_id == 2))',
            dataset_name='census_tract',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_poverty_employment = census_tract.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.poverty_id == 2))',
            dataset_name='census_tract',
            source_data=source_data,
        ),

        # # # Poverty Subarea Level Indicators - #18

        # # Non-poverty = 1
        Table(
            attribute=
            'non_poverty_population = subarea.aggregate(urbansim_parcel.parcel.population * (parcel.poverty_id == 1))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_poverty_households = subarea.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.poverty_id == 1))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        Table(
            attribute=
            'non_poverty_employment = subarea.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.poverty_id == 1))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        # # Poverty = 2
        Table(
            attribute=
            'poverty_population = subarea.aggregate(urbansim_parcel.parcel.population * (parcel.poverty_id == 2))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        Table(
            attribute=
            'poverty_households = subarea.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.poverty_id == 2))',
            dataset_name='subarea',
            source_data=source_data,
        ),
        Table(
            attribute=
            'poverty_employment = subarea.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.poverty_id == 2))',
            dataset_name='subarea',
            source_data=source_data,
        ),

        # # # Poverty TOD Level Indicators - #30

        # # Non-Poverty = 1
        Table(
            attribute=
            'non_poverty_population = tod.aggregate(urbansim_parcel.parcel.population * (parcel.poverty_id == 1))',
            dataset_name='tod',
            source_data=source_data,
        ),
        # Table(
        # attribute = 'non_poverty_households = tod.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.poverty_id == 1))',
        # dataset_name = 'tod',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'non_poverty_employment = tod.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.poverty_id == 1))',
            dataset_name='tod',
            source_data=source_data,
        ),

        # # Poverty = 2
        Table(
            attribute=
            'poverty_population = tod.aggregate(urbansim_parcel.parcel.population * (parcel.poverty_id == 2))',
            dataset_name='tod',
            source_data=source_data,
        ),
        # Table(
        # attribute = 'poverty_households = tod.aggregate(urbansim_parcel.parcel.number_of_households * (parcel.poverty_id == 2))',
        # dataset_name = 'tod',
        # source_data = source_data,
        # ),
        Table(
            attribute=
            'poverty_employment = tod.aggregate(urbansim_parcel.parcel.number_of_jobs * (parcel.poverty_id == 2))',
            dataset_name='tod',
            source_data=source_data,
        ),
        Table(
            attribute=
            'developed_acres = poverty.aggregate(numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft,intermediates=[parcel])/43560.0',
            dataset_name='poverty',
            source_data=source_data,
        ),
        DatasetTable(
            source_data=source_data,
            dataset_name='poverty',
            name='Acreage_by_built_res_density',
            attributes=[
                'Nonres_existing = poverty.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) < 2015) * (psrc_parcel.parcel.residential_units == 0) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Nonres_redev = poverty.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units == 0) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Nonres_newdev = poverty.aggregate((parcel.baseyear_built < 1850) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units == 0) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Low_existing = poverty.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) < 2015) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 3630) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Low_redev = poverty.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 3630) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Low_newdev = poverty.aggregate((parcel.baseyear_built < 1850) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 3630) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Medium_existing = poverty.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) < 2015) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 3630) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Medium_redev = poverty.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 3630) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'Medium_newdev = poverty.aggregate((parcel.baseyear_built < 1850) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 3630) * (urbansim_parcel.parcel.parcel_sqft_per_unit > 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'High_existing = poverty.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) < 2015) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'High_redev = poverty.aggregate((parcel.baseyear_built > 0) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
                'High_newdev = poverty.aggregate((parcel.baseyear_built < 1850) * (parcel.aggregate(building.year_built, function=maximum) > 2014) * (psrc_parcel.parcel.residential_units > 0) * (urbansim_parcel.parcel.parcel_sqft_per_unit <= 871) * numpy.logical_or((psrc_parcel.parcel.residential_units > 0),(parcel.aggregate(psrc_parcel.building.job_capacity_computed_if_necessary) > 0)) * parcel.parcel_sqft, intermediates=[parcel]) / 43560.0',
            ],
        ),
    ]
    return indicators
Exemple #16
0
def get_indicators(cache_directory,
                   run_description,
                   years=range(2014, 2051),
                   base_year=2014):
    source_data = SourceData(
        cache_directory=cache_directory,
        run_description=run_description,
        years=years,
        base_year=base_year,
        dataset_pool_configuration=DatasetPoolConfiguration(
            package_order=[
                'psrc_parcel', 'urbansim_parcel', 'psrc', 'urbansim',
                'opus_core'
            ],
            package_order_exceptions={},
        ),
    )

    indicators = [

        # FAZ indicators
        # =====================
        Table(
            attribute=
            'householdsAn=faz.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel,zone])',
            dataset_name='faz',
            source_data=source_data,
        ),
        Table(
            attribute=
            'populationAn=faz.aggregate(urbansim_parcel.building.population, intermediates=[parcel,zone])',
            dataset_name='faz',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employmentAn=faz.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel,zone])',
            dataset_name='faz',
            source_data=source_data,
        ),

        # TAZ indicators
        Table(
            attribute=
            'householdsAn=zone.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel])',
            dataset_name='zone',
            source_data=source_data,
        ),
        Table(
            attribute=
            'populationAn=zone.aggregate(urbansim_parcel.building.population, intermediates=[parcel])',
            dataset_name='zone',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employmentAn=zone.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel])',
            dataset_name='zone',
            source_data=source_data,
        ),

        # ## City indicators
        # ==================
        Table(
            attribute=
            'householdsAn=city.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute=
            'populationAn=city.aggregate(urbansim_parcel.building.population, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employmentAn=city.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel])',
            dataset_name='city',
            source_data=source_data,
        ),

        # ## Tract-City indicators
        # ==================
        Table(
            attribute=
            'householdsAn=tractcity.aggregate(urbansim_parcel.building.number_of_households, intermediates=[parcel])',
            dataset_name='tractcity',
            source_data=source_data,
        ),
        Table(
            attribute=
            'populationAn=tractcity.aggregate(urbansim_parcel.building.population, intermediates=[parcel])',
            dataset_name='tractcity',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employmentAn=tractcity.aggregate(urbansim_parcel.building.number_of_jobs, intermediates=[parcel])',
            dataset_name='tractcity',
            source_data=source_data,
        ),

        # # County Regional Geography indicators
        Table(
            attribute=
            'populationAn = fips_rgs.aggregate(urbansim_parcel.parcel.population, intermediates=[city])',
            dataset_name='fips_rgs',
            source_data=source_data,
        ),
        Table(
            attribute=
            'householdsAn = fips_rgs.aggregate(urbansim_parcel.parcel.number_of_households, intermediates=[city])',
            dataset_name='fips_rgs',
            source_data=source_data,
        ),
        Table(
            attribute=
            'employmentAn = fips_rgs.aggregate(urbansim_parcel.parcel.number_of_jobs, intermediates=[city])',
            dataset_name='fips_rgs',
            source_data=source_data,
        ),
    ]
    return indicators