def poverty(year, root_dir, bg_mergedgdb, region, places, bg_file, pov_file, final_gdb_loc): gdb = f"Poverty{year}.gdb" ap.env.workspace = os.path.join(root_dir, gdb) # -----> Change Year ap.ClearWorkspaceCache_management() outputgdb = ap.env.workspace working_file = "Poverty_working" # ! can these go? pov_table = os.path.join(bg_mergedgdb, pov_file) bg = os.path.join(bg_mergedgdb, bg_file) working_gdb = os.path.join(root_dir, gdb) # Working feature class locations twrw_places_file = f"Poverty{year}_working_RegionJoin_Places" twrw_places = os.path.join(outputgdb, twrw_places_file) cw_file = f"Poverty{year}_working_County" cw = os.path.join(outputgdb, cw_file) rw_file = f"Poverty{year}_working_Region" rw = os.path.join(outputgdb, rw_file) twcw_file = f"Poverty{year}_working_CountyJoin" twcw = os.path.join(outputgdb, twcw_file) twrw_file = f"Poverty{year}_working_RegionJoin" twrw = os.path.join(outputgdb, twrw_file) final_file = f"Poverty{year}_Final" final = os.path.join(outputgdb, final_file) # ! still needed? replaceGDB(root_dir, gdb) fields_list = [ "B17010e1", "C17002e1", "C17002e2", "C17002e3", "C17002e4", "C17002e5" ] # Clip blockgroups by the region shape, and keep only fields of interest # clipPolygons(census_gdb, census_file, boundary_file, output_gdb, output_file, join_table, fields_list) clipPolygons(bg_mergedgdb, bg_file, region, os.path.join(root_dir, gdb), working_file) joinAndCalcFields(working_file, bg_mergedgdb, os.path.join(root_dir, gdb), 'GEOID_Data', pov_file, 'GEOID', fields_list) ap.env.workspace = outputgdb ap.management.AddFields( working_file, [["SqMiles", "DOUBLE"], ["TFam", "DOUBLE"], ["TPov", "DOUBLE"], ['PPOV', "DOUBLE"], ["POVDens", "DOUBLE"], ["CoPovBG", "SHORT"], ["RegPovBG", "SHORT"]]) print('Added Fields to working file') # compute median income from census fields ap.CalculateFields_management( working_file, "PYTHON3", [ ["SqMiles", "!shape.area@squaremiles!"], ["TFam", "!C17002e1!"], ["TPov", "!C17002e2! + !C17002e3! + !C17002e4! + !C17002e5!" ], # all households less than 150% of the poverty line ['PPov', "!TPov! / !TFam!"], ["PovDens", "!TPov! / !SqMiles!"] ]) print("---------------------------") print("Finished calculating Median HH Inc Calcs") # DISSOLVE TRACTS BY COUNTY - SUM VALUES ap.Dissolve_management( working_file, cw, "COUNTYFP", [["TFam", "SUM"], ["TPov", "SUM"], ["SqMiles", "SUM"]]) print("---------------------------") print("Dissolve County Stats") # DISSOLVE TRACTS BY REGION - SUM VALUES ap.Dissolve_management( working_file, rw, "", [["TFam", "SUM"], ["TPov", "SUM"], ["SqMiles", "SUM"]]) print("---------------------------") print("Dissolve Region Stats") # define and compute county calculation fields ap.management.AddFields( cw, [ ["CoTFam", "DOUBLE"], # total families ["CoTPov", "DOUBLE"], # total low income families ["CoPPov", "DOUBLE"], # percent low income families ["CoSqMiles", "DOUBLE"], # square miles ["CoPovDens", "DOUBLE"] ]) # low income hh per square mile ap.CalculateFields_management( cw, "PYTHON", [["CoTFam", "!SUM_TFam!"], ["CoTPov", "!SUM_TPov!"], ["CoPPov", "!SUM_TPov!/!SUM_TFam!"], ["CoSqMiles", "!SUM_SqMiles!"], ["CoPovDens", "!CoTPov! / !CoSqMiles!"]]) print("---------------------------") print(rw_file + " fields calculated !!!") # define and compute region calculation fields ap.management.AddFields( rw, [["RegTFam", "DOUBLE"], ["RegTPov", "DOUBLE"], ["RegPPov", "DOUBLE"], ["RegSqMiles", "DOUBLE"], ["RegPovDens", "DOUBLE"]]) ap.CalculateFields_management( rw, "PYTHON", [["RegTFam", "!SUM_TFam!"], ["RegTPov", "!SUM_TPov!"], ["RegPPov", "!SUM_TPov!/!SUM_TFam!"], ["RegSqMiles", "!SUM_SqMiles!"], ["RegPovDens", "!RegTPov! / !RegSqMiles!"]]) print("---------------------------") print(rw_file + " fields calculated !!!") # SPATIAL JOIN TRACTS FILE WITH COUNTY FEATURE CLASS ap.SpatialJoin_analysis(working_file, cw, twcw) print("---------------------------") print("County Spaital Join") # SPATIAL JOIN TRACTS FILE WITH REGION FILE ap.SpatialJoin_analysis(twcw, rw, twrw) print("---------------------------") print("Region Spaital Join") # identify blockgroups as greater than county and regional average ap.CalculateFields_management( in_table=twrw, expression_type="PYTHON3", fields= "CoPovBG 'ifBlock(!PPov!, !CoPPov!)';RegPovBG 'ifBlock(!PPov!, !RegPPov!)'", code_block="""def ifBlock(area, region): if area > region: return 1 else: return 0 """) print("---------------------------") print("Above LEP Density Calculations Completed") # SPATIAL JOIN TRACTS FILE WITH PLACES FILE ap.SpatialJoin_analysis(twrw, places, twrw_places) print("---------------------------") print("Places Spaital Join") # remove these unnecessary fields in the cleanup stage delete_fields = [ 'GEOID_1', 'SUM_TFam', 'SUM_TPov', 'SUM_TFam', 'SUM_SqMiles', 'SUM_TFam_1', 'SUM_TPov_1', 'SUM_TFam_1', 'SUM_SqMiles_1', 'Join_Count', 'TARGET_FID', 'Join_Count_1', 'TARGET_FID_1', 'Join_Count_12', 'TARGET_FID_12', 'B17010e1', 'C17002e1', 'C17002e2', 'C17002e3', 'C17002e4', 'C17002e5', 'Shape_Length_12', 'Shape_Area_12', 'ALAND_1', 'AWATER_1', 'COUNTYFP_1', 'Shape_Length_1', 'Shape_Area_1', 'STATEFP_1', 'PLACEFP', 'PLACENS', 'AFFGEOID', 'GEOID_12', 'LSAD', 'Shape_Length_1', 'Shape_Area_1', 'COUTNYFP_1' ] cleanUp(twrw_places, gdb, final_file, final_gdb_loc, delete_fields)
def minority(year, root_dir, bg_mergegdb, region, places, bg_file, race_file, hisp_file, final_gdb_loc): gdb = f"Minority{year}.gdb" ap.env.workspace = os.path.join(root_dir, gdb) ap.ClearWorkspaceCache_management() outputgdb = ap.env.workspace working_file = f"Minority{year}_working" # ! can these go? (Wondering this for basically all comments in this function) # race_table = os.path.join(bg_mergegdb, race_file) # hisp_table = os.path.join(bg_mergegdb, hisp_file) # bg = os.path.join(bg_mergegdb, bg_file) # working_gdb = os.path.join(root_dir, gdb) # Working feature class locations cw_file = f"Minority{year}_working_County" cw = os.path.join(outputgdb, cw_file) rw_file = f"Minority{year}_working_Region" rw = os.path.join(outputgdb, rw_file) twcw_file = f"Minority{year}_working_CountyJoin" twcw = os.path.join(outputgdb, twcw_file) twrw_file = f"Minority{year}_working_RegionJoin" twrw = os.path.join(outputgdb, twrw_file) twrw_places_file = f"Minority{year}_working_RegionJoin_Places" twrw_places = os.path.join(outputgdb, twrw_places_file) final_file = f"Minority{year}_Final" final = os.path.join(outputgdb, final_file) # CREATE WORKING GDB replaceGDB(root_dir, gdb) race_fields_list = ["B02001e1", "B02001e3", "B02001e4", "B02001e5", "B02001e6", "B02001e7", "B02001e8"] hisp_fields_list = ["B03002e13"] # Clip blockgroups by the region shape, and keep only fields of interest # clipPolygons(census_gdb, census_file, boundary_file, output_gdb, output_file, join_table, fields_list) clipPolygons(bg_mergegdb, bg_file, region, os.path.join(root_dir, gdb), working_file) # joinAndCalcFields(fc, census_gdb, output_gdb, key, table, table_key, fields_list) joinAndCalcFields(working_file, bg_mergegdb, os.path.join(root_dir, gdb), 'GEOID_Data', race_file, 'GEOID', race_fields_list) joinAndCalcFields(working_file, bg_mergegdb, os.path.join(root_dir, gdb), 'GEOID_Data', hisp_file, 'GEOID', hisp_fields_list) ap.management.AddFields(working_file, [["SqMiles", "DOUBLE"], ["TPop", "DOUBLE"], ["TMinority", "DOUBLE"], ["PMinority", "DOUBLE"], ["MinorityDens", "DOUBLE"], ["CoMinBG", "SHORT"], ["RegMinBG", "SHORT"], ["TBlack", "DOUBLE"], ["TNativeAm", "DOUBLE"], ["TAsian", "DOUBLE"], ["TPacIsland", "DOUBLE"], ["TOther", "DOUBLE"], ["THispanic", "DOUBLE"], ["TTwoOrMore", "DOUBLE"], ["PWhite", "DOUBLE"], ["PBlack", "DOUBLE"], ["PNativeAm", "DOUBLE"], ["PNative", "DOUBLE"], ["PAsian", "DOUBLE"], ["PPacIsland", "DOUBLE"], ["POther", "DOUBLE"], ["PTwoOrMore", "DOUBLE"], ["PHispanic", "DOUBLE"]]) ap.CalculateFields_management(working_file, "PYTHON3", [["SqMiles", "!shape.area@squaremiles!"], ["TPop", "!B02001e1!"], ["TMinority", "!B02001e3! + !B02001e4! + !B02001e5! + !B02001e6! + !B02001e7! + !B02001e8! + !B03002e13!"], ["PMinority", "!TMinority! / !TPop!"], ["MinorityDens", "!TMinority! / !SqMiles!"], ["TBlack", "!B02001e3!"], ["TNativeAm", "!B02001e4!"], ["TAsian", "!B02001e5!"], ["TPacIsland", "!B02001e6!"], ["TOther", "!B02001e7!"], ["TTwoOrMore", "!B02001e8!"], ["THispanic", "!B03002e13!"], ["PBlack", "!TBlack! / !TPop!"], ["PNativeAm", "!TNativeAm! / !TPop!"], ["PAsian", "!TAsian! / !TPop!"], ["PPacIsland", "!TPacIsland! / !TPop!"], ["POther", "!TOther! / !TPop!"], ["PTwoOrMore", "!TTwoOrMore! / !TPop!"], ["PHispanic", "!THispanic! / !TPop!"]]) print("---------------------------") print("Finished calculating Minority Calcs") # DISSOLVE TRACTS BY COUNTY - SUM VALUES ap.Dissolve_management(working_file, cw, "COUNTYFP", [["TPop", "SUM"], ["TMinority", "SUM"], ["SqMiles", "SUM"], ["TBlack", "SUM"], ["TNativeAm", "SUM"], ["TAsian", "SUM"], ["TPacIsland", "SUM"], ["TOther", "SUM"], ["THispanic", "SUM"]]) print("---------------------------") print("Dissolve County Stats") # DISSOLVE TRACTS BY REGION - SUM VALUES ap.Dissolve_management(working_file, rw, "", [["TPop", "SUM"], ["TMinority", "SUM"], ["SqMiles", "SUM"], ["TBlack", "SUM"], ["TNativeAm", "SUM"], ["TAsian", "SUM"], ["TPacIsland", "SUM"], ["TOther", "SUM"], ["THispanic", "SUM"]]) print("---------------------------") print("Dissolve Region Stats") # ! Why is county in here? (I am actually asking) ap.management.AddFields(cw, [["CoTPop", "Double"], ["CoTMinority", "Double"], ["CoPMinority", "Double"], ["CoSqMiles", "Double"], ["CoMinorityDens", "Double"], ["CoTBlack", "DOUBLE"], ["CoTNativeAm", "DOUBLE"], ["CoTAsian", "DOUBLE"], ["CoTPacIsland", "DOUBLE"], ["CoTOther", "DOUBLE"], ["CoTHispanic", "DOUBLE"], ["CoPWhite", "DOUBLE"], ["CoPBlack", "DOUBLE"], ["CoPNativeAm", "DOUBLE"], ["CoPAsian", "DOUBLE"], ["CoPPacIsland", "DOUBLE"], ["CoPOther", "DOUBLE"], ["CoPHispanic", "DOUBLE"]]) ap.CalculateFields_management(cw, "PYTHON", [["CoTPop", "!SUM_TPop!"], ["CoTMinority", "!SUM_TMinority!"], ["CoPMinority", "!CoTMinority! / !CoTPop!"], ["CoSqMiles", "!SUM_SqMiles!"], ["CoMinorityDens", "!CoTMinority! / !CoSqMiles!"], ["CoTBlack", "!SUM_TBlack!"], ["CoTNativeAm", "!SUM_TNativeAm!"], ["CoTAsian", "!SUM_TAsian!"], ["CoTPacIsland", "!SUM_TPacIsland!"], ["CoTOther", "!SUM_TOther!"], ["CoPBlack", "!SUM_TBlack! / !SUM_TPop!"], ["CoPNativeAm", "!SUM_TNativeAm! / !SUM_TPop!"], ["CoPAsian", "!SUM_TAsian! / !SUM_TPop!"], ["CoPPacIsland", "!SUM_TPacIsland! / !SUM_TPop!"], ["CoPOther", "!SUM_TOther! / !SUM_TPop!"], ["CoPHispanic", "!SUM_TOther! / !SUM_TPop!"]]) print("---------------------------") print(rw_file + " fields calculated !!!") ap.management.AddFields(rw, [["RegTPop", "Double"], ["RegTMinority", "Double"], ["RegPMinority", "Double"], ["RegSqMiles", "Double"], ["RegMinorityDens", "Double"], ["RegTBlack", "DOUBLE"], ["RegTNativeAm", "DOUBLE"], ["RegTAsian", "DOUBLE"], ["RegTPacIsland", "DOUBLE"], ["RegTOther", "DOUBLE"], ["RegTHispanic", "DOUBLE"], ["RegPBlack", "DOUBLE"], ["RegPNativeAm", "DOUBLE"], ["RegPAsian", "DOUBLE"], ["RegPPacIsland", "DOUBLE"], ["RegPOther", "DOUBLE"], ["RegPHispanic", "DOUBLE"]]) ap.CalculateFields_management(rw, "PYTHON", [["RegTPop", "!SUM_TPop!"], ["RegTMinority", "!SUM_TMinority!"], ["RegPMinority", "!RegTMinority! / !RegTPop!"], ["RegSqMiles", "!SUM_SqMiles!"], ["RegMinorityDens", "!RegTMinority! / !RegSqMiles!"], ["RegTBlack", "!SUM_TBlack!"], ["RegTNativeAm", "!SUM_TNativeAm!"], ["RegTAsian", "!SUM_TAsian!"], ["RegTPacIsland", "!SUM_TPacIsland!"], ["RegTOther", "!SUM_TOther!"], ["RegTHispanic", "!SUM_THispanic!"], ["RegPBlack", "!SUM_TBlack! / !SUM_TPop!"], ["RegPNativeAm", "!SUM_TNativeAm! / !SUM_TPop!"], ["RegPAsian", "!SUM_TAsian! / !SUM_TPop!"], ["RegPPacIsland", "!SUM_TPacIsland! / !SUM_TPop!"], ["RegPOther", "!SUM_TOther! / !SUM_TPop!"], ["RegPHispanic", "!SUM_THispanic! / !SUM_TPop!"]]) print("---------------------------") print(rw_file + " fields calculated !!!") # SPATIAL JOIN TRACTS FILE WITH COUNTY FILE ap.SpatialJoin_analysis(working_file, cw, twcw) print("---------------------------") print("County Spatial Join") # SPATIAL JOIN TRACTS FILE WITH REGION FILE ap.SpatialJoin_analysis(twcw, rw, twrw) print("---------------------------") print("Region Spatial Join") # Compare each block group's minority percentage to the region, and flag if greater # NEW WAY OF CALCULATION USING PERCENTAGE ap.CalculateFields_management(in_table=twrw, expression_type="PYTHON3", fields="CoMinBG 'ifBlock(!PMinority!, !CoPMinority!)';RegMinBG 'ifBlock(!PMinority!, !RegPMinority!)'", code_block="""def ifBlock(area, region): if area > region: return 1 else: return 0 """) print("---------------------------") print("Above LEP Density Calculations Completed") # SPATIAL JOIN TRACTS FILE WITH PLACES FILE ap.SpatialJoin_analysis(twrw, places, twrw_places) print("---------------------------") print("Places Spatial Join") # remove these unnecessary fields in the cleanup stage delete_fields = ["Join_Count", "Join_Count_1", "TARGET_FID_12", "Target_FID", "Target_FID_1", "Join_Count_12", "Geoid_1", "B02001e1", "B02001e2", "B02001e3", "B02001e4", "B02001e5", "B02001e6", "B02001e7", "B02001e8", "B02001e9", "B02001e10", "B03002e13", "SUM_TPop", "SUM_TMinority", "SUM_SqMiles", "SUM_TPop_1", "SUM_TMinority_1", "SUM_SqMiles_1", "GEOID_12_13", "PLACENS", "PLACEFP", "STATEFP_1", "SHAPE_LENGTH_12", "SHAPE_AREA_12", "SHAPE_LENGTH_1", "SHAPE_LENGTH_1", "COUNTYFP_1", "GEOID_12", "SUM_TWhite", "SUM_TBlack", "SUM_TNativeAm", "SUM_TAsian", "SUM_TPacIsland", "SUM_TOther", "SUM_THispanic", "SUM_TWhite_1", "SUM_TBlack_1", "SUM_TNativeAm_1", "SUM_TAsian_1", "SUM_TPacIsland_1", "SUM_TOther_1", "SUM_THispanic_1"] cleanUp(twrw_places, gdb, final_file, final_gdb_loc, delete_fields)
def medHHInc(year, root_dir, bg_mergegdb, region, places, bg_file, inc_file, final_gdb_loc): gdb = f"MedHHInc{year}.gdb" ap.env.workspace = os.path.join(root_dir, gdb) # -----> Change Year ap.ClearWorkspaceCache_management() outputgdb = ap.env.workspace # LOCAL VARIABLES inc_table = os.path.join(bg_mergegdb, inc_file) working_file = f"MedHHInc{year}_working" cw_file = f"MedHHInc{year}_working_County" cw = os.path.join(outputgdb, cw_file) rw_file = f"MedHHInc{year}_working_Region" rw = os.path.join(outputgdb, rw_file) twcw_file = f"MedHHInc{year}_working_CountyJoin" twcw = os.path.join(outputgdb, twcw_file) twrw_file = f"MedHHInc{year}_working_RegionJoin" twrw = os.path.join(outputgdb, twrw_file) final_file = f"MedHHInc{year}_Final" twrw_places_file = f"MedHHInc{year}_working_RegionJoin_Places" twrw_places = os.path.join(outputgdb, twrw_places_file) # TODO: UPDATE THIS delete_fields = [ 'B19001e1', 'B19049e1', 'COUNTYFP_1', 'SUM_THH', 'SUM_SqMiles', 'MEDIAN_MedHHInc', 'Shape_Length_1', 'Shape_Area_1', 'SUM_THH_1', 'SUM_SqMiles_1', 'MEDIAN_MedHHInc_1', 'Shape_Length_12', 'Shape_Area_12', 'STATEFP_1', 'PLACEFP', 'PLACENS', 'AFFGEOID', 'GEOID_12', 'LSAD', 'ALAND_1', 'AWATER_1', 'TARGET_FID_12', 'Join_Count_12', 'TARGET_FID_12', 'TARGET_FID', 'Join_Count' ] replaceGDB(root_dir, gdb) fields_list = ['B19001e1', 'B19049e1'] clipPolygons(bg_mergegdb, bg_file, region, os.path.join(root_dir, gdb), working_file) joinAndCalcFields(working_file, bg_mergegdb, os.path.join(root_dir, gdb), 'GEOID_Data', inc_file, 'GEOID', fields_list) ap.env.workspace = outputgdb ap.management.AddFields( working_file, [["THH", "DOUBLE"], ["MedHHInc", "Double"], ["CoBelMedInc", "DOUBLE"], ["RegBelMedInc", "DOUBLE"]]) ap.CalculateFields_management( working_file, "PYTHON3", [["THH", "!B19001e1!"], ["MedHHInc", "!B19049e1!"]]) # get rid of all of the census fields ap.DeleteField_management(working_file, fields_list) # DISSOLVE TRACTS BY COUNTY - SUM VALUES ap.Dissolve_management( working_file, cw, "COUNTYFP", [["THH", "SUM"], ["SqMiles", "SUM"], ["MedHHInc", "MEDIAN"]]) print("") print("---------------------------") print("Dissolve County Stats") # DISSOLVE TRACTS BY REGION - SUM VALUES ap.Dissolve_management( working_file, rw, "", [["THH", "SUM"], ["SqMiles", "SUM"], ["MedHHInc", "MEDIAN"]]) print("") print("---------------------------") print("Dissolve Region Stats") ap.management.AddFields(cw, [["CoTHH", "DOUBLE"], ["CoMedHHInc", "DOUBLE"]]) ap.CalculateFields_management( cw, "PYTHON", [["CoTHH", "!SUM_THH!"], ["CoMedHHInc", "!Median_MedHHInc!"]]) print("") print("---------------------------") print(rw_file + " fields calculated !!!") ap.management.AddFields(rw, [["RegTHH", "DOUBLE"], ["RegMedHHInc", "DOUBLE"]]) ap.CalculateFields_management( rw, "PYTHON", [["RegTHH", "!SUM_THH!"], ["RegMedHHInc", "!Median_MedHHInc!"]]) print("") print("---------------------------") print(rw_file + " fields calculated !!!") # SPATIAL JOIN TRACTS FILE WITH COUNTY FILE ap.SpatialJoin_analysis(working_file, cw, twcw) print("") print("---------------------------") print("County Spaital Join") # SPATIAL JOIN TRACTS FILE WITH REGION FILE ap.SpatialJoin_analysis(twcw, rw, twrw) print("") print("---------------------------") print("Region Spaital Join") print("") ap.CalculateFields_management( in_table=twrw, expression_type="PYTHON3", fields= "CoBelMedInc 'ifBlock(!MedHHInc!, !CoMedHHInc!)';RegBelMedInc 'ifBlock(!MedHHInc!, !RegMedHHInc!)'", code_block='''def ifBlock(area, region): if area < region: return 1 else: return 0''') print("---------------------------") print("Above LEP Density Calculations Completed") # SPATIAL JOIN TRACTS FILE WITH PLACES FILE ap.SpatialJoin_analysis(twrw, places, twrw_places) print("") print("---------------------------") print("Places Spaital Join") # for field in delete_fields: # try: # ap.DeleteField_management(twrw_places, field) # print("") # print("---------------------------") # print(field + " DELETED") # print("---------------------------") # except: # print(field + ' does not exist') # # ap.ClearWorkspaceCache_management() # # deleteFeatureClass(final_file, final_gdb_loc) # # # CREATE FINAL FEATURE CLASS # ap.FeatureClassToFeatureClass_conversion(twrw_places, outputgdb, final_file) # print("") # print("---------------------------") # print("MedHHInc_Final feature class created - Script Complete!!!") cleanUp(twrw_places, gdb, final_file, final_gdb_loc, delete_fields)
def idRoutes(year, root_dir, routes, final_gdb_loc): gdb = f"IdentifiedRoutes{year}.gdb" ap.env.workspace = os.path.join(root_dir, gdb) # -----> Change ap.ClearWorkspaceCache_management() working_gdb = ap.env.workspace working_file = "IdentifiedRoutes_working" # Get input demographic feature classes from previous function outputs # minority_gdb = os.path.join(root_dir, f"Minority{year}.gdb") # -----> Change Year # poverty_gdb = os.path.join(root_dir, f"Poverty{year}.gdb") # -----> Change Year # lep_gdb = os.path.join(root_dir, f"LEP{year}.gdb") minority_file = os.path.join(final_gdb_loc, f"Minority{year}_Final") # minority_file = os.path.join(minority_gdb, f"Minority{year}_Final") poverty_file = os.path.join(final_gdb_loc, f"Poverty{year}_Final") # poverty_file = os.path.join(poverty_gdb, f"Poverty{year}_Final") lep_file = os.path.join(final_gdb_loc, f"LEP{year}_Final") medhhinc_file = os.path.join(final_gdb_loc, f"MedHHInc{year}_Final") # lep_file = os.path.join(lep_gdb, f"LEP{year}_Final") # Working feature classes minority_working_file = f"Minority{year}_BG" poverty_working_file = f"Poverty{year}_BG" lep_working_file = f"LEP{year}_BG" medhhinc_working_file = f"MedHHInc{year}_BG" routes_file = f"IdentifiedRoutes{year}" routes_working = os.path.join(working_gdb, routes_file) # define inputs for the for loop - one set for each demographic category working_list = [ { "org_file": minority_file, # input feature class "working_file": minority_working_file, # working feature class for calcs "identified_field": "RegMinBG", # field containing the threshold value for the region "add_fields": [['MinorityLength', 'double'], ['PMinority', 'double'], ['MinorityRoute', 'SHORT']] }, # route fields to be added { "org_file": poverty_file, "working_file": poverty_working_file, "identified_field": "RegPovBG", "add_fields": [['PovertyLength', 'double'], ['PPoverty', 'double'], ['PovertyRoute', 'SHORT']] }, { "org_file": medhhinc_file, "working_file": medhhinc_working_file, "identified_field": "RegBelMedInc", "add_fields": [['MedHHIncLength', 'double'], ['PMedHHInc', 'double'], ['MedHHIncRoute', 'SHORT']] }, { "org_file": lep_file, "working_file": lep_working_file, "identified_field": "RegAbvLEP", "add_fields": [['LEPLength', 'double'], ['PLEP', 'double'], ['LEPRoute', 'SHORT']] } ] # ! is this a helper function now if os.path.exists(working_gdb) and os.path.isdir(working_gdb): shutil.rmtree(working_gdb) print(f"{gdb} DELETED!!!") # CREATE WORKING GDB ap.CreateFileGDB_management(root_dir, gdb) print("GEODATABASE CREATED!!!") # CREATE WORKING MINORITY, POVERTY AND ROUTES FEATURE CLASSES ap.FeatureClassToFeatureClass_conversion(routes, working_gdb, routes_file) print("FEATURE CLASS CREATED!!!") ap.AddFields_management(routes_working, [['FullLength', 'double']]) print('INTIIAL FIELDS ADDED TO ROUTES_WORKING FILE!!!') ap.CalculateFields_management(routes_working, 'PYTHON3', [['FullLength', '!shape.length@miles!']]) print('CALCULATE FULL LENGTH OF ROUTES!!!') # loop through each demographic category, first collecting inputs from the working list, # then for item in working_list: # WORKING LIST ITEM DEFINITIONS org_file = item["org_file"] working_file = item["working_file"] identified_field = item["identified_field"] add_fields = item["add_fields"] routes_analysis = "routes_" + str(working_file) length_field = add_fields[0][0] percent_field = add_fields[1][0] id_field = add_fields[2][0] print("") print("--------------------------------") print("********************************") print("START OF " + working_file) print("********************************") print("--------------------------------") print("") # FOR LOOP FILE NAME DEFINITIONS dissolve_file = str(working_file) + "_dissolve" buffer_file = str(dissolve_file) + "_buffer" clip_routes = str(routes_analysis) + "_clip" dissolve_routes = str(clip_routes) + "_dissolve" # FOR LOOP POLYGON AND ROUTE GEOPROCESSING selected_bg = str( identified_field ) + " = 1" # "where" expression filtering for identified blockgroups print(selected_bg) ap.FeatureClassToFeatureClass_conversion(org_file, working_gdb, working_file, selected_bg) print(working_file + " CREATED!!!") ap.FeatureClassToFeatureClass_conversion(routes_working, working_gdb, routes_analysis) print(routes_analysis + " FILE CREATED!!!") ap.Dissolve_management(working_file, dissolve_file, '') # dissolve all into one shape print(dissolve_file + " CREATED!!!") ap.Buffer_analysis(dissolve_file, buffer_file, "50 feet") # buffer by 50 feet print(buffer_file + " CREATED!!!") ap.Clip_analysis(routes_working, buffer_file, clip_routes) # clip routes using the dissolve shape print(clip_routes + " CREATED!!!") # calculate length of route inside identified blockgroups and compare to total length ap.AddField_management(clip_routes, "IdLength", "double") print("IdLength Field Added for " + working_file) ap.CalculateField_management(clip_routes, "IdLength", "!shape.geodesicLength@miles!") print("IdLength Field Calculated for " + working_file) ap.Dissolve_management( clip_routes, dissolve_routes, 'LineAbbr', [["IdLength", 'sum']]) # collect route pieces by route print(clip_routes + " DISSOLVED") ap.JoinField_management(routes_working, "LineAbbr", dissolve_routes, "LineAbbr", ["SUM_IdLength"]) # join and sum ID'ed length print(routes_working + " JOINED WITH " + dissolve_routes) ap.AddFields_management(routes_working, add_fields) print("FIELDS ADDED TO " + routes_working) # compute percentage of total that is ID'ed then flag if greater than 0.33 ap.CalculateFields_management( routes_working, 'PYTHON3', [[length_field, '!SUM_IdLength!'], [percent_field, f'percent(!{length_field}!, !FullLength!)']], '''def percent(calc, full): if calc is None: return 0 else: return calc / full ''') ap.CalculateFields_management( routes_working, 'PYTHON3', [[id_field, f'ifBlock(!{percent_field}!)']], '''def ifBlock(percent): if percent > 0.33: return 1 else: return 0 ''') print(routes_working + " FIELDS CALCULATED") ap.DeleteField_management(routes_working, "SUM_IdLength") print("IdLength Field Deleted") ## loop end ## ap.ClearWorkspaceCache_management() deleteFeatureClass(routes_file, final_gdb_loc) # CREATE FINAL FEATURE CLASS ap.FeatureClassToFeatureClass_conversion(routes_file, final_gdb_loc, routes_file) print("---------------------------")
arcpy.CalculateFields_management( in_table=table, expression_type="PYTHON3", fields= [[ "total_pop_intersect", "!demographic_data_total_pop! * (!Shape_Area!/!na_va_tracts_tract_area!)" ], [ "white_only_pop_intersect", "!demographic_data_white_only_pop! * (!Shape_Area!/!na_va_tracts_tract_area!)" ], [ "no_vehicle_hh_intersect", "!demographic_data_hh_no_vehcile! * (!Shape_Area!/!na_va_tracts_tract_area!)" ], [ "under18_pop_intersect", '!demographic_data_under18_pop! * (!Shape_Area!/!na_va_tracts_tract_area!)' ], [ "over65_pop_intersect", "!demographic_data_F65over_pop! * (!Shape_Area!/!na_va_tracts_tract_area!)" ], [ "poverty_hh_intersect", "!demographic_data_hh_poverty! * (!Shape_Area!/!na_va_tracts_tract_area!)" ], [ "total_hh_intersect", "!demographic_data_total_hh! * (!Shape_Area!/!na_va_tracts_tract_area!)" ], [ "minority_pop_intersect", "!demographic_data_minority_pop! * (!Shape_Area!/!na_va_tracts_tract_area!)" ]])
statistics_type="ALL") # new names for columns mean, standard deviation path = r"F:\Studium_Trier\Masterarbeit\Datensaetze\tables\rfClass" tables = [ os.path.join(path, f) for f in os.listdir(path) if f.endswith(".dbf") ] for table in tables: name = os.path.basename(table) print(name) arcpy.management.AddFields(table, [["M" + name[0:4] + name[-6:-4], 'DOUBLE'], ["S" + name[0:4] + name[-6:-4], 'DOUBLE']]) arcpy.CalculateFields_management( table, "PYTHON3", [["M" + name[0:4] + name[-6:-4], "!MEAN!"], ["S" + name[0:4] + name[-6:-4], "!STD!"]]) arcpy.DeleteField_management(table, [ "MEAN", "STD", "COUNT", "AREA", "MAJORITY", "MAXIMUM", "MEDIAN", "MINIMUM", "MINORITY", "RANGE", "SUM", "VARIETY", "MIN", "MAX" ]) # conversion of dbf tables to csv arcpy.env.workspace = r"F:\Studium_Trier\Masterarbeit\Datensaetze\tables\rfClass" path = r"F:\Studium_Trier\Masterarbeit\Datensaetze\tables\rfClass" tables = [ os.path.join(path, f) for f in os.listdir(path) if f.endswith(".dbf") ] for table in tables: arcpy.TableToTable_conversion(
def convertStreets(Project_Folder, us_counties): arcpy.env.overwriteOutput = True Model_Inputs_gdb = os.path.join(Project_Folder, 'Model_Inputs.gdb') Model_Outputs_gdb = os.path.join(Project_Folder, 'Model_Outputs.gdb') streets = os.path.join(Model_Inputs_gdb, 'Streets') zlevels = os.path.join(Model_Inputs_gdb, 'Zlevels') adminbound4 = os.path.join(Model_Inputs_gdb, 'Adminbndy4') arcpy.env.workspace = Model_Inputs_gdb # Simplify AltStreets and Streets Lines streets_simple = arcpy.SimplifyLine_cartography(in_features=streets, out_feature_class=os.path.join(Model_Outputs_gdb, "Streets_Simple"), algorithm="POINT_REMOVE", tolerance="5 Feet", error_resolving_option="RESOLVE_ERRORS", collapsed_point_option="KEEP_COLLAPSED_POINTS", error_checking_option="CHECK", in_barriers=[])[0] arcpy.AddFields_management(in_table=streets_simple, field_description=[["REF_ZLEV", "LONG", "", "", "", ""], ["NREF_ZLEV", "LONG", "", "", "", ""], ["PlaceCodeL", "LONG", "", "", "", ""], ["PlaceCodeR", "LONG", "", "", "", ""], ["PlaceNamL", "TEXT", "", "255", "", ""], ["PlaceNamR", "TEXT", "", "255", "", ""], ["CountyCodeL", "LONG", "", "", "", ""], ["CountyCodeR", "LONG", "", "", "", ""], ["CountyNamL", "TEXT", "", "255", "", ""], ["CountyNamR", "TEXT", "", "255", "", ""], ["StateCodeL", "LONG", "", "", "", ""], ["StateCodeR", "LONG", "", "", "", ""], ["StateAbbrL", "TEXT", "", "255", "", ""], ["StateAbbrR", "TEXT", "", "255", "", ""], ["OneWay", "SHORT", "", "", "", ""], ["Speed", "LONG", "", "", "", ""], ["CFCC", "TEXT", "", "255", "", ""], ["M_LINK_ID", "LONG", "", "", "", ""], ["OLD_LINK_ID", "LONG", "", "", "", ""]]) print('Fields added to Streets') # turning restrictions arcpy.JoinField_management(in_data=streets_simple, in_field="REF_IN_ID", join_table=zlevels, join_field="NODE_ID", fields=["Z_LEVEL"]) # return calculated z level turning restrictions arcpy.CalculateField_management(in_table=streets_simple, field="REF_ZLEV", expression="zlevCalc(!Z_LEVEL!)", expression_type="PYTHON3", code_block="""def zlevCalc(z): if(z != 0): return z else: return 0""", field_type="TEXT") arcpy.DeleteField_management( in_table=streets_simple, drop_field=["ZLEVEL"]) print('REF_ZLEV Calculated') # calculate NREF arcpy.JoinField_management(in_data=streets_simple, in_field="NREF_IN_ID", join_table=zlevels, join_field="NODE_ID", fields=["Z_LEVEL"]) arcpy.CalculateField_management(in_table=streets_simple, field="NREF_ZLEV", expression="zlevCalc(!Z_LEVEL!)", expression_type="PYTHON3", code_block="""def zlevCalc(z): if(z != 0): return z else: return 0""", field_type="TEXT") arcpy.DeleteField_management( in_table=streets_simple, drop_field=["ZLEVEL"]) print('NREF_ZLEV Calculated') # Calculate Cities/AdminBndry4 fields # calculate R_AREA Cities arcpy.JoinField_management(in_data=streets_simple, in_field="R_AREA_ID", join_table=adminbound4, join_field="AREA_ID", fields=["AREA_ID", "POLYGON_NM"]) arcpy.CalculateField_management( in_table=streets_simple, field="PlaceCodeR", expression="!AREA_ID!", expression_type="PYTHON3") arcpy.CalculateField_management(in_table=streets_simple, field="PlaceNameR", expression="placeNameCalc(!POLYGON_NM!)", expression_type="PYTHON3", code_block="""def placeNameCalc(name): if name == 'ST LOUIS': return 'ST LOUIS CITY' else: return name""") arcpy.DeleteField_management(in_table=streets_simple, drop_field=[ "AREA_ID", "POLYGON_NM"]) # calculate L_AREA Cities arcpy.JoinField_management(in_data=streets_simple, in_field="L_AREA_ID", join_table=adminbound4, join_field="AREA_ID", fields=["AREA_ID", "POLYGON_NM"]) arcpy.CalculateField_management( in_table=streets_simple, field="PlaceCodeL", expression_type="PYTHON3", expression="!AREA_ID!") arcpy.CalculateField_management(in_table=streets_simple, field="PlaceNameL", expression_type="PYTHON3", expression="placeNameCalc(!POLYGON_NM!)", code_block="""def placeNameCalc(name): if name == 'ST LOUIS': return 'ST LOUIS CITY' else: return name.upper()""") arcpy.DeleteField_management(in_table=streets_simple, drop_field=[ "AREA_ID", "POLYGON_NM"]) print('Cities Calculated') # Calculate County fields # CountyNameR, CountyNameL, CountyCodeL, CountyCodeR county_streets = arcpy.SpatialJoin_analysis( streets_simple, us_counties, "county_streets")[0] # US_COUNTIES needs to be TIGER or County level shapefile that has GEOID's arcpy.JoinField_management(in_data=streets_simple, in_field="LINK_ID", join_table=county_streets, join_field="LINK_ID", fields=["GEOID", "NAME"]) arcpy.CalculateField_management(in_table=streets_simple, field="CountyNameR", expression="placeNameCalc(!GEOID!, !NAME!)", expression_type="PYTHON3", code_block="""def placeNameCalc(geoid, name): if geoid == '29189': return 'ST LOUIS' elif geoid == '29510': return 'ST LOUIS CITY' elif geoid == '17163': return 'ST CLAIR' else: return name.upper()""") arcpy.CalculateField_management(in_table=streets_simple, field="CountyNameL", expression="placeNameCalc(!GEOID!, !NAME!)", expression_type="PYTHON3", code_block="""def placeNameCalc(geoid, name): if geoid == '29189': return 'ST LOUIS' elif geoid == '29510': return 'ST LOUIS CITY' elif geoid == '17163': return 'ST CLAIR' else: return name.upper()""") arcpy.CalculateField_management( in_table=streets_simple, field="CountyCodeR", expression="!GEOID!", expression_type="PYTHON3") arcpy.CalculateField_management( in_table=streets_simple, field="CountyCodeL", expression="!GEOID!", expression_type="PYTHON3") print("County Calculated") # Calculate State fields # StateAbbrL, StateAbbrR, StateCodeL, StateCodeR arcpy.CalculateField_management( in_table=streets_simple, field="StateCodeL", expression_type="PYTHON3", expression="!GEOID![0:2]") arcpy.CalculateField_management(in_table=streets_simple, field="StateAbbrL", expression_type="PYTHON3", expression="stateAbbr(!StateCodeL!)", code_block="""def stateAbbr(statecode): if statecode == 29: return 'MO' else: return 'IL' """) arcpy.CalculateField_management( in_table=streets_simple, field="StateCodeR", expression_type="PYTHON3", expression="!GEOID![0:2]") arcpy.CalculateField_management(in_table=streets_simple, field="StateAbbrR", expression_type="PYTHON3", expression="stateAbbr(!StateCodeR!)", code_block="""def stateAbbr(statecode): if statecode == 29: return 'MO' else: return 'IL' """) arcpy.DeleteField_management( in_table=streets_simple, drop_field=["GEOID", "NAME"]) # One Way Calculation # T = > # F = < # if blank is not a one way road and returns blank arcpy.CalculateField_management(in_table=streets_simple, field="OneWay", expression="oneWCalc(!DIR_TRAVEL!)", expression_type="PYTHON3", code_block="""def oneWCalc(dir): if(dir == "T"): return ">" elif(dir == "F"): return "<" else: return '' """) # calculated speed with to and from speeds # uses either to or from speed depending on direction for oneway speed calcs arcpy.CalculateField_management(in_table=streets_simple, field="Speed", expression="speedCalc(!DIR_TRAVEL!,!TO_SPD_LIM!,!FR_SPD_LIM!)", expression_type="PYTHON3", code_block="""def speedCalc(dir, toSpeed, fromSpeed): if(dir == 'T'): return toSpeed else: return fromSpeed """) print('OneWay Calculated') # Calculate Speeds based on category # Calculates speed fields that are empty with the speed calc field specs from HERE documentation arcpy.CalculateField_management(in_table=streets_simple, field="Speed", expression="nullSpeedCalc(!Speed!, !SPEED_CAT!)", expression_type="PYTHON3", code_block="""def nullSpeedCalc(speed, cat): if(speed is None): if(cat == '8'): return 15 elif(cat == '7'): return 20 elif(cat == '6'): return 25 elif(cat == '5'): return 35 """) print('Speed Calculated') # Calculate Functional Classes # TODO: REVIEW FUNCTIONAL CLASS CALCULATION # functional classes that adhear to the map maker specification arcpy.CalculateField_management(in_table=streets_simple, field="CFCC", expression="cfccCalc(!FUNC_CLASS!)", expression_type="PYTHON3", code_block="""def cfccCalc(fClass): if(fClass == 1): return 'A10' elif(fClass == 2): return 'A20' elif(fClass == 3): return 'A30' elif(fClass == 4 or fClass == 5): return 'A40' """) print('CFCC Calculated') # TODO: reassess calculation arcpy.CalculateFields_management(in_table=streets_simple, expression_type="PYTHON3", fields=[ ["M_LINK_ID", "!OBJECTID!"], ["OLD_LINK_ID", "!LINK_ID!"]], code_block="")[0] # updated the schema to match mapmaker schema updateSchema(streets_simple) return arcpy.FeatureClassToFeatureClass_conversion(in_features=streets_simple, out_path=Model_Outputs_gdb, out_name="Streets_Final")[0]
def senior(year, root_dir, bg_mergedgdb, region, places, bg_file, sen_file, final_gdb_loc): gdb = f"Senior{year}.gdb" ap.env.workspace = os.path.join(root_dir, gdb) # -----> Change Year ap.ClearWorkspaceCache_management() outputgdb = ap.env.workspace working_file = f"Senior{year}_working" bg = os.path.join(bg_mergedgdb, bg_file) working_gdb = os.path.join(root_dir, gdb) # Working feature class locations sen_table = os.path.join(bg_mergedgdb, sen_file) cw_file = f"Senior{year}_working_County" cw = os.path.join(outputgdb, cw_file) rw_file = f"Senior{year}_working_Region" rw = os.path.join(outputgdb, rw_file) twcw_file = f"Senior{year}_working_CountyJoin" twcw = os.path.join(outputgdb, twcw_file) twrw_file = f"Senior{year}_working_RegionJoin" twrw = os.path.join(outputgdb, twrw_file) twrw_places_file = f"Senior{year}_working_RegionJoin_Places" twrw_places = os.path.join(outputgdb, twrw_places_file) final_file = f"Senior{year}_Final" final = os.path.join(outputgdb, final_file) # CREATE WORKING GDB replaceGDB(root_dir, gdb) # ap.FeatureClassToFeatureClass_conversion(bg, outputgdb, working_file, # "GEOID LIKE '29189%' Or GEOID LIKE '29510%' Or GEOID LIKE '17163%'") # print("") # print("---------------------------") # print(working_file + " Created!!!") # ap.JoinField_management(in_data=working_file, in_field="GEOID_Data", join_table=senior_table, join_field="GEOID", # fields="B01001e1;B01001e20;B01001e21;B01001e22;B01001e23;B01001e24;B01001e25;B01001e44;B01001e45;B01001e46;B01001e47;B01001e48;B01001e49") # print("") # print("---------------------------") # print("Finished calculating Senior Calcs") fields_list = [ 'B01001e1', 'B01001e20', 'B01001e21', 'B01001e22', 'B01001e23', 'B01001e24', 'B01001e25', 'B01001e44', 'B01001e45', 'B01001e46', 'B01001e47', 'B01001e48', 'B01001e49' ] # Clip blockgroups by the region shape, and keep only fields of interest # clipPolygons(census_gdb, census_file, boundary_file, output_gdb, output_file, join_table, fields_list) clipPolygons(bg_mergedgdb, bg_file, region, os.path.join(root_dir, gdb), working_file) joinAndCalcFields(working_file, bg_mergedgdb, os.path.join(root_dir, gdb), 'GEOID_Data', sen_file, 'GEOID', fields_list) ap.env.workspace = outputgdb ap.management.AddFields( working_file, [ ["SqMiles", "DOUBLE"], ['TPop', 'DOUBLE'], ["TSenior", "DOUBLE"], # count ["PSenior", "Double"], # percent ["SeniorDens", "DOUBLE"], # density ["CoAbvSenior", "SHORT" ], # is the bg above county senior percentage ["RegAbvSenior", "SHORT"] ]) # is the bg above region senior percentage ap.CalculateFields_management( working_file, 'PYTHON3', [ ['TPop', '!B01001e1!'], [ 'TSenior', # all population over 65 '!B01001e44! + !B01001e45! + !B01001e46! + !B01001e47! + !B01001e48! + !B01001e49! + !B01001e20! + !B01001e21! + !B01001e22! + !B01001e23! + !B01001e24! + !B01001e25!' ], ['PSenior', '!TSenior! / !TPop!'], ['SeniorDens', '!TSenior!/!SqMiles!'] ]) # DISSOLVE TRACTS BY COUNTY - SUM VALUES ap.Dissolve_management( working_file, cw, "COUNTYFP", [["TPop", "SUM"], ["SqMiles", "SUM"], ["TSenior", "SUM"]]) print("") print("---------------------------") print("Dissolve County Stats") # DISSOLVE TRACTS BY REGION - SUM VALUES ap.Dissolve_management( working_file, rw, "", [["TPop", "SUM"], ["SqMiles", "SUM"], ["TSenior", "SUM"]]) print("") print("---------------------------") print("Dissolve Region Stats") # define and compute county calculation fields ap.management.AddFields( cw, [ ["CoSqMiles", "DOUBLE"], # square miles ["CoTPop", "DOUBLE"], # total pop ["CoTSenior", "DOUBLE"], # senior pop ["CoPSenior", "Double"], # senior percentage ["CoSeniorDens", "DOUBLE"], # seniors per sq mile ["CoAbvSenior", "DOUBLE"] ]) # is the bg above county senior percentage ap.CalculateFields_management( cw, "PYTHON", [["CoSqMiles", "!SUM_SqMiles!"], ["CoTPop", "!SUM_TPop!"], ["CoTSenior", "!SUM_TSenior!"], ["CoPSenior", "!CoTSenior! / !CoTPop!"], ["CoSeniorDens", "!CoTSenior! / !CoSqMiles!"]]) print("") print("---------------------------") print(cw_file + " fields calculated !!!") # define and region county calculation fields ap.management.AddFields( rw, [["RegSqMiles", "DOUBLE"], ["RegTPop", "DOUBLE"], ["RegTSenior", "DOUBLE"], ["RegPSenior", "Double"], ["RegSeniorDens", "DOUBLE"], ["RegAbvSenior", "DOUBLE"]]) ap.CalculateFields_management( rw, "PYTHON", [["RegSqMiles", "!SUM_SqMiles!"], ["RegTPop", "!SUM_TPop!"], ["RegTSenior", "!SUM_TSenior!"], ["RegPSenior", "!RegTSenior! / !RegTPop!"], ["RegSeniorDens", "!RegTSenior! / !RegSqMiles!"]]) print("") print("---------------------------") print(rw_file + " fields calculated !!!") # SPATIAL JOIN TRACTS FILE WITH COUNTY FILE ap.SpatialJoin_analysis(working_file, cw, twcw) print("") print("---------------------------") print("County Spaital Join") # SPATIAL JOIN TRACTS FILE WITH REGION FILE ap.SpatialJoin_analysis(twcw, rw, twrw) print("") print("---------------------------") print("Region Spaital Join") # identify blockgroups as greater than county and regional average ap.CalculateFields_management( in_table=twrw, expression_type="PYTHON3", fields= "CoAbvSenior 'ifBlock(!PSenior!, !CoPSenior!)';RegAbvSenior 'ifBlock(!PSenior!, !RegPSenior!)'", code_block="""def ifBlock(area, region): if area > region: return 1 else: return 0 """) print("") print("---------------------------") print("Above Senior Density Calculations Completed") # SPATIAL JOIN TRACTS FILE WITH PLACES FILE ap.SpatialJoin_analysis(twrw, places, twrw_places) print("---------------------------") print("Places Spaital Join") # ! is this handled in cleanup now? # # CREATE FINAL FEATURE CLASS # ap.FeatureClassToFeatureClass_conversion(twrw_places, outputgdb, final_file) # print("---------------------------") # for field in list: # ap.DeleteField_management(final_file, field) # print("---------------------------") # print(field + " DELETED") # print("---------------------------") # # print("Senior_Final feature class created - Script Complete!!!") # # # CREATE FINAL FEATURE CLASS # ap.ClearWorkspaceCache_management() # # deleteFeatureClass(final_file, final_gdb_loc) # ap.FeatureClassToFeatureClass_conversion(final_file, final_gdb_loc, final_file) # print("---------------------------") # remove these unnecessary fields in the cleanup stage delete_fields = [ 'B01001e20', 'B01001e21', 'B01001e22', 'B01001e23', 'B01001e24', 'B01001e25', 'B01001e44', 'B01001e45', 'B01001e46', 'B01001e47', 'B01001e48', 'B01001e49', 'B01001e1', 'Join_Count', 'TARGET_FID', 'Join_Count_1', 'TARGET_FID_1', 'Join_Count_12', 'TARGET_FID_12', 'COUNTYFP_1', 'SUM_TPop', 'SUM_SqMiles', 'SUM_TSenior', 'Shape_Length_1', 'Shape_Area_1', 'CoAbvSenior_1', 'SUM_TPop_1', 'SUM_SqMiles_1', 'SUM_TSenior_1', 'Shape_Length_12', 'Shape_Area_12', 'RegSqMiles', 'RegAbvSenior_1', 'STATEFP_1', 'PLACEFP', 'PLACENS', 'AFFGEOID', 'GEOID_1', 'LSAD', 'ALAND_1', 'AWATER_1' ] cleanUp(twrw_places, gdb, final_file, final_gdb_loc, delete_fields)
def lowCar(year, root_dir, tracts_mergegdb, region, places, tracts_file, commute_file, final_gdb_loc): gdb = f"LowCar{year}.gdb" ap.env.workspace = os.path.join(root_dir, gdb) # -----> Change Year ap.ClearWorkspaceCache_management() outputgdb = ap.env.workspace working_file = f"LowCar{year}_working" working_gdb = os.path.join(root_dir, gdb) # ! are these supposed to go into working or output gdb? cw = os.path.join(working_gdb, "NoCar_working_County") cw_file = f"NoCar{year}_Working_County" rw = os.path.join(working_gdb, "NoCar_working_Reg") rw_file = f"NoCar{year}_Working_Reg" twcw_file = f"NoCar{year}_working_CountyJoin" twcw = os.path.join(working_gdb, twcw_file) twrw_file = f"NoCar{year}_working_RegJoin" twrw = os.path.join(working_gdb, twrw_file) twrw_places_file = f"NoCar{year}_working_RegionJoin_Places" twrw_places = os.path.join(working_gdb, twrw_places_file) final_file = f"NoCar{year}_final" delete_fields = [ "Join_Count", "TARGET_FID", "Join_Count", "TARGET_FID", "B08201e2", "B08201e3", "B08201e1", "B08201e2", "B08201e3", "SUM_THH", "SUM_TNoCar", "SUM_TOneCar", "SUM_SqMiles", "SUM_THH_1", "SUM_TNoCar_1", "SUM_TOneCar_1", "SUM_SqMiles_1", "Shape_Length_12", "Shape_Area_12" ] replaceGDB(root_dir, gdb) fields_list = ["B08201e1", "B08201e2", "B08201e3"] # clipPolygons(census_gdb, census_file, boundary_file, output_gdb, output_file, join_table, fields_list) clipPolygons(tracts_mergegdb, tracts_file, region, os.path.join(root_dir, gdb), working_file) # joinAndCalcFields(fc, census_gdb, output_gdb, key, table, table_key, fields_list) joinAndCalcFields(working_file, tracts_mergegdb, os.path.join(root_dir, gdb), 'GEOID_Data', commute_file, 'GEOID', fields_list) # ADDING ALL OF THE FIELDS TO TRACTS WORKING ap.management.AddFields( working_file, [["THH", "DOUBLE"], ["TNoCar", "DOUBLE"], ["TOneCar", "DOUBLE"], ["TLowCar", "LONG"], ["PNoCar", "DOUBLE"], ["POneCar", "DOUBLE"], ["PLowCar", "DOUBLE"], ["NoCarDens", "DOUBLE"], ["OneCarDens", "DOUBLE"], ["LowCarDens", "DOUBLE"]]) print("Added fields") # CALCULATE OUT TRACT CENSUS VALUES ap.CalculateFields_management( working_file, "PYTHON3", [["SqMiles", "!shape.area@squaremiles!"], ["THH", "!B08201e1!"], ["TNoCar", "!B08201e2!"], ["TOneCar", "!B08201e3!"], ['TLowCar', '!B08201e2! + !B08201e3!'], ['PNoCar', '!B08201e2!/!B08201e1!'], ['POneCar', '!B08201e3!/!B08201e1!'], ['PLowCar', '!TLowCar!/!B08201e1!']]) print( "Finished calculating Population, Total No Car, Total One Car, and Total Low Car" ) # CALCULATE OUT TRACT CENSUS DENSITIES ap.CalculateFields_management(working_file, "PYTHON3", [["NoCarDens", "!TNoCar!/!SqMiles!"], ['OneCarDens', '!TOneCar!/!SqMiles!'], ['LowCarDens', '!TLowCar! / !SqMiles!']]) print("Finished calculating Population, Total No Car, and Total One Car") # DISSOLVE TRACTS BY COUNTY - SUM VALUES ap.Dissolve_management( working_file, cw, "COUNTYFP", [["THH", "SUM"], ["TNoCar", "SUM"], ["TOneCar", "SUM"], ["TLowCar", "SUM"], ["SqMiles", "SUM"]]) print("Dissolve County Stats") # DISSOLVE TRACTS BY REGION - SUM VALUES ap.Dissolve_management( working_file, rw, "", [["THH", "SUM"], ["TNoCar", "SUM"], ["TOneCar", "SUM"], ["TLowCar", "SUM"], ["SqMiles", "SUM"]]) print("Dissolve Region Stats") # ADD COUNTY VALUE FIELDS ap.management.AddFields( cw, [["CoTHH", "DOUBLE"], ["CoTNoCar", "DOUBLE"], ["CoTOneCar", "DOUBLE"], ["CoTLowCar", "DOUBLE"], ["CoPNoCar", "DOUBLE"], ["CoPOneCar", "DOUBLE"], ["CoPLowCar", "DOUBLE"], ["CoSqMiles", "DOUBLE"], ["CoNoCarDens", "DOUBLE"], ["CoOneCarDens", "DOUBLE"], ["CoLowCarDens", "DOUBLE"], ["CoAbvNoCar", "SHORT", '', '', '', 0], ["CoAbvOneCar", "SHORT", '', '', '', 0], ["CoAbvLowCar", "SHORT", '', '', '', 0]]) print(cw_file + " fields added !!!") # ADD REGION VALUE FIELDS ap.management.AddFields( rw, [["RegTHH", "DOUBLE"], ["RegTNoCar", "DOUBLE"], ["RegPNoCar", "DOUBLE"], ["RegTOneCar", "DOUBLE"], ["RegPOneCar", "DOUBLE"], ["RegTLowCar", "DOUBLE"], ["RegPLowCar", "DOUBLE"], ["RegSqMiles", "DOUBLE"], ["RegNoCarDens", "DOUBLE"], ["RegOneCarDens", "DOUBLE"], ["RegLowCarDens", "DOUBLE"], ["RegAbvNoCar", "SHORT", '', '', '', 0], ["RegAbvOneCar", "SHORT", '', '', '', 0], ["RegAbvLowCar", "SHORT", '', '', '', 0]]) print(rw_file + " fields added !!!") # CALCULATE COUNTY VALUES # ap.CalculateFields_management(cw, "PYTHON3",[["CoTHH", "!SUM_THH!"],["CoTNoCar", "!SUM_TNoCar!"],["CoTOneCar", "!SUM_TOneCar!"],["CoTLowCar", "!SUM_TLowCar!"],["CoSqMiles", "!SUM_SqMiles!"]]) ap.CalculateFields_management( cw, "PYTHON3", [["CoTHH", "!SUM_THH!"], ["CoTNoCar", "!SUM_TNoCar!"], ["CoTOneCar", "!SUM_TOneCar!"], ["CoTLowCar", "!SUM_TLowCar!"], ["CoSqMiles", "!SUM_SqMiles!"]]) print(cw_file + " fields calculated !!!") # CALCULATE REGIONAL VALUES ap.CalculateFields_management( rw, "PYTHON3", [["RegTHH", "!SUM_THH!"], ["RegTNoCar", "!SUM_TNoCar!"], ["RegTOneCar", "!SUM_TOneCar!"], ["RegTLowCar", "!SUM_TLowCar!"], ["RegSqMiles", "!SUM_SqMiles!"]]) print(rw_file + " fields calculated !!!") # SPATIAL JOIN TRACTS FILE WITH COUNTY FILE ap.SpatialJoin_analysis(working_file, cw, twcw) print("County Spaital Join") # SPATIAL JOIN TRACTS FILE WITH REGION FILE ap.SpatialJoin_analysis(twcw, rw, twrw) print("Region Spaital Join") # CALCULATE OUT LOW CAR AND DENSITIES FOR COUNTYIES AND REGION ON TRACT FILE ap.CalculateFields_management( twrw, "PYTHON3", [["CoPNoCar", "!CoTNoCar!/!CoTHH!"], ["RegPNoCar", "!RegTNoCar!/!RegTHH!"], ["CoPOneCar", "!CoTOneCar!+!CoTHH!"], ["RegPOneCar", "!RegTOneCar!+!RegTHH!"], ["CoTLowCar", "!CoTOneCar!+!CoTNoCar!"], ["RegTLowCar", "!RegTOneCar!+!RegTNoCar!"], ["CoOneCarDens", "!CoTOneCar!/!CoSqMiles!"], ["RegOneCarDens", "!RegTOneCar!/!RegSqMiles!"], ["CoNoCarDens", "!CoTNoCar!/!CoSqMiles!"], ["RegNoCarDens", "!RegTNoCar!/!RegSqMiles!"], ["CoLowCarDens", "(!CoTOneCar! + !CoTNoCar!) / !CoSqMiles!"], ["RegLowCarDens", "(!RegTOneCar! + !RegTNoCar!) / !RegSqMiles!"]]) print('Calculated County and Regional Statistics') # CALCULATE OUT ABOVE REGIONAL AND COUNTY AVERAGE DENSITIES FOR TRACTS ap.CalculateFields_management( in_table=twrw, expression_type="PYTHON3", fields= "CoAbvNoCar 'ifBlock(!PNoCar!, !CoPNoCar!)';RegAbvNoCar 'ifBlock(!PNoCar!, !RegPNoCar!)';CoAbvLowCar 'ifBlock(!PLowCar!, !CoPLowCar!)';RegAbvLowCar 'ifBlock(!PLowCar!, !RegPLowCar!)';CoAbvOneCar 'ifBlock(!POneCar!, !CoPOneCar!)';RegAbvOneCar 'ifBlock(!POneCar!, !RegPOneCar!)'", code_block="""def ifBlock(area, region): if area > region: return 1 else: return 0 """) print("Above Car Density Calculations Completed") ap.SpatialJoin_analysis(twrw, places, twrw_places) print("") print("---------------------------") print("Places Spaital Join") # CREATE FINAL FEATURE CLASS cleanUp(twrw_places, gdb, final_file, final_gdb_loc, delete_fields)
def routeBuffers(config): ap.env.overwriteOutput = True date = config['date'] sign = config['sign'] acs_year = config['acs_year'] title_vi_gdb = config['title_vi_gdb'] csv_dir = config['processed_dir'] ds_gdb = config['ds_gdb'] # CSV TABLES patterns_name = config['files']['patterns']['name'] patterns_table = os.path.join(csv_dir, f'{patterns_name}.csv') # FEATURE CLASS NAMES routes_dir_line = config['files']['feat_classes']['routes_dir'] routes_line = config['files']['feat_classes']['routes'] route_buffer = config['files']['feat_classes']['route_buffer'] sys_buffer = config['files']['feat_classes']['sys_buffer'] # MetroBusRoutes_Buffer and MetroBusSystem_Buffer buffer_list = [{ 'dist': '0.75 miles', 'name': '075' }, { 'dist': '0.5 miles', 'name': '05' }, { 'dist': '0.25 miles', 'name': '025' }] # BUFFERING 0.75, 0.5, 0.25 MILES # has subsiquent for loops that run for each of the populations calcualtion as a part of titlevi for dist in buffer_list: # ROUTE BUFFER routes_buffer = f'{route_buffer}{dist["name"]}_{sign}_{date}' routes_buffer_loc = os.path.join(ds_gdb, routes_buffer) # DELETE DUPLICATE ROUTE FILE # deleteFeatureClass(routes_buffer, ds_gdb) ap.Buffer_analysis(routes_line, routes_buffer, dist['dist'], "FULL", "ROUND", "NONE") print('Routes Buffered') # PATTERNS GROUP patterns_pd = pd.read_csv(patterns_table).groupby([ 'RouteAbbr', 'LineName', 'PubNum', 'LineNum', 'ShapeID', 'DirName' ]).mean() patterns_pd.drop(['shape_lat', 'shape_lon', 'shape_pt_sequence'], axis=1) print('Unique Routes table created') # SYSTEM BUFFER (dissolves the route buffers) mb_sys_buffer = f'{sys_buffer}{dist["name"]}_{sign}_{date}' mb_sys_buffer_loc = os.path.join(ds_gdb, mb_sys_buffer) ap.Dissolve_management(routes_buffer, mb_sys_buffer) print('System Buffered') ap.AddField_management(mb_sys_buffer, 'type', 'TEXT') ap.CalculateField_management(mb_sys_buffer, 'type', '"system"') # TITLE VI POPULATION ANALYSIS # TITLE VI ANALYSIS FOR STANDARD FILES # ACS INPUT, TOTAL POPULATION FIELD, DENSITY POPULATION COUNT # takes the data from the titlevi fields and calculates the population # density for specific groups in order to get the total population of each group acs_list = [{ 'file_name': f'Minority{acs_year}_Final', 'pop': 'TPop', 'field': f'ClipPop{dist["name"]}', 'calc': '(!TPop!/!SqMiles!)' }, { 'file_name': f'Minority{acs_year}_Final', 'pop': 'TMinority', 'field': f'ClipMin{dist["name"]}', 'calc': '!MinorityDens!' }, { 'file_name': f'LEP{acs_year}_Final', 'pop': 'TLEP', 'field': f'ClipLEP{dist["name"]}', 'calc': '!LEPDens!' }, { 'file_name': f'Poverty{acs_year}_Final', 'pop': 'TPov', 'field': f'ClipPov{dist["name"]}', 'calc': '!PovDens!' }, { 'file_name': f'Senior{acs_year}_Final', 'pop': 'TSenior', 'field': f'ClipSen{dist["name"]}', 'calc': '!SeniorDens!' }, { 'file_name': f'NoCar{acs_year}_Final', 'pop': 'TNoCar', 'field': f'ClipNoCar{dist["name"]}', 'calc': '!NoCarDens!' }, { 'file_name': f'NoCar{acs_year}_Final', 'pop': 'TLowCar', 'field': f'ClipLowCar{dist["name"]}', 'calc': '!LowCarDens!' }] # LOOP FOR CALCULATING TITLE VI POPULCATION BUFFERS for acs in acs_list: # CALCULATE OUT FOR SYSTEM AND ROUTES BUFFER POPULATIONS acs_in = os.path.join(title_vi_gdb, acs['file_name']) acs_out = f'{mb_sys_buffer}_{acs["pop"]}' print('') print('-------------------------') print(f'Start of {acs_out} Creation') print('-------------------------') print('') ap.Clip_analysis(acs_in, mb_sys_buffer, acs_out) ap.AddFields_management( acs_out, [[acs['field'], 'DOUBLE'], ['ClipSqMiles', 'DOUBLE']]) print(f'Added fields to {acs_out} ') ap.CalculateFields_management( acs_out, 'PYTHON3', [['ClipSqMiles', "!shape.area@squaremiles!"], [acs['field'], f'{acs["calc"]} * !ClipSqMiles!']]) print(f'Calculated fields for {acs_out}') # dissolve out file name acs_out_diss = f'{acs_out}_dissolve' ap.Dissolve_management(acs_out, acs_out_diss, '', [[acs['field'], 'SUM']]) ap.AddField_management(acs_out_diss, 'type', 'TEXT') ap.CalculateField_management(acs_out_diss, 'type', '"system"') ap.JoinField_management(mb_sys_buffer, 'type', acs_out_diss, 'type', f'SUM_{acs["field"]}') ap.AddField_management(mb_sys_buffer, acs['field'], 'DOUBLE') ap.CalculateField_management(mb_sys_buffer, acs["field"], f'!SUM_{acs["field"]}!') ap.DeleteField_management(mb_sys_buffer, f'SUM_{acs["field"]}') # DELETE EXTRA FIELDS delete_list = [acs_out, acs_out_diss] for d in delete_list: ap.Delete_management(d)
def lep(year, root_dir, bg_mergedgdb, region, places, bg_file, lep_file, final_gdb_loc, census_key, tract_mergedgdb, tract_file): gdb = f"LEP{year}.gdb" ap.env.workspace = os.path.join(root_dir, gdb) # -----> Change Year ap.ClearWorkspaceCache_management() outputgdb = ap.env.workspace working_file = "LEP_working" # lep_table = os.path.join(bg_mergedgdb, lep_file) # bg = os.path.join(bg_mergedgdb, bg_file) # working_gdb = os.path.join(root_dir, gdb) # Working file locations cw_file = f"LEP{year}_working_County" cw = os.path.join(outputgdb, cw_file) rw_file = f"LEP{year}_working_Region" rw = os.path.join(outputgdb, rw_file) twcw_file = f"LEP{year}_working_CountyJoin" twcw = os.path.join(outputgdb, twcw_file) twrw_file = f"LEP{year}_working_RegionJoin" twrw = os.path.join(outputgdb, twrw_file) twrw_places_file = f"LEP{year}_working_RegionJoin_Places" twrw_places = os.path.join(outputgdb, twrw_places_file) final_file = f"LEP{year}_Final" final = os.path.join(outputgdb, final_file) # LIST OF FIELDS TO DELETE delete_fields = [ "Join_Count", "TARGET_FID", "Join_Count_1", "TARGET_FID_1", "Join_Count_12", "TARGET_FID_12", "ALAND_1", "AWATER_1", "Shape_Length_12", "Shape_Area_12", "Shape_Length_1", "Shape_Area_1", "GEOID_1", "B16004e1", "B16004e10", "B16004e11", "B16004e12", "B16004e13", "B16004e14", "B16004e15", "B16004e16", "B16004e17", "B16004e18", "B16004e19", "B16004e2", "B16004e20", "B16004e21", "B16004e22", "B16004e23", "B16004e24", "B16004e25", "B16004e26", "B16004e27", "B16004e28", "B16004e29", "B16004e3", "B16004e30", "B16004e31", "B16004e32", "B16004e33", "B16004e34", "B16004e35", "B16004e36", "B16004e37", "B16004e38", "B16004e39", "B16004e4", "B16004e40", "B16004e41", "B16004e42", "B16004e43", "B16004e44", "B16004e45", "B16004e46", "B16004e47", "B16004e48", "B16004e49", "B16004e5", "B16004e50", "B16004e51", "B16004e52", "B16004e53", "B16004e54", "B16004e55", "B16004e56", "B16004e57", "B16004e58", "B16004e59", "B16004e6", "B16004e60", "B16004e61", "B16004e62", "B16004e63", "B16004e64", "B16004e65", "B16004e66", "B16004e67", "B16004e7", "B16004e8", "B16004e9", "B16004m1", "B16004m10", "B16004m11", "B16004m12", "B16004m13", "B16004m14", "B16004m15", "B16004m16", "B16004m17", "B16004m18", "B16004m19", "B16004m2", "B16004m20", "B16004m21", "B16004m22", "B16004m23", "B16004m24", "B16004m25", "B16004m26", "B16004m27", "B16004m28", "B16004m29", "B16004m3", "B16004m30", "B16004m31", "B16004m32", "B16004m33", "B16004m34", "B16004m35", "B16004m36", "B16004m37", "B16004m38", "B16004m39", "B16004m4", "B16004m40", "B16004m41", "B16004m42", "B16004m43", "B16004m44", "B16004m45", "B16004m46", "B16004m47", "B16004m48", "B16004m49", "B16004m5", "B16004m50", "B16004m51", "B16004m52", "B16004m53", "B16004m54", "B16004m55", "B16004m56", "B16004m57", "B16004m58", "B16004m59", "B16004m6", "B16004m60", "B16004m61", "B16004m62", "B16004m63", "B16004m64", "B16004m65", "B16004m66", "B16004m67", "B16004m7", "B16004m8", "B16004m9", "C16002e1", "C16002e10", "C16002e11", "C16002e12", "C16002e13", "C16002e14", "C16002e2", "C16002e3", "C16002e4", "C16002e5", "C16002e6", "C16002e7", "C16002e8", "C16002e9", "C16002m1", "C16002m10", "C16002m11", "C16002m12", "C16002m13", "C16002m14", "C16002m2", "C16002m3", "C16002m4", "C16002m5", "C16002m6", "C16002m7", "C16002m8", "C16002m9", "GEOID", "SUM_TPOP", "SUM_SqMiles", "SUM_TEngOnly", "SUM_TEngVW", "SUM_TLEP", "SUM_TLEPAsian", "SUM_TLEPSpan", "SUM_TLEPEuro", "SUM_TLEPOther", "SUM_TPOP_1", "SUM_SqMiles_1", "SUM_TEngOnly_1", "SUM_TEngVW_1", "SUM_TLEP_1", "SUM_TLEPAsian_1", "SUM_TLEPSpan_1", "SUM_TLEPEuro_1", "SUM_TLEPOther_1", # "C16001e8","C16001e11","C16001e14","C16001e17","C16001e20","C16001e23","C16001e26","C16001e29","C16001e35", "SUM_TLEPFrench" "SUM_TLEPFrench_1", "SUM_TLEPGerm", "SUM_TLEPGerm_1", "SUM_TLEPRuss", "SUM_TLEPRuss_1", "SUM_TLEPOIndoEuro", "SUM_TLEPOIndoEuro_1", "SUM_TLEPKor", "SUM_TLEPKor_1", "SUM_TLEPChin", "SUM_TLEPChin_1", "SUM_TLEPViet", "SUM_TLEPViet_1", "SUM_TLEPTaglog", "SUM_TLEPTaglog_1", "SUM_TLEPArabic", "SUM_TLEPArabic_1" ] replaceGDB(root_dir, gdb) fields_list = [ "B16004e1", "B16004e10", "B16004e11", "B16004e12", "B16004e13", "B16004e14", "B16004e15", "B16004e16", "B16004e17", "B16004e18", "B16004e19", "B16004e2", "B16004e20", "B16004e21", "B16004e22", "B16004e23", "B16004e24", "B16004e25", "B16004e26", "B16004e27", "B16004e28", "B16004e29", "B16004e3", "B16004e30", "B16004e31", "B16004e32", "B16004e33", "B16004e34", "B16004e35", "B16004e36", "B16004e37", "B16004e38", "B16004e39", "B16004e4", "B16004e40", "B16004e41", "B16004e42", "B16004e43", "B16004e44", "B16004e45", "B16004e46", "B16004e47", "B16004e48", "B16004e49", "B16004e5", "B16004e50", "B16004e51", "B16004e52", "B16004e53", "B16004e54", "B16004e55", "B16004e56", "B16004e57", "B16004e58", "B16004e59", "B16004e6", "B16004e60", "B16004e61", "B16004e62", "B16004e63", "B16004e64", "B16004e65", "B16004e66", "B16004e67", "B16004e7", "B16004e8", "B16004e9", "B16004m1", "B16004m10", "B16004m11", "B16004m12", "B16004m13", "B16004m14", "B16004m15", "B16004m16", "B16004m17", "B16004m18", "B16004m19", "B16004m2", "B16004m20", "B16004m21", "B16004m22", "B16004m23", "B16004m24", "B16004m25", "B16004m26", "B16004m27", "B16004m28", "B16004m29", "B16004m3", "B16004m30", "B16004m31", "B16004m32", "B16004m33", "B16004m34", "B16004m35", "B16004m36", "B16004m37", "B16004m38", "B16004m39", "B16004m4", "B16004m40", "B16004m41", "B16004m42", "B16004m43", "B16004m44", "B16004m45", "B16004m46", "B16004m47", "B16004m48", "B16004m49", "B16004m5", "B16004m50", "B16004m51", "B16004m52", "B16004m53", "B16004m54", "B16004m55", "B16004m56", "B16004m57", "B16004m58", "B16004m59", "B16004m6", "B16004m60", "B16004m61", "B16004m62", "B16004m63", "B16004m64", "B16004m65", "B16004m66", "B16004m67", "B16004m7", "B16004m8", "B16004m9", "C16002e1", "C16002e10", "C16002e11", "C16002e12", "C16002e13", "C16002e14", "C16002e2", "C16002e3", "C16002e4", "C16002e5", "C16002e6", "C16002e7", "C16002e8", "C16002e9", "C16002m1", "C16002m10", "C16002m11", "C16002m12", "C16002m13", "C16002m14", "C16002m2", "C16002m3", "C16002m4", "C16002m5", "C16002m6", "C16002m7", "C16002m8", "C16002m9", "C16001e5", "C16001e8", "C16001e11", "C16001e14", "C16001e17", "C16001e20", "C16001e23", "C16001e26", "C16001e29", "C16001e35" ] # clipPolygons(census_gdb, census_file, boundary_file, output_gdb, output_file, join_table, fields_list) clipPolygons(bg_mergedgdb, bg_file, region, os.path.join(root_dir, gdb), working_file) joinAndCalcFields(working_file, bg_mergedgdb, os.path.join(root_dir, gdb), 'GEOID_Data', lep_file, 'GEOID', fields_list) ap.env.workspace = outputgdb # ADDING ALL THE BLOCK GROUP CENSUS FIELDS ap.management.AddFields( working_file, [ ["TPOP", "DOUBLE"], ["SqMiles", "DOUBLE"], ["TEngOnly", "DOUBLE"], ["TEngVW", "DOUBLE"], ["PEngVW", "DOUBLE"], ["TLEP", "DOUBLE"], ["PLEP", "DOUBLE"], ["LEPDens", "DOUBLE"], ["TLEPAsian", "DOUBLE"], ["PLEPAsian", "DOUBLE"], ["LEPAsianDens", "DOUBLE"], ["TLEPSpan", "DOUBLE"], ["PLEPSpan", "DOUBLE"], ["LEPSpanDens", "DOUBLE"], ["TLEPEuro", "DOUBLE"], ["PLEPEuro", "DOUBLE"], ["LEPEuroDens", "DOUBLE"], ["TLEPOther", "DOUBLE"], ["PLEPOther", "DOUBLE"], ['LEPOtherDens', "DOUBLE"], ["CoAbvLEP", "DOUBLE"], ["RegAbvLEP", "DOUBLE"], [ "TLEPFrench", "DOUBLE" ], # C16001e8 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: French, Haitian, or Cajun: Speak English less than "very well": Population 5 years and over -- (Estimate) [ "TLEPGerm", "DOUBLE" ], # C16001e11 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: German or other West Germanic languages: Speak English less than ""very well"": Population 5 years and over -- (Estimate) [ "TLEPRuss", "DOUBLE" ], # C16001e14 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Russian, Polish, or other Slavic languages: Speak English less than "very well": Population 5 years and over -- (Estimate) [ "TLEPOIndoEuro", "DOUBLE" ], # C16001e17 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Other Indo-European languages: Speak English less than ""very well"": Population 5 years and over -- (Estimate) [ "TLEPKor", "DOUBLE" ], # C16001e20 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Korean: Speak English less than ""very well"": Population 5 years and over -- (Estimate) [ "TLEPChin", "DOUBLE" ], # C16001e23 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Chinese (incl. Mandarin, Cantonese): Speak English less than "very well": Population 5 years and over -- (Estimate) [ "TLEPViet", "DOUBLE" ], # C16001e26 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Vietnamese: Speak English less than ""very well"": Population 5 years and over -- (Estimate) [ "TLEPTaglog", "DOUBLE" ], # C16001e29 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Tagalog (incl. Filipino): Speak English less than ""very well"": Population 5 years and over -- (Estimate) [ "TLEPArabic", "DOUBLE" ], # C16001e35 LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Arabic: Speak English less than ""very well"": Population 5 years and over -- (Estimate) ]) print("") print("---------------------------") print("Added fields") # CALCULATE OUT BLOCK GROUP CENSUS VALUES ap.CalculateFields_management(working_file, "PYTHON3", [ ["SqMiles", "!shape.area@squaremiles!"], ["TPOP", "!B16004e1!"], [ "TLEP", "!B16004e6!+!B16004e7!+!B16004e8!+!B16004e11!+!B16004e12!+!B16004e13!+!B16004e16!+!B16004e17!+!B16004e18!+!B16004e21!+!B16004e22!+!B16004e23!+!B16004e28!+!B16004e29!+!B16004e30!+!B16004e33!+!B16004e34!+!B16004e35!+!B16004e38!+!B16004e39!+!B16004e40!+!B16004e43!+!B16004e44!+!B16004e45!+!B16004e50!+!B16004e51!+!B16004e52!+!B16004e55!+!B16004e56!+!B16004e57!+!B16004e60!+!B16004e61!+!B16004e62!+!B16004e65!+!B16004e66!+!B16004e67!" ], ["PLEP", "!TLEP!/!TPOP!"], ["LEPDens", "!TLEP!/!SqMiles!"], ["TEngOnly", "!B16004e3!+!B16004e25!+!B16004e47!"], [ "TEngVW", "!B16004e5!+!B16004e10!+!B16004e15!+!B16004e20!+!B16004e27!+!B16004e32!+!B16004e37!+!B16004e42!+!B16004e49!+!B16004e54!+!B16004e59!+!B16004e64!" ], [ "TLEPSpan", "!B16004e6!+!B16004e7!+!B16004e8!+!B16004e28!+!B16004e29!+!B16004e30!+!B16004e50!+!B16004e51!+!B16004e52!" ], [ "TLEPEuro", "!B16004e11!+!B16004e12!+!B16004e13!+!B16004e33!+!B16004e34!+!B16004e35!+!B16004e55!+!B16004e56!+!B16004e57!" ], [ "TLEPAsian", "!B16004e16!+!B16004e17!+!B16004e18!+!B16004e38!+!B16004e39!+!B16004e40!+!B16004e60!+!B16004e61!+!B16004e62!" ], [ "TLEPOther", "!B16004e21!+!B16004e22!+!B16004e23!+!B16004e43!+!B16004e44!+!B16004e45!+!B16004e65!+!B16004e66!+!B16004e67!" ], ["PEngVW", "!TEngVW!/!TPOP!"], ["PLEPSpan", "!TLEPSpan!/!TPOP!"], ["PLEPEuro", "!TLEPEuro!/!TPOP!"], ["PLEPAsian", "!TLEPAsian!/!TPOP!"], ["PLEPOther", "!TLEPOther!/!TPOP!"], ["LEPSpanDens", "!TLEPSpan!/!SqMiles!"], ["LEPEuroDens", "!TLEPEuro!/!SqMiles!"], ["LEPAsianDens", "!TLEPAsian!/!SqMiles!"], ["LEPOtherDens", "!TLEPOther!/!SqMiles!"], ["TLEPFrench", "!C16001e8!"], ["TLEPGerm", "!C16001e11!"], ["TLEPRuss", "!C16001e14!"], ["TLEPOIndoEuro", "!C16001e17!"], ["TLEPKor", "!C16001e20!"], ["TLEPChin", "!C16001e23!"], ["TLEPViet", "!C16001e26!"], ["TLEPTaglog", "!C16001e29!"], ["TLEPArabic", "!C16001e35!"], ]) print("") print("---------------------------") print("Finished calculating LEP Population Calcs") # DISSOLVE TRACTS BY COUNTY - SUM VALUES ap.Dissolve_management( working_file, cw, "COUNTYFP", [["TPOP", "SUM"], ["SqMiles", "SUM"], ["TEngOnly", "SUM"], ["TEngVW", "SUM"], ["TLEP", "SUM"], ["TLEPAsian", "SUM"], ["TLEPSpan", "SUM"], ["TLEPEuro", "SUM"], ["TLEPOther", "SUM"], ["TLEPFrench", "SUM"], ["TLEPGerm", "SUM"], ["TLEPRuss", "SUM"], ["TLEPOIndoEuro", "SUM"], ["TLEPKor", "SUM"], ["TLEPChin", "SUM"], ["TLEPViet", "SUM"], ["TLEPTaglog", "SUM"], ["TLEPArabic", "SUM"]]) print("") print("---------------------------") print("Dissolve County Stats") # DISSOLVE TRACTS BY REGION - SUM VALUES ap.Dissolve_management( working_file, rw, "", [["TPOP", "SUM"], ["SqMiles", "SUM"], ["TEngOnly", "SUM"], ["TEngVW", "SUM"], ["TLEP", "SUM"], ["TLEPAsian", "SUM"], ["TLEPSpan", "SUM"], ["TLEPEuro", "SUM"], ["TLEPOther", "SUM"], ["TLEPFrench", "SUM"], ["TLEPGerm", "SUM"], ["TLEPRuss", "SUM"], ["TLEPOIndoEuro", "SUM"], ["TLEPKor", "SUM"], ["TLEPChin", "SUM"], ["TLEPViet", "SUM"], ["TLEPTaglog", "SUM"], ["TLEPArabic", "SUM"]]) print("") print("---------------------------") print("Dissolve Region Stats") # ADD COUNTY FIELDS ap.management.AddFields( cw, [["CoTPOP", "DOUBLE"], ["CoSqMiles", "DOUBLE"], ["CoTLEP", "DOUBLE"], ["CoPLEP", "DOUBLE"], ["CoLEPDens", "DOUBLE"], ["CoTLEPAsian", "DOUBLE"], ["CoPLEPAsian", "DOUBLE"], ["CoLEPAsianDens", "DOUBLE"], ["CoTLEPSpan", "DOUBLE"], ["CoPLEPSpan", "DOUBLE"], ["CoLEPSpanDens", "DOUBLE"], ["CoTLEPEuro", "DOUBLE"], ["CoPLEPEuro", "DOUBLE"], ["CoLEPEuroDens", "DOUBLE"], ["CoTLEPOther", "DOUBLE"], ["CoPLEPOther", "DOUBLE"], ['CoLEPOtherDens', "DOUBLE"], ["CoTLEPOther", "DOUBLE"], ["CoTLEPFrench", "DOUBLE"], ["CoPLEPFrench", "DOUBLE"], ["CoLEPFrenchDens", "DOUBLE"], ["CoTLEPGerm", "DOUBLE"], ["CoPLEPGerm", "DOUBLE"], ["CoLEPGermDens", "DOUBLE"], ["CoTLEPRuss", "DOUBLE"], ["CoPLEPRuss", "DOUBLE"], ["CoLEPRussDens", "DOUBLE"], ["CoTLEPOIndoEuro", "DOUBLE"], ["CoPLEPOIndoEuro", "DOUBLE"], ["CoLEPOIndoEuroDens", "DOUBLE"], ["CoTLEPKor", "DOUBLE"], ["CoPLEPKor", "DOUBLE"], ["CoLEPKorDens", "DOUBLE"], ["CoTLEPChin", "DOUBLE"], ["CoPLEPChin", "DOUBLE"], ["CoLEPChinDens", "DOUBLE"], ["CoTLEPViet", "DOUBLE"], ["CoPLEPViet", "DOUBLE"], ["CoLEPVietDens", "DOUBLE"], ["CoTLEPTaglog", "DOUBLE"], ["CoPLEPTaglog", "DOUBLE"], ["CoLEPTaglogDens", "DOUBLE"], ["CoTLEPArabic", "DOUBLE"], ["CoPLEPArabic", "DOUBLE"], ["CoLEPArabicDens", "DOUBLE"]]) print("") print("---------------------------") print(cw_file + " fields added !!!") # ADD REGIONAL FIELDS ap.management.AddFields( rw, [["RegTPOP", "DOUBLE"], ["RegSqMiles", "DOUBLE"], ["RegTLEP", "DOUBLE"], ["RegPLEP", "DOUBLE"], ["RegLEPDens", "DOUBLE"], ["RegTLEPAsian", "DOUBLE"], ["RegPLEPAsian", "DOUBLE"], ["RegLEPAsianDens", "DOUBLE"], ["RegTLEPSpan", "DOUBLE"], ["RegPLEPSpan", "DOUBLE"], ["RegLEPSpanDens", "DOUBLE"], ["RegTLEPEuro", "DOUBLE"], ["RegPLEPEuro", "DOUBLE"], ["RegLEPEuroDens", "DOUBLE"], ["RegTLEPOther", "DOUBLE"], ["RegPLEPOther", "DOUBLE"], ['RegLEPOtherDens', "DOUBLE"], ["RegTLEPFrench", "DOUBLE"], ["RegPLEPFrench", "DOUBLE"], ["RegLEPFrenchDens", "DOUBLE"], ["RegTLEPGerm", "DOUBLE"], ["RegPLEPGerm", "DOUBLE"], ["RegLEPGermDens", "DOUBLE"], ["RegTLEPRuss", "DOUBLE"], ["RegPLEPRuss", "DOUBLE"], ["RegLEPRussDens", "DOUBLE"], ["RegTLEPOIndoEuro", "DOUBLE"], ["RegPLEPOIndoEuro", "DOUBLE"], ["RegLEPOIndoEuroDens", "DOUBLE"], ["RegTLEPKor", "DOUBLE"], ["RegPLEPKor", "DOUBLE"], ["RegLEPKorDens", "DOUBLE"], ["RegTLEPChin", "DOUBLE"], ["RegPLEPChin", "DOUBLE"], ["RegLEPChinDens", "DOUBLE"], ["RegTLEPViet", "DOUBLE"], ["RegPLEPViet", "DOUBLE"], ["RegLEPVietDens", "DOUBLE"], ["RegTLEPTaglog", "DOUBLE"], ["RegPLEPTaglog", "DOUBLE"], ["RegLEPTaglogDens", "DOUBLE"], ["RegTLEPArabic", "DOUBLE"], ["RegPLEPArabic", "DOUBLE"], ["RegLEPArabicDens", "DOUBLE"]]) print("") print("---------------------------") print(rw_file + " fields added !!!") # CALCULATE COUNTY VALUES ap.CalculateFields_management( cw, "PYTHON3", [["CoTPOP", "!SUM_TPOP!"], ["CoSqMiles", "!SUM_SqMiles!"], ["CoTLEP", "!SUM_TLEP!"], ["CoTLEPAsian", "!SUM_TLEPAsian!"], ["CoTLEPSpan", "!SUM_TLEPSpan!"], ["CoTLEPEuro", "!SUM_TLEPEuro!"], ["CoTLEPOther", "!SUM_TLEPOther!"], ["CoTLEPFrench", "!SUM_TLEPFrench!"], ["CoTLEPGerm", "!SUM_TLEPGerm!"], ["CoTLEPRuss", "!SUM_TLEPRuss!"], ["CoTLEPOIndoEuro", "!SUM_TLEPOIndoEuro!"], ["CoTLEPKor", "!SUM_TLEPKor!"], ["CoTLEPChin", "!SUM_TLEPChin!"], ["CoTLEPViet", "!SUM_TLEPViet!"], [ "CoTLEPTaglog", "!SUM_TLEPTaglog!" ], ["CoTLEPArabic", "!SUM_TLEPArabic!"]]) # CALCULATE REGIONAL PERCENTAGES AND DENSITIES ap.CalculateFields_management( cw, "PYTHON3", [["CoPLEP", "!CoTLEP!/!CoTPOP!"], [ "CoLEPDens", "!CoTLEP!/!CoSqMiles!" ], ["CoPLEPAsian", "!CoTLEPAsian!/!CoTPOP!"], ["CoLEPAsianDens", "!CoTLEPAsian!/!CoSqMiles!"], ["CoPLEPSpan", "!CoTLEPSpan!/!CoTPOP!"], ["CoLEPSpanDens", "!CoTLEPSpan!/!CoSqMiles!"], ["CoPLEPEuro", "!CoTLEPEuro!/!CoTPOP!"], ["CoLEPEuroDens", "!CoTLEPEuro!/!CoSqMiles!"], ["CoPLEPOther", "!CoTLEPOther!/!CoTPOP!"], ['CoLEPOtherDens', "!CoTLEPOther!/!CoSqMiles!"], ["CoPLEPFrench", "!CoTLEPFrench! / !CoTPOP!"], ["CoLEPFrenchDens", "!CoTLEPFrench! / !CoSqMiles!"], ["CoPLEPGerm", "!CoTLEPGerm! / !CoTPOP!"], ["CoLEPGermDens", "!CoTLEPGerm! / !CoSqMiles!"], ["CoPLEPRuss", "!CoTLEPRuss! / !CoTPOP!"], ["CoLEPRussDens", "!CoTLEPRuss! / !CoSqMiles!"], ["CoPLEPOIndoEuro", "!CoTLEPOIndoEuro! / !CoTPOP!"], ["CoLEPOIndoEuroDens", "!CoTLEPOIndoEuro! / !CoSqMiles!"], ["CoPLEPKor", "!CoTLEPKor! / !CoTPOP!"], ["CoLEPKorDens", "!CoTLEPKor! / !CoSqMiles!"], ["CoPLEPChin", "!CoTLEPChin! / !CoTPOP!"], ["CoLEPChinDens", "!CoTLEPChin! / !CoSqMiles!"], ["CoPLEPViet", "!CoTLEPViet! / !CoTPOP!"], ["CoLEPVietDens", "!CoTLEPViet! / !CoSqMiles!"], ["CoPLEPTaglog", "!CoTLEPTaglog! / !CoTPOP!"], ["CoLEPTaglogDens", "!CoTLEPTaglog! / !CoSqMiles!"], ["CoPLEPArabic", "!CoTLEPArabic! / !CoTPOP!"], ["CoLEPArabicDens", "!CoTLEPArabic! / !CoSqMiles!"]]) print("") print("---------------------------") print(cw_file + " fields calculated !!!") # CALCULATE REGIONAL VALUES ap.CalculateFields_management( rw, "PYTHON3", [["RegTPOP", "!SUM_TPOP!"], ["RegSqMiles", "!SUM_SqMiles!"], ["RegTLEP", "!SUM_TLEP!"], ["RegTLEPAsian", "!SUM_TLEPAsian!"], ["RegTLEPSpan", "!SUM_TLEPSpan!"], ["RegTLEPEuro", "!SUM_TLEPEuro!"], ["RegTLEPOther", "!SUM_TLEPOther!"], ["RegTLEPFrench", "!SUM_TLEPFrench!"], ["RegTLEPGerm", "!SUM_TLEPGerm!"], ["RegTLEPRuss", "!SUM_TLEPRuss!"], ["RegTLEPOIndoEuro", "!SUM_TLEPOIndoEuro!"], ["RegTLEPKor", "!SUM_TLEPKor!"], ["RegTLEPChin", "!SUM_TLEPChin!"], ["RegTLEPViet", "!SUM_TLEPViet!"], ["RegTLEPTaglog", "!SUM_TLEPTaglog!"], ["RegTLEPArabic", "!SUM_TLEPArabic!"]]) # CALCULATE REGIONAL PERCENTAGES AND DENSITIES ap.CalculateFields_management( rw, "PYTHON3", [["RegPLEP", "!RegTLEP!/!RegTPOP!"], ["RegLEPDens", "!RegTLEP!/!RegSqMiles!"], ["RegPLEPAsian", "!RegTLEPAsian!/!RegTPOP!"], ["RegLEPAsianDens", "!RegTLEPAsian!/!RegSqMiles!"], ["RegPLEPSpan", "!RegTLEPSpan!/!RegTPOP!"], ["RegLEPSpanDens", "!RegTLEPSpan!/!RegSqMiles!"], ["RegPLEPEuro", "!RegTLEPEuro!/!RegTPOP!"], ["RegLEPEuroDens", "!RegTLEPEuro!/!RegSqMiles!"], ["RegPLEPOther", "!RegTLEPOther!/!RegTPOP!"], ['RegLEPOtherDens', "!RegTLEPOther!/!RegSqMiles!"], ["RegPLEPFrench", "!RegTLEPFrench! / !RegTPOP!"], ["RegLEPFrenchDens", "!RegTLEPFrench! / !RegSqMiles!"], ["RegPLEPGerm", "!RegTLEPGerm! / !RegTPOP!"], ["RegLEPGermDens", "!RegTLEPGerm! / !RegSqMiles!"], ["RegPLEPRuss", "!RegTLEPRuss! / !RegTPOP!"], ["RegLEPRussDens", "!RegTLEPRuss! / !RegSqMiles!"], ["RegPLEPOIndoEuro", "!RegTLEPOIndoEuro! / !RegTPOP!"], ["RegLEPOIndoEuroDens", "!RegTLEPOIndoEuro! / !RegSqMiles!"], ["RegPLEPKor", "!RegTLEPKor! / !RegTPOP!"], ["RegLEPKorDens", "!RegTLEPKor! / !RegSqMiles!"], ["RegPLEPChin", "!RegTLEPChin! / !RegTPOP!"], ["RegLEPChinDens", "!RegTLEPChin! / !RegSqMiles!"], ["RegPLEPViet", "!RegTLEPViet! / !RegTPOP!"], ["RegLEPVietDens", "!RegTLEPViet! / !RegSqMiles!"], ["RegPLEPTaglog", "!RegTLEPTaglog! / !RegTPOP!"], ["RegLEPTaglogDens", "!RegTLEPTaglog! / !RegSqMiles!"], ["RegPLEPArabic", "!RegTLEPArabic! / !RegTPOP!"], ["RegLEPArabicDens", "!RegTLEPArabic! / !RegSqMiles!"]]) print("") print("---------------------------") print(rw_file + " fields calculated !!!") # SPATIAL JOIN TRACTS FILE WITH COUNTY FILE ap.SpatialJoin_analysis(working_file, cw, twcw) print("") print("---------------------------") print("County Spaital Join") # SPATIAL JOIN TRACTS FILE WITH REGION FILE ap.SpatialJoin_analysis(twcw, rw, twrw) print("") print("---------------------------") print("Region Spaital Join") # CALCULATE OUT ABOVE REGIONAL AND COUNTY AVERAGE DENSITIES FOR TRACTS # NEW WAY USING PERCENT POPULATION ap.CalculateFields_management( in_table=twrw, expression_type="PYTHON3", fields= "CoAbvLEP 'ifBlock(!PLEP!, !CoPLEP!)';RegAbvLEP 'ifBlock(!PLEP!, !RegPLEP!)'", code_block="""def ifBlock(area, region): if area > region: return 1 else: return 0 """) # OLD WAY USING POPULATION DENSITY print("") print("---------------------------") print("Above LEP Density Calculations Completed") # SPATIAL JOIN TRACTS FILE WITH PLACES FILE ap.SpatialJoin_analysis(twrw, places, twrw_places) print("") print("---------------------------") print("Places Spaital Join") cleanUp(twrw_places, gdb, final_file, final_gdb_loc, delete_fields) lepTracts(census_key, int(year), tract_mergedgdb, tract_file, root_dir, gdb, final_gdb_loc, region)
row[6] = street_View(row[7], row[8]) # Create the eDocs link row[9] = eDocs_Link(row[4]) # Create the iDocs link row[10] = iDocs_Link(row[4]) cursor.updateRow(row) arcpy.MakeFeatureLayer_management(parcel_property, 'parcel_property_layer') arcpy.AddJoin_management('parcel_property_layer', 'PAR_ID', 'parcel_layer', 'id') pp_select = arcpy.SelectLayerByAttribute_management('parcel_property_layer', where_clause='Parcel_Property.id = 0') arcpy.CalculateFields_management(pp_select, "PYTHON3", [['Parcel_Property.appellation', '!Parcel.appellation!'], ['Parcel_Property.affected_surveys', '!Parcel.affected_surveys!'], ['Parcel_Property.parcel_intent', '!Parcel.parcel_intent!'], ['Parcel_Property.topology_type', '!Parcel.topology_type!'], ['Parcel_Property.status', '!Parcel.status!'], ['Parcel_Property.statutory_actions', '!Parcel.statutory_actions!'], ['Parcel_Property.land_district', '!Parcel.land_district!'], ['Parcel_Property.titles', '!Parcel.titles!'], ['Parcel_Property.survey_area', '!Parcel.survey_area!'], ['Parcel_Property.calc_area', '!Parcel.calc_area!']]) arcpy.RemoveJoin_management('parcel_property_layer') print('Parcel Property fields added and calculated') # Call the publishing function for each of the layers that need updating. publishing.publishLayer('Publishing', 'Crown Land') publishing.publishLayer('Publishing', 'Owner', False) publishing.publishLayer('Publishing', 'All Parcels') publishing.publishLayer('Publishing', 'Roads _Addressing_') publishing.publishLayer('Publishing', 'Address _Electoral_')