def RotateFeatureClass(inputFC, outputFC, angle=0, pivot_type="CENTER", pivot_point=None): """Rotate Feature Class inputFC Input features outputFC Output feature class angle Angle to rotate, in degrees pivot_type CENTER, LOWER_LEFT, LOWER_RIGHT, UPPER_LEFT, UPPER_RIGHT, XY pivot_point X,Y coordinates (as space-separated string) (only used if pivot_type = "XY") """ # temp names for cleanup env_file, lyrTmp, tmpFC = [None] * 3 Row, Rows, oRow, oRows = [None] * 4 # cursors # warning thresholds for complex features warnPts = 10000 warnPrt = 10 try: # process parameters try: angle = float(angle) except: raise MsgError("Invalid value for rotation: {0}".format(angle)) # determine pivot point ext = arcpy.Describe(inputFC).extent if pivot_type == "CENTER": pivot_point = (ext.XMin + ext.width * 0.5, ext.YMin + ext.height * 0.5) elif pivot_type == "LOWER_LEFT": pivot_point = ext.XMin, ext.YMin elif pivot_type == "UPPER_LEFT": pivot_point = ext.XMin, ext.YMax elif pivot_type == "UPPER_RIGHT": pivot_point = ext.XMax, ext.YMax elif pivot_type == "LOWER_RIGHT": pivot_point = ext.XMax, ext.YMin elif pivot_type == "XY": try: pivot_point = tuple([float(xy) for xy in pivot_point.split()]) except: raise Exception("Invalid value for pivot point: %s" % pivot_point) xcen, ycen = pivot_point # msg = "Rotating {0} degrees around {1} ({2:.1f}, {3:.1f})" # GPMsg(msg.format(angle, pivot_type, xcen, ycen)) # set up environment env_file = arcpy.CreateScratchName("xxenv", ".xml", "file", arcpy.GetSystemEnvironment("TEMP")) arcpy.SaveSettings(env_file) # Disable any GP environment clips or project on the fly arcpy.ClearEnvironment("extent") arcpy.ClearEnvironment("outputCoordinateSystem") WKS = env.workspace if not WKS: if os.path.dirname(outputFC): WKS = os.path.dirname(outputFC) else: WKS = os.path.dirname(arcpy.Describe(inputFC).catalogPath) env.workspace = env.scratchWorkspace = WKS # get feature class properties dFC = arcpy.Describe(inputFC) shpField = dFC.shapeFieldName shpType = dFC.shapeType FID = dFC.OIDFieldName SR = dFC.spatialReference # create temp feature class tmpFC = arcpy.CreateScratchName("xxfc", "", "featureclass") arcpy.CreateFeatureclass_management(os.path.dirname(tmpFC), os.path.basename(tmpFC), shpType, spatial_reference=SR) # set up id field (used to join later) TFID = "ORIG_FID" arcpy.AddField_management(tmpFC, TFID, "LONG") arcpy.DeleteField_management(tmpFC, "ID") # rotate the feature class coordinates # only points, polylines, and polygons are supported ##GPMsg("t", "writing") # open read and write cursors Rows = arcpy.SearchCursor(inputFC, "", "", "%s;%s" % (shpField, FID)) oRows = arcpy.InsertCursor(tmpFC) if shpType == "Point": for Row in Rows: shp = Row.getValue(shpField) pnt = shp.getPart() pnt.X, pnt.Y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle) oRow = oRows.newRow() oRow.setValue(shpField, pnt) oRow.setValue(TFID, Row.getValue(FID)) oRows.insertRow(oRow) elif shpType in ["Polyline", "Polygon"]: # initialize total area / length totarea_in, totarea_out = 0.0, 0.0 parts = arcpy.Array() rings = arcpy.Array() ring = arcpy.Array() for Row in Rows: shp = Row.getValue(shpField) if shpType == "Polygon": totarea_in += shp.area p = 0 for part in shp: for pnt in part: if pnt: x, y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle) ring.add(arcpy.Point(x, y, pnt.ID)) else: # if we have a ring, save it if len(ring) > 0: rings.add(ring) ring.removeAll() # we have our last ring, add it rings.add(ring) ring.removeAll() # if only one, remove nesting if len(rings) == 1: rings = rings.getObject(0) parts.add(rings) rings.removeAll() p += 1 # if only one, remove nesting if len(parts) == 1: parts = parts.getObject(0) if dFC.shapeType == "Polyline": shp = arcpy.Polyline(parts) else: shp = arcpy.Polygon(parts) totarea_out += shp.area if shp.pointCount > warnPts or shp.partCount > warnPrt: GPMsg("w", ("Feature {0} contains {1} points, " "{2} parts").format(Row.getValue(FID), shp.pointCount, shp.partCount)) parts.removeAll() oRow = oRows.newRow() oRow.setValue(shpField, shp) oRow.setValue(TFID, Row.getValue(FID)) oRows.insertRow(oRow) else: raise Exception, "Shape type {0} is not supported".format(shpType) del Row, Rows, oRow, oRows # close write cursor (ensure buffer written) Row, Rows, oRow, oRows = [None] * 4 # restore variables for cleanup if shpType == "Polygon": # check - did the area change more than 0.1 percent? diff = totarea_out - totarea_in diffpct = 100.0 * diff / totarea_in if abs(diffpct) > 1.0: GPMsg("w", "Please check output polygons") GPMsg( "w", "Input area: {0:.1f} Output area: {1:.1f}".format( totarea_in, totarea_out)) GPMsg("w", "({:.1f} percent change)".format(diffpct)) # join attributes, and copy to output lyrTmp = "lyrTmp" arcpy.MakeFeatureLayer_management(tmpFC, lyrTmp) arcpy.AddJoin_management(lyrTmp, TFID, inputFC, FID) env.qualifiedFieldNames = False arcpy.CopyFeatures_management(lyrTmp, outputFC) env.qualifiedFieldNames = True # Fourth field [3] is a duplicate of TFID dropField = arcpy.ListFields(outputFC)[3].name arcpy.DeleteField_management(outputFC, dropField) except MsgError, xmsg: GPMsg("e", str(xmsg))
def appdata_roaming_dir(): """Returns the roaming AppData directory for the installed ArcGIS Desktop.""" install = arcpy.GetInstallInfo('desktop') app_data = arcpy.GetSystemEnvironment("APPDATA") product_dir = ''.join((install['ProductName'], major_version())) return os.path.join(app_data, 'ESRI', product_dir)
number_of_classes = sys.argv[2] output_classified_raster = sys.argv[3] number_of_iterations = "20" minimum_class_size = sys.argv[4] if minimum_class_size == '#': minimum_class_size = "20" sample_interval = sys.argv[5] if sample_interval == '#': sample_interval = "10" reject_fraction = "0.0" a_priori_probability_weighting = "EQUAL" input_a_priori_probability_file = "#" output_confidence_raster = "#" output_signature_file = sys.argv[6] no_signature_output = 0 out_sig_path = arcpy.GetSystemEnvironment("Temp") if output_signature_file == '#': no_signature_output = 1 output_signature_file = out_sig_path + os.sep + "sigtmpbb9z.gsg" # Executing tools try: overwrite_setting = arcpy.env.overwriteOutput if no_signature_output: arcpy.env.overwriteOutput = 1 arcpy.gp.IsoCluster_sa(input_raster_bands, output_signature_file, number_of_classes, number_of_iterations, minimum_class_size, sample_interval) arcpy.env.overwriteOutput = overwrite_setting
LiikenneElementti = arcpy.GetParameterAsText(4) Nimi = arcpy.GetParameterAsText(5) Impedanssi = arcpy.GetParameterAsText(6) Breaks = arcpy.GetParameterAsText(7) Pysakointi = arcpy.GetParameterAsText(8) Kavely = int(arcpy.GetParameterAsText(9)) RinDisk = arcpy.GetParameterAsText(10) Suunta = arcpy.GetParameterAsText(11) Details = arcpy.GetParameterAsText(12) Overlap = arcpy.GetParameterAsText(13) Trim = arcpy.GetParameterAsText(14) TrimCut = arcpy.GetParameterAsText(15) #Lines = arcpy.GetParameterAsText(16) #Voidaan lisätä tarvittaessa parametriksi, tällöin tämä pitää lisätä myös käyttöliittymän viimeiseksi kysyttäväksi parametriksi! #Environment määritykset: temp = arcpy.GetSystemEnvironment("TEMP") mxd = arcpy.mapping.MapDocument("CURRENT") df = arcpy.mapping.ListDataFrames(mxd, "*")[0] env.workspace = temp #Haetaan ArcGis versio: for key, value in arcpy.GetInstallInfo().iteritems(): if key == "Version": ArcVersio = value #Luodaan suoritusinfopalkki arcpy.SetProgressor("step", "PALVELUALUE LASKENTA...Tarkistukset ennen laskentaa...", 0, 100, 5)
def summarize(run_output, extent, spatialReference): """! Summarize assets that are withing the given extent @param run_output Folder with simulation outputs @param extent Extent to find assets in @param spatialReference Spatial reference to use @return pandas dataframe with assets list """ arcpy.env.overwriteOutput = True arcpy.env.addOutputsToMap = True zone = getZone(spatialReference) zone_suffix = "_{}".format(zone).replace('.', '_') gdb_point = os.path.join(RAMPART_BASE, "points_100m{}.gdb".format(zone_suffix)) gdb_poly = os.path.join(RAMPART_BASE, "polygons_100m{}.gdb".format(zone_suffix)) fire = os.path.basename(os.path.dirname(run_output)) arcpy.env.workspace = arcpy.GetSystemEnvironment('TEMP') arcpy.env.workspace = ensure_dir( os.path.join(arcpy.GetSystemEnvironment('TEMP'), arcpy.CreateUniqueName(fire))) vectors = {} for_points = [] for_polygons = [] print(extent.XMin, extent.XMax, extent.YMin, extent.YMax) prob = [ x for x in os.listdir(run_output) if x.startswith('wxshield') and x.endswith('.asc') ][-1] prob = os.path.join(run_output, prob) prob_raster = arcpy.sa.Raster(prob) #~ prob_int = arcpy.sa.Int(prob_raster * 1000) arcpy.env.extent = extent prob_zero = arcpy.sa.Con(arcpy.sa.IsNull(prob_raster), 0, prob_raster) fire_shape = None try: perim = [ x for x in os.listdir(run_output) if x.startswith(fire) and x.endswith('{}.shp'.format(zone_suffix)) ][-1] perim = os.path.join(run_output, perim) print(perim) except: # HACK: projecting the point is removing it for some reason?? perim = os.path.join(run_output, fire + '.shp') def do_summarize(gdb, fire_shape): env_push() arcpy.env.workspace = gdb features = map(lambda x: os.path.join(gdb, x), arcpy.ListFeatureClasses('*_proj')) env_pop() env_push() for input in features: added = 0 name = os.path.basename(input).replace( '{}_proj'.format(zone_suffix), '').replace('_', ' ').replace(' 100m', '').capitalize() print(name) arcpy.Clip_analysis(input, "in_memory\\extent", "in_memory\\clipped") arcpy.MakeFeatureLayer_management("in_memory\\clipped", "from_lyr") if 0 == int(arcpy.GetCount_management('from_lyr').getOutput(0)): #~ print("No results") continue lyr = "from_lyr" join_worked = False all_fields = [x.name for x in arcpy.Describe(input).fields] fields = ['SHAPE@'] for f in DESCRIPTION_FIELDS: if f in all_fields: fields.append(f) break is_point = u'Point' == arcpy.Describe(input).shapeType sr = arcpy.Describe(input).spatialReference if fire_shape.spatialReference != sr: fire_shape = fire_shape.projectAs(sr) if is_point: arcpy.sa.ExtractValuesToPoints("from_lyr", prob, "in_memory\\probpts", "INTERPOLATE", "VALUE_ONLY") lyr = "in_memory\\probpts" fields.append('RASTERVALU') else: try: oid = arcpy.Describe("from_lyr").oidFieldName arcpy.sa.ZonalStatisticsAsTable('from_lyr', oid, prob_zero, "in_memory\\stats", "NODATA", "MAXIMUM") arcpy.AddJoin_management("from_lyr", oid, "in_memory\\stats", oid, "KEEP_ALL") #~ if 0 < int(arcpy.GetCount_management("in_memory\\stats").getOutput(0)): new_fields = [] for f in fields: if f == 'SHAPE@': new_fields.append(f) else: new_fields.append('clipped.' + f) new_fields.append('stats.MAX') fields = new_fields #~ print("ZonalStatisticsAsTable worked") join_worked = True except: print("FAIL") pass def addLayer(row): shp = row[0] pt = shp.firstPoint desc = "" if not row[1] else row[1].replace('\n', '').replace( '\r', '') pr = None had_pr = False try: pr = max(0, row[2]) pr = round(pr * 100, 1) had_pr = True except: pass if pr is None: pr = 0.0 if not (is_point or join_worked): print("Searching...") for b in shp: for g in b: if g is not None: loc = "{} {}".format(g.X, g.Y) s = arcpy.GetCellValue_management( prob, loc).getOutput(0) if 'NoData' != s: pr = max(pr, float(s)) if is_point: lat, lon = utm_to_lat_lon(pt.X, pt.Y, zone, False) z, x, y = lat_lon_to_utm(lat, lon) r = [ name, 'Point', desc, z, int(x), int(y), shp.firstPoint.X, shp.firstPoint.Y, pr, pr ] #~ print(r) for_points.append(r) else: c4 = None if not fire_shape.disjoint(shp): print("Looking") if 1 == fire_shape.pointCount: c4 = fire_shape.firstPoint else: for b in fire_shape: for g in b: if shp.contains(g): c4 = g if c4 is not None: ang = [0, 0] else: #~ print("Finding closest") c1 = find_closest(fire_shape.centroid.X, fire_shape.centroid.Y, shp) c2 = find_closest(c1.X, c1.Y, fire_shape) while True: c3 = find_closest(c2.X, c2.Y, shp) c4 = find_closest(c3.X, c3.Y, fire_shape) if c1.X == c3.X and c1.Y == c3.Y and c2.X == c4.X and c2.Y == c4.Y: break c1 = c3 c2 = c4 ang = arcpy.PointGeometry( c3, fire_shape.spatialReference).angleAndDistanceTo( arcpy.PointGeometry( c4, fire_shape.spatialReference)) lat, lon = utm_to_lat_lon(c4.X, c4.Y, zone, False) z, x, y = lat_lon_to_utm(lat, lon) r = [ name, 'Polygon', desc, z, int(x), int(y), c4.X, c4.Y, pr, pr, ang[0] % 360, ang[1] ] #~ print(r) for_polygons.append(r) for row in arcpy.da.SearchCursor(lyr, fields): added += 1 if (added % 1000) == 0: print('.', end='') addLayer(row) if added > 1000: print('') print("Added {} entries".format(added)) arcpy.Delete_management("from_lyr") arcpy.Delete_management("in_memory\\probpts") env_pop() return fire_shape p = polygonFromExtent(extent, arcpy.Describe(prob).spatialReference) arcpy.MakeFeatureLayer_management(p, "in_memory\\extent") cursor = arcpy.da.InsertCursor("in_memory\\extent", ['SHAPE@']) cursor.insertRow([p]) del cursor for fire_row in arcpy.SearchCursor(perim): fire_shape = fire_row.Shape fire_shape = fire_shape.projectAs(arcpy.Describe(prob).spatialReference) fire_shape = do_summarize(gdb_point, fire_shape) fire_shape = do_summarize(gdb_poly, fire_shape) arcpy.Delete_management("in_memory\\extent") columns = [ 'Layer', 'Type', 'Zone', 'Basemap', 'Easting', 'Northing', 'Angle', 'Distance (m)', 'Avg %', 'Max %', 'Description' ] df = pd.DataFrame(for_points, columns=[ 'Layer', 'Type', 'Description', 'Zone', 'Easting', 'Northing', 'X', 'Y', 'Avg %', 'Max %' ]) if len(df) > 0: df['Basemap'] = df.apply( lambda x: int(str(x['Easting'])[:2] + str(x['Northing'])[:3]), axis=1) df['KEY'] = df.apply( lambda x: x['Layer'] + str(x['Zone']) + str(x['Basemap']), axis=1) grouped = df.groupby(['Layer', 'Type', 'Zone', 'Basemap', 'KEY']).count()[['Description']].reset_index() formean = df.groupby(['Layer', 'Type', 'Zone', 'Basemap', 'KEY']).mean() bymean = formean[['Avg %']].reset_index() bymax = df.groupby(['Layer', 'Zone', 'Basemap', 'KEY']).max()[['Max %']].reset_index() byx = formean[['X']].reset_index() byy = formean[['Y']].reset_index() agg = pd.merge(grouped[grouped['Description'] > 3], bymean) agg = pd.merge(agg, bymax) agg = pd.merge(agg, byx) agg = pd.merge(agg, byy) singles = df[~df['KEY'].isin(agg['KEY'])] del agg['KEY'] del singles['KEY'] if len(agg) > 0: agg['Description'] = agg.apply( lambda x: str(x['Description']) + ' matches', axis=1) agg['Easting'] = "" agg['Northing'] = "" summary = pd.concat([singles, agg]) else: summary = singles summary['Vector'] = summary.apply( lambda x: angleAndDistanceTo(x['X'], x['Y'], fire_shape), axis=1) summary['Angle'] = summary.apply(lambda x: x['Vector'][0], axis=1) summary['Distance (m)'] = summary.apply(lambda x: x['Vector'][1], axis=1) summary = summary[columns] else: summary = None df2 = pd.DataFrame(for_polygons, columns=[ 'Layer', 'Type', 'Description', 'Zone', 'Easting', 'Northing', 'X', 'Y', 'Avg %', 'Max %', 'Angle', 'Distance (m)' ]) if len(df2) > 0: df2['Basemap'] = df2.apply( lambda x: int(str(x['Easting'])[:2] + str(x['Northing'])[:3]), axis=1) df2['KEY'] = df2.apply( lambda x: x['Layer'] + str(x['Zone']) + str(x['Basemap']), axis=1) grouped = df2.groupby(['Layer', 'Type', 'Zone', 'Basemap', 'KEY']).count()[['Description']].reset_index() formean = df2.groupby(['Layer', 'Type', 'Zone', 'Basemap', 'KEY']).mean() bymean = formean[['Avg %']].reset_index() bymax = df2.groupby(['Layer', 'Zone', 'Basemap', 'KEY']).max()[['Max %']].reset_index() byangle = formean[['Angle']].reset_index() bydistance = formean[['Distance (m)']].reset_index() agg = pd.merge(grouped[grouped['Description'] > 3], bymean) agg = pd.merge(agg, bymax) agg = pd.merge(agg, byangle) agg = pd.merge(agg, bydistance) singles = df2[~df2['KEY'].isin(agg['KEY'])] del agg['KEY'] del singles['KEY'] if len(agg) > 0: agg['Description'] = agg.apply( lambda x: str(x['Description']) + ' matches', axis=1) agg['Easting'] = "" agg['Northing'] = "" agg['X'] = "" agg['Y'] = "" summary2 = pd.concat([singles, agg]) else: summary2 = singles summary2 = summary2[columns] if summary is not None and summary2 is not None: result = pd.concat([summary, summary2]) elif summary is not None: result = summary elif summary2 is not None: result = summary2 else: return None result['Direction'] = result.apply(lambda x: angleToDirection(x['Angle']) if x['Distance (m)'] > 0 else 'N/A', axis=1) result['Distance (m)'] = result.apply(lambda x: int(x['Distance (m)']), axis=1) result['Avg %'] = result.apply(lambda x: round(x['Avg %'], 1), axis=1) result['Max %'] = result.apply(lambda x: round(x['Max %'], 1), axis=1) result = result[[x if x != 'Angle' else 'Direction' for x in columns]] result = result.sort_values( ['Distance (m)', 'Layer', 'Type', 'Basemap', 'Easting', 'Northing']) result = result.sort_values(['Max %'], ascending=False, kind='mergesort') return result
# -*- coding: cp936 -*- import arcpy, os # 获取到的结果可能为空 print arcpy.GetSystemEnvironment("TEMP") print arcpy.GetSystemEnvironment("TMP") print arcpy.GetSystemEnvironment("MW_TMPDIR") # 获取默认的overwriteOutput环境值,并将overwriteOutput设为True print arcpy.env.overwriteOutput arcpy.env.overwriteOutput = True print arcpy.env.overwriteOutput # 保存当前环境设置,并将overwriteOutput设为False arcpy.SaveSettings(os.path.join(os.path.expanduser("~"), "Desktop", "MyCustomSettings.xml")) arcpy.env.overwriteOutput = False print arcpy.env.overwriteOutput # 重新读取导出的环境设置文件 arcpy.LoadSettings(os.path.join(os.path.expanduser("~"), "Desktop", "MyCustomSettings.xml")) print arcpy.env.overwriteOutput
def makeMaps(scenario, run_output, force_maps, hide): """! @param scenario Scenario to use settings from @param run_output Folder where simulation output resides @param force_maps Whether or not to force making maps if they already exist @param hide Whether or not to show perimeter closest to date on map @return Path to final output pdf """ import pdf from pdf import makePDF perimeters = PerimeterList(scenario.year, scenario.fire) sim_output = readSimOutput(run_output) startup = find_lines(sim_output, 'Startup indices ') startup = startup[0] if ( len(startup) > 0) else "Startup indices are not valid" prefix = 'actuals_' if scenario.actuals_only else 'wxshield_' fire_prefix = scenario.fire + "_" + ('actual_' if scenario.actuals_only else '') probs = [ x for x in os.listdir(os.path.join(Settings.HOME_DIR, run_output)) if x.startswith(prefix) and x[-3:] == "asc" ] day0 = find_day(probs[0]) - 1 jds = map(find_day, probs) dates = map(find_date, probs) days = map(lambda x: x - day0, jds) extent = None perim = None ensure_dir(scenario.outbase) out_dir = os.path.join(scenario.outbase, scenario.fire[:3]) ensure_dir(out_dir) for_time = os.path.basename(scenario.run_output) pdf_output = os.path.abspath( os.path.join(out_dir, fire_prefix + for_time + ".pdf")) copied = os.path.join(scenario.outbase, os.path.basename(pdf_output)) # HACK: if any one map is required then make them all if not (force_maps or not os.path.exists(pdf_output)): logging.info("Maps already exist for " + scenario.fire) return copied for_time = os.path.basename(scenario.run_output) mapflag = os.path.join(out_dir, scenario.fire + "_" + for_time + "_mapsinprogress") if os.path.exists(mapflag): logging.info("Maps already being made for " + scenario.fire) return copied write_file(os.path.dirname(mapflag), os.path.basename(mapflag), " ") map_output = getMapOutput(run_output) logging.info("Making maps for " + scenario.fire) # HACK: run in parallel but assume this works for now wxshield = getWxSHIELDFile(dates[0], scenario.fire, map_output) processes = [] run_what = r'python.exe firestarr\getWxshield.py {} {} {} {} {} "{}"'.format( scenario.lat, scenario.lon, dates[0], days[-1], scenario.fire, map_output) if 'overridden' in startup: startup_values = map(lambda x: x.strip(), startup[startup.find('(') + 1:-1].split(',')) logging.debug(startup_values) # HACK: just use known positions for now #~ (0.0mm, FFMC 92.0, DMC 59.0, DC 318.0) #~ print(startup_values[0][:-2], startup_values[1][5:].strip(), startup_values[2][4:].strip(), startup_values[0][3:].strip()) apcp = float(startup_values[0][:-2]) ffmc = float(startup_values[1][5:].strip()) dmc = float(startup_values[2][4:].strip()) dc = float(startup_values[3][3:].strip()) run_what += ' --apcp_0800 {} --ffmc {} --dmc {} --dc {}'.format( apcp, ffmc, dmc, dc) logging.debug(run_what) processes.append( start_process(run_what, Settings.PROCESS_FLAGS, Settings.HOME_DIR)) arcpy.env.overwriteOutput = True ensure_dir(os.path.dirname(out_dir)) ensure_dir(out_dir) # keep these until the end so they lock the file names mxd_paths = [] mxd_names = [] risk_paths = [] risk_names = [] scores = [] txtFuelRaster = find_line(sim_output, 'Fuel raster is ', 'Fuel raster is ') suffix = findSuffix(txtFuelRaster) env_push() png_processes = [] arcpy.env.scratchWorkspace = ensure_dir( arcpy.CreateScratchName(scenario.fire + os.path.basename(run_output), "", "Workspace", arcpy.GetSystemEnvironment('TEMP'))) for i in reversed(xrange(len(days))): f = os.path.join( run_output, probs[i].replace(prefix, 'sizes_').replace('.asc', '.csv')) run_what = r'python.exe firestarr\plotsize.py "{}" "{}"'.format( f, days[i]) png_processes = [ start_process(run_what, Settings.PROCESS_FLAGS, Settings.HOME_DIR) ] + png_processes for i in reversed(xrange(len(days))): finish_process(png_processes[i]) arcpy.env.addOutputsToMap = False prob_input = os.path.join(run_output, probs[i]) c_prob = arcpy.sa.Int(arcpy.sa.Raster(prob_input) * 10) shp_class = os.path.join( map_output, probs[i].replace(".asc", "_class_poly.shp").replace("-", "_")) # keep getting 'WARNING: Error of opening hash table for code page.' when we save to file plan poly = "in_memory\poly" logging.debug("Converting to polygon") arcpy.RasterToPolygon_conversion(c_prob, poly, "SIMPLIFY") del c_prob #~ print(shp_class) arcpy.CopyFeatures_management(poly, shp_class) del poly perim = None if hide else perimeters.find_perim( scenario.fire, dates[i]) copyMXD = None if len(days) - 1 == i: # we need to get the extent from the last map copyMXD, theMXD, extent = getProjectionMXD(i, scenario.actuals_only, scenario.run_output, scenario.fire, extent, perim) run_what = r'python.exe firestarr\saveboth.py "{}" "{}"'.format( copyMXD, fire_prefix + dates[i] + ".png") processes.append( start_process(run_what, Settings.PROCESS_FLAGS, Settings.HOME_DIR)) del theMXD run_what = r'python.exe firestarr\assets.py {} "{}" {} "{}" {}'.format( i, scenario.run_output, scenario.fire, extent, prefix) else: copyMXD = getProjectionMXDName(i, scenario.actuals_only, scenario.run_output, scenario.fire, extent, perim) run_what = r'python.exe firestarr\getProjectionMXD.py {} "{}" {} "{}"'.format( i, scenario.run_output, scenario.fire, extent) if scenario.actuals_only: run_what += ' --actuals' if perim: run_what += ' --perim "{}"'.format(perim) processes.append( start_process(run_what, Settings.PROCESS_FLAGS, Settings.HOME_DIR)) mxd_paths = [copyMXD] + mxd_paths mxd_names = [fire_prefix + dates[i] + ".png"] + mxd_names start_raster = os.path.join(run_output, scenario.fire + '.tif') fire_raster = None if os.path.exists(start_raster): fire_raster = arcpy.sa.Raster(start_raster) # need to make sure the extent is the same for all rasters or they don't add properly env_push() setSnapAndExtent(prob_input) def by_intensity(intensity): letter = intensity.upper()[0] prob_i = os.path.join( run_output, prob_input.replace(prefix, 'intensity_{}_'.format(letter))) ra = Settings.RAMPART_MASK.format(intensity, suffix) logging.debug(prob_i) raster = arcpy.sa.Int( arcpy.sa.Raster(prob_i) * arcpy.sa.Raster(ra)) if fire_raster is not None: # don't count anything in the starting perimeter # HACK: will not consider fires that start from just a size raster = arcpy.sa.Con(arcpy.sa.IsNull(fire_raster), raster, 0) raster = arcpy.sa.Con(arcpy.sa.IsNull(raster), 0, raster) return raster low_raster = by_intensity('low') moderate_raster = by_intensity('moderate') high_raster = by_intensity('high') total_raster = low_raster + moderate_raster + high_raster total_raster = arcpy.sa.SetNull(0 == total_raster, total_raster) total_path = os.path.join( map_output, prob_input.replace(prefix, 'RA_').replace('.asc', '.tif')) total_raster.save(total_path) del low_raster del moderate_raster del high_raster score = arcpy.RasterToNumPyArray(total_raster, nodata_to_value=0).sum() # .58 so that 10 for social & economic gives a 10 total score score = fixK(score / 1000000.0 / 0.58) env_pop() run_what = r'python.exe firestarr\getRiskMXD.py {} "{}" {} "{}" "{}"'.format( i, scenario.run_output, scenario.fire, extent, score) if scenario.actuals_only: run_what += ' --actuals' if perim: run_what += ' --perim "{}"'.format(perim) processes.append( start_process(run_what, Settings.PROCESS_FLAGS, Settings.HOME_DIR)) copyMXD = getRiskMXDName(i, scenario.actuals_only, scenario.run_output, scenario.fire, extent, perim) risk_paths = [copyMXD] + risk_paths risk_names = [ os.path.join(os.path.dirname(copyMXD), fire_prefix + dates[i] + "_risk.png") ] + risk_names scores = [score] + scores env_pop() copyMXD = getFuelMXDName(fire_prefix, scenario.run_output, scenario.fire, extent, perim) run_what = r'python.exe firestarr\getFuelMXD.py {} "{}" {} "{}"'.format( fire_prefix, scenario.run_output, scenario.fire, extent) if perim: run_what += ' --perim "{}"'.format(perim) processes.append( start_process(run_what, Settings.PROCESS_FLAGS, Settings.HOME_DIR)) mxd_paths = [copyMXD] + mxd_paths mxd_names = [fire_prefix + "_fuels.png"] + mxd_names mxd_names = map(lambda x: os.path.abspath(os.path.join(map_output, x)), mxd_names) copyMXD = getImpactMXDName(fire_prefix, scenario.run_output, scenario.fire, extent, perim) run_what = r'python.exe firestarr\getImpactMXD.py {} "{}" {} "{}"'.format( fire_prefix, scenario.run_output, scenario.fire, extent) if perim: run_what += ' --perim "{}"'.format(perim) processes.append( start_process(run_what, Settings.PROCESS_FLAGS, Settings.HOME_DIR)) risk_paths = [copyMXD] + risk_paths risk_names = [ os.path.join(os.path.dirname(copyMXD), fire_prefix + "_impact.png") ] + risk_names for process in processes: finish_process(process) # HACK: put in not generated images for any missing maps if len(mxd_names) < 6: mxd_names = ( mxd_names + [os.path.join(Settings.HOME_DIR, 'not_generated.png')] * 6)[:6] if len(risk_names) < 6: risk_names = ( risk_names + [os.path.join(Settings.HOME_DIR, 'not_generated.png')] * 6)[:6] logging.debug(mxd_names + [wxshield] + risk_names) makePDF(scenario.fire, days, dates, mxd_names, wxshield, risk_names, sim_output, pdf_output, scores) try_copy(pdf_output, copied) # HACK: use known file name for assets csv_orig = os.path.abspath( os.path.join(run_output, fire_prefix + for_time + "_assets.csv")) csv_output = os.path.abspath( os.path.join(out_dir, os.path.basename(csv_orig))) csv_copied = os.path.join(scenario.outbase, os.path.basename(csv_orig)) try_copy(csv_orig, csv_output) try_copy(csv_orig, csv_copied) fixtime(scenario.fire, parse(for_time.replace('_', ' ')), [pdf_output, copied, csv_orig, csv_copied]) try: tryForceRemove(mapflag) except: pass # shouldn't need any of these intermediary outputs shutil.rmtree(map_output, True) return copied