# Stratum calcs - populate Output File msg("Writing stratum area change results to output file:\n %s" % outFileStratum) try: outFile = open(outFileStratum,'w') colnames_text = ",".join(utils.swAreaChgStratumCols) outFile.write(colnames_text + "\n") outFile.write(coreString) outFile.write(flString) outFile.write(frString) outFile.write(frwString) outFile.close() except: errtext = "There was an error while opening or writing to the output file:" errtext += "%s" % outFileStratum e.call(errtext) allStrataString = output_string(year1,year2,sample_group,zmChangeAll.change_prop, zmChangeAll.area_change,zmChangeAll.area_change_se,mc_ci) # All sites calcs - populate Output File msg("Writing soundwide area change results to output file:\n %s" % outFileAll) try: outFile = open(outFileAll,'w') colnames_text = ",".join(utils.swAreaChgAllCols) outFile.write(colnames_text + "\n") outFile.write(allStrataString) outFile.close() except:
#------ Query the Sites Database Table for site id, Zm area, Zm variance with siteid as key flds_stats = (utils.siteCol,utils.est_basalcovCol,utils.estvar_basalcovCol) site_stats_data = site_stats.field_results(flds_stats, utils.siteCol) print site_stats_data #----- List of all sites in the Sites DB table meeting the criteria sites_in_stats_tbl = sorted(site_stats_data.keys()) #---Check for missing sites in the Sites DB table missing_sites = set(sites_sampled).difference(sites_in_stats_tbl) if missing_sites: err_text = "The sites database table, %s, is missing site(s)\n" % sitestats_tbl err_text += ",".join(missing_sites) err_text += "\nfor the year, %s" % survey_year e.call(err_text) #--- Determine Analysis Stratum & Extrapolation Type (using geo and sampling strata lookup) site_extrap = {} for (siteid,data) in site_characteristics_data.items(): site_extrap[siteid] = svmp.sw_Stratum4AreaCalcs[tuple(data)] #print site_extrap #--- Create a dictionary of extrapolation types that groups sites extrap_site = invert_dict(site_extrap) #print extrap_site core_dat = group_data_by_extrap(extrap_site,svmp.core_extrap,site_stats_data) pfl_dat = group_data_by_extrap(extrap_site,svmp.pfl_extrap,site_stats_data) fl_dat = group_data_by_extrap(extrap_site,svmp.fl_extrap,site_stats_data) fr_dat = group_data_by_extrap(extrap_site,svmp.fr_extrap,site_stats_data)
# Overwrite existing output data gp.OverWriteOutput = 1 # Create the custom error class # and associate it with the gp e = SvmpToolsError(gp) # Set some basic defaults for error handling e.debug = True e.full_tb = True #e.exit_clean = False #e.pdb = True msg(selStatement) rows = gp.UpdateCursor(shapefile,selStatement) row = rows.Next() while row: id = row.getValue("Id") date = row.getValue(utils.shpDateCol) msg(id) msg(date) row = rows.Next() del rows except SystemExit: pass except: e.call() del gp
import traceback import arcgisscripting from svmp_exceptions import SvmpToolsError DEBUG = True PDB_DEBUG = False # Create the geoprocessing object gp = arcgisscripting.create() # Create the custom error class # and associate it with the gp error = SvmpToolsError(gp) # Set some basic defaults for error handling error.debug = True error.pdb = True def main(): try: raise RuntimeError except Exception, E: error.call('An intentional RuntimeError occurred in the python script..') if __name__ == "__main__": try: main() except Exception, E: error.call(E)
# Validate presence of input transect file if not os.path.isfile(fullTransFile): missingTransFile.append(fullTransFile) # Compare site list to directory lists (in and out) to check for folder and file existence missingInDir = [d for d in siteList if d not in inSubDirList] missingOutDir = [d for d in siteList if d not in outSubDirList] errtext = "" if missingInDir or missingOutDir or missingTransFile: if missingInDir: errtext += "INPUT directory(ies) not found:\n%s\n%s\n" % ('/'.join((inParentDir,"*")),'\n'.join(missingInDir)) if missingOutDir: errtext += "OUTPUT directory(ies) not found:\n%s\n%s\n" % ('/'.join((outParentDir,"*",utils.ptShpSubDir)),'\n'.join(missingOutDir)) if missingTransFile: errtext += "TRANSECT file(s) not found:\n%s" % '\n'.join(missingTransFile) e.call(errtext) #--- END CHECK FOR PRESENCE OF INPUT/OUTPUT DIRECTORIES AND FILES ------ #----------------------------------------------------------------------- # Create input Spatial Reference for use in Cursor msg('Fetching Spatial Reference') inSpatialRef = utils.make_spatRef(gp,outParentDir,inCoordSys) msg("Processing %s site(s) requested in '%s'" % (len(siteList),siteFile)) # Now loop through and process sites for path in sites_to_process: fullTransFile,outDir,outFC,site = path[0],path[1],path[2],path[3] msg("-------- SITE ID: %s --------" % site) outFCFull = os.path.join(outDir,outFC) if os.path.exists(fullTransFile):
for site in siteList: # construct control file name and full path ctlFile = "".join((site,ctlSuffix)) ctlFileFull = os.path.join(ctlParentDir,site,ctlFile) if not os.path.exists(ctlFileFull): # add to list of sites with missing control files missingCtlFiles.append(site) errtext = "" if missingPtShapes or missingCtlFiles: if missingPtShapes: errtext += "The following sites are missing transect point shapefiles for %s:\n" % surveyYear errtext += '\n'.join(missingPtShapes) if missingCtlFiles: errtext += "\nThe following sites are missing control files for %s:\n" % surveyYear errtext += '\n'.join(missingCtlFiles) e.call(errtext) # Loop through all sites and make a list of those without eelgrass for site, shapefile in ptDirDict.items(): try: # Create Search Cursor for Input Transect Data Table # Sort on Zmarina column, descending -- contains only zero or one for presence/absence rows = gp.SearchCursor(shapefile,"","",utils.zmCol, "%s D" % utils.zmCol) # First row contains max value row = rows.Next() ZmFlag = row.GetValue(utils.zmCol) except: e.call("Sorry, there was a problem accessing or querying feature class %s" % site) # if max value is zero, there is no Z. marina at the site.