Ejemplo n.º 1
0
def execute_first_DSLD(_tiffolder, _DSLDFolder, threshold):
    sr = arcpy.SpatialReference(4326)
    print("looking at the first daily rainfall data in tif folder...")
    daily_list = create_daily_List(_tiffolder)
    first_date = min(daily_list)
    print("execute first rainy data from date " + first_date)
    first_data_name = 'chirps-v2.0.{0}.{1}.{2}.tif'.format(
        first_date[0:4], first_date[4:6], first_date[6:8])
    first_daily_data = os.path.join(_tiffolder, first_data_name)
    daily_Date = date(int(first_date[0:4]), int(first_date[4:6]),
                      int(first_date[6:8]))
    dsld_date = daily_Date + timedelta(days=1)
    print("creating dsld data " + str(dsld_date) +
          " from daily rainfall data from " + str(daily_Date))
    DSLDYear = str(dsld_date.year)
    DSLDmonth = str(dsld_date.month)
    DSLDday = str(dsld_date.day)
    print(str(dsld_date))
    DSLDFilename = 'cli_chirps_dsld_{0}{1}{2}.tif'.format(
        DSLDYear.zfill(4), DSLDmonth.zfill(2), DSLDday.zfill(2))
    print("Processing " + DSLDFilename)
    arcpy.CheckOutExtension("spatial")
    outCon = Con(Raster(first_daily_data) > int(threshold), 1, 0)
    outCon.save(os.path.join(_DSLDFolder, DSLDFilename))
    arcpy.DefineProjection_management(os.path.join(_DSLDFolder, DSLDFilename),
                                      sr)
    arcpy.CheckInExtension("spatial")
    print("file " + DSLDFilename + " is created")
Ejemplo n.º 2
0
def execute_DSLR(_lastdate, _tiffolder, _DSLR_folder, threshold):
    sr = arcpy.SpatialReference(4326)
    date_formatted = date(int(_lastdate[0:4]), int(_lastdate[4:6]),
                          int(_lastdate[6:8]))
    last_dslrname = 'cli_chirps_dslr_{0}'.format(_lastdate)
    last_dslrfile = os.path.join(_DSLR_folder, last_dslrname)
    next_dailyname = 'chirps-v2.0.{0}.{1}.{2}.tif'.format(
        _lastdate[0:4], _lastdate[4:6], _lastdate[6:8])
    next_dailydata = os.path.join(_tiffolder, next_dailyname)
    if arcpy.Exists(next_dailydata):
        print("next daily data is available...")
        print("start processing next DSLR...")
        new_dslr_date = date_formatted + timedelta(days=1)
        DSLRYear1 = str(new_dslr_date.year)
        DSLRmonth1 = str(new_dslr_date.month)
        DSLRday1 = str(new_dslr_date.day)
        new_dslr_name = 'cli_chirps_dslr_{0}{1}{2}.tif'.format(
            DSLRYear1.zfill(4), DSLRmonth1.zfill(2), DSLRday1.zfill(2))
        print("Processing DSLR from " + last_dslrfile + " and " +
              next_dailydata)
        arcpy.CheckOutExtension("spatial")
        outDSLRCon = Con(
            Raster(next_dailydata) < int(threshold),
            Raster(last_dslrfile) + 1, 0)
        outDSLRCon.save(os.path.join(_DSLR_folder, new_dslr_name))
        arcpy.DefineProjection_management(
            os.path.join(_DSLR_folder, new_dslr_name), sr)
        arcpy.CheckInExtension("spatial")
        print("DSLR File " + new_dslr_name + " is created")
    else:
        print("next daily data is not available. Exit...")
Ejemplo n.º 3
0
def addNDVI(inputTif, workDir, outName):

    arcpy.CheckOutExtension('spatial')

    print "\nCalculating NDVI and stacking to band 5"
    red = arcpy.sa.Raster(os.path.join(inputTif, "Band_1"))
    NIR = arcpy.sa.Raster(os.path.join(inputTif, "Band_4"))

    numerator = arcpy.sa.Float(NIR - red)
    denominator = arcpy.sa.Float(NIR + red)
    NDVI = arcpy.sa.Divide(numerator, denominator)
    NDVI_times = arcpy.sa.Times(NDVI, 100)
    NDVI_add = arcpy.sa.Plus(NDVI_times, 100)
    NDVI_int = arcpy.sa.Int(NDVI_add)

    vtab = arcpy.CreateObject("ValueTable")
    vtab.addRow(os.path.join(inputTif, "Band_1"))
    vtab.addRow(os.path.join(inputTif, "Band_2"))
    vtab.addRow(os.path.join(inputTif, "Band_3"))
    vtab.addRow(os.path.join(inputTif, "Band_4"))
    vtab.addRow(NDVI_int)
    arcpy.CompositeBands_management(
        vtab, os.path.join(workDir, outName + '_RGBNIR_NDVI.tif'))

    arcpy.CheckInExtension('spatial')

    return os.path.join(workDir, outName + '_RGBNIR_NDVI.tif')
Ejemplo n.º 4
0
def main(argv=None):
    """Main function for Climate Linkage Mapper tool"""
    start_time = datetime.now()
    # print "Start time: %s" % start_time.strftime(TFORMAT)

    if argv is None:
        argv = sys.argv
    try:
        cc_env.configure(argv)
        cc_util.check_cc_project_dir()

        check_out_sa_license()
        arc_wksp_setup()
        config_lm()
        log_setup()

        run_analysis()

    except arcpy.ExecuteError:
        msg = arcpy.GetMessages(2)
        arcpy.AddError(arcpy.GetMessages(2))
        lm_util.write_log(msg)
        exc_traceback = sys.exc_info()[2]
        lm_util.gprint("Traceback (most recent call last):\n" +
                       "".join(traceback.format_tb(exc_traceback)[:-1]))

    except Exception:
        exc_value, exc_traceback = sys.exc_info()[1:]
        arcpy.AddError(exc_value)
        lm_util.gprint("Traceback (most recent call last):\n" +
                       "".join(traceback.format_tb(exc_traceback)))
    finally:
        arcpy.CheckInExtension("Spatial")
        print_runtime(start_time)
Ejemplo n.º 5
0
def pregnanciesEstimates(birthPregMultiXlsx, iso3, outputDir):

    arcpy.CheckOutExtension("Spatial")

    try:
        # Retrieve multiplier from spreadsheet
        wb = xlrd.open_workbook(birthPregMultiXlsx)
        ws = wb.sheet_by_name("2012")
        for row in range(1, ws.nrows):
            if ws.cell_value(row, 2) == iso3:
                birthPregMulti = ws.cell_value(row, 1)

        # Multiply births estimates for each year by multiplier
        for year in ("2010", "2012", "2015", "2020", "2025", "2030", "2035"):
            rastPath = os.path.join(outputDir,
                                    "%s%sadjustedBirths.tif" % (iso3, year))
            birthsRast = arcpy.Raster(rastPath)

            pregnancies = birthsRast * birthPregMulti

            outRast = os.path.join(outputDir,
                                   "%s%spregnancies.tif" % (iso3, year))
            arcpy.CopyRaster_management(pregnancies,
                                        outRast,
                                        pixel_type="32_BIT_FLOAT")

    finally:
        arcpy.CheckInExtension("Spatial")
Ejemplo n.º 6
0
def process_local_slope(dem=None,
                        slope=None,
                        max_slope=30,
                        mask=None,
                        return_type="polygon",
                        workspace=None):
    """

	:param dem: The DEM to process
	:param slope: If slope is already processed, use this instead.
	:param max_slope: The maximum slope in degrees that will be considered suitable for building
	:param mask: A polygon or raster mask to use as the processing area (arcpy.env.mask/Analysis Mask environment)
	:param return_type: whether to return a polygon feature class or a raster. Default is polygon, where raster will be processed to polygon automatically. Options are "polygon" or "raster"
	:return:
	"""

    if not dem and not slope:
        raise ValueError(
            "Must provide either a slope raster or a DEM raster. Either parameter 'dem' or parameter 'slope' must be defined."
        )

    arcpy.CheckOutExtension("Spatial")

    if not slope:
        arcpy.env.mask = mask
        logger.info("Processing raster to slope")
        mask_raster = arcpy.sa.ExtractByMask(
            dem, mask
        )  # mask environment variable hasn't been working - force extraction
        slope_raster = arcpy.sa.Slope(mask_raster, output_measurement="DEGREE")
    else:
        slope_raster = arcpy.sa.ExtractByMask(
            slope, mask
        )  # mask environment variable hasn't been working - force extraction

    logger.info("Thresholding raster")
    threshold_raster = slope_raster < max_slope

    raster_name = generate_gdb_filename("slope_raster", gdb=workspace)

    logger.info("Saving raster to disk")
    threshold_raster.save(raster_name)

    arcpy.CheckInExtension("Spatial")

    if return_type.lower() == "polygon":

        logger.info("Converting to polygons")
        new_name = convert_and_filter_by_code(raster_name, filter_value=1)

        poly_name = generate_gdb_filename("slope_polygon", gdb=workspace)
        arcpy.CopyFeatures_management(new_name, poly_name)

        return poly_name
    elif return_type.lower() == "raster":
        return raster_name
    else:
        raise ValueError(
            "Invalid parameter for return_type. Must be either \"raster\" or \"polygon\""
        )
Ejemplo n.º 7
0
def create_elevations(citywide):

    # get project area extent
    if citywide == 0:
        print("Calculating extent of project area...")
        desc = arcpy.Describe(clip)
        Xmin = desc.extent.XMin
        Xmax = desc.extent.XMax
        Ymin = desc.extent.YMin
        Ymax = desc.extent.YMax

        extent = str(Xmin) + " " + str(Ymin) + " " + str(Xmax) + " " + str(Ymax)
        print("Extents: " + extent)
    else:
        extent = " "

    # clip to processing extent
    print("Clipping dem and dsm to project area...")
    arcpy.management.Clip(originaldem, extent, dem, clip, -3.402823e+38, "ClippingGeometry", "NO_MAINTAIN_EXTENT")
    print("dem complete")
    arcpy.management.Clip(originaldsm, extent, dsm, clip, -3.402823e+38, "ClippingGeometry", "NO_MAINTAIN_EXTENT")
    print("dsm complete")

    # create ndsm from existing dem and dsm rasters
    print("Generating ndsm...")
    arcpy.CheckOutExtension("Spatial")
    outRaster = Raster(dsm) - Raster(dem)
    outRaster.save(ndsm)
    arcpy.CheckInExtension("Spatial")
    print("ndsm complete")
Ejemplo n.º 8
0
def rainydays(tiffolder, threshold, rainydayFolder):
    print("start processing rainy data........ ")
    sr = arcpy.SpatialReference(4326)
    tifdata = []
    rainydata = []
    for tdata in os.listdir(tiffolder):
        if tdata.endswith(".tif") or tdata.endswith(".tiff"):
            parseString = tdata.split('.')
            parse = parseString[4]
            tifdate = parse[0:8]
            tifdata.append(tifdate)
    for rdata in os.listdir(rainydayFolder):
        if rdata.endswith(".tif") or rdata.endswith(".tiff"):
            parseStringtdata = rdata.split('.')
            rainydate = parseStringtdata[1]
            rainydata.append(rainydate)
    for i in tifdata:
        print("checking rainday data for date " + i)
        if i not in rainydata:
            print("rainday data for date " + i + " has not been calculated")
            print("calculating rainday for date " + i)
            tifname = '3B-DAY-L.MS.MRG.3IMERG.{0}-S000000-E235959.V05.nc4.tif'.format(
                i)
            rainyfilename = 'raindays.{0}.threshold_{1}mm.tif'.format(
                i, threshold)
            tiffile = os.path.join(tiffolder, tifname)
            arcpy.CheckOutExtension("spatial")
            outCon = Con(Raster(tiffile) > int(threshold), 1, 0)
            outCon.save(os.path.join(rainydayFolder, rainyfilename))
            arcpy.DefineProjection_management(
                os.path.join(rainydayFolder, rainyfilename), sr)
            print("file " + rainyfilename + " is created")
            arcpy.CheckInExtension("spatial")
    print("processing rainy days for threshold " + str(threshold) +
          " is  completed--------")
def stats_overlap(non_overlapping_zones_list, zone_field, in_value_raster, out_table, is_thematic):
    temp_out_tables = ['in_memory/' + os.path.basename(zfc) + "_temp_table" for zfc in non_overlapping_zones_list]

    arcpy.CheckOutExtension("Spatial")
    for zones, temp_table in zip(non_overlapping_zones_list, temp_out_tables):
        cu.multi_msg('Calculating statistics for layer {0}'.format(zones))
        zonal_tabarea.stats_area_table(zones, zone_field, in_value_raster, temp_table, is_thematic)
    arcpy.CheckInExtension("Spatial")

    # doing this append/copy method instead of merge prevents problems with
    # differences in the field length of the zone field created by
    # Zonal Statistics As Table, merge doesn't have 'NO_TEST' option.
    target_table = temp_out_tables.pop(0)
    arcpy.Append_management(temp_out_tables, target_table, 'NO_TEST')
    arcpy.CopyRows_management(target_table, out_table)

    in_count = 0
    for zones in non_overlapping_zones_list:
        in_count += int(arcpy.GetCount_management(zones).getOutput(0))
    out_count = int(arcpy.GetCount_management(out_table).getOutput(0))
    if out_count < in_count:
        warn_msg = ("WARNING: {0} features are missing in the output table"
                    " because they are too small for this raster's"
                    " resolution. This may be okay depending on your"
                    " application.").format(in_count - out_count)
        arcpy.AddWarning(warn_msg)
        print(warn_msg)
    for t in temp_out_tables + [target_table]:
        arcpy.Delete_management(t)
def updateDataCHIRPS(interval, output_dir, tif_data):
    downloadData.downloadCHIRPSData(interval, output_dir, tif_data)
    arcpy.CheckOutExtension("spatial")
    CreateRASPI.dekadRASPI()
    CreateRASPI.seasonalRASPI()
    CreateRASPI.monthlyRASPI()
    arcpy.CheckInExtension("spatial")
Ejemplo n.º 11
0
    def make_covhsi(self, fish_applied, depth_raster_path):
        # habitat suitability curves from Fish.xlsx
        # fish_applied is a dictionary with fish species listed in Fish.xlsx
        arcpy.CheckOutExtension('Spatial')
        arcpy.env.overwriteOutput = True
        arcpy.env.workspace = self.cache
        arcpy.env.extent = "MAXOF"
        self.logger.info("* * * CREATING " + str(self.cover_type).upper() + " COVER RASTER * * *")
        for species in fish_applied.keys():
            self.logger.info(" >> SPECIES  : " + str(species))
            for ls in fish_applied[species]:
                self.logger.info("         LIFESTAGE: " + str(ls))
                self.logger.info("   -> Retrieving " + self.cover_type + " curve from Fish.xlsx ...")
                curve_data = self.fish.get_hsi_curve(species, ls, self.cover_type)
                if depth_raster_path.__len__() > 0:
                    self.logger.info("   -> Cropping to relevant depth regions ...")
                    self.crop_input_raster(species, ls, depth_raster_path)
                else:
                    try:
                        self.cell_size = float(arcpy.GetRasterProperties_management(self.input_raster, property_type="CELLSIZEX")[0])
                    except:
                        self.cell_size = 1.0
                self.logger.info("   -> Calculating cover HSI raster ...")
                try:
                    ras_out = self.call_analysis(curve_data)
                except:
                    self.logger.info("ERROR: Cover raster calculation (check input data).")
                    arcpy.CheckInExtension('Spatial')
                    self.error = True

                self.logger.info("      - OK")
                ras_name = self.cover_type + "_hsi.tif"
                self.logger.info(
                    "   -> Saving: " + self.path_hsi + ras_name + " ...")
                try:
                    ras_out.save(self.path_hsi + ras_name)
                    self.logger.info("      - OK")
                except:
                    self.logger.info("ERROR: Could not save " + self.cover_type + " HSI raster (corrupted data?).")
                    self.error = True

            if not self.error:
                self.logger.info(" >> " + self.cover_type + " cover HSI raster creation " + str(species).upper() + " complete.")
            else:
                self.logger.info(" >> Could not create cover HSI raster. Check errors messages.")

        arcpy.CheckInExtension('Spatial')
Ejemplo n.º 12
0
    def calculate_d2w(self):
        try:
            arcpy.CheckOutExtension('Spatial')  # check out license
            arcpy.gp.overwriteOutput = True
            arcpy.env.workspace = self.cache

            # check if interpolated WLE already exists
            path2wle_ras = os.path.join(self.out_dir, self.out_wle)
            if not self.check_interp_ras(path2wle_ras):
                self.interpolate_wle()
            else:
                self.logger.info("Using existing interpolated WLE raster ...")

            try:
                self.logger.info("Reading input rasters ...")
                ras_wle = arcpy.Raster(path2wle_ras)
                ras_dem = arcpy.Raster(self.path2dem_ras)
                arcpy.env.extent = ras_dem.extent
                self.logger.info("OK")
            except:
                self.logger.info(
                    "ERROR: Could not find / access input rasters.")
                return True

            try:
                self.logger.info("Calculating depth to groundwater raster ...")
                ras_d2w = Con((ras_wle > 0), (ras_dem - ras_wle))
                self.logger.info("OK")
            except arcpy.ExecuteError:
                self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
                return True
            except Exception as e:
                self.logger.info(arcpy.GetMessages(2))
                self.logger.info(e.args[0])
                return True

            try:
                self.logger.info("Saving depth to groundwater raster to: %s" %
                                 os.path.join(self.out_dir, self.out_d2w))
                ras_d2w.save(os.path.join(self.out_dir, self.out_d2w))
                self.logger.info("OK")
                self.save_info_file(os.path.join(self.out_dir, self.out_d2w))
            except arcpy.ExecuteError:
                self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
                return True
            except Exception as e:
                self.logger.info(arcpy.GetMessages(2))
                self.logger.info(e.args[0])
                return True

            arcpy.CheckInExtension('Spatial')
        except arcpy.ExecuteError:
            self.logger.info("ExecuteERROR: (arcpy).")
            self.logger.info(arcpy.GetMessages(2))
            return True
        except Exception as e:
            self.logger.info("ExceptionERROR: (arcpy).")
            self.logger.info(e.args[0])
            return True
Ejemplo n.º 13
0
def main():
    try:
        # Input 1: Azerbaijan's extent
        input_extent = "44 37 52 43"  # Min longitude, Min latitude, Max longitude, Max latitude
        # Input 2: Nighttime light raster datasets
        satellite_years = [
            'F101992', 'F101993', 'F101994', 'F121994', 'F121995', 'F121996',
            'F121997', 'F121998', 'F121999', 'F141997', 'F141998', 'F141999',
            'F142000', 'F142001', 'F142002', 'F142003', 'F152000', 'F152001',
            'F152002', 'F152003', 'F152004', 'F152005', 'F152006', 'F152007',
            'F162004', 'F162005', 'F162006', 'F162007', 'F162008', 'F162009',
            'F182010', 'F182011', 'F182012', 'F182013'
        ]
        for satellite_year in satellite_years:

            print "Working on satellite-year " + satellite_year

            print "...Set the version number for stable_lights data"
            if satellite_year == "F182010":
                version = ".v4d"
            elif satellite_year[0:3] == 'F18':
                version = ".v4c"
            else:
                version = ".v4b"

            print "...Clipping stable_lights raster for " + satellite_year
            input_raster = "../orig/" + satellite_year + version + "_web.stable_lights.avg_vis.tif"
            output_raster = "../data/" + satellite_year + "_stable_lights_aze.tif"
            clip_raster(in_raster=input_raster,
                        clipping_extent=input_extent,
                        out_raster=output_raster)

            print "...Set the version number for pct_lights data"
            if satellite_year[0:3] == 'F18':
                version = ".v4c"
            else:
                version = ".v4b"

            print "...Clipping pct_lights raster for " + satellite_year
            if satellite_year == 'F182011':
                print "...We skip " + satellite_year + "because the pct_lights.tif is currently not available."
            else:
                input_raster = "../orig/" + satellite_year + version + ".pct_lights.tif"
                output_raster = "../temp/" + satellite_year + "_pct_lights_aze.tif"
                clip_raster(in_raster=input_raster,
                            clipping_extent=input_extent,
                            out_raster=output_raster)

        print "All done."

    # Return geoprocessing specific errors
    except arcpy.ExecuteError:
        print arcpy.GetMessages()
    # Return any other type of error
    except:
        print "There is non-geoprocessing error."
    # Check in extensions
    finally:
        arcpy.CheckInExtension("spatial")
Ejemplo n.º 14
0
    def save_mu(self, *args):
        # args[0] can be an optional output directory
        try:
            self.out_dir = args[0]
        except:
            pass
        self.logger.info("")
        self.logger.info(" * SAVING ... ")
        arcpy.CheckOutExtension('Spatial')  # check out license
        arcpy.gp.overwriteOutput = True
        arcpy.env.workspace = self.cache
        arcpy.env.extent = "MAXOF"
        arcpy.CheckInExtension('Spatial')
        try:
            self.logger.info(" * Converting MU IDs to strings:")

            self.logger.info("   >> Converting raster to points ...")
            pts = arcpy.RasterToPoint_conversion(self.ras_mu, self.cache + "pts_del.shp")

            self.logger.info("   >> Converting numbers to strings ...")
            arcpy.AddField_management(pts, "MU", "TEXT")
            expression = "inverse_dict = " + fGl.dict2str(self.mu_dict, inverse_dict=True)
            arcpy.CalculateField_management(pts, "MU", "inverse_dict[!grid_code!]", "PYTHON", expression)

            self.logger.info("   >> OK")
            self.logger.info(" * Saving MU string raster as:")
            self.logger.info(str(self.out_dir) + "\\mu_str.tif")
            arcpy.PointToRaster_conversion(in_features=pts, value_field="MU",
                                           out_rasterdataset=self.out_dir + "\\mu_str.tif",
                                           cell_assignment="MOST_FREQUENT", cellsize=5)
            self.logger.info(" * OK")
        except arcpy.ExecuteError:
            self.logger.info("ExecuteERROR: (arcpy).")
            self.logger.info(arcpy.GetMessages(2))
        except Exception as e:
            self.logger.info("ExceptionERROR: (arcpy).")
            self.logger.info(e.args[0])
        except:
            self.logger.info("ERROR: Field assignment failed.")
            return True

        try:
            self.logger.info(" * Saving mu numeric raster as:")
            self.logger.info(str(self.out_dir) + "\\mu.tif")
            self.ras_mu.save(self.out_dir + "\\mu.tif")
            self.logger.info(" * OK")
        except arcpy.ExecuteError:
            self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
        except Exception as e:
            self.logger.info(arcpy.GetMessages(2))
        except:
            self.logger.info("ERROR: Saving failed.")
            return True

        try:
            self.clean_up()
        except:
            pass
        return False
Ejemplo n.º 15
0
def clip_to_hu8(raster,
                nhd_gdb,
                out_dir,
                projection=arcpy.SpatialReference(102039)):
    """Outputs a series of rasters, each one clipped to a different HU8. """
    env.workspace = 'in_memory'
    env.outputCoordinateSystem = projection
    env.compression = "NONE"  # only final tifs are generated
    env.snapRaster = raster
    env.cellSize = '10'
    env.pyramids = "PYRAMIDS -1 SKIP_FIRST"
    arcpy.CheckOutExtension("Spatial")

    # HUC8 polygons each saved as separate fc inheriting albers from environ
    huc8_fc = os.path.join(nhd_gdb, "WBD_HU8")
    arcpy.MakeFeatureLayer_management(huc8_fc, "huc8_layer")
    huc4_code = re.search('\d{4}', os.path.basename(nhd_gdb)).group()

    clips_dir = os.path.join(out_dir, 'huc8clips{0}'.format(huc4_code))
    if not os.path.exists(clips_dir):
        os.mkdir(clips_dir)


##    # add walls
##    arcpy.PolygonToLine_management(huc8_fc, 'wall_lines')
##    arcpy.AddField_management('wall_lines', "height", "DOUBLE")
##    arcpy.CalculateField_management('wall_lines', "height", '500', "PYTHON")
##    arcpy.FeatureToRaster_conversion('wall_lines', "height", 'wall_raster')
##    wallsObject = Raster('wall_raster')
##    elevObject = Raster(raster)
##    walled_ned = Con(IsNull(wallsObject), elevObject,
##                    (wallsObject + elevObject))

# for each HU8 feature in the fc, make a clip
    with arcpy.da.SearchCursor(huc8_fc, ["HUC_8"]) as cursor:
        for row in cursor:
            if row[0].startswith(huc4_code):
                whereClause = """"{0}" = '{1}'""".format("HUC_8", row[0])
                arcpy.SelectLayerByAttribute_management(
                    "huc8_layer", 'NEW_SELECTION', whereClause)
                arcpy.CopyFeatures_management("huc8_layer", "this_hu8")

                # clip the raster
                out_raster = os.path.join(clips_dir,
                                          'NED{0}.tif'.format(row[0]))
                cu.multi_msg('Creating output {0}'.format(out_raster))

                # use a small buffer here because otherwise the walls get
                # cut off in slivers
                arcpy.Buffer_analysis('this_hu8', 'this_hu8_buffer', 5000)
                arcpy.Clip_management(raster, '', out_raster,
                                      'this_hu8_buffer', '#',
                                      'ClippingGeometry')
                arcpy.Delete_management('this_hu8')
                arcpy.Delete_management('this_hu8_buffer')

    arcpy.Delete_management('huc8_layer')
    arcpy.ResetEnvironments()
    arcpy.CheckInExtension("Spatial")
Ejemplo n.º 16
0
def extension(name):
    """ Safely use ArcGIS extensions """
    if arcpy.CheckExtension(name) == u"Available":
        status = arcpy.CheckOutExtension(name)
        yield status
    else:
        raise RuntimeError("%s license isn't available" % name)
    arcpy.CheckInExtension(name)
def dekadLT():
    dictionary = {}
    for i in dir:
        for j in dekad:
            index = i + j
            content = []
            for file_dekad in os.listdir(datadir):
                if file_dekad.endswith(".tif") or file_dekad.endswith(".tiff"):
                    Moresult_dekad = Moregex_dekad.match(file_dekad)
                    Dmonth = Moresult_dekad.group('month')
                    Ddekad = Moresult_dekad.group('dekad')
                    if Ddekad == j and Dmonth == i:
                        content.append(os.path.join(datadir, file_dekad))
            dictionary[index] = content


#=========================Create STD DEKAD file=================================#

    for k in dir:
        for l in dekad:
            index = k + l
            listoffile = dictionary[index]
            ext = ".tif"
            newfilename_dekad = 'chirps-v2.0.1981-2016.{0}.{1}.dekad.36yrs.std{2}'.format(
                k, l, ext)
            newfilename_dekad_avg = 'chirps-v2.0.1981-2016.{0}.{1}.dekad.36yrs.avg{2}'.format(
                k, l, ext)
            print(newfilename_dekad)

            if arcpy.Exists(os.path.join(stddir, newfilename_dekad)):
                print(newfilename_dekad + " exists")
            else:
                arcpy.CheckOutExtension("spatial")
                outCellStatistics = CellStatistics(listoffile, "STD", "DATA")
                outCellStatistics.save(os.path.join(stddir, newfilename_dekad))
                arcpy.CheckInExtension("spatial")

            if arcpy.Exists(os.path.join(stddir, newfilename_dekad_avg)):
                print(newfilename_dekad_avg + " exists")
            else:
                arcpy.CheckOutExtension("spatial")
                outCellStatistics_avg = CellStatistics(listoffile, "MEAN",
                                                       "DATA")
                outCellStatistics_avg.save(
                    os.path.join(stddir, newfilename_dekad_avg))
                arcpy.CheckInExtension("spatial")
Ejemplo n.º 18
0
    def mu_maker(self, h_raster, u_raster, full_out_ras_name,
                 full_out_shp_name, *mu):
        # h_raster: STR - full path to depth raster
        # u_raster: STR - full path to velocity raster
        # full_out_ras_name: STR - full path of the results raster name
        # full_out_shp_name: STR - full path of the result shapefile name
        # mu = LIST(STR) - (optional) - restricts analysis to a list of morphological units according to mu.xlsx

        # start with raster calculations
        self.logger.info("Raster Processing    --- --- ")
        self.license_state = arcpy.CheckOutExtension(
            'Spatial')  # check out license
        arcpy.gp.overwriteOutput = True
        arcpy.env.workspace = self.path
        arcpy.env.extent = "MAXOF"

        try:
            self.mu_names = mu[
                0]  # limit mu analysis to optional list, if provided
        except:
            pass

        out_ras = self.calculate_mu(h_raster, u_raster)

        try:
            self.logger.info(" > Saving Raster ...")
            out_ras.save(full_out_ras_name)
            self.logger.info("   * OK")
        except:
            self.logger.info("ERROR: Could not save MU raster.")
        arcpy.CheckInExtension('Spatial')  # release license
        self.logger.info("Raster Processing OK     --- \n")

        self.logger.info("Shapefile Processing --- --- ")
        self.logger.info(" > Converting mu raster to shapefile ...")
        temporary_shp = full_out_shp_name.split(".shp")[0] + "1.shp"
        arcpy.RasterToPolygon_conversion(arcpy.Raster(full_out_ras_name),
                                         temporary_shp, 'NO_SIMPLIFY')

        self.logger.info(" > Calculating Polygon areas ...")
        arcpy.CalculateAreas_stats(temporary_shp, full_out_shp_name)

        self.logger.info("   * OK - Removing remainders ...")
        arcpy.Delete_management(temporary_shp)

        self.logger.info(" > Adding MU field ...")
        arcpy.AddField_management(full_out_shp_name,
                                  "MorphUnit",
                                  "TEXT",
                                  field_length=50)
        expression = "the_dict[!gridcode!]"
        codeblock = "the_dict = " + str(
            dict(
                zip(self.mu_names_number.values(),
                    self.mu_names_number.keys())))
        arcpy.CalculateField_management(full_out_shp_name, "MorphUnit",
                                        expression, "PYTHON", codeblock)
        self.logger.info("Shapefile Processing OK  --- ")
Ejemplo n.º 19
0
def GenContour(parameter):
    arcpy.env.overwriteOutput = True
    inputRas = parameter[0]
    interval = int(parameter[1])
    base = int(parameter[2])
    output = parameter[3]
    arcpy.CheckOutExtension("Spatial")
    arcpy.sa.Contour(inputRas, output, interval, base)
    arcpy.CheckInExtension("Spatial")
Ejemplo n.º 20
0
def create_reviewer_session(reviewer_db, session):
    # Check out a Data Reviewer extension license
    arcpy.CheckOutExtension("datareviewer")

    arcpy.env.overwriteOutput = "true"

    arcpy.CreateReviewerSession_Reviewer(reviewer_db, session)

    arcpy.CheckInExtension("datareviewer")
Ejemplo n.º 21
0
 def _LicenseManager(self, extension, checkout=True):
     v = None
     licAvailability = arcpy.CheckExtension(extension)
     if(licAvailability == "Available"): 
         if(checkout):v = arcpy.CheckOutExtension(extension)
         else: v= arcpy.CheckInExtension(extension)
     else:raise Exception("Lisense "+ extension +" "+ licAvailability)
     
     print v
Ejemplo n.º 22
0
def RemoveDEMErrantValues(strJobId):
    aa = datetime.now()
    Utility.printArguments(["WMX Job ID"], [strJobId],
                           "A05 RemoveDEMErrantValues")
    arcpy.CheckOutExtension("3D")
    arcpy.CheckOutExtension("Spatial")

    ProjectJob, project, strUID = getProjectFromWMXJobID(
        strJobId)  # @UnusedVariable

    errorMsg = processJob(ProjectJob, project, strUID)

    arcpy.CheckInExtension("3D")
    arcpy.CheckInExtension("Spatial")
    doTime(aa, "Operation Complete: A05 Remove DEM Errant Values")
    if len(errorMsg) > 0:
        raise Exception("Failed to process {} raster data correctly".format(
            " ".join(errorMsg)))
Ejemplo n.º 23
0
def combineMODData(folder, outputFolder, subset):
    processedDate = []
    for filename in os.listdir(folder):
        print(filename)
        if filename.endswith(".tif") or filename.endswith(".tiff"):
            split = filename.split('.')
            filedate = split[1]
            year = int(split[1][1:5])
            date = int(split[1][5:8])
            month, jd, y = JulianDate_to_MMDDYYY(year, date)
            twodigitmonth = str(month).zfill(2)
            twodigitday = str(jd).zfill(2)
            if filedate not in processedDate:
                processedDate.append(filedate)
                combinedData=[]
                combinedData.append(os.path.join(folder, filename))
                for SFilename in os.listdir(folder):
                    if SFilename.endswith(".tif") or SFilename.endswith(".tiff"):
                        if os.path.join(SFilename) not in combinedData:
                            split1 = SFilename.split('.')
                            Sfiledate = split1[1]
                            if Sfiledate == filedate:
                                combinedData.append(os.path.join(folder, SFilename))
                            else:
                                continue
                print(combinedData)
                sumofdata = len(combinedData)
                stringcombined = combinedData[0]
                x = 1
                while x > 0 and x < sumofdata:
                    stringcombined = stringcombined + ";" +combinedData[x]
                    x = x+1
                print(stringcombined)
                sr = arcpy.SpatialReference(4326)
                arcpy.env.workspace = folder
                newfilename = 'phy_MOD13A3.{0}.{1}.{2}_006.1_km_monthly_EVI.tif'.format(year, twodigitmonth, twodigitday )
                idnfilename = 'idn_phy_MOD13A3.{0}.{1}.{2}_006.1_km_monthly_EVI.tif'.format(year, twodigitmonth,
                                                                                        twodigitday)
                julianname = '{0}.{1}.006.250m_16_days_NDVI.tif'.format(split[0], filedate)
                arcpy.CheckOutExtension("spatial")
                arcpy.MosaicToNewRaster_management(input_rasters= combinedData, output_location = outputFolder, raster_dataset_name_with_extension=newfilename, coordinate_system_for_the_raster= sr, pixel_type='16_BIT_SIGNED', number_of_bands='1' )
                arcpy.DefineProjection_management(os.path.join(outputFolder,newfilename), sr)
                outExtractByMask = ExtractByMask(os.path.join(outputFolder,newfilename), subset)

                # ---- Uncomment code when result filename is in julian date---- #
                # if arcpy.Exists(os.path.join(outputFolder, julianname)):
                #     print(julianname + " exists")
                # else:
                #     outExtractByMask.save(os.path.join(outputFolder, julianname))

                if arcpy.Exists(os.path.join(outputFolder, idnfilename)):
                    print(idnfilename + " exists")
                else:
                    outExtractByMask.save(os.path.join(outputFolder, idnfilename))

                arcpy.CheckInExtension("spatial")
Ejemplo n.º 24
0
def write_to_reviewer_table(reviewer_db, session, feature, field, orig_table_name):

    # Check out a Data Reviewer extension license
    arcpy.CheckOutExtension("datareviewer")

    arcpy.env.overwriteOutput = "true"

    arcpy.WriteToReviewerTable_Reviewer(reviewer_db, session, feature, field, orig_table_name)

    arcpy.CheckInExtension("datareviewer")
def seasonal():
    dictionary = {}
    for i in dirseasonal:
        index = i
        content = []
        for file_dekad in os.listdir(datadir):
            if file_dekad.endswith(".tif") or file_dekad.endswith(".tiff"):
                print(file_dekad)
                Moresult_dekad = Moregex_dekad.match(file_dekad)
                Dmonth = Moresult_dekad.group('season')
                if Dmonth == i:
                    content.append(os.path.join(datadir, file_dekad))
        dictionary[index] = content
    print(dictionary)

    # =========================Create STD DEKAD file=================================#
    for k in dirseasonal:
        index = k
        listoffile = dictionary[index]
        print(listoffile)
        ext = ".tif"
        newfilename_dekad = 'chirps-v2.0.1981-2016.{0}.seasonal.36yrs.std{1}'.format(
            k, ext)
        newfilename_dekad_avg = 'chirps-v2.0.1981-2016.{0}.seasonal.36yrs.avg{1}'.format(
            k, ext)
        print(newfilename_dekad)

        if arcpy.Exists(os.path.join(stddir, newfilename_dekad)):
            print(newfilename_dekad + " exists")
        else:
            arcpy.CheckOutExtension("spatial")
            outCellStatistics = CellStatistics(listoffile, "STD", "DATA")
            outCellStatistics.save(os.path.join(stddir, newfilename_dekad))
            arcpy.CheckInExtension("spatial")

        if arcpy.Exists(os.path.join(stddir, newfilename_dekad_avg)):
            print(newfilename_dekad_avg + " exists")
        else:
            arcpy.CheckOutExtension("spatial")
            outCellStatistics_avg = CellStatistics(listoffile, "MEAN", "DATA")
            outCellStatistics_avg.save(
                os.path.join(stddir, newfilename_dekad_avg))
            arcpy.CheckInExtension("spatial")
Ejemplo n.º 26
0
def run_reviewer_batch_job(reviewer_db, session, checks_rbj, prod_data):

    # Check out a Data Reviewer extension license
    arcpy.CheckOutExtension("datareviewer")

    arcpy.env.overwriteOutput = "true"

    # Execute Reviewer Batch Job function
    res = arcpy.ExecuteReviewerBatchJob_Reviewer(reviewer_db, session, checks_rbj, prod_data)

    arcpy.CheckInExtension("datareviewer")
def max_monthly_dslr(country, month, year, threshold):
    countryfolder = "D:\\IDN_GIS\\01_Data\\03_Regional\\" + country + "\\DSLR_" + str(
        threshold).zfill(2) + "mm"
    max_country_folder = "D:\\IDN_GIS\\01_Data\\03_Regional\\" + country + "\\DSLR_" + str(
        threshold).zfill(2) + "mm_monthly_max"
    result_filename = '{0}_cli_dslr_{1}mm_threshold_{2}.{3}.max.tif'.format(
        country,
        str(threshold).zfill(2), year, month)
    if not os.path.exists(os.path.join(max_country_folder, result_filename)):
        logging.debug(datelog + " : max dslr " + result_filename +
                      " is not available. Processing.....")
        file_list = []
        for dslrfile in os.listdir(countryfolder):
            if dslrfile.endswith(".tif") or dslrfile.endswith(".tiff"):
                datestring = dslrfile.split("_")
                filemonth = datestring[5][4:6]
                fileyear = datestring[5][0:4]
                #print(filemonth, fileyear)
                if month == filemonth and year == fileyear:
                    file_list.append(os.path.join(countryfolder, dslrfile))
        lastday = mosaicDataset.eomday(int(year), int(month))
        firstdate = date(int(year), int(month), 1)
        lastdate = date(int(year), int(month), lastday)
        day_count_delta = lastdate - firstdate
        day_count = day_count_delta.days + 1
        if day_count == len(file_list):
            logging.debug(
                datelog +
                " : the data required to calculate max dslr are available. Calculating max DSLR...."
            )
            arcpy.CheckOutExtension("Spatial")
            #print(result_filename)
            outCellStatistics = CellStatistics(file_list, "MAXIMUM", "DATA")
            outCellStatistics.save(
                os.path.join(max_country_folder, result_filename))
            arcpy.CheckInExtension("spatial")
        else:
            logging.debug(
                datelog +
                " : incomplete data to calculate max dslr for month = " +
                month + " and year = " + year)
            logging.debug(datelog + " : data available = " +
                          str(len(file_list)) +
                          " while the required data is " + str(day_count))
    else:
        logging.debug(datelog + " : max dslr for month = " + month +
                      " and year = " + year + " is available")


# calmonth = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']
# calyear = ['2014', '2015', '2016', '2017']
# for i in calmonth:
#     for j in calyear:
#         max_monthly_dslr('idn',i,j, 1)
Ejemplo n.º 28
0
def GenerateQALasDataset(strJobId,
                         createQARasters=False,
                         createMissingRasters=True,
                         overrideBorderPath=None):
    Utility.printArguments([
        "WMXJobID", "createQARasters", "createMissingRasters",
        "overrideBorderPath"
    ], [strJobId, createQARasters, createMissingRasters, overrideBorderPath],
                           "A04 GenerateQALasDataset")

    aa = datetime.now()
    arcpy.AddMessage("Checking out licenses")
    arcpy.CheckOutExtension("3D")
    arcpy.CheckOutExtension("Spatial")

    ProjectJob, project, strUID = getProjectFromWMXJobID(
        strJobId)  # @UnusedVariable

    las_qainfo, lasd_boundary = processJob(ProjectJob, project,
                                           createQARasters,
                                           createMissingRasters,
                                           overrideBorderPath)
    try:
        if las_qainfo is not None and os.path.exists(las_qainfo.filegdb_path):
            arcpy.Compact_management(in_workspace=las_qainfo.filegdb_path)
    except:
        pass

        # @TODO: Move this to another standalone script
        # updateCMDR(ProjectJob, project, las_qainfo, updatedBoundary)

    arcpy.AddMessage("Checking in licenses")
    arcpy.CheckInExtension("3D")
    arcpy.CheckInExtension("Spatial")

    if las_qainfo.num_las_files <= 0:
        raise Exception(
            "Project has no .las files in DELIVERED LAS_CLASSIFIED or LAS_UNCLASSIFIED folders, CANNOT CONTINUE.\nERROR: {}"
            .format(project))

    doTime(aa, "Operation Complete: A04 Generate QA LASDataset")
Ejemplo n.º 29
0
    def get_matrix(self,out_path=None,out_name=None):
        if out_path != None:
            self.out_path = out_path
        if out_name != None:
            self.out_name = out_name
        dbf = os.path.join(self.out_path,self.out_name)
        or_list = arcpy.RasterToNumPyArray(self.or_map)
        or_null = 0
        fi_list = arcpy.RasterToNumPyArray(self.fi_map)
        fi_null = 0
        or_list = list(np.unique(or_list))
        try:
            or_list.remove(or_null)
        except:
            None
        fi_list = list(np.unique(fi_list))
        try:
            fi_list.remove(fi_null)
        except:
            None
### create table and calculate
        arcpy.env.overwriteOutput = True
        if arcpy.CheckExtension("Spatial") == "Available":
            arcpy.CheckOutExtension("Spatial")
        else:
            arcpy.AddError("spatial analysi liscense is not available")
            pass
        arcpy.CreateTable_management(self.out_path,self.out_name)
        c_field = "ori_map"
        self.add_field(dbf,c_field,"LONG")
        for i in fi_list:
            self.add_field(dbf,'c'+str(i),"LONG")
        Rows = arcpy.InsertCursor(dbf)
        for j in or_list:
            row = Rows.newRow()
            row.setValue(c_field,int(j))
            for i in fi_list:
                n_field = 'c'+str(i)
                map_or = arcpy.sa.EqualTo(self.or_map,int(j))
                map_fi = arcpy.sa.EqualTo(self.fi_map,int(i))
                ff = arcpy.sa.Times(map_or,map_fi)
                ff_array = arcpy.RasterToNumPyArray(ff)
                num = 0
                for k in ff_array:
                    num += len([x for x in k if x == 1])
                row.setValue(n_field,num)
            Rows.insertRow(row)
        del row,Rows
        try:
            arcpy.CheckInExtension("Spatial")
        except:
            pass
        return "trans matrix has been successfully calculated in {0}".format(dbf)
Ejemplo n.º 30
0
def dekadRainfallAnomaly():
    average_pattern_dekad = vp.get('CHIRPS_Longterm_Average',
                                   'global_lta_dekad_pattern')
    dekad_pattern = vp.get('CHIRPS', 'global_dekad_pattern')
    AVGdir_dekad = 'Z:\\Temp\\DryWetSeason\\Statistics_Month24_movingby_Dekad'
    dekaddir = 'Z:\\Temp\\DryWetSeason\\Month24_movingby_Dekad'
    ResultDir_dekad = 'Z:\\Temp\\DryWetSeason\\Output'
    AVGregex_dekad = re.compile(average_pattern_dekad)
    Moregex_dekad = re.compile(dekad_pattern)

    for Dfilename in os.listdir(dekaddir):
        if Dfilename.endswith(".tif") or Dfilename.endswith(".tiff"):
            #print(Dfilename)
            Moresult_dekad = Moregex_dekad.match(Dfilename)
            Dmonth = Moresult_dekad.group('month')
            Ddekad = Moresult_dekad.group('dekad')
            for ADfilename in os.listdir(AVGdir_dekad):
                if ADfilename.endswith(".tif"):
                    if AVGregex_dekad.match(ADfilename):
                        AVGresult_dekad = AVGregex_dekad.match(ADfilename)
                        SDmonth = AVGresult_dekad.group('month')
                        SDdekad = AVGresult_dekad.group('dekad')
                        if SDmonth == Dmonth and SDdekad == Ddekad:
                            #print(Dfilename+" match with "+ADfilename)
                            AVGFile_dekad = os.path.join(
                                AVGdir_dekad, ADfilename)
                            MoFile_dekad = os.path.join(dekaddir, Dfilename)
                            month = SDmonth
                            year = Moresult_dekad.group('year')
                            ext = ".tif"
                            newfilename_dekad = '{0}.{1}.{2}.{3}.month24_ratio_anom{4}'.format(
                                base_product_name, year, month, Ddekad, ext)

                            print(newfilename_dekad)
                            if arcpy.Exists(
                                    os.path.join(ResultDir_dekad,
                                                 newfilename_dekad)):
                                print(newfilename_dekad + " is already exist")
                            else:
                                arcpy.CheckOutExtension("spatial")
                                newRaster_dekad = Int(100 *
                                                      Raster(MoFile_dekad) /
                                                      Raster(AVGFile_dekad))
                                newRaster_dekad.save(
                                    os.path.join(ResultDir_dekad,
                                                 newfilename_dekad))
                                arcpy.CheckInExtension("spatial")
                        continue
                else:
                    continue
            continue
        else:
            continue