Ejemplo n.º 1
0
def execute(self, parameters, messages):
    rasteri = arcpy.Raster(parameters[0].valueAsText)
    arcpy.env.overwriteOutput = True
    lowervalue = parameters[1].value
    uppervalue = parameters[2].value

    nodatathreshold = parameters[3].value
    output_rastername = parameters[4].valueAsText

    addtomap = parameters[5].value
    ignorenegative = parameters[6].value

    raster_array = arcpy.RasterToNumPyArray(rasteri)  #, nodata_to_value=0);

    super_threshold_indices = raster_array < nodatathreshold
    raster_array[super_threshold_indices] = numpy.nan

    #myprint (str(raster_array[0][0]))

    myprint("\n" + "=" * 10 + " Rescale raster " + "=" * 10)
    myprint("Starting rescale raster")

    minimi = numpy.nanmin(raster_array)
    maksimi = numpy.nanmax(raster_array)

    if (ignorenegative):
        myprint("   Negatives will be changed to zero...")
        raster_array[raster_array < 0] = 0
        raster_array[raster_array < 0] = 0
        myprint("      ...done")
    else:
        minimi = numpy.nanmin(raster_array)
        if (minimi < 0):
            myprint("   Negatives will be spread to new raster Min: %s" %
                    (str(minimi)))
            raster_array += numpy.abs(minimi)

    #myprint (str(raster_array));

    diff = uppervalue - lowervalue
    myprint("   Rescaling array[%s - %s] -> array[%s .. %s] " %
            (str(minimi), str(maksimi), str(lowervalue), str(uppervalue)))
    myprint("   max(raster_array):%s diff:%s" %
            (str(numpy.nanmax(raster_array)), str(diff)))

    raster_array = (raster_array / (float(
        (numpy.nanmax(raster_array)))) * diff)
    raster_array = raster_array + lowervalue
    #myprint (str(raster_array[0][0]))

    myprint("Calculation done.")
    #myprint (str(raster_array));

    mx = rasteri.extent.XMin + 0 * rasteri.meanCellWidth

    my = rasteri.extent.YMin + 0 * rasteri.meanCellHeight

    #myprint ( "Size of output raster: %s x %s"%( numpy.shape(numero4),numpy.shape(uusi)));

    #myprint (uusi[100]);

    # Overwrite
    arcpy.env.overwriteOutput = True
    myRasterBlock = arcpy.NumPyArrayToRaster(raster_array, arcpy.Point(mx, my),
                                             rasteri.meanCellWidth,
                                             rasteri.meanCellHeight)

    myprint("Saving new raster...\n   Output rastername: %s" %
            (output_rastername))
    #myRasterBlock.save("d:\\arcgis\\database.gdb\\tulos");
    myRasterBlock.save(output_rastername)
    desc = arcpy.Describe(output_rastername)
    name = desc.file.split('.')[0] + "_layer"
    #parameters[3].value =  myRasterBlock;
    if (addtomap):
        myprint("   Adding layer to map with name: " + name)
        addToDisplay(output_rastername, name, "TOP")
Ejemplo n.º 2
0
def create_GLADseamask(in_gladtile, in_mod44w, in_alositerator, in_lakes,
                       outpath, processgdb):
    """
    Identifies sea/ocean water pixels in a GLAD surface water classes tile
    :param in_gladtile: path to GLAD tile to be processed
    :param in_mod44w: path of extracted and mosaicked MOD44W C6 QA layer envelopping GLAD tile
    :param in_alositerator: list of dictionary that contains extents of intersecting ALOS DEM tiles
    :return: GLAD tile with same extent and pixel size but where sea water pixels are identified as class 9
    """
    gladtileid = 'glad{}'.format(
        re.sub(
            '(^.*class99_1[89]_)|([.]tif)', '',
            gladtile))  # Unique identifier for glad tile based on coordinates
    gladtile_extent = arcpy.Describe(gladtile).extent
    mod44w_outdir = os.path.split(in_mod44w)[0]

    print('Creating seamask for {}...'.format(gladtileid))
    # Run a 3x3 majority filter on MODIS to get rid of isolated sea pixels
    modmaj = os.path.join(mod44w_outdir, 'mod44wmaj_{}'.format(gladtileid))
    print('    1/15 - Majority filter of MODIS QA layer...')
    if not arcpy.Exists(modmaj):
        focallyr = FocalStatistics(in_raster=in_mod44w,
                                   neighborhood=NbrRectangle(4, 4, "CELL"),
                                   statistics_type='MAJORITY')
        Con(IsNull(focallyr), Raster(in_mod44w), focallyr).save(modmaj)

    # Project, adjust cell size, and snap MODIS to GLAD tile
    print('    2/15 - Project and resample MODIS to match GLAD tile...')
    arcpy.env.snapRaster = in_gladtile
    arcpy.env.extent = in_gladtile
    mod44w_gladmatch_wgs = os.path.join(
        mod44w_outdir, 'mod44wQA_glad{}_wgs'.format(gladtileid))
    if not arcpy.Exists(mod44w_gladmatch_wgs):
        # try:
        #     arcpy.Delete_management(mod44w_gladmatch_wgs)
        # except:
        #     traceback.print_exc()
        arcpy.ProjectRaster_management(
            modmaj,
            out_raster=mod44w_gladmatch_wgs,
            in_coor_system=arcpy.Describe(in_mod44w).SpatialReference,
            out_coor_system=arcpy.SpatialReference(
                4326),  #WGS84, same as  in_gladtile,
            cell_size=arcpy.Describe(in_gladtile).meanCellWidth,
            resampling_type='NEAREST')

    # ----------- Format ALOS ----------------------------------------------------------------------------------------------
    # Subset tiles to only keep those that intersect the GLAD tile
    print('    3/15 - Getting intersecting ALOS DEM tiles...')
    alos_seltiles = get_inters_tiles(ref_extent=gladtile_extent,
                                     tileiterator=in_alositerator,
                                     containsonly=False)

    # Format ALOS
    alos_mosaictile = os.path.join(processgdb,
                                   'alos_mosaic_{}'.format(gladtileid))
    print('    4/15 - Mosaicking ALOS DEM tiles...')
    if not arcpy.Exists(alos_mosaictile):
        arcpy.MosaicToNewRaster_management(
            input_rasters=alos_seltiles,
            output_location=os.path.split(alos_mosaictile)[0],  # 'in_memory
            raster_dataset_name_with_extension=os.path.split(alos_mosaictile)
            [1],
            pixel_type='16_BIT_SIGNED',
            number_of_bands=1)

    alos_rsp = os.path.join(processgdb, 'alos_rsp{}'.format(gladtileid))
    print('    5/15 - Resampling ALOS DEM...')
    if not arcpy.Exists(alos_rsp):
        arcpy.Resample_management(
            in_raster=alos_mosaictile,
            out_raster=alos_rsp,
            cell_size=arcpy.Describe(in_gladtile).meanCellWidth,
            resampling_type='NEAREST')

    # ----------- Format HydroLAKES  ------------------------------------------------------------
    lakeras = os.path.join(processgdb, '{}_lakeras'.format(gladtileid))
    print('    6/15 - Rasterizing HydroLAKES...')
    if not arcpy.Exists(lakeras):
        arcpy.PolygonToRaster_conversion(
            in_features=in_lakes,
            value_field="Lake_type",
            out_rasterdataset=lakeras,
            cell_assignment='CELL_CENTER',
            cellsize=arcpy.Describe(in_gladtile).meanCellWidth)

    # ----------- Create seamask based on MODIS, ALOS, and GLAD  ------------------------------------------------------------
    # Create GLAD land-water mask
    print(
        '    7/15 - Creating GLAD land-water mask, excluding known lake pixels from the analysis...'
    )
    gladtile_mask = os.path.join(processgdb, '{}_mask'.format(gladtileid))
    if not arcpy.Exists(gladtile_mask):
        arcpy.CopyRaster_management(in_raster=Con(
            IsNull(lakeras),
            Reclassify(
                in_gladtile, "Value",
                RemapValue([[0, "NoData"], [1, 1], [2, 2], [3, 2], [4, 2],
                            [5, 2], [6, 2], [7, 2], [8, 2], [10, 1], [11, 2],
                            [12, 0]])), 0),
                                    out_rasterdataset=gladtile_mask,
                                    pixel_type='4_BIT')

    # Prepare pixels that are water, under 0 m elevation and not already labeled as seamask by MOD44W for Nibbling
    gladtile_shore = os.path.join(processgdb, '{}_shore'.format(gladtileid))
    # If seawater in MODIS seamask and water in GLAD:
    #   9
    # else:
    #   if elevation > 0 or land in glad:
    #       glad values
    #   else (i.e. <0 & water in glad):
    #       NoData
    print(
        '    8/15 - Creating shore mask to nibble on i.e. GLAD water, '
        'under 0 m elevation and not already labeled as seamask by MOD44W for Nibbling...'
    )
    if not arcpy.Exists(gladtile_shore):
        Con((Raster(mod44w_gladmatch_wgs) == 4) & (Raster(gladtile_mask) == 2),
            3,
            Con((Raster(alos_rsp) > 0) | (Raster(gladtile_mask) < 2),
                Raster(gladtile_mask))).save(gladtile_shore)

    # Expand seamask in contiguous areas < 0 meters in elevation and identified as water in GLAD
    print(
        '    9/15 - Expanding seamask in contiguous areas <= 0 meters in elevation and identified as water in GLAD...'
    )
    outnibble1 = os.path.join(processgdb, '{}_1nibble'.format(gladtileid))
    if not arcpy.Exists(outnibble1):
        Nibble(in_raster=Raster(gladtile_shore),
               in_mask_raster=Raster(gladtile_shore),
               nibble_values='DATA_ONLY',
               nibble_nodata='PROCESS_NODATA',
               in_zone_raster=Con(IsNull(Raster(gladtile_shore)), 1,
                                  Con(Raster(gladtile_shore) == 3, 1,
                                      0))).save(outnibble1)

    # Expand seamask by ten pixels in non-sea GLAD water pixels under 10 m in elevation
    outexpand = os.path.join(processgdb, '{}_expand'.format(gladtileid))
    print(
        '    10/15 - Expand seamask by ten pixels in non-sea GLAD water pixels...'
    )  # Another option is to constrain it at water under 10 m elevation but does not work under 10 m in elevation...')
    if not arcpy.Exists(outexpand):
        Con(
            (Raster(outnibble1) == 2),  # & (Raster(alos_rsp)<10),
            Expand(in_raster=Raster(outnibble1),
                   number_cells=10,
                   zone_values=3),
            Raster(outnibble1)).save(outexpand)

    # Fill in remaining water pixels under 0 m elevation and not already labeled with nearest water value, whether inland or sea
    outnibble2 = os.path.join(processgdb, '{}_2nibble'.format(gladtileid))
    print(
        '    11/15 - Fill in remaining water pixels under 0 m elevation and'
        ' not already labeled with nearest water value, whether inland or sea...'
    )
    if not arcpy.Exists(outnibble2):
        Nibble(in_raster=outexpand,
               in_mask_raster=outexpand,
               nibble_values='DATA_ONLY',
               nibble_nodata='PROCESS_NODATA',
               in_zone_raster=Raster(gladtile_mask) == 2).save(outnibble2)

    # Get regions
    outregion = os.path.join(processgdb, '{}_region'.format(gladtileid))
    print('    12/15 - Create regions out of contiguous areas...')
    if not arcpy.Exists(outregion):
        RegionGroup(in_raster=outnibble2,
                    number_neighbors='FOUR',
                    zone_connectivity='WITHIN',
                    excluded_value=1).save(outregion)

    # Remove sea mask regions under 2000 pixels. This avoids having artefact patches surrounded by land or inland water
    outregionclean = os.path.join(processgdb,
                                  '{}_regionclean'.format(gladtileid))
    print(
        '    13/15 - Removing sea mask regions under 2000 pixels. '
        'This avoids having artefact patches surrounded by land or inland water...'
    )
    if not arcpy.Exists(outregionclean):
        Con(
            IsNull(ExtractByAttributes(outregion,
                                       'LINK = 3 AND Count > 2000')),
            gladtile_mask, 3).save(outregionclean)

    # Fill in inland water zones entirely surrounded by sea water
    outeuc = os.path.join(processgdb, '{}_euc'.format(gladtileid))
    print(
        '    14/15 - Filling in inland water zones entirely surrounded by sea water...'
    )
    if not arcpy.Exists(outeuc):
        EucAllocation(
            in_source_data=InList(outregionclean, [0, 1, 3])).save(outeuc)

    outzone = os.path.join(processgdb, '{}_zone'.format(gladtileid))
    if not arcpy.Exists(outzone):
        ZonalStatistics(in_zone_data=outregion,
                        zone_field='Value',
                        in_value_raster=outeuc,
                        statistics_type='RANGE').save(outzone)

    print('    15/15 - Creating final formatted GLAD tile with sea mask...')
    # #1. To deal with remaining NoData (< 0 elevation)
    # #2. There are NoData pixels that remain following the second nibble in areas where formerly water pixels have become
    # out of contact with any other water pixel. This carries into outregion and outzone, and so must be dealt with
    # in the last step
    # #3 all other pixels that have been identified as sea water

    if not arcpy.Exists(outpath):
        Con(
            IsNull(Raster(outregionclean)),
            in_gladtile,  #1
            Con(
                IsNull(outzone),
                in_gladtile,  #2
                Con(
                    ((Raster(outzone) == 0) & (Raster(outeuc) == 3) &
                     (Raster(outregionclean) == 2)) | (  #3
                         Raster(outregionclean) == 3),
                    9,
                    in_gladtile))).save(outpath)

    arcpy.ClearEnvironment('extent')
    arcpy.ClearEnvironment('snapRaster')
    arcpy.ClearEnvironment('cellSize')
    arcpy.ClearEnvironment('mask')
def preprocess_x(input_fc, input_type, id_field, o_j_field, input_network,
                 search_tolerance, search_criteria, search_query, travel_mode,
                 batch_size):

    # add field mappings
    if input_type == "origins_i":
        field_mappings = arcpy.FieldMappings()
        field_mappings.addFieldMap(field_map_x(input_fc, id_field, "i_id"))

    if input_type == "destinations_j":
        field_mappings = arcpy.FieldMappings()
        field_mappings.addFieldMap(field_map_x(input_fc, id_field, "j_id"))
        field_mappings.addFieldMap(field_map_x(input_fc, o_j_field, "o_j"))

    # convert to points if required
    describe_x = arcpy.Describe(input_fc, input_type)
    if describe_x.ShapeType != "Point":
        arcpy.AddMessage("Converting " + input_type + " to points...")
        arcpy.management.FeatureToPoint(input_fc,
                                        r"in_memory/" + input_type + "_point",
                                        "INSIDE")
        arcpy.conversion.FeatureClassToFeatureClass(
            r"in_memory/" + input_type + "_point",
            r"in_memory",
            input_type,
            field_mapping=field_mappings)
    else:
        arcpy.AddMessage(input_type + " is already points...")
        arcpy.conversion.FeatureClassToFeatureClass(
            input_fc, r"in_memory", input_type, field_mapping=field_mappings)

    # prepare origins/destinations output
    if input_type == "origins_i":
        arcpy.management.AddField(r"in_memory/" + input_type,
                                  "i_id_text",
                                  "TEXT",
                                  field_length=255)
        arcpy.management.CalculateField(r"in_memory/" + input_type,
                                        "i_id_text", "!i_id!", "PYTHON3")
        output_fc = batch_i_setup(r"in_memory/" + input_type, batch_size)
    else:
        arcpy.management.AddField(r"in_memory/" + input_type,
                                  "j_id_text",
                                  "TEXT",
                                  field_length=255)
        arcpy.management.CalculateField(r"in_memory/" + input_type,
                                        "j_id_text", "!j_id!", "PYTHON3")

        layer = arcpy.management.MakeFeatureLayer(r"in_memory/" + input_type,
                                                  input_type + "_view")
        arcpy.management.SelectLayerByAttribute(layer, "NEW_SELECTION",
                                                "o_j > 0")

        arcpy.conversion.FeatureClassToFeatureClass(layer, arcpy.env.workspace,
                                                    input_type)
        output_fc = os.path.join(arcpy.env.workspace + "/" + input_type)

    # calculate network locations
    calculate_nax_locations(output_fc, input_type, input_network,
                            search_tolerance, search_criteria, search_query,
                            travel_mode)
    arcpy.AddMessage("Finished pre-processing " + input_type)
    arcpy.management.Delete(r"in_memory")
    return output_fc
Ejemplo n.º 4
0
def get_field(dataset, field_name):
    """ takes feature class and field name as input and
    return field object for the given field name"""
    return [f for f in arcpy.Describe(dataset).fields
            if f.name in field_name][0]
        mode = "CAR"
        mu = car_fast
        sigma = car_fast_dev
    if transport_mode == 4:
        mode = "BIKE"
        mu = bike
        sigma = bike_dev
    if transport_mode == 5:
        mode = "WALK"
        mu = walk
        sigma = walk_dev

    print mode, mu, sigma

    #Get parameters
    spatial_ref = arcpy.Describe(line).spatialReference
    mem_point = arcpy.CreateFeatureclass_management("in_memory", "mem_point", "POINT", "", "DISABLED", "DISABLED", line)
    arcpy.AddField_management(mem_point, "LineOID", "LONG")
    arcpy.AddField_management(mem_point, "Value", "FLOAT")
    arcpy.AddField_management(mem_point, "COUNTID", "DOUBLE")

    #Set fields
    search_fields = ["SHAPE@", "OID@"]
    insert_fields = ["SHAPE@", "LineOID", "Value", "COUNTID"]

    #Access line and point
    with arcpy.da.SearchCursor(line, (search_fields)) as search:
        with arcpy.da.InsertCursor(mem_point, (insert_fields)) as insert:
            for row in search:
                try:
Ejemplo n.º 6
0
def Report_MODULE(PARAMS):
    """Report Generation"""
    start = time.clock()  # start the clock
    message("Generating report...")
    # Report_PARAMS = [outTbl, siteName, mxd, pdf]

    outTbl = PARAMS[0]
    siteNameFld = str(PARAMS[1])
    mxd = arcpy.mapping.MapDocument(PARAMS[2])
    # Set file name, ext, and remove file if it already exists
    pdf = PARAMS[3]
    if os.path.splitext(pdf)[1] == "":
        pdf += ".pdf"
    if os.path.exists(pdf):
        os.remove(pdf)
    # Set path for intermediate pdfs
    pdf_path = os.path.dirname(pdf) + os.sep

    # Create the file and append pages in the cursor loop
    pdfDoc = arcpy.mapping.PDFDocumentCreate(pdf)

    graphic = "GRAPHIC_ELEMENT"
    blackbox = arcpy.mapping.ListLayoutElements(mxd, graphic, "blackbox")[0]
    graybox = arcpy.mapping.ListLayoutElements(mxd, graphic, "graybox")[0]

    # dictionary for field, type, ltorgt, numDigits, allnos, & average
    fld_dct = {
        'field': [
            'FR_2_cnt', 'FR_3A_acr', 'FR_3A_boo', 'FR_3B_boo', 'FR_3B_sca',
            'FR_3D_boo', 'V_2_50', 'V_2_100', 'V_2_score', 'V_2_boo',
            'V_3A_boo', 'V_3B_scar', 'V_3C_comp', 'V_3D_boo', 'EE_2_cnt',
            'EE_3A_boo', 'EE_3B_sca', 'EE_3C_boo', 'EE_3D_boo', 'R_2_03',
            'R_2_03_tb', 'R_2_03_bb', 'R_2_05', 'R_2_6', 'R_3A_acr',
            'R_3B_sc06', 'R_3B_sc1', 'R_3B_sc12', 'R_3C_boo', 'R_3D_boo',
            'B_2_cnt', 'B_2_boo', 'B_3A_boo', 'B_3C_boo', 'B_3D_boo',
            'Vul_High', 'Conserved'
        ]
    }
    txt, dbl = 'Text', 'Double'
    fld_dct['type'] = [
        dbl, dbl, txt, txt, dbl, txt, dbl, dbl, dbl, txt, txt, dbl, dbl, txt,
        dbl, txt, dbl, txt, txt, dbl, txt, txt, dbl, dbl, dbl, dbl, dbl, dbl,
        txt, txt, dbl, txt, txt, txt, txt, dbl, dbl
    ]
    fld_dct['ltorgt'] = [
        'gt', 'gt', '', '', 'lt', '', 'gt', 'gt', 'gt', '', '', 'lt', 'gt', '',
        'gt', '', 'lt', '', '', 'gt', '', '', 'gt', 'gt', 'gt', 'lt', 'lt',
        'lt', '', '', 'gt', '', '', '', '', 'gt', 'gt'
    ]
    fld_dct['aveBool'] = [
        '', '', 'YES', 'NO', '', 'YES', '', '', '', 'YES', 'YES', '', '',
        'YES', '', 'YES', '', 'YES', 'YES', '', 'YES', 'YES', '', '', '', '',
        '', '', 'YES', 'YES', '', 'YES', 'YES', 'YES', 'YES', '', ''
    ]
    fld_dct['numDigits'] = [
        0, 2, 0, 0, 2, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
        0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 2, 2
    ]
    fld_dct['rowNum'] = [
        1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
        22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39
    ]
    fld_dct['allnos'] = [''] * 37
    fld_dct['average'] = [''] * 37

    # Make table layer from results table
    arcpy.MakeTableView_management(outTbl, "rptbview")
    desc = arcpy.Describe("rptbview")
    fieldInfo = desc.fieldInfo
    cnt_rows = str(arcpy.GetCount_management(outTbl))

    for field in fld_dct['field']:  # loop through fields
        idx = fld_dct['field'].index(field)
        # Check to see if field exists in results
        fldIndex = fieldInfo.findFieldByName(fld_dct['field'][idx])
        if fldIndex > 0:  # exists
            if fld_dct['type'][idx] == 'Text':  # narrow to yes/no
                # Copy text field to list by field index
                fld_dct[idx] = field_to_lst(outTbl, field)
                # Check if all 'NO'
                if fld_dct[idx].count("NO") == int(cnt_rows):
                    fld_dct['allnos'][idx] = 1
            else:  # type = Double
                l = [x for x in field_to_lst(outTbl, field) if x is not None]
                if l != []:  # if not all null
                    # Get average values
                    fld_dct['average'][idx] = mean(l)

    start = exec_time(start, "loading data for report")

    i = 1
    pg_cnt = 1
    siterows = arcpy.SearchCursor(outTbl, "")  # may be slow, use "rptbview"?
    siterow = siterows.next()

    while siterow:

        oddeven = i % 2
        if oddeven == 1:
            column = 1
            siteText = "SiteLeftText"
            site_Name = "SiteLeftName"
        else:
            column = 2
            siteText = "SiteRightText"
            site_Name = "SiteRightName"
        TE = "TEXT_ELEMENT"
        siteText = arcpy.mapping.ListLayoutElements(mxd, TE, siteText)[0]
        siteText.text = "Site " + str(i)

        # Text element processing
        siteName = arcpy.mapping.ListLayoutElements(mxd, TE, site_Name)[0]
        fldNameValue = "siterow." + siteNameFld
        if fieldInfo.findFieldByName(siteNameFld) > 0:
            if eval(fldNameValue) == ' ':
                siteName.text = "No name"
            else:
                siteName.text = eval(fldNameValue)
        else:
            siteName.text = "No name"

        # loop through expected fields in fld_dct['field']
        for field in fld_dct['field']:
            idx = fld_dct['field'].index(field)
            # Check to see if field exists in results
            # if it doesn't color = black
            if fldExists(field, column, fld_dct['rowNum'][idx], fieldInfo,
                         blackbox):
                fldVal = "siterow." + field
                if fld_dct['type'][idx] == 'Double':  # is numeric
                    proctext(eval(fldVal), "Num", fld_dct['numDigits'][idx],
                             fld_dct['ltorgt'][idx], fld_dct['average'][idx],
                             column, fld_dct['rowNum'][idx],
                             fld_dct['allnos'][idx], mxd)
                else:  # is boolean
                    proctext(eval(fldVal), "Boolean", 0, "",
                             fld_dct['aveBool'][idx], column,
                             fld_dct['rowNum'][idx], fld_dct['allnos'][idx],
                             mxd)
        if oddeven == 0:
            exportReport(pdfDoc, pdf_path, pg_cnt, mxd)
            start = exec_time(start, "Page " + str(pg_cnt) + " generation")
            pg_cnt += 1

        i += 1
        siterow = siterows.next()

    # If you finish a layer with an odd number of records,
    # last record was not added to the pdf.
    if oddeven == 1:
        # Blank out right side
        siteText = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT",
                                                    "SiteRightText")[0]
        siteText.text = " "
        # Fill right side with gray empty boxes
        for i in range(39):
            # Not set up to process the Social Equity or Reliability scores
            newBox = graybox.clone("_clone")
            boxpos(newBox, 2, i + 1)
        exportReport(pdfDoc, pdf_path, pg_cnt, mxd)

    del siterow
    del siterows

    arcpy.Delete_management("rptbview", "")

    pdfDoc.saveAndClose()

    mxd_result = os.path.splitext(pdf)[0] + ".mxd"
    if arcpy.Exists(mxd_result):
        arcpy.Delete_management(mxd_result)

    mxd.saveACopy(mxd_result)  # save last page just in case

    del mxd
    del pdfDoc
    mxd_name = os.path.basename(mxd_result)
    message("Created PDF Report: {} and {}".format(pdf, mxd_name))
Ejemplo n.º 7
0
custum_trans = "dhdn3_utm_BW"

#Textfile
text = input_dir + "\zusammenfassung_shp.txt"
outFile = open(text, "w")

try:
    # Wandere durch Basisverzeichnis und alle seiner Sub-Verzeichnisse
    for (path, dirs, files) in os.walk(input_dir):

        # Schleife durch Dateien
        for shp in files:

            # Nur Shapfefiles berücksichtigen die zudem in DHDN3 vorliegen
            if shp.endswith(".shp") and (
                    arcpy.Describe(path + "\\" + shp).SpatialReference
            ).name == "DHDN_3_Degree_Gauss_Zone_3":

                print "Transformiere " + shp + " unter: " + path
                shp_utm = shp.replace(".", "_utm.")

                # ArcGIS Projektion mit spezifischer Transformationsmethode für BW
                arcpy.Project_management(
                    path + "/" + shp,
                    path + "/" + shp_utm,
                    out_coor_system=
                    "PROJCS['ETRS_1989_UTM_Zone_32N',GEOGCS['GCS_ETRS_1989',DATUM['D_ETRS_1989',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',9.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]]",
                    transform_method=custum_trans,
                    in_coor_system=
                    "PROJCS['DHDN_3_Degree_Gauss_Zone_3',GEOGCS['GCS_Deutsches_Hauptdreiecksnetz',DATUM['D_Deutsches_Hauptdreiecksnetz',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Gauss_Kruger'],PARAMETER['False_Easting',3500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',9.0],PARAMETER['Scale_Factor',1.0],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]]"
                )
Ejemplo n.º 8
0
import arcpy
from arcpy import env
env.workspace = r"C:\Users\Sheol\Desktop\2013-14\2015-2016\ProgramGIS\ArcPy\Lesson4"
rasterlist = arcpy.ListRasters()
for raster in rasterlist:
    desc = arcpy.Describe(raster)
    print raster + " is <<<" + desc.dataType + ">>> raster type"
Ejemplo n.º 9
0
    def source(self, s):
        """
        Validates source; can apply a fieldmap to an FC, also will copy locally if an external dataset
        :param s:
        :return:
        """
        valid = True

        if type(s) is list:
            for path in s:
                if not arcpy.Exists(path):
                    valid = False
                    break

        else:
            if not arcpy.Exists(s):
                valid = False

        if not valid:
            logging.warning("Cannot find source {0!s}".format(s))

        # If there's a field map, use it as an input to the FeatureClassToFeatureClass tool and copy the data locally
        if self.field_map:
            s = field_map.ini_fieldmap_to_fc(s, self.name, self.field_map,
                                             self.scratch_workspace)

        elif type(s) is list:
            # If we're dealing with a list (currently only GLAD and Terra-I, we can skip this validation)
            pass

        elif r"projects/wri-datalab" in s:
            pass

        # If there's not a field map, need to figure out what type of data source it is, and if it's local or not
        else:
            # This could be a folder, a gdb/mdb, a featuredataset, or an SDE database
            source_dirname = os.path.dirname(s)

            # we want to simply determine if this is local/not local so we can copy the datasource
            # first determine if our source dataset is within a featuredataset

            desc = arcpy.Describe(source_dirname)
            if hasattr(desc,
                       "datasetType") and desc.datasetType == 'FeatureDataset':
                source_dirname = os.path.dirname(source_dirname)

            # Test if it's an SDE database
            try:
                server_address = arcpy.Describe(
                    source_dirname).connectionProperties.server

                # If source SDE is localhost, don't need to worry about copying anywhere
                if server_address == 'localhost':
                    pass
                else:
                    s = util.copy_to_scratch_workspace(self.source,
                                                       self.scratch_workspace)

            # Otherwise, just look at the drive letter to determine if it's local or not
            except AttributeError:

                # Split the drive from the path returns (Letter and :), then take only the letter and lower it
                drive = os.path.splitdrive(s)[0][0].lower()

                if drive in util.list_network_drives():
                    s = util.copy_to_scratch_workspace(self.source,
                                                       self.scratch_workspace)

                elif drive not in ['c', 'd']:
                    logging.info(
                        "Are you sure the source dataset is local to this machine? \
                    It's not on drives C or D . . .")

        self._source = s
Ejemplo n.º 10
0
def writeDocument(sourceDataset, targetDataset, xmlFileName):

    desc = arcpy.Describe(sourceDataset)
    descT = arcpy.Describe(targetDataset)
    sourcePath = getLayerPath(desc)
    targetPath = getLayerPath(descT)

    ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer
    if not dla.checkServiceCapabilities(sourcePath, False):
        dla.addMessage(
            sourceDataset +
            ' Does not appear to be a feature service layer, exiting. Check that you selected a layer not a service'
        )
        return False
    if not dla.checkServiceCapabilities(targetPath, False):
        dla.addMessage(
            targetDataset +
            ' Does not appear to be a feature service layer, exiting. Check that you selected a layer not a service'
        )
        return False

    xmlDoc = Document()
    root = xmlDoc.createElement('SourceTargetMatrix')
    xmlDoc.appendChild(root)
    root.setAttribute("version", '1.1')
    root.setAttribute("xmlns:esri", 'http://www.esri.com')

    dataset = xmlDoc.createElement("Datasets")
    root.appendChild(dataset)
    setSourceTarget(dataset, xmlDoc, "Source", sourcePath)
    setSourceTarget(dataset, xmlDoc, "Target", targetPath)

    setSpatialReference(dataset, xmlDoc, desc, "Source")
    setSpatialReference(dataset, xmlDoc, descT, "Target")

    setSourceTarget(dataset, xmlDoc, "ReplaceBy", "")

    fieldroot = xmlDoc.createElement("Fields")
    root.appendChild(fieldroot)

    fields = getFields(descT, targetDataset)
    sourceFields = getFields(desc, sourceDataset)
    sourceNames = [
        field.name[field.name.rfind(".") + 1:] for field in sourceFields
    ]
    upperNames = [nm.upper() for nm in sourceNames]

    #try:
    for field in fields:

        fNode = xmlDoc.createElement("Field")
        fieldroot.appendChild(fNode)
        fieldName = field.name[field.name.rfind(".") + 1:]
        matchSourceFields(xmlDoc, fNode, field, fieldName, sourceNames,
                          upperNames)

    # write the source field values
    setSourceFields(root, xmlDoc, sourceFields)
    setTargetFields(root, xmlDoc, fields)
    # Should add a template section for value maps, maybe write domains...
    # could try to preset field mapping and domain mapping...

    # add some data to the document
    writeDataSample(xmlDoc, root, sourceNames, sourceDataset, 10)
    # write it out
    xmlDoc.writexml(open(xmlFileName, 'w'),
                    indent="  ",
                    addindent="  ",
                    newl='\n')
    xmlDoc.unlink()
Ejemplo n.º 11
0
    currentOverwriteOutput = env.overwriteOutput
    env.overwriteOutput = True
    GCS_WGS_1984 = arcpy.SpatialReference(r"WGS 1984")
    webMercator = arcpy.SpatialReference(
        r"WGS 1984 Web Mercator (Auxiliary Sphere)")
    env.overwriteOutput = True
    scratch = env.scratchWorkspace

    #Project doesn't like in_memory featureclasses, copy to scratch
    copyInFeatures = os.path.join(scratch, "copyInFeatures")
    arcpy.CopyFeatures_management(inFeature, copyInFeatures)
    deleteme.append(copyInFeatures)

    prjInFeature = os.path.join(scratch, "prjInFeature")
    srInputPoints = arcpy.Describe(copyInFeatures).spatialReference
    arcpy.AddMessage("Projecting input points to Web Mercator ...")
    arcpy.Project_management(copyInFeatures, prjInFeature, webMercator)
    deleteme.append(prjInFeature)
    tempFans = os.path.join(scratch, "tempFans")

    if traversal < 360:

        # put bearing into 0 - 360 range
        geoBearing = math.fmod(geoBearing, 360.0)
        if debug == True: arcpy.AddMessage("geoBearing: " + str(geoBearing))
        arithmeticBearing = Geo2Arithmetic(
            geoBearing
        )  # need to convert from geographic angles (zero north clockwise) to arithmetic (zero east counterclockwise)
        if debug == True:
            arcpy.AddMessage("arithmeticBearing: " + str(arithmeticBearing))
Ejemplo n.º 12
0
    try:
        br = int(br)
    except:
        arcpy.AddError(
            "The script could not read your balance ratio because it is not a number"
        )
        quit()
print(br)

try:
    minalt = int(minimum.split('.')[0])
except:
    minalt = int(minimum.split(',')[0])
minalt = minalt - interval

desc = arcpy.Describe(dem)  # Get information from the DEM
nombre = os.path.join(files,
                      desc.baseName + ".txt")  # Get the name from the DEM

# Create a .txt file and populate it with the data from the Surface volume
# calculation, given the thresholds and the interval
with open(nombre, "w") as f:
    try:
        for plane in range(minalt, maxalt, interval):
            result = arcpy.SurfaceVolume_3d(dem, "", "BELOW", plane)
            print(arcpy.GetMessage(0), file=f)
            print(arcpy.GetMessage(1), file=f)
            print(arcpy.GetMessage(2), file=f)
            print(arcpy.GetMessage(3), file=f)
    except Exception as e:
        print(e.message)
addMsgAndPrint(versionString)

#test for valid input:
# inFc exists and has item PlotAtScale
if not arcpy.Exists(inFc):
    forceExit()
fields = arcpy.ListFields(inFc)
fieldNames = []
for field in fields:
    fieldNames.append(field.name)
if not 'PlotAtScale' in fieldNames:
    arcpy.AddField_management(inFc, 'PlotAtScale', 'FLOAT')
    addMsgAndPrint('Adding field PlotAtScale to {}'.format(inFc))

gdb = os.path.dirname(inFc)
if arcpy.Describe(gdb).dataType == 'FeatureDataset':
    gdb = os.path.dirname(gdb)

if os.path.basename(inFc) == 'OrientationPoints':
    addMsgAndPrint('Populating OrientationPointsDicts')
    makeDictsOP(inFc)
    isOP = True
else:
    isOP = False

outTable = gdb + '/xxxPlotAtScales'
testAndDelete(outTable)
mapUnits = 'meters'
minSeparationMapUnits = minSeparation_mm / 1000.0
searchRadius = minSeparationMapUnits * maxPlotAtScale
if not 'meter' in arcpy.Describe(inFc).spatialReference.linearUnitName.lower():
Ejemplo n.º 14
0
def gen_cwd_back(core_list, climate_lyr, resist_lyr, core_lyr):
    """"Generate CWD and back rasters using r.walk in GRASS"""
    slope_factor = "1"
    walk_coeff_flat = "1"
    walk_coeff_uphill = str(cc_env.climate_cost)
    walk_coeff_downhill = str(cc_env.climate_cost * -1)
    walk_coeff = (walk_coeff_flat + "," + walk_coeff_uphill + "," +
                  walk_coeff_downhill + "," + walk_coeff_downhill)

    focal_core_rast = "focal_core_rast"
    gcwd = "gcwd"
    gback = "gback"
    gbackrc = "gbackrc"
    core_points = "corepoints"
    no_cores = str(len(core_list))

    # Map from directional degree output from GRASS to Arc's 1 to 8 directions
    # format. See r.walk source code and ArcGIS's 'Understanding cost distance
    # analysis' help page.
    rc_rules = "180=5\n225=4\n270=3\n315=2\n360=1\n45=8\n90=7\n135=6"

    try:
        for position, core_no in enumerate(core_list):
            core_no_txt = str(core_no)
            lm_util.gprint("Generating CWD and back rasters for Core " +
                           core_no_txt + " (" + str(position + 1) + "/" +
                           no_cores + ")")

            # Pull out focal core for cwd analysis
            write_grass_cmd("r.reclass",
                            input=core_lyr,
                            output=focal_core_rast,
                            overwrite=True,
                            rules="-",
                            stdin=core_no_txt + '=' + core_no_txt)

            # Converting raster core to point feature
            run_grass_cmd("r.to.vect",
                          flags="z",
                          input=focal_core_rast,
                          output=core_points,
                          type="point")

            # Running r.walk to create CWD and back raster
            run_grass_cmd("r.walk",
                          elevation=climate_lyr,
                          friction=resist_lyr,
                          output=gcwd,
                          outdir=gback,
                          start_points=core_points,
                          walk_coeff=walk_coeff,
                          slope_factor=slope_factor)

            # Reclassify back raster directional degree output to ArcGIS format
            write_grass_cmd("r.reclass",
                            input=gback,
                            output=gbackrc,
                            rules="-",
                            stdin=rc_rules)

            # Get spatial reference for defining ARCINFO raster projections
            desc_data = arcpy.Describe(cc_env.prj_core_rast)
            spatial_ref = desc_data.spatialReference

            # Get cwd path (e.g. ..\datapass\cwd\cw\cwd_3)
            cwd_path = lm_util.get_cwd_path(core_no)

            def create_arcgrid(rtype, grass_grid):
                """Export GRASS raster to ASCII grid and then to ARCINFO grid
                """
                ascii_grid = os.path.join(cc_env.scratch_dir,
                                          rtype + core_no_txt + ".asc")
                arc_grid = cwd_path.replace("cwd_", rtype)
                run_grass_cmd("r.out.gdal",
                              input=grass_grid,
                              output=ascii_grid,
                              format="AAIGrid")
                arcpy.CopyRaster_management(ascii_grid, arc_grid)
                arcpy.DefineProjection_management(arc_grid, spatial_ref)
                cc_util.arc_delete(ascii_grid)

            create_arcgrid("cwd_", gcwd)  # Export CWD raster
            create_arcgrid("back_", gbackrc)  # Export reclassified back raster
    except Exception:
        raise
Ejemplo n.º 15
0
    def execute(self, parameters, messages):
        """The source code of the tool."""
        visibility_lines = parameters[0].valueAsText
        id_observer_field = parameters[1].valueAsText
        observer_offset_field = parameters[2].valueAsText
        id_target_field = parameters[3].valueAsText
        target_offset_field = parameters[4].valueAsText
        target_x_field = parameters[5].valueAsText
        target_y_field = parameters[6].valueAsText
        horizons = parameters[7].valueAsText

        useCurvatures = parameters[8].value
        refCoeff = parameters[9].value

        workspace = fv.getPath(horizons)

        file_name = horizons.replace(workspace + "\\", "")

        arcpy.CreateFeatureclass_management(
            workspace,
            file_name,
            "POINT",
            has_z="ENABLED",
            spatial_reference=arcpy.Describe(
                visibility_lines).spatialReference)

        fieldsNew = [(id_observer_field, id_observer_field, "SHORT"),
                     (id_target_field, id_target_field, "SHORT"),
                     ("Elevation", "Elevation", "DOUBLE"),
                     ("Hide_Tar", "Hides target point", "SHORT"),
                     ("ViewAngle", "Viewing angle", "DOUBLE"),
                     ("AngleDiff_Tar", "Viewing angle difference to target",
                      "DOUBLE"),
                     ("Dist_Observ", "Distance to observer", "DOUBLE"),
                     ("Behind_Tar", "Behind target", "SHORT"),
                     ("OID_LoS", "OID_LoS", "SHORT")]

        fieldsNames = [row[0] for row in fieldsNew]

        functions_arcmap.prepareDataColumns(horizons, fieldsNew)

        arcpy.AddMessage("\t Determination of horizons started...")

        insert_cursor = arcpy.da.InsertCursor(horizons,
                                              ["SHAPE@"] + fieldsNames)

        number_of_LoS = int(
            arcpy.GetCount_management(visibility_lines).getOutput(0))
        arcpy.SetProgressor(
            "step", "Analyzing " + str(number_of_LoS) + " lines of sight...",
            0, number_of_LoS, 1)

        with arcpy.da.SearchCursor(visibility_lines, [
                "OBJECTID", "SHAPE@", id_observer_field, id_target_field,
                observer_offset_field, target_offset_field, target_x_field,
                target_y_field
        ]) as cursor:
            for row in cursor:

                target_x = row[6]
                target_y = row[7]
                target_offset = row[5]

                points = []
                poi = visibility.WKTtoPoints(row[1].WKT)

                # get coordinates of first point for distance calculation
                start_point_x = float(poi[0].split(" ")[0])
                start_point_y = float(poi[0].split(" ")[1])
                observer_elev = float(poi[0].split(" ")[2]) + float(row[4])

                target_distance = visibility.distance(target_x, target_y,
                                                      start_point_x,
                                                      start_point_y)
                sampling_distance = visibility.distance(
                    float(poi[1].split(" ")[0]), float(poi[1].split(" ")[1]),
                    start_point_x, start_point_y)
                target_index = -1

                # for every point do this
                for i in range(0, len(poi)):
                    parts = poi[i].split(" ")
                    x = float(parts[0])
                    y = float(parts[1])
                    z = float(parts[2])
                    dist = visibility.distance(x, y, start_point_x,
                                               start_point_y)

                    if useCurvatures:
                        z = visibility.curvatureCorrections(z, dist, refCoeff)

                    if i == 0:
                        points.append([x, y, 0, observer_elev, -90])
                    elif math.fabs(target_distance -
                                   dist) < sampling_distance / 2:
                        points.append([
                            x, y, dist, z + target_offset,
                            visibility.angle(dist,
                                             z + target_offset - observer_elev)
                        ])
                        target_index = i
                    else:
                        points.append([
                            x, y, dist, z,
                            visibility.angle(dist, z - observer_elev)
                        ])

                results = visibility.findGlobalHorizons(points, target_index)

                point = arcpy.Point()

                for i in range(0, len(results)):
                    hor_type = 0
                    point.X = results[i][0]
                    point.Y = results[i][1]
                    point.Z = results[i][3]
                    ptGeometry = arcpy.PointGeometry(point)

                    insert_cursor.insertRow([
                        ptGeometry, row[2], row[3], results[i][3],
                        results[i][5], results[i][4], results[i][6],
                        results[i][2], results[i][7], row[0]
                    ])

                arcpy.SetProgressorPosition()

        arcpy.ResetProgressor()
        arcpy.AddMessage("\t Determination of horizons sucessfuly ended.")

        functions_arcmap.addLayer(horizons)
        return
            pass
    if arcpy.Exists(clu_buff_wgs):
        try:
            arcpy.Delete_management(clu_buff_wgs)
        except:
            pass

    # Re-project the AOI to WGS84 Geographic (EPSG WKID: 4326)
    arcpy.AddMessage("\nConverting CLU Buffer to WGS 1984...")
    wgs_CS = arcpy.SpatialReference(4326)
    arcpy.Project_management(clu_buffer, clu_buff_wgs, wgs_CS)
    arcpy.AddMessage("Done!\n")

    # Use the WGS 1984 AOI to clip/extract the DEM from the service
    arcpy.AddMessage("\nDownloading Data...")
    aoi_ext = arcpy.Describe(clu_buff_wgs).extent
    xMin = aoi_ext.XMin
    yMin = aoi_ext.YMin
    xMax = aoi_ext.XMax
    yMax = aoi_ext.YMax
    clip_ext = str(xMin) + " " + str(yMin) + " " + str(xMax) + " " + str(yMax)
    arcpy.Clip_management(source_Service, clip_ext, WGS84_DEM, "", "", "", "NO_MAINTAIN_EXTENT")
    arcpy.AddMessage("Done!\n")

    # Project the WGS 1984 DEM to the coordinate system of the input CLU layer
    arcpy.AddMessage("\nProjecting data to match input CLU...\n")
    final_CS = arcpy.Describe(source_clu).spatialReference.factoryCode
    cellsize = 3
    arcpy.ProjectRaster_management(WGS84_DEM, final_DEM, final_CS, "BILINEAR", cellsize)
    arcpy.AddMessage("Done!\n")
Ejemplo n.º 17
0
    arcpy.CreateFeatureclass_management(
        out_path=os.path.dirname(splitedBuffer),
        out_name=os.path.basename(splitedBuffer),
        geometry_type='POLYGON',
        spatial_reference=spatialReference)
    if perplineFC:
        if arcpy.Exists(perplineFC):
            arcpy.Delete_management(perplineFC)
        arcpy.CreateFeatureclass_management(
            out_path=os.path.dirname(perplineFC),
            out_name=os.path.basename(perplineFC),
            geometry_type='POLYLINE',
            spatial_reference=spatialReference)


desc = arcpy.Describe(linefc)
sr = desc.spatialReference
arcpy.AddMessage('Spatial Reference : ' + sr.name)

# Judge Coordinate System Type, only Projected Coordinate System works.
if sr.type == "Projected":
    arcpy.AddMessage(' Starting ...')
    createSplittedbufferfc(splitedbuffer, perplinefc, sr)
    with arcpy.da.SearchCursor(linefc, ['SHAPE@', 'OBJECTID']) as linecursor:
        for linerow in linecursor:
            Position = 0
            perplinelist = []
            bufferPolygon = linerow[0].buffer(float(buffersize))
            arcpy.CopyFeatures_management(bufferPolygon,
                                          'in_memory\\bufferpolygon')
            arcpy.AddMessage(linerow[1])
Ejemplo n.º 18
0
in_should_replace_codes_with_values = arcpy.GetParameter(2)
in_should_update_field_aliases_on_output = arcpy.GetParameter(3)
in_api_has_alias_information = arcpy.GetParameter(4)
in_codelists_file = arcpy.GetParameter(5)

in_geo_table = arcpy.GetParameter(6)
in_geo_join_field = arcpy.GetParameterAsText(7)

global geom_cache
geom_cache = {}
global geo_fl
geo_fl = 'geo_fl'
arcpy.MakeFeatureLayer_management(in_geo_table, geo_fl)

in_geo_fl_desc = arcpy.Describe(geo_fl)
in_geo_field_info = in_geo_fl_desc.fieldInfo

in_output_workspace = arcpy.GetParameterAsText(8)
in_use_field_value_for_outputname = arcpy.GetParameter(9)
in_sdmx_field_for_outputname = arcpy.GetParameterAsText(10)
in_output_filename = arcpy.ValidateTableName(arcpy.GetParameterAsText(11))

# in_save_temp_files = arcpy.GetParameter(13)

arcpy.SetProgressor('default', 'Creating working directory ...')
# create working directory
wd_res = create_working_directory()
full_job_path = wd_res[0]
now_ts = wd_res[1]
    stationLyr = "stations"
    stationBuffer = watershedFD + os.sep + "stationsBuffer"
    stationElev = watershedGDB_path + os.sep + "stationElev"
    
    # --------------------------------------------------------------------- Check station interval
    # Exit if interval not set propertly
    try:
        float(interval)
    except:
        AddMsgAndPrint("\nStation Interval was invalid; Cannot set interpolation interval. Exiting...\n",2)
        sys.exit()
        
    interval = float(interval)
    
    # --------------------------------------------------------------------- Check DEM Coordinate System and Linear Units
    desc = arcpy.Describe(inputDEM)
    sr = desc.SpatialReference
    units = sr.LinearUnitName
    cellSize = desc.MeanCellWidth

    try:
        if interval < float(cellSize):
            AddMsgAndPrint("\nThe interval specified is less than the DEM cell size. Please re-run with a higher interval value. Exiting...\n",2)
            sys.exit()
    except:
        AddMsgAndPrint("\nThere may be an issue with the DEM cell size. Exiting...\n",2)
        sys.exit()
    
    if units == "Meter":
        units = "Meters"
    elif units == "Foot":
Ejemplo n.º 20
0
    def featurechecks(self, input_file):
        arcpy = self._arcpy
        desc = arcpy.Describe(input_file)

        if desc.shapeType <> 'Polygon':
            print 'ERROR: Input feature is not a polygon'
#todo: import moduless
import arcpy, os, string

print "***\nCalculate Raster Extents\n***"

#parameters
folder = r"d:\tmp"
fn = folder + os.path.sep + "rCatalog.csv"

#processing Variables
arcpy.env.workspace = folder

if os.path.exists(fn):
    os.remove(fn)
fs = open(fn, "w")
fs.write("IMAGE,XMIN,YMIN,XMAX,YMAX\n")

rsts = arcpy.ListRasters("*.jpg")
for rst in rsts:
    ##    newfn = string.replace(rst, ".jpg", ".tif")
    ##    arcpy.CopyRaster_management(rst, newfn)
    ext = arcpy.Describe(rst).extent
    extDetails = folder + os.path.sep + rst + "," + str(ext.XMin) + "," + str(
        ext.XMax) + "," + str(ext.YMin) + "," + str(ext.YMax) + "\n"
    fs.write(extDetails)

fs.close()

print "completed"
Ejemplo n.º 22
0
buildTopology = True

#######################################################################################################################
start = datetimePrint()[3]
timeDateString = datetimePrint()[0]  # Gets time and date to add to export
print("Current Run: " + timeDateString)

# Create a new GDB
# Export names and proj
if writeToMaster:
    exportGDBName = exportGDBPrefix + "MASTER"
else:
    exportGDBName = exportGDBPrefix + timeDateString

spatialRef = arcpy.Describe(
    mapAreasFCOrig
).spatialReference  #Assumes mapAreaFC and mapDBs have the same projection

exportGDBFullPath = exportFolder + "\\" + exportGDBName + ".gdb"
if arcpy.Exists(exportGDBFullPath):
    print("GDB already exists")
    arcpy.env.workspace = exportGDBFullPath
    datasets = arcpy.ListDatasets()
    for dataset in datasets:
        checkAndDelete(dataset)
else:
    print("Creating a new GDB at: " + exportGDBFullPath)
    arcpy.CreateFileGDB_management(out_folder_path=exportFolder,
                                   out_name=exportGDBName,
                                   out_version="CURRENT")
# Script Name: PeukerDouglas
# 
# Created By:  David Tarboton
# Date:        9/29/11

# Import ArcPy site-package and os modules
import arcpy 
import os
import subprocess

# Inputs
inlyr = arcpy.GetParameterAsText(0)
desc = arcpy.Describe(inlyr)
fel=str(desc.catalogPath)
arcpy.AddMessage("\nInput Elevation file: "+fel)

centerweight=arcpy.GetParameterAsText(1)
arcpy.AddMessage("\nCenter Smoothing Weight: "+centerweight)

sideweight=arcpy.GetParameterAsText(2)
arcpy.AddMessage("\nSide Smoothing Weight: "+sideweight)

diagonalweight=arcpy.GetParameterAsText(3)
arcpy.AddMessage("\nDiagonal Smoothing Weight: "+diagonalweight)

# Input Number of Processes
inputProc=arcpy.GetParameterAsText(4)
arcpy.AddMessage("\nInput Number of Processes: "+inputProc)

# Output
ss = arcpy.GetParameterAsText(5)
Ejemplo n.º 24
0
def enable_copy_attachments(input_fc, output_fc):
    # Check if the output feature class has attachments table 
    output_attachment_table = output_fc + '__ATTACH'
    if arcpy.Exists(output_attachment_table):
        arcpy.AddMessage("Output feature class layer already includes attachments.")
        exit()

    # Check if the input feature class has attachments table
    input_attachment_table = input_fc + '__ATTACH'
    if not arcpy.Exists(input_attachment_table):
        desc = arcpy.Describe(input_fc)
        input_attachment_table = desc.Path.split('.')[0] + '.gdb\\' + desc.Name + '__ATTACH'
                            
        if not arcpy.Exists(input_attachment_table):
            arcpy.AddMessage("Unable to locate the attachment table for the input feature class.")

    else:

        # Enable Attachments
        arcpy.AddMessage("Enabling Attachments")
        arcpy.EnableAttachments_management(output_fc)
        arcpy.AddMessage("Enabled Attachments")

        # Copy Attachments from Input feature class to Temp feature class.
        arcpy.AddMessage("Copying Attachments..")

        outputTable = output_fc + '__ATTACH'

        try:
            # Check if the input feature class was related to the attachment tables via the ObjectID field.
            input_table_desc = arcpy.Describe(input_attachment_table)
            field_rel_objectID = [field for field in input_table_desc.fields if field.name.lower() == 'rel_objectid']

            # If the input attachment table has REL_OBJECTID field then remap GUID fields between input and output attachment table.
            if field_rel_objectID:
                field_rel_globalID = [field for field in input_table_desc.fields if field.type.lower() == 'guid']
                if field_rel_globalID:
                    output_field = field_rel_globalID[0]
                else:
                    arcpy.AddError("Can't copy attachments...")

                output_table_field_mappings = arcpy.FieldMappings()
                output_table_field_mappings.addTable(outputTable)

                input_table_field_mappings = arcpy.FieldMappings()
                input_table_field_mappings.addTable(input_attachment_table)

                output_table_globalID = [field for field in output_table_field_mappings.fields if field.type.lower() == 'guid'][0]
                field_index = output_table_field_mappings.findFieldMapIndex(output_table_globalID.name)
                fmap = output_table_field_mappings.fieldMappings[field_index]
                output_table_field_mappings.removeFieldMap(field_index)
                fmap.addInputField(input_attachment_table,output_field.name)
                output_table_field_mappings.addFieldMap(fmap)

                for input_field_map in input_table_field_mappings.fieldMappings:
                    output_table_field_mappings.addFieldMap(input_field_map)

                arcpy.Append_management(input_attachment_table, outputTable, 'NO_TEST', output_table_field_mappings)
            else:
                arcpy.Append_management(input_attachment_table, outputTable)
            arcpy.AddMessage("Copied Attachments..")
        except Exception as e:
            arcpy.AddError(e)
Ejemplo n.º 25
0
gladresgdb = os.path.join(resdir, 'glad.gdb')
pathcheckcreate(gladresgdb)
pathcheckcreate(alosresgdb)
pathcheckcreate(mod44w_resgdb)

#GLAD values mean the following
#0: NoData, 1: Land, 2: Permanent water, 3: Stable seasonal, 4: Water gain, 5: Water loss
#6: Dry period, 7: Wet period, 8: High frequency, 10: Probable land, 11: Probable water, 12: Sparse data - exclude

########################################################################################################################
#Get list of tiles for raw GLAD tiles, mod44w (MODIS sea mask) and ALOS (DEM)
print('Getting tile lists for GLAD, MODIS, and ALOS...')
rawtilelist = getfilelist(glad_dir, 'class99_19.*[.]tif$')
alos_tilelist = getfilelist(alos_dir,
                            'ALPSMLC30_[NS][0-9]{3}[WE][0-9]{3}_DSM.tif$')
alos_wgsextdict = {i: arcpy.Describe(i).extent for i in alos_tilelist}

mod44w_tilelist = getfilelist(mod44w_outdir, '.*[.]hdf$')
#Rename all MODIS files that have points with underscores
if any([
        re.search('[.]',
                  os.path.splitext(os.path.split(i)[1])[0])
        for i in mod44w_tilelist
]):
    for modtile in mod44w_tilelist:
        modtile_renamed = '{}.hdf'.format(
            re.sub('[.]', '_',
                   os.path.splitext(os.path.split(modtile)[1])[0]))
        os.rename(modtile, os.path.join(mod44w_outdir, modtile_renamed))

Ejemplo n.º 26
0
Archivo: MD.py Proyecto: dkav/omniscape
def topoclimate(options):
    try:
        theStart = datetime.datetime.now()  
        # TILING
        options['tileNum'] = tileNum
        options['numTiles'] = numTiles

        options = set_options_and_dirs(options)   
        copy_this_file(options)
        
        arcpy.env.scratchWorkspace = options['scratchDir']
        arcpy.env.Workspace = options['scratchDir']
        os.environ["TEMP"] = options['scratchDir']
        os.environ["TMP"] = options['scratchDir']    
        
        

        # Set raster paths and export to ascii if needed. FIXME: don't really need ascii, just convenient for header code for now
        c2Raster = c3Raster = c4Raster = c5Raster = None
        options['c2Var'] = options['c3Var'] =options['c4Var'] =options['c5Var'] = None
        c1Raster = path.join(options['projectDir'],options['c1RasterBase'])
        arcpy.CalculateStatistics_management(c1Raster, "1", "1", "#")
        options['c1Var'] = get_raster_var(c1Raster)

        c2BandArray = c3BandArray = c4BandArray = c5BandArray = None
        if options['c2RasterBase'] is not None:
            c2Raster = path.join(options['projectDir'],options['c2RasterBase'])
            arcpy.CalculateStatistics_management(c2Raster, "1", "1", "#")
            options['c2Var'] = get_raster_var(c2Raster)
        if options['c3RasterBase'] is not None:
            c3Raster = path.join(options['projectDir'],options['c3RasterBase'])
            arcpy.CalculateStatistics_management(c3Raster, "1", "1", "#")
            options['c3Var'] = get_raster_var(c3Raster)
        if options['c4RasterBase'] is not None:
            c4Raster = path.join(options['projectDir'],options['c4RasterBase'])
            arcpy.CalculateStatistics_management(c4Raster, "1", "1", "#")
            options['c4Var'] = get_raster_var(c4Raster)
        if options['c5RasterBase'] is not None:
            c5Raster = path.join(options['projectDir'],options['c5RasterBase']) 
            arcpy.CalculateStatistics_management(c5Raster, "1", "1", "#")
            options['c5Var'] = get_raster_var(c5Raster)
        
        descData = arcpy.Describe(c1Raster)
        spatialReference = descData.spatialReference
        options['cellSize'] = descData.MeanCellHeight             
        header = get_header(c1Raster)          
        descData = arcpy.Describe(c1Raster)
        arcpy.env.extent = descData.Extent    
        
        cumDistRaster = arcpy.sa.Con((arcpy.Raster(c1Raster) > 0),0,0)        
      
        if options['endBand'] == 0:
            approxEndBand = str(int(header['nrows']/options['blockSize'])+1)
        else: approxEndBand = str(options['endBand'])
        if options['endStripe'] == 0:
            approxEndStripe = str(int(header['ncols']/options['blockSize'])+1)
        else: approxEndStripe = str(options['endStripe'])
        maxNumSolvesToDo = (int(approxEndBand)-options['startBand'] + 1)*(int(approxEndStripe)-options['startStripe']+1) + 1
        iter = 0
        bandNum = 0
        pctDone=0
        pctDone2=0
        partialResultsArray = None
#new
        breakFlag = False
        count = 0
        calcInBand = False
        for centerRow in range((options['blockSize']-1)/2,header['nrows'],options['blockSize']):
#new
            count += 1
            bandNum += 1 
            if options['startBand'] > 0 and bandNum < options['startBand']: blarg #not capable yet because of counting/partialresults continue
            if options['endBand'] > 0 and bandNum >= options['endBand']+1: 
                blarg #not capable yet because of counting/partialresults
                breakFlag = True
                break
            print 'Starting band #',str(bandNum)+'/'+approxEndBand,' centered on row '+str(centerRow)

            c1BandArray, LLC = band(c1Raster,header,centerRow, options)
#fixme: reinstate next line when figure out partialresults etc
            # if npy.max(c1BandArray) == -9999: continue
            c2BandArray, dummy  = band(c2Raster, header, centerRow, options)
            c3BandArray, dummy  = band(c3Raster, header, centerRow, options)
            c4BandArray, dummy  = band(c4Raster, header, centerRow, options)
            c5BandArray, dummy  = band(c5Raster, header, centerRow, options)
#new
            if partialResultsArray is None:
# FIXME: create partialresultsarray that is SIZE of band, but not ALIGNED with band. OR, create array that is size of REMAINDER.            
# LLC is same
# size is 
# except for last band?
# that's when LLC equals the LLC of the input raster
#fixme: this doesn't work when using an end band!
                nResultsRows = options['radius']+1
                # if LLC 
                # nResultsRows > 
                partialResultsArray = npy.zeros((nResultsRows,header['ncols']), dtype = 'float64')-9999 
                # partialResultsCenterRow = centerRow
                partialResultsLLC = LLC 
                
                # print 'partialResultsArray.shape[0]'
                # print partialResultsArray.shape[0]

            subsetCenterRow = min(centerRow,options['radius'])
            
            # Check for all nodata in center band of C1 raster
 #Fixme: reinstate bandcenterarray check. Had to remove because continue statement screwed up partialresults etc.
            # if options['blockSize']>1:
                # bandCenterArray = c1BandArray[(subsetCenterRow-(options['blockSize']-1)/2):(subsetCenterRow+(options['blockSize']-1)/2),:]
            # else:
                # bandCenterArray = c1BandArray[subsetCenterRow,:]
             
            # if npy.max(bandCenterArray) == -9999:       
                # del bandCenterArray
                # continue
            # del bandCenterArray
            
            
            # grid = npy.indices((c1BandArray.shape))
            # rowArray = grid[0]
            # colArray = grid[1]
            # del grid
            stripeNum = 0
            for centerCol in range((options['blockSize']-1)/2, header['ncols'],options['blockSize']):
                
                start_time0 = time.clock()
                stripeNum += 1
                if options['startStripe'] > 0 and stripeNum < options['startStripe']: continue
                if options['endStripe'] > 0 and stripeNum >= options['endStripe']+1: break
                iter += 1
                # print 'Band #',bandNum,'Stripe #',stripeNum
    
                c2CircleVector = c3CircleVector = c4CircleVector = c5CircleVector = None
                subsetCenterCol = min(centerCol,options['radius']) 
                subsetCenterRow = min(centerRow,options['radius'])
                # time consuming- add smart search. 
                # if options['blockSize']>1:
                    # if npy.max(sourceCenterArraySum0[centerCol-(options['blockSize']-1)/2:centerCol+(options['blockSize']-1)/2+1]) <= 0: 
                        # print'No values in radius'
                        # continue
                # else:
                    # if sourceCenterArray[centerCol] <= 0: #BHM ADDED INDEX 10/12/15 
                        # print'No sources in radius'
                        # continue 
                # print c1BandArray
                # time consuming- add smart search. 
                if c1BandArray[subsetCenterRow, centerCol]!=-9999:
#FIXME! Passing teh band array here causes memory errors on this line. Should be able to accomplish what is needed without passing.
                    c1CircleVector= circ(c1BandArray, subsetCenterRow, centerCol, options)
                    if c2BandArray is not None:
                        c2CircleVector = circ(c2BandArray, subsetCenterRow, centerCol, options)
                    # Was getting memory error calling circ with None arrays, setting to None above instead                    
                    if c3BandArray is not None:
                        c3CircleVector = circ(c3BandArray, subsetCenterRow, centerCol, options)
                    if c4BandArray is not None:
                        c4CircleVector = circ(c4BandArray, subsetCenterRow, centerCol, options)
                    if c5BandArray is not None:
                        c5CircleVector = circ(c5BandArray, subsetCenterRow, centerCol, options)
                # circleHeader = get_subset_header(c1CircleArray, header, options, centerRow, centerCol)
                # yMin = circleHeader['yllcorner'] #fixme- check. coudl be something like: max(circleHeader['yllcorner'],circleHeader['yllcorner'] + ((circleHeader['nrows'] - centerRow - options['radius'] - 1) * circleHeader['cellsize']))
                
            
                # print '\nDone with prep'
                # start_time0 = elapsed_time(start_time0)           

                # pctDone = report_pct_done(iter, maxNumSolvesToDo, -1)
                # print 'Elapsed time so far: {0}'.format(datetime.datetime.now()-theStart)  
                # print'subs'

                
                
                    calcInBand = True                        
                    stdEucDist, options = std_euc(c1CircleVector, c2CircleVector, c3CircleVector, c4CircleVector, c5CircleVector, subsetCenterRow, subsetCenterCol, options)

                    if options['blockSize']>1:
                        print'error- cannot do blocks yet'
                        exit(0)
                        partialResultsArray[(subsetCenterRow-(options['blockSize']-1)/2):(subsetCenterRow+(options['blockSize']-1)/2)+1,(centerCol-(options['blockSize']-1)/2):(centerCol+(options['blockSize']-1)/2)+1] = stdEucDist
                    else:
                        # partialResultsArray[subsetCenterRow,centerCol] = stdEucDist
                        # print 'count,centercol'
                        # print count, centerCol
                        partialResultsArray[count-1,centerCol] = stdEucDist

                        # print'subcr,cc,std'
                        # print subsetCenterRow,centerCol,stdEucDist
                        # print 'bda'
                        # print partialResultsArray
                        # print 'donebda'
                        
                        
                    del c1CircleVector, c2CircleVector, c3CircleVector, c4CircleVector, c5CircleVector                              
            # print 'Band Dist array:'
            # print partialResultsArray
            print 'Done with band#',str(bandNum)+'/'+approxEndBand
            # pctDone = report_pct_done((bandNum-options['startBand']+1)*(stripeNum-options['startStripe']+1), maxNumSolvesToDo, -1)

            pctDone = report_pct_done(iter+1, maxNumSolvesToDo, -1)
            # if calcInBand: #
                # partialResultsArray = 
#new
            # print 'count, target'
            # print count, partialResultsArray.shape[0]
            if count == partialResultsArray.shape[0]:
                print 'reset'
#new            
                #if we've filled up partialResultsArray, add it to the cumulativeraster
                # yMin = max(header['yllcorner'],header['yllcorner'] + ((header['nrows'] - partialResultsCenterRow - options['radius'] - 1) * header['cellsize']))
                # LLC = arcpy.Point(header['xllcorner'],yMin)
                # print 'pra shape, llc'
                
                # print partialResultsArray, partialResultsLLC
                partialResultsRaster = arcpy.NumPyArrayToRaster(partialResultsArray, partialResultsLLC, header['cellsize'],header['cellsize'],-9999)                          

                # # SAVING BAND RASTER REMOVES OCCASIONAL HORIZONTAL STRIPING
                tempBandFile = os.path.join(options['scratchDir'], 'justBAND'+str(bandNum)+'cur_' + options['outputFileText']+'.tif')
                partialResultsRaster.save(tempBandFile)
                delete_data(tempBandFile)
                
                cumDistRaster = addData_arcpy(cumDistRaster, partialResultsRaster)
                del partialResultsArray
                partialResultsArray = None
                gc.collect()
                calcInBand=False
                count = 0
                del partialResultsRaster
    
              
                options = write_temp_maps(options,cumDistRaster,bandNum) 
                                         
            # print 'Done with band #',bandNum,',
            print 'Elapsed time so far: {0}'.format(datetime.datetime.now()-theStart)  
        print 'DONE with bands'
        # print 'partialResultsArray, partialResultsLLC'    
        # print partialResultsArray, partialResultsLLC    
        # print 'partialres data'
        # print partialResultsArray[0:count,:]
        
        # LLC = arcpy.Point(header['xllcorner'],header['yllcorner'])
        if breakFlag:#there's an endband
            partialResultsRaster = arcpy.NumPyArrayToRaster(partialResultsArray, partialResultsLLC, header['cellsize'],header['cellsize'],-9999)                          
        else: 
            partialResultsRaster = arcpy.NumPyArrayToRaster(partialResultsArray[0:count,:], partialResultsLLC, header['cellsize'],header['cellsize'],-9999)                          
        # # SAVING BAND RASTER REMOVES OCCASIONAL HORIZONTAL STRIPING
        tempBandFile = os.path.join(options['scratchDir'], 'justBAND'+str(bandNum)+'cur_' + options['outputFileText']+'.tif')
        
        partialResultsRaster.save(tempBandFile)
        delete_data(tempBandFile)

        cumDistRaster = addData_arcpy(cumDistRaster, partialResultsRaster)
        del partialResultsRaster
        # cumDistRaster.save(r'c:\md\test.tif')


        print 'Done with solves.'  
        print_prof_data()
        write_final_maps(options,cumDistRaster) 
        #xprint locals()
        return options
    except arcpy.ExecuteError:
        print_geoproc_error()
    except:    
        print c1BandArray.shape
        print_python_error()
Ejemplo n.º 27
0
# -*- coding: utf-8 -*-
import arcpy
from arcpy.sa import *
import os,time #exceptions
arcpy.env.parallelProcessingFactor = 0
try:
    t_inicio=time.clock()# captura el tiempo de inicio del proceso
    arcpy.env.overwriteOutput = True


    in_source_data=arcpy.GetParameterAsText(0)
    sp=int(arcpy.Describe(in_source_data).spatialreference.factoryCode)
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(sp)

    in_cost_raster=arcpy.GetParameterAsText(1)
    maximum_distance=arcpy.GetParameterAsText(2)
    if maximum_distance!="---":
        maximum_distance=int(arcpy.GetParameterAsText(2))
    else:
        maximum_distance=""
    out_backlink_raster=arcpy.GetParameterAsText(3)
    capa_extent=arcpy.GetParameterAsText(4)
    capa_salida=arcpy.GetParameterAsText(5)
    if "....." in capa_extent:
        capa_extent=capa_extent.replace("....."," ")
    if "," not in capa_extent:
        if "MAXOF" in capa_extent or "MINOF"in capa_extent:
            arcpy.env.extent=capa_extent
        else:
            arcpy.env.extent=arcpy.Describe(capa_extent).extent
Ejemplo n.º 28
0
    def execute(self, parameters, messages):
        """The source code of the tool."""
        in_features = parameters[0].value
        start_time_field = str(parameters[1].value)
        end_time_field = str(parameters[2].value)
        ovelse_gdb = parameters[3].value
        track_features_name = str(parameters[4].value)
        time_interval = int(parameters[5].value)
        fc_name = "temp_" + datetime.now().strftime('%Y%m%d%H%M%S')
        arcpy.AddMessage(
            "Opprettet midlertidig feature klasse {0}\\{1}.".format(
                ovelse_gdb, fc_name))
        arcpy.FeatureClassToFeatureClass_conversion(in_features=in_features,
                                                    out_path=ovelse_gdb,
                                                    out_name=fc_name)
        temp_features = os.path.join(str(ovelse_gdb), fc_name)
        has_m = "DISABLED"
        has_z = "DISABLED"
        spatial_ref = arcpy.Describe(temp_features).spatialReference
        arcpy.CreateFeatureclass_management(ovelse_gdb, track_features_name,
                                            "POINT", "", has_m, has_z,
                                            spatial_ref)
        track_features = os.path.join(str(ovelse_gdb), track_features_name)
        arcpy.AddMessage("Opprettet feature klasse {0}".format(track_features))
        arcpy.AddField_management(track_features, "TRACKID", "LONG")
        arcpy.AddField_management(track_features, "DATETIME", "DATE")
        arcpy.AddField_management(track_features, "JNR", "TEXT")
        dsc = arcpy.Describe(temp_features)
        cursor = arcpy.SearchCursor(temp_features)
        track_rows = arcpy.InsertCursor(track_features)
        arcpy.AddMessage(
            "Begining populating feature klasse {0}".format(track_features))
        for row in cursor:
            shape = row.getValue(dsc.shapeFieldName)
            start = row.getValue(start_time_field)
            end = row.getValue(end_time_field)
            try:
                jnr = row.getValue("JNR")
            except:
                jnr = ""
            objectid = row.getValue("OBJECTID")
            total_dist = shape.length
            if end and start:
                total_time = end - start
                duration = 0
                while duration < total_time.total_seconds():
                    values = track_rows.newRow()
                    values.TRACKID = objectid
                    values.JNR = jnr
                    values.DATETIME = start + timedelta(seconds=duration)
                    distance = (total_dist /
                                total_time.total_seconds()) * duration
                    point = shape.positionAlongLine(distance)
                    values.Shape = point
                    track_rows.insertRow(values)
                    duration = duration + time_interval
                    del values
            else:
                arcpy.AddMessage(
                    "Feature {0} har ikke TID_START eller TID_SLUTT.".format(
                        jnr))

        del cursor, row, track_rows
        arcpy.AddMessage("Finalizing processing features")
        arcpy.JoinField_management(in_data=track_features,
                                   in_field="TRACKID",
                                   join_table=temp_features,
                                   join_field="OBJECTID")
        if arcpy.Exists(temp_features):
            arcpy.AddMessage("Slette midlertidige feature klasse {0}.".format(
                temp_features))
            arcpy.DeleteFeatures_management(temp_features)
        try:
            mxd = arcpy.mapping.MapDocument("CURRENT")
            dataFrame = arcpy.mapping.ListDataFrames(mxd, "*")[0]
            addLayer = arcpy.mapping.Layer(track_features)
            arcpy.mapping.AddLayer(dataFrame, addLayer)
        except:
            pass
        return
Ejemplo n.º 29
0
#S = 0.005
#T = 10000
#t = 100
#Inputs
points = arcpy.GetParameterAsText(0)
pumpfield = arcpy.GetParameterAsText(1)
buff = float(arcpy.GetParameterAsText(2))
t = float(arcpy.GetParameterAsText(3))
T = float(arcpy.GetParameterAsText(4))  #vertical multiplier
S = float(arcpy.GetParameterAsText(5))  #horizontal multiplier
cellSize = float(arcpy.GetParameterAsText(6))
fileplace = arcpy.GetParameterAsText(7)
outraster = arcpy.GetParameterAsText(8)

outFeatureClass = outraster + "points"
wellidfield = arcpy.Describe(points).OIDFieldName

env.workspace = fileplace

x, y = [], []
wellid = []
pump = []

for row in arcpy.da.SearchCursor(points, ["SHAPE@XY", wellidfield, pumpfield]):
    # Print x,y coordinates of each point feature
    x.append(row[0][0])
    y.append(row[0][1])
    wellid.append(row[1])
    pump.append(row[2])

desc = arcpy.Describe(points)
Ejemplo n.º 30
0
    def AddFeaturesToFeatureLayer(self,
                                  url,
                                  pathToFeatureClass,
                                  chunksize=0,
                                  lowerCaseFieldNames=False):
        """Appends local features to a hosted feature service layer.

        Args:
            url (str): The URL of the feature service layer.
            pathToFeatureClass (str): The path of the feature class on disk.
            chunksize (int): The maximum amount of features to upload at a time. Defaults to 0.
            lowerCaseFieldNames (bool): A boolean value indicating if field names should be converted
                to lowercase before uploading. Defaults to ``False``.
        Returns:
            The result from :py:func:`arcrest.agol.services.FeatureLayer.addFeatures`.
        Raises:
            ArcRestHelperError: if ``arcpy`` can't be found.
        Notes:
            If publishing to a PostgreSQL database, it is suggested to to set ``lowerCaseFieldNames`` to ``True``.

        """
        if arcpyFound == False:
            raise common.ArcRestHelperError({
                "function":
                "AddFeaturesToFeatureLayer",
                "line":
                inspect.currentframe().f_back.f_lineno,
                "filename":
                'featureservicetools',
                "synerror":
                "ArcPy required for this function"
            })
        fl = None
        try:
            fl = FeatureLayer(url=url, securityHandler=self._securityHandler)

            if chunksize > 0:
                fc = os.path.basename(pathToFeatureClass)
                inDesc = arcpy.Describe(pathToFeatureClass)
                oidName = arcpy.AddFieldDelimiters(pathToFeatureClass,
                                                   inDesc.oidFieldName)

                arr = arcpy.da.FeatureClassToNumPyArray(
                    pathToFeatureClass, (oidName))
                syncSoFar = 0
                messages = {'addResults': [], 'errors': []}
                total = len(arr)
                errorCount = 0
                if total == '0':
                    print("0 features in %s" % pathToFeatureClass)
                    return "0 features in %s" % pathToFeatureClass
                print("%s features in layer" % (total))

                arcpy.env.overwriteOutput = True
                if int(total) < int(chunksize):
                    return fl.addFeatures(
                        fc=pathToFeatureClass,
                        lowerCaseFieldNames=lowerCaseFieldNames)
                else:
                    newArr = chunklist(arr, chunksize)
                    exprList = [
                        "{0} >= {1} AND {0} <= {2}".format(
                            oidName, nArr[0][0], nArr[len(nArr) - 1][0])
                        for nArr in newArr
                    ]
                    for expr in exprList:

                        UploadLayer = arcpy.MakeFeatureLayer_management(
                            pathToFeatureClass, 'TEMPCOPY', expr).getOutput(0)
                        #print(arcpy.GetCount_management(in_rows=UploadLayer).getOutput(0) + " features in the chunk")
                        results = fl.addFeatures(
                            fc=UploadLayer,
                            lowerCaseFieldNames=lowerCaseFieldNames)
                        chunkCount = arcpy.GetCount_management(
                            in_rows=UploadLayer).getOutput(0)
                        print(chunkCount + " features in the chunk")
                        if chunkCount > 0:

                            if results is not None and 'addResults' in results and results[
                                    'addResults'] is not None:
                                featSucces = 0
                                for result in results['addResults']:
                                    if 'success' in result:
                                        if result['success'] == False:
                                            if 'error' in result:
                                                errorCount = errorCount + 1
                                                print("\tError info: %s" %
                                                      (result))
                                        else:
                                            featSucces = featSucces + 1
                                syncSoFar = syncSoFar + featSucces
                                print("%s features added in this chunk" %
                                      (featSucces))
                                print("%s/%s features added, %s errors" %
                                      (syncSoFar, total, errorCount))
                                if 'addResults' in messages:
                                    messages['addResults'] = messages[
                                        'addResults'] + results['addResults']
                                else:
                                    messages['addResults'] = results[
                                        'addResults']
                            else:
                                messages['errors'] = result
                return messages
            else:
                return fl.addFeatures(fc=pathToFeatureClass,
                                      lowerCaseFieldNames=lowerCaseFieldNames)
        except arcpy.ExecuteError:
            line, filename, synerror = trace()
            raise common.ArcRestHelperError({
                "function": "AddFeaturesToFeatureLayer",
                "line": line,
                "filename": filename,
                "synerror": synerror,
                "arcpyError": arcpy.GetMessages(2),
            })
        except:
            line, filename, synerror = trace()
            raise common.ArcRestHelperError({
                "function": "AddFeaturesToFeatureLayer",
                "line": line,
                "filename": filename,
                "synerror": synerror,
            })
        finally:
            fl = None
            del fl
            gc.collect()