Esempio n. 1
0
def output_binned_geojson(dataset, field, linspace_array):  #, basename):
    # Make a layer from the feature class
    arcpy.MakeFeatureLayer_management(dataset, "lyr")

    for i in range(len(linspace_array) - 1):
        json_name = str(linspace_array[i]) + "-" + str(
            linspace_array[i + 1]) + ".geojson"
        # define sql where clause for select by attribute procoess
        whereclause = """{} < {} AND {} <= {}""".format(
            linspace_array[i],
            arcpy.AddFieldDelimiters(
                dataset, field
            ),  # Correctly quotes/noquotes dataset depending on source location
            arcpy.AddFieldDelimiters(
                dataset, field
            ),  # Correctly quotes/noquotes dataset depending on source location
            linspace_array[i + 1])
        arcpy.SelectLayerByAttribute_management(
            "lyr", "CLEAR_SELECTION", whereclause
        )  # Clear selection before select by attribute helped to make the script work
        arcpy.SelectLayerByAttribute_management("lyr", "NEW_SELECTION",
                                                whereclause)
        arcpy.FeaturesToJSON_conversion(
            "lyr", os.path.join(json_folder,
                                json_name), "NOT_FORMATTED", "NO_Z_VALUES",
            "M_VALUES", "GEOJSON")  # Convert the selected features to JSON
Esempio n. 2
0
def file_gdb_layer_to_geojson(geodatabase, layer_name, outfile):

	geoprocessing_log.info("Converting layer to geojson")
	if os.path.exists(os.path.join(outfile)):
		geoprocessing_log.warn("Output file {0:s} exists - Deleting".format(outfile))
		os.remove(outfile)

	geoprocessing_log.info("Reprojecting to web_mercator")
	reprojected = temp.generate_gdb_filename(layer_name)
	arcpy.Project_management(in_dataset=os.path.join(geodatabase, layer_name), out_dataset=reprojected, out_coor_system=arcpy.SpatialReference(REPROJECTION_ID))

	geoprocessing_log.info("Writing out geojson file at {0:s}".format(reprojected))
	arcpy.FeaturesToJSON_conversion(reprojected, outfile, geoJSON="GEOJSON")  # export GeoJSON with ArcGIS Pro

	return  #  skip the code below for now, but retain it for legacy purposes for now. Can probably delete after August 2016. It was replaced by the line above

	ogr.UseExceptions()

	geoprocessing_log.debug("Opening FGDB")
	file_gdb_driver = ogr.GetDriverByName("OpenFileGDB")
	new_gdb, new_layer_name = os.path.split(reprojected)
	gdb = file_gdb_driver.Open(new_gdb, 0)

	geojson_driver = ogr.GetDriverByName("GeoJSON")
	geojson = geojson_driver.CreateDataSource(outfile)

	geoprocessing_log.info("Writing out geojson file at {0:s}".format(new_layer_name))
	layer = gdb.GetLayer(new_layer_name)
	geojson.CopyLayer(layer, layer_name, options=["COORDINATE_PRECISION=4",])
Esempio n. 3
0
def geojsonConvert(inputFilename):
    dateTime = datetime.now()
    logger.info("GeoJSON Convertion")
    reprojecedFile = nameModify(inputFilename, "wgs84")

    outputFilename = path.dirname(inputFilename) + sep + path.basename(
        reprojecedFile).replace(reprojecedFile.split(".")[-1], "json")

    arcpy.Project_management(
        inputFilename, reprojecedFile,
        "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]",
        "",
        "PROJCS['RGF_1993_Lambert_93',GEOGCS['GCS_RGF_1993',DATUM['D_RGF_1993',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',700000.0],PARAMETER['False_Northing',6600000.0],PARAMETER['Central_Meridian',3.0],PARAMETER['Standard_Parallel_1',44.0],PARAMETER['Standard_Parallel_2',49.0],PARAMETER['Latitude_Of_Origin',46.5],UNIT['Meter',1.0]]",
        "NO_PRESERVE_SHAPE", "", "NO_VERTICAL")
    arcpyLogger(arcpyVerboseLevel)
    logger.info("The wgs84 reprojected file, generated in " +
                str(duration(dateTime)) + " is : " + reprojecedFile)

    #shpCopy(reprojecedFile, outputPath + path.basename(inputDataPath).replace("." + inputFilename.split(".")[-1], "_clean_WGS84.shp"))

    arcpy.FeaturesToJSON_conversion(reprojecedFile, outputFilename,
                                    "NOT_FORMATTED", "NO_Z_VALUES",
                                    "NO_M_VALUES", "GEOJSON")

    arcpyLogger(arcpyVerboseLevel)
    logger.info("The GeoJSON file, generated in " + str(duration(dateTime)) +
                " is : " + outputFilename)

    fileCopier(
        outputFilename, outputPath + path.basename(inputDataPath).replace(
            "." + inputFilename.split(".")[-1], "_clean_wgs84.geojson"))
    return outputFilename
Esempio n. 4
0
def sendResults(string):
    # json of result layer
    arcpy.FeaturesToJSON_conversion(string,
                                    requestedResultSetName + ".geojson",
                                    "NOT_FORMATTED", "NO_Z_VALUES",
                                    "NO_M_VALUES", "GEOJSON", "KEEP_INPUT_SR")
    print("PYTHON SCRIPT EXIT")
    return
Esempio n. 5
0
def shptoGeojson(file):

    scratch_name = arcpy.CreateScratchName("temp",data_type="json",workspace="in_memory")
    arcpy.FeaturesToJSON_conversion(file, scratch_name)
    return_json = {}
    with open(scratch_name, "w") as f:
        json.dump(return_json, f)
    return return_json
 def onClick(self):
     # Get the current map document and the first data frame.
     mxd = arcpy.mapping.MapDocument('current')
     #df = arcpy.mapping.ListDataFrames(mxd)[0]
     # Call the zoomToSelectedFeatures() method of the data frame class
     #df.zoomToSelectedFeatures()
     layers = arcpy.mapping.ListLayers(mxd)
     arcpy.FeaturesToJSON_conversion(
         layers[0], r"C:\bla\selection_" + str(uuid.uuid4()) + ".json")
Esempio n. 7
0
def ExportJSON(FeatureClass):
    try:
        # This will be the output json file
        JsonFile = os.path.dirname(arcpy.env.workspace) + '\\' + FeatureClass + '.json'
        
        # If the json file exists already then delete it 
        if os.path.exists(JsonFile):
            arcpy.AddMessage("File exists: " + JsonFile + '. Deleted')
            os.remove(JsonFile)

        # Export the FeatureClass
        arcpy.AddMessage("Exporting " + JsonFile)
        arcpy.FeaturesToJSON_conversion(FeatureClass, JsonFile, "NOT_FORMATTED")
    except Exception as e:
        arcpy.AddMessage('Export failed for FeatureClass: ' + FeatureClass + ' ' + str(e))
Esempio n. 8
0
def get_data_dict(gdb, layer):
    #tranform data into dictionary
    arcpy.env.workspace = gdb
    fjson = arcpy.FeaturesToJSON_conversion(layer, "features9.json")

    #open json and assign dictionary to variable
    open_file = open(fjson[0], 'r')
    json_data = open_file.read()
    data = eval(json_data)

    #grab features (geom/attributes)
    features = data['features']
    arcpy.AddMessage("converted {} to json".format(layer))

    return features
Esempio n. 9
0
    def CreateLinesBuildings2Levee(accum_csv, Levee_failures_shp, temp_table, temp_point, Line_Features,
                                   Line_Features_WGS84, ConnectionLines):

        df_csv = pd.read_csv(accum_csv)

        field_names=['X_center', 'Y_center', 'LF']
        arr = arcpy.da.TableToNumPyArray(Levee_failures_shp, (field_names))
        df = pd.DataFrame(arr)

        merged_df = df_csv.merge(df, left_on='Levee_Failure', right_on="LF")
        columns_to_drop = ['Wert', 'MaxDepth', 'deg_of_loss', 'Damage', 'LF']
        for columns in columns_to_drop:
            merged_df = merged_df.drop(columns, axis=1)
        merged_df = merged_df.rename(columns={'X_centroid': 'X_Build', 'Y_centroid': 'Y_Build', 'X_center': 'X_LF', 'Y_center': 'Y_LF'})

        x = np.array(np.rec.fromrecords(merged_df.values))
        names = merged_df.dtypes.index.tolist()
        x.dtype.names = tuple(names)
        if not arcpy.Exists(temp_table):
            arcpy.da.NumPyArrayToTable(x, temp_table)
        else:
            print ("Temporal table already exists. Cannot move on. Script aborted.")

        print ("ArcGIS Event layer will now be created.")
        EventLayer='Event_Layer_Lines'
        arcpy.MakeXYEventLayer_management(temp_table, "X_Build", "Y_Build", EventLayer,
                                          spatial_reference=arcpy.SpatialReference(21781))
        arcpy.CopyFeatures_management(EventLayer, temp_point)
        print ("Start to create ArcGIS line feature class...")
        arcpy.XYToLine_management(temp_point, Line_Features, "X_Build", "Y_Build", "X_LF",
                                  "Y_LF", id_field="V25OBJECTI")
        arcpy.JoinField_management(Line_Features, "X_LF", temp_table ,"X_LF", fields = "Levee_Failure")
        out_coor_system = arcpy.SpatialReference(4326)
        arcpy.Project_management(Line_Features, Line_Features_WGS84, out_coor_system=out_coor_system)
        arcpy.FeaturesToJSON_conversion(Line_Features_WGS84, ConnectionLines, format_json="FORMATTED", geoJSON="GEOJSON")
        print ('Finished converting and exporting to geojson')

        print ('begin to delete temporary files')
        files_to_delete=[temp_table, temp_point, Line_Features, Line_Features_WGS84]
        for files in files_to_delete:
            if arcpy.Exists(files):
                arcpy.Delete_management(files)
                print (str(files) + (" successfully deleted."))
            else:
                print ("File " + str(files) + " does not exist and cannot be deleted")
        print ("Successfully finished function")
Esempio n. 10
0
 def ArcGIS2geojson(Current_lv, ArcGIS_FeatureClass,
                    ArcGIS_FeatureClass_WGS84, GeojsonFilePath):
     ##This script creates a .geojson file from an ArcGIS feature class.
     arcpy.Project_management(ArcGIS_FeatureClass,
                              ArcGIS_FeatureClass_WGS84, 4326)
     arcpy.AddField_management(ArcGIS_FeatureClass_WGS84, "Levee", "short")
     arcpy.AddField_management(ArcGIS_FeatureClass_WGS84,
                               "Time",
                               "string",
                               field_length=50)
     arcpy.CalculateField_management(ArcGIS_FeatureClass_WGS84, "Levee",
                                     Current_lv)
     Time = '"' + str("2006-03-11T09:00:00") + '"'
     arcpy.CalculateField_management(ArcGIS_FeatureClass_WGS84, "Time",
                                     Time)
     arcpy.FeaturesToJSON_conversion(ArcGIS_FeatureClass_WGS84,
                                     GeojsonFilePath,
                                     format_json="FORMATTED",
                                     geoJSON="GEOJSON")
     print("ArcGIS to Geojson conversion successfully finished")
def generate_wgs84_data(zone_name, shapefile_path, dstring, lgr):
    """ Take feature, convert to EPSG:4326, then convert it to GeoJSON
        that CrowdFiber can use.
    """
    try:
        in_shp = "%s%s_%s.shp" % (shapefile_path, zone_name, dstring)
        out_shp = "%s%s_%s_wgs84.shp" % (shapefile_path, zone_name, dstring)
        out_geojson = "%s%s_%s_wgs84.json" % (shapefile_path, zone_name,
                                              dstring)
        out_cs = arcpy.SpatialReference(4326)

        arcpy.Project_management(in_shp, out_shp, out_cs)
        arcpy.FeaturesToJSON_conversion(in_features=out_shp,
                                        out_json_file=out_geojson,
                                        format_json="NOT_FORMATTED",
                                        include_z_values="NO_Z_VALUES",
                                        include_m_values="NO_M_VALUES",
                                        geoJSON="GEOJSON")
    except:
        lgr.exception("Couldn't generate GeoJSON for %s" % (zone_name))
Esempio n. 12
0
 def get_aoi_geometry(self, aoi_layer):
     if 'geometry' not in self.aoi_infos[aoi_layer.name]:
         json_file_url = os.sep.join([self.base_utils.root_dir, 'json', '{}.json'.format(aoi_layer.name)])
         arcpy.FeaturesToJSON_conversion(aoi_layer.dataSource, json_file_url)
         aoi_geometry = {}
         with open(json_file_url) as f:
             aoi_json = json.load(f)
             aoi_geometry['spatialReference'] = aoi_json['spatialReference']
             for feature in aoi_json['features']:
                 if 'rings' in feature['geometry']:
                     feature_rings = feature['geometry']['rings']
                     if 'rings' not in aoi_geometry:
                         aoi_geometry['rings'] = []
                     aoi_geometry['rings'] += feature_rings
                 if 'curveRings' in feature['geometry']:
                     feature_curve_rings = feature['geometry']['curveRings']
                     if 'curveRings' not in aoi_geometry:
                         aoi_geometry['curveRings'] = []
                     aoi_geometry['curveRings'] += feature_curve_rings
         self.aoi_infos[aoi_layer.name]['geometry'] = aoi_geometry
     return self.aoi_infos[aoi_layer.name]['geometry']
Esempio n. 13
0
def createNeighbourhoodJSONfiles(makeCompactJson):
    neighbourhoods = "Neighbourhoods"
    sites = "Sites"

    if makeCompactJson:
        files = glob.glob(outJsonDir + '*.json')
        for f in files:
            os.remove(f)

    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference("WGS 1984")
    #arcpy.SelectLayerByAttribute_management("lyr", "NEW_SELECTION", "GUM_COUNT_ACTUAL > 0")

    arcpy.MakeFeatureLayer_management(neighbourhoods, "Neighbourhoods")
    arcpy.MakeFeatureLayer_management(sites, "Sites")

    #arcpy.management.SelectLayerByLocation("Sites", "INTERSECT", "Neighbourhoods", None, "NEW_SELECTION", "NOT_INVERT")

    with arcpy.da.SearchCursor(neighbourhoods, ["AREA_NAME"]) as cursor:
        for row in cursor:
            print(row)
            hood = row[0].replace("'", "''")
            arcpy.SelectLayerByAttribute_management(
                "Neighbourhoods", "NEW_SELECTION",
                "AREA_NAME = '" + hood + "'")
            arcpy.SelectLayerByLocation_management("Sites", "INTERSECT",
                                                   "Neighbourhoods", None,
                                                   "NEW_SELECTION",
                                                   "NOT_INVERT")

            hood = hood.replace("''", "'")
            hood = hood.replace("/", "-")
            arcpy.AddMessage(hood)

            jsonFile = jsonDir + hood + ".json"
            arcpy.FeaturesToJSON_conversion("Sites", jsonFile, "NOT_FORMATTED")

            if makeCompactJson == True:
                compactJSON(jsonFile, hood)

    arcpy.SelectLayerByAttribute_management("Sites", "CLEAR_SELECTION")
Esempio n. 14
0
def main():
    gdb_path = arcpy.GetParameterAsText(0)
    input_feature = arcpy.GetParameter(1)
    all_the_world = bool(arcpy.GetParameter(2))
    to_clip = bool(arcpy.GetParameter(3))
    osm_scheme = arcpy.GetParameterAsText(4)
    layer_config_file = arcpy.GetParameterAsText(5)
    aprx_model = arcpy.GetParameterAsText(6)
    create_vtpk = bool(arcpy.GetParameter(7))

    pythonPath = os.path.dirname(os.path.realpath(sys.argv[0]))
    settings = ConfigParser()
    settings.read(pythonPath + "/settings.ini")

    db_server = CommonFunctions.readParameter(settings, "database",
                                              'db_server')
    db_port = CommonFunctions.readParameter(settings, "database", 'db_port')
    db_database = CommonFunctions.readParameter(settings, "database",
                                                'db_database')
    db_collection = CommonFunctions.readParameter(settings, "database",
                                                  'db_collection')
    done_path = CommonFunctions.readParameter(settings, "directories",
                                              'done_path')
    tiling_scheme = CommonFunctions.readParameter(settings, "models",
                                                  'tiling_scheme')
    global element_at_time
    element_at_time = int(
        CommonFunctions.readParameter(settings, "general", 'element_at_time'))

    client = MongoClient(db_server, int(db_port))
    db = client[db_database]
    collection = db[db_collection]
    collection.create_index([("id", ASCENDING)], background=True)
    collection.create_index([("osm_type", ASCENDING)], background=True)
    collection.ensure_index([("geometry", GEOSPHERE)], background=True)
    collection.create_index([("geometry.type", ASCENDING)], background=True)
    collection.create_index([("nd.ref", ASCENDING)], background=True)
    collection.create_index([("member.ref", ASCENDING)], background=True)
    collection.create_index([("osm_type", ASCENDING), ("geometry", ASCENDING)],
                            background=True)
    collection.create_index([("osm_type", ASCENDING), ("id", ASCENDING)],
                            background=True)

    geometries = []
    if not all_the_world:
        if (os.path.exists(os.path.join(done_path, "workjson.geojson"))):
            os.remove(os.path.join(done_path, "workjson.geojson"))
        arcpy.FeaturesToJSON_conversion(
            input_feature,
            os.path.join(done_path, "workjson.geojson").replace("\\", "/"),
            geoJSON="GEOJSON")

        time.sleep(1)
        content = ''
        with open(
                os.path.join(done_path, "workjson.geojson").replace("\\",
                                                                    "/")) as f:
            content = f.readlines()

        resultjson = ''
        for single in content:
            resultjson = resultjson + single.replace("\n", "")
        if (os.path.exists(os.path.join(done_path, "workjson.geojson"))):
            os.remove(os.path.join(done_path, "workjson.geojson"))
        d = json.loads(resultjson)
        features = d['features']
        for feature in features:
            geometries.append(feature['geometry'])

        if to_clip:
            if (arcpy.Exists("in_memory/polygon_selection")):
                arcpy.Delete_management("in_memory/polygon_selection")
            arcpy.management.CreateFeatureclass(
                "in_memory",
                "polygon_selection",
                "POLYGON",
                "",
                "DISABLED",
                "DISABLED",
                spatial_reference=
                "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 11258999068426.2;-100000 10000;-100000 10000;8.98315284119521E-09;0.001;0.001;IsHighPrecision"
            )

            # Open an InsertCursor and insert the new geometry
            cursor = arcpy.da.InsertCursor('in_memory/polygon_selection',
                                           ['SHAPE@'])
            for feature in features:
                if (feature['geometry']['type'] == "Polygon"):
                    geom = feature["geometry"]["coordinates"][0]
                    array = arcpy.Array()
                    for g in geom:
                        array.append(arcpy.Point(g[0], g[1]))
                polygon = arcpy.Polygon(array)
                cursor.insertRow([polygon])
            # Delete cursor object
            del cursor

    gdbname = gdb_path.replace("\\", "/")
    gdbname = gdbname.split("/")[len(gdbname.split("/")) - 1]
    database_path = gdb_path.replace(gdbname, "")
    arcpy.AddMessage("Create Geodatabase: " + gdbname + " using " +
                     osm_scheme + " in directory " + database_path)
    arcpy.CreateFileGDB_management(database_path, gdbname)
    arcpy.ImportXMLWorkspaceDocument_management(gdb_path, osm_scheme)

    arcpy.AddMessage("Read layer config file")
    with open(layer_config_file) as f:
        content = f.readlines()

    for single in content:
        single = single.replace("\n", "")
        arcpy.AddMessage("Process " + single.split(",")[1] + ": " +
                         single.split(",")[0])
        readSingleLayer(collection, single, geometries,
                        os.path.join(database_path, gdbname), all_the_world,
                        to_clip)
    client.close()

    if aprx_model != "":
        arcpy.AddMessage('Rebuild aprx file from model')
        aprx = arcpy.mp.ArcGISProject(aprx_model)

        dbs = []
        m = aprx.listMaps()[0]
        arcpy.AddMessage("Update Model databases")
        for lyr in m.listLayers():
            if (lyr.supports("connectionProperties") == True):
                if lyr.connectionProperties:
                    if lyr.connectionProperties['connection_info'][
                            'database'] not in dbs:
                        dbs.append(lyr.connectionProperties['connection_info']
                                   ['database'])

        for db in dbs:
            aprx.updateConnectionProperties(
                db, os.path.join(database_path, gdbname), True, False)

        absname = gdbname.split(".")[0]
        if (arcpy.Exists(os.path.join(database_path, absname + ".aprx"))):
            arcpy.Delete_management(
                os.path.join(database_path, absname + ".aprx"))
        aprx.saveACopy(os.path.join(database_path, absname + ".aprx"))

        if create_vtpk:
            for m in aprx.listMaps():
                arcpy.AddMessage("Tile index creation")
                if (arcpy.Exists(database_path + "/" + absname + "Index.gdb")):
                    arcpy.Delete_management(database_path + "/" + absname +
                                            "Index.gdb")
                arcpy.CreateFileGDB_management(database_path,
                                               absname + "Index.gdb")
                arcpy.management.CreateVectorTileIndex(
                    m, database_path + "/" + absname + "Index.gdb/osmIndex",
                    "EXISTING", tiling_scheme, 10000)

                arcpy.AddMessage("Vector tile map creation")
                if (arcpy.Exists(database_path + "/" + absname + ".vtpk")):
                    arcpy.Delete_management(database_path + "/" + absname +
                                            ".vtpk")
                arcpy.management.CreateVectorTilePackage(
                    m, database_path + "/" + absname + ".vtpk", "EXISTING",
                    tiling_scheme, "INDEXED", 73957190.9489637,
                    1128.49717634527,
                    database_path + "/" + absname + "Index.gdb/osmIndex",
                    "OSM", "World, Vector")
        del aprx

    arcpy.ClearWorkspaceCache_management()
Esempio n. 15
0
    def execute(self, parameters, messages):
        """The source code of the tool."""

        arcpy.ImportToolbox(os.path.join(os.path.dirname(__file__), "URB.pyt"))
        arcpy.gp.toolbox = os.path.join(os.path.dirname(__file__), "URB.pyt")

        def extentToPoly(extent, srid=3003):
            clist = arcpy.Array()
            clist.append(arcpy.Point(extent.XMin, extent.YMin))
            clist.append(arcpy.Point(extent.XMin, extent.YMax))
            clist.append(arcpy.Point(extent.XMax, extent.YMax))
            clist.append(arcpy.Point(extent.XMax, extent.YMin))
            return arcpy.Polygon(clist)

        def get_best_fit_scale(fc,
                               paper,
                               scales=[
                                   1000, 2000, 2500, 5000, 7500, 10000, 20000
                               ]):
            desc = arcpy.Describe(fc)
            sheet = printOutput_templates[paper]["size"]
            margin = 10
            #mapUnitsPerMillimeter = [0.5,1,2,5,10]
            cx = (desc.extent.XMin + desc.extent.XMax) / 2
            cy = (desc.extent.YMin + desc.extent.YMax) / 2
            fc_bound = extentToPoly(desc.extent)

            for scale in scales:
                scaleFactor = scale / 1000
                wb = sheet[0] * scaleFactor / 2
                hb = sheet[1] * scaleFactor / 2
                wf = (sheet[0] - margin * 2) * scaleFactor / 2
                hf = (sheet[1] - margin * 2) * scaleFactor / 2

                #bound = arcpy.Polygon([arcpy.Point(cx-wb,cy-hb), arcpy.Point(cx+wb,cy-hb), arcpy.Point(cx+wb,cy+hb), arcpy.Point(cx-wb,cy+hb)])
                #frame = arcpy.Polygon([arcpy.Point(cx-wf,cy-hf), arcpy.Point(cx+wf,cy-hf), arcpy.Point(cx+wf,cy+hf), arcpy.Point(cx-wf,cy+hf)])
                bound = extentToPoly(
                    arcpy.Extent(cx - wb, cy - hb, cx + wb, cy + hb))
                frame_extent = arcpy.Extent(cx - wf, cy - hf, cx + wf, cy + hf)
                frame = extentToPoly(frame_extent)

                #tempfcname = "in_memory/output" + uuid.uuid4().hex
                #arcpy.Intersect_analysis ([frame, fc_bound], tempfcname)
                #result = arcpy.GetCount_management(tempfcname)
                #intersections = int(result.getOutput(0))

                #if intersections > 0:
                if frame_extent.contains(desc.extent):
                    return bound, frame, scale

            return bound, frame, scaleFactor

        def get_esri_ring(extent):
            ring = [
                [extent.XMin, extent.YMin],
                [extent.XMax, extent.YMin],
                [extent.XMax, extent.YMax],
                [extent.XMin, extent.YMax],
                [extent.XMin, extent.YMin],
            ]
            return ring

        probe_path = parameters[0].valueAsText
        coordinate_catastali = parameters[1].valueAsText
        paper = parameters[2].valueAsText.replace("'", "")
        base = parameters[3].valueAsText.replace("'", "")

        checkboxes = []
        #for idx in range(4,12):
        #    if parameters[idx].valueAsText == "true":
        #        checkboxes.append(idx)
        for param in parameters:
            #arcpy.AddMessage("param: %s %s" % (str(param.datatype),str(param.valueAsText)))
            if param.datatype in ("Booleano",
                                  "Boolean") and param.valueAsText == "true":
                checkboxes.append(param.name)
        arcpy.AddMessage("checkboxes: %s" % str(checkboxes))

        with open(
                os.path.join(os.path.dirname(__file__),
                             "web_map_as_json.json"), "r") as jf:
            wmaj_template = jf.read()

        template_engine = Template(wmaj_template)

        decode_map = []

        if coordinate_catastali:
            CC_result = arcpy.gp.CC2FCtool(coordinate_catastali)
            probe_path = CC_result.getOutput(0)
        else:
            if not probe_path:
                arcpy.AddError(
                    "Deve essere specificata almeno un contesto, come layer o come coordinate catastali"
                )
                exit(0)

        arcpy.AddMessage("probe_path: %s paper:" % probe_path)
        probe = arcpy.mapping.Layer(probe_path)
        with arcpy.da.SearchCursor(probe_path, ['SHAPE']) as cursor:
            probe_polygon = next(cursor)[0]

        #probe_json_path = os.path.join(tempfile.mkdtemp(), "probe.json")
        probe_json_path = get_jobfile("activity", "json")
        arcpy.FeaturesToJSON_conversion(probe_path, probe_json_path,
                                        "FORMATTED")

        with open(probe_json_path, "r") as jf:
            probe_json = jf.read()

        #arcpy.AddMessage(json.dumps(json.loads(probe_json),indent=3))

        json_feats = []
        probe_json_dict = json.loads(probe_json)
        for feat in probe_json_dict["features"]:
            feat["symbol"] = {
                "color": [255, 0, 0, 0],
                "outline": {
                    "color": [255, 0, 0, 255],
                    "width": 1.75,
                    "type": "esriSLS",
                    "style": "esriSLSSolid"
                },
                "type": "esriSFS",
                "style": "esriSFSSolid"
            }
            json_feats.append(feat)

        result_pdf = []

        for tema in temi:
            if not tema["label"] in checkboxes:
                continue

            mapServices = basi[base] + tema["def"]

            bound, frame, scale = get_best_fit_scale(probe_path, paper)

            printpar = {
                "extent": [
                    frame.extent.XMin, frame.extent.YMin, frame.extent.XMax,
                    frame.extent.YMax
                ],
                "scale":
                scale,
                "srid":
                3003,
                "esri_poly":
                json.dumps(
                    json.loads(probe_json)["features"][0]["geometry"]
                    ["rings"]),
                "esri_style":
                json.dumps(
                    proto_ESRI_style),  #non implementato nel template json
                "esri_bound":
                get_esri_ring(bound.extent),
                "esri_frame":
                get_esri_ring(frame.extent),
                "title":
                tema["label"].upper().replace("_", ""),
                "dpi":
                200,
                "auth":
                "Settore urbanistica, Servizi catastali e Mobilita'",
                "copyright":
                "Comune di Padova"
            }

            web_map_as_json = template_engine.render(printpar=printpar)
            web_map_as_dict = json.loads(web_map_as_json)

            web_map_as_dict["operationalLayers"][0]["featureCollection"][
                "layers"][0]["featureSet"]["features"] = json_feats
            web_map_as_dict[
                "operationalLayers"] = mapServices + web_map_as_dict[
                    "operationalLayers"]
            web_map_as_json = json.dumps(web_map_as_dict)

            post_parameters = {
                "f": "json",
                "Web_Map_as_JSON": web_map_as_json,
                "Format": "PDF",
                "Layout_Template": printOutput_templates[paper]["label"]
            }

            #arcpy.AddMessage(json.dumps(post_parameters,indent=3))

            #pdf_file_path = os.path.join(tempfile.mkdtemp(), tema["label"]+".pdf")
            pdf_file_path = get_jobfile("output", "pdf")

            res = urllib.urlopen(
                base_url +
                "arcgis/rest/services/Utilities/PrintingTools/GPServer/Export%20Web%20Map%20Task/execute",
                urllib.urlencode(post_parameters)).read()

            if "results" in res:
                remoteFile = json.loads(res)['results'][0]["value"]["url"]
                #arcpy.AddMessage ("REMOTE: " + remoteFile)
                urllib.urlretrieve(remoteFile, pdf_file_path)
                arcpy.AddMessage("OK: %s" % tema["label"])
                result_pdf.append(pdf_file_path)
            else:
                arcpy.AddMessage("NO")

        if parameters[-1].valueAsText:
            pdf_globale = parameters[-1].valueAsText
        else:
            #pdf_globale = os.path.join(tempfile.mkdtemp(), "inquadramento.pdf")
            pdf_globale = get_jobfile("output", "pdf")

        merger = PdfFileMerger()
        for file in result_pdf:
            merger.append(PdfFileReader(file))
        merger.write(pdf_globale)

        parameters[-1].value = pdf_globale

        arcpy.AddMessage("OK: %s" % pdf_globale)
Esempio n. 16
0
def dissolve_into_shapefile(blocks_maz_layer, maz_or_taz):
    """
    Dissolve the blocks into final MAZ/TAZ shapefile
    """
    shapefile = MAZS_SHP if maz_or_taz == "maz" else TAZS_SHP

    # don't care if this fails, just want to head off error since arcpy gets mad if we try to overwrite
    try:
        arcpy.Delete_management("{0}_temp.shp".format(shapefile))
    except Exception as err:
        logging.debug(err.args[0])

    # don't care if this fails, just want to head off error since arcpy gets mad if we try to overwrite
    try:
        arcpy.Delete_management("{0}.shp".format(shapefile))
    except Exception as err:
        logging.debug(err.args[0])

    try:
        # create mazs shapefile -- save as temp since we'll do a bit more to it
        fields = [
            ["{0}.ALAND10".format(CENSUS_BLOCK_ROOT), "SUM"],
            ["{0}.AWATER10".format(CENSUS_BLOCK_ROOT), "SUM"],
            ["{0}.GEOID10".format(CENSUS_BLOCK_ROOT),
             "COUNT"],  # count block per maz
        ]
        if maz_or_taz == "maz":
            # list the taz for the maz
            fields.append(["{0}.taz".format(CROSSWALK_ROOT),
                           "FIRST"])  # verified taz are unique for maz above
        else:
            # count the mazs per taz
            fields.append(["{0}.maz".format(CROSSWALK_ROOT), "COUNT"])

        arcpy.Dissolve_management(blocks_maz_layer,
                                  "{0}_temp".format(shapefile),
                                  "{0}.{1}".format(CROSSWALK_ROOT, maz_or_taz),
                                  fields, "MULTI_PART", "DISSOLVE_LINES")
        logging.info("Dissolved {0}s into {1}_temp.shp".format(
            maz_or_taz, shapefile))

        # calculate partcount
        my_layer = "my_{0}_layer".format(maz_or_taz)
        arcpy.MakeFeatureLayer_management("{0}_temp.shp".format(shapefile),
                                          my_layer)
        arcpy.AddField_management(my_layer, "partcount", "SHORT", 6)
        arcpy.CalculateField_management(my_layer, "partcount",
                                        "!Shape.partCount!", "PYTHON3")
        logging.info("Calculated part count for {0}s".format(maz_or_taz))

        # add perimeter.  In meters because ALAND10 is square meters
        arcpy.AddGeometryAttributes_management(my_layer,
                                               "PERIMETER_LENGTH_GEODESIC",
                                               "METERS")
        logging.info("Calulated perimeter length for {0}s".format(maz_or_taz))

        # add perimeter squared over area
        arcpy.AddField_management(my_layer, "psq_overa", "DOUBLE", 10, 0)
        arcpy.CalculateField_management(my_layer, "psq_overa",
                                        "!PERIM_GEO!*!PERIM_GEO!/!ALAND10!",
                                        "PYTHON3")
        logging.info("Calculated perim*perim/area for {0}s".format(maz_or_taz))

        # add acres from ALAND10
        SQUARE_METERS_PER_ACRE = 4046.86
        arcpy.AddField_management(my_layer, "acres", "DOUBLE", 10, 5)
        arcpy.CalculateField_management(
            my_layer, "acres", "!ALAND10!/{}".format(SQUARE_METERS_PER_ACRE))
        logging.info("Calculated acres for {0}s".format(maz_or_taz))

        # delete maz/taz=0, that's not a real maz/taz
        arcpy.SelectLayerByAttribute_management(my_layer, "NEW_SELECTION",
                                                "{0} > 0".format(maz_or_taz))
        logging.info("Selected out water for {0}s".format(maz_or_taz))

        # Write the selected features to a new feature class and rename fields for clarity
        # todo: the alias names don't seem to be getting picked up, not sure why
        old_to_new = {
            "GEOID10": ["blockcount", "block count"],
            "PERIM_GEO": ["PERIM_GEO", "perimeter in meters"],
            "psq_overa": ["psq_overa", "perimeter squared over area"]
        }

        if maz_or_taz == "taz": old_to_new["maz"] = ["mazcount", "maz count"]

        rename_fields(my_layer, shapefile, old_to_new)
        logging.info("Saving final {0}s into {1}.shp".format(
            maz_or_taz, shapefile))

        # delete the temp
        arcpy.Delete_management("{0}_temp.shp".format(shapefile))

        # don't care if this fails, just want to head off error since arcpy gets mad if we try to overwrite
        try:
            arcpy.Delete_management("{0}.json".format(shapefile))
        except Exception as err:
            logging.debug(err.args[0])

        # create geojson
        arcpy.FeaturesToJSON_conversion("{0}.shp".format(shapefile),
                                        "{0}.json".format(shapefile),
                                        format_json="FORMATTED",
                                        geoJSON="GEOJSON")
        logging.info("Created {0}.json".format(shapefile))

    except Exception as err:
        logging.error(err.args[0])
Esempio n. 17
0
def generate_slice_spec_geojson_file(input_features, sharedstreetid,
                                     slice_fields_csv, output_geojson):
    """This function will create a slice specification compliant geojson file.
    :param - input_features - feature class that has all of the fields from the crosswalk file
    :param - sharedstreetid - unique street ID as defined by SharedStreets.
    :param - slice_fields_csv - the csv with fields required for the slice specification compliant geojson.
    :param - output_geojson - output slice based geojson where each line geometry has slices as properties
    :return - output_geojson -path to output geojson
    """
    try:
        arcpy.env.overwriteOutput = True
        output_temp_features = os.path.join("in_memory",
                                            "Temporary_Slice_Features")
        srl.arc_print("Reading input features...")
        pre_fields = [
            f.name for f in arcpy.ListFields(input_features)
            if f.type not in ["OID", "Geometry"]
            and f.name.lower() not in ["shape_area", "shape_length"]
        ]
        fields = ["SHAPE@"] + pre_fields
        cw_df = srl.arcgis_table_to_df(input_features, fields)
        cw_groups = cw_df.groupby(sharedstreetid)
        sr = arcpy.Describe(input_features).spatialReference
        output_path, output_name = os.path.split(output_temp_features)
        arcpy.CreateFeatureclass_management(output_path,
                                            output_name,
                                            "POLYLINE",
                                            spatial_reference=sr)
        srl.arc_print("Adding fields to intermediate features...")
        slice_fields = srl.add_fields_from_csv(output_temp_features,
                                               slice_fields_csv)
        slice_fields = ["SHAPE@"] + slice_fields
        with arcpy.da.InsertCursor(output_temp_features,
                                   slice_fields) as insertCursor:
            srl.arc_print(
                "Established insert cursor for intermediate slice file...",
                True)
            lineCounter = 0
            for street_id, street_group in cw_groups:
                lineCounter += 1
                try:
                    shape = street_group["SHAPE@"].iloc[0]
                    cw_fields = [
                        "type", "width", "height", "direction", "material",
                        "meta"
                    ]
                    slice_group = street_group[cw_fields]

                    json_slices = slice_group.to_json(orient="records")
                    slice_row = [shape, street_id, json_slices]
                    insertCursor.insertRow(slice_row)
                    if lineCounter % 500 == 0:
                        srl.arc_print(
                            "Iterated through feature " + str(lineCounter) +
                            ".", True)
                except Exception as e:
                    srl.arc_print(
                        "Failed to iterate through feature " +
                        str(lineCounter) + ".", True)
                    arcpy.AddWarning(str(e.args[0]))
            del insertCursor, fields, pre_fields, lineCounter
        srl.arc_print("Exporting intermediate feature class to geojson...")
        arcpy.FeaturesToJSON_conversion(output_temp_features,
                                        output_geojson,
                                        format_json="FORMATTED",
                                        geoJSON="GEOJSON",
                                        outputToWGS84="WGS84",
                                        use_field_alias="USE_FIELD_ALIAS")
        srl.arc_print("Script Complete!")
    except Exception as e:
        srl.arc_print("Tool Script Error!")
        import traceback, sys
        tb = sys.exc_info()[2]
        srl.arc_print("An error occurred on line %i" % tb.tb_lineno)
        arcpy.AddError("The error occurred on line {0}...".format(
            tb.tb_lineno))
Esempio n. 18
0
    def house2geojson(sorted_csv, building_connections, building_connections_WGS84, house_geojson,
                     Path_for_levee_based_geojson, house_geojson_modified):
        ##First modify the arrays created in the class 'sort_csv' so that they are numerical and not a string anymore.
        ##Further filter the dictionary containing the accumulated csv-data so that for each levee failure a geojson can
        ##be exported which only contains the (from this levee) affected buildings

        if arcpy.Exists(building_connections):
            print ("Process aborted. A feature class with the same name already exists")
        elif arcpy.Exists (building_connections_WGS84):
            print("Process aborted. A feature class with the same name already exists")
        else:
            arcpy.management.XYTableToPoint(sorted_csv, building_connections, "X_centroid", "Y_centroid",
                                        coordinate_system=21781)
            arcpy.Project_management(building_connections, building_connections_WGS84, 4326)
            arcpy.FeaturesToJSON_conversion(building_connections_WGS84, house_geojson, format_json="FORMATTED",
                                        geoJSON="GEOJSON")
        print("Geojson successfully written. Start modyfing Geojson...")

        with open(house_geojson, mode='r+') as gj:
            geojson_dictionary = json.load(gj)
            list_int = ['Levee_Failure', 'LF_Max']
            list_float = ['MaxDepth', 'Damage']

            for part in geojson_dictionary['features']:
                for key, value in part['properties'].items():
                    if key in list_float:
                        temp = value
                        temp1 = temp.strip('[ ] ,')
                        temp2 = temp1.split(",")
                        temp3 = [float(i) for i in temp2]
                        part['properties'][key] = temp3
                    elif key in list_int:
                        temp5 = value
                        temp6 = temp5.strip('[ ] ,')
                        temp7 = temp6.split(",")
                        temp8 = [int(i) for i in temp7]
                        part['properties'][key] = temp8
            print("The GeoJson file was transformed to a dictionary and modified.")
            print("Beginn to write to geojson file...")

            ##calculating the maximum number of levee failure that is affected
            temp_max = []
            temp_min = []
            for part in geojson_dictionary['features']:
                temp_max.append(max(part['properties']['Levee_Failure']))
                temp_min.append(min(part['properties']['Levee_Failure']))
            maximum = max(temp_max)
            minimum = min(temp_min)
            i = minimum

            ##select only those buildings which are affected by the levee failure i and writing a new geojson file
            # for each levee failure
            # while i <= maximum:
            #     features = []
            #     for part in geojson_dictionary['features']:
            #         for item in part['properties']['Levee_Failure']:
            #             if item == i:
            #                 features.append(part)
            #     if i <=4:
            #         new_geojson = Path_for_levee_based_geojson + '/Connection_Building_to_HasliAare_LF' + str(i)\
            #                       + '.geojson'
            #     elif i <=42:
            #         new_geojson = Path_for_levee_based_geojson + '/Connection_Building_to_lowerAare_LF' + str(i)\
            #                       + '.geojson'
            #     new_dictionary = copy.deepcopy(geojson_dictionary)
            #     new_dictionary['features'] = features
            #     file = open(new_geojson, "w+")
            #     json.dump(new_dictionary, file, indent=3)
            #     print('geojson file for levee number ' + str(
            #         i) + ' has sucessfully been created. Moving to the next number...')
            #     i = i + 1


            ##finaly create file with all houses affected by levees at the HasliAare or Untere Aare and close it
            mod_file = open(house_geojson_modified, "w+")
            json.dump(geojson_dictionary, mod_file, indent=3)
            print("Original geojson file successfully overwritten.")
Esempio n. 19
0
# modify paths as needed. Since arcGIS is windows only, transfer the locations_*.csv files over

import arcpy
from arcpy import env
import os
arcpy.env.workspace = "c:/data"

# FOR PLACE DATA
# 0. Install and run arcGIS pro: http://pro.arcgis.com/en/pro-app/get-started/install-and-sign-in-to-arcgis-pro.htm
# 1. create a new TwoRavens arcGIS project. This will create TwoRavens.gdb, where data is stored.
# 2. From the top ribbon, go to Analysis > Tools, this will open a new tab on the right.
# 3. Geocoding Tools > Geocode Addresses, provide the input table icews_filtered.csv and select the esri online geocoder, make sure the fields match up
# 4. after running, the data is located within TwoRavens.gdb under a 'File Geodatabase Feature Class' (you can't make this up)
# 5. save to json via the python console within arcGIS:
arcpy.FeaturesToJSON_conversion(
    os.path.join("TwoRavens.gdb", "locations_icews_filtered_GeocodeAddresse"),
    "icews_coded.json")
arcpy.FeaturesToJSON_conversion(
    os.path.join("TwoRavens.gdb", "locations_acled_filtered_GeocodeAddresse"),
    "acled_coded.json")

# FOR COORDINATE DATA
# use the python console within arcGIS:
location_path = r"C:/Users/mike/Documents/locations/locations_cline_filtered.csv"
x_coords = "Longitude"
y_coords = "Latitude"
out_Layer = "Cline_xy_layer"
saved_Layer = r"C:/data/Cline_xy.lyr"
spRef = arcpy.SpatialReference("WGS 1984")
arcpy.MakeXYEventLayer_management(location_path, x_coords, y_coords, out_Layer,
                                  spRef)
Esempio n. 20
0
def create_geo_jason_file(Input_Polygon_path):

    arcpy.env.overwriteOutput = True
    product_dir = os.path.dirname(Input_Polygon_path)
    Names_in = os.path.basename(Input_Polygon_path).split('_')
    n_charc = len(Names_in)
    version = Names_in[n_charc - 1][0:4]
    TOLERANCEs = [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.05]

    head_name_cat = "finalcat_info"
    head_name_riv = "finalcat_info_riv"
    head_name_slake = "sl_connected_lake"
    head_name_nlake = "sl_non_connected_lake"

    Input_file_name = []
    Output_file_name = []
    if 'v' in version:
        Input_file_name = [
            head_name_cat + "_" + version + '.shp',
            head_name_riv + "_" + version + '.shp',
            head_name_slake + "_" + version + '.shp',
            head_name_nlake + "_" + version + '.shp',
        ]
        Output_file_name = [
            head_name_cat + "_" + version + '.geojson',
            head_name_riv + "_" + version + '.geojson',
            head_name_slake + "_" + version + '.geojson',
            head_name_nlake + "_" + version + '.geojson',
        ]
    else:
        Input_file_name = [
            head_name_cat + '.shp',
            head_name_riv + '.shp',
            head_name_slake + '.shp',
            head_name_nlake + '.shp',
        ]
        Output_file_name = [
            head_name_cat + '.geojson',
            head_name_riv + '.geojson',
            head_name_slake + '.geojson',
            head_name_nlake + '.geojson',
        ]
    created_jason_files = []
    created_jason_files_lake_riv = []

    for i in range(0, len(Input_file_name)):
        input_path = os.path.join(product_dir, Input_file_name[i])
        output_jason_path = os.path.join(product_dir, Output_file_name[i])
        if not os.path.exists(input_path):
            continue
        created_jason_files.append(output_jason_path)

        if 'finalcat_info_riv' in Input_file_name[
                i] or 'connected_lake' in Input_file_name[i]:
            created_jason_files_lake_riv.append(output_jason_path)

        # reproject to WGS84
        input_wgs_84 = os.path.join(tempfile.gettempdir(), "input_wgs_84.shp")
        arcpy.Project_management(input_path, input_wgs_84,
                                 arcpy.SpatialReference(int(4326)))

        if 'finalcat_info' in Input_file_name[
                i] or "finalcat_info_riv" in Input_file_name[i]:
            arcpy.AddField_management(input_wgs_84, 'rvhName', "TEXT")
            arcpy.CalculateField_management(input_wgs_84, 'rvhName',
                                            "'sub' + str(int(\"!SubId!\"))",
                                            "PYTHON3")

        arcpy.RepairGeometry_management(input_wgs_84)

        for TOLERANCE in TOLERANCEs:
            input_wgs_84_simplify = os.path.join(tempfile.gettempdir(),
                                                 "input_wgs_84_simplify.shp")
            if arcpy.Exists(input_wgs_84_simplify):
                arcpy.Delete_management(input_wgs_84_simplify)

            if "finalcat_info_riv" not in Input_file_name[i]:
                CA.SimplifyPolygon(input_wgs_84, input_wgs_84_simplify,
                                   "POINT_REMOVE", TOLERANCE)
            else:
                CA.SimplifyLine(input_wgs_84, input_wgs_84_simplify,
                                "POINT_REMOVE", TOLERANCE)

            arcpy.FeaturesToJSON_conversion(input_wgs_84_simplify,
                                            output_jason_path, "FORMATTED",
                                            "NO_Z_VALUES", "NO_M_VALUES",
                                            "GEOJSON")

            json_file_size = os.stat(
                output_jason_path).st_size / 1024 / 1024  #to MB
            if json_file_size <= 100:
                break

    if len(created_jason_files_lake_riv) > 1 and os.stat(
            os.path.join(product_dir,
                         Output_file_name[1])).st_size / 1024 / 1024 < 500:
        for i in range(0, len(created_jason_files_lake_riv)):
            injson2 = load(open(created_jason_files_lake_riv[i]))
            if 'finalcat_info_riv' in created_jason_files_lake_riv[i]:
                new_features = []
                for element in injson2["features"]:
                    if element["properties"]["Lake_Cat"] == 0:
                        new_features.append(element)
                injson2["features"] = new_features

            if i == 0:
                output_jason_lake_riv = injson2
            else:
                output_jason_lake_riv['features'] += injson2['features']

        with open(os.path.join(product_dir,
                               'routing_product_lake_river.geojson'),
                  'w',
                  encoding='utf-8') as f:
            json.dump(output_jason_lake_riv, f, ensure_ascii=False, indent=4)
    else:
        shutil.copy(
            created_jason_files_lake_riv[0],
            os.path.join(product_dir, 'routing_product_lake_river.geojson'))

    return
Esempio n. 21
0
  def execute(self, parameters, messages):

    records = parameters[0].valueAsText
    outJSON = parameters[1].valueAsText

    inDesc = arcpy.Describe(records)
    if (inDesc.dataType == "FeatureLayer"):
      arcpy.FeaturesToJSON_conversion(records, outJSON)
      return

    fieldTypeDict = { "SmallInteger":"esriFieldTypeSmallInteger", 
  "Integer":"esriFieldTypeInteger",
  "Single:":"esriFieldTypeSingle",
  "Double":"esriFieldTypeDouble",
  "String":"esriFieldTypeString",
  "Date":"esriFieldTypeDate",
  "OID":"esriFieldTypeOID",
  "Geometry":"esriFieldTypeGeometry",
  "Blob":"esriFieldTypeBlob",
  "Raster":"esriFieldTypeRaster",
  "Guid:":"esriFieldTypeGUID",
  "GlobalID":"esriFieldTypeGlobalID" }
    
    fields = arcpy.ListFields(records)
    fieldList = [field.name for field in fields]

    with arcpy.da.SearchCursor(records, fieldList) as cursor:
      with codecs.open(outJSON, 'w', 'utf-8') as jsonFile:

        jsonFile.write('{ "displayFieldName": "", ')
        jsonFile.write('"fieldAliases": {')
        lstString = []
        for field in fields:
          lstString.append( '"{}":"{}"'.format(field.name, field.aliasName) )
        jsonFile.write( ','.join(lstString) )

        jsonFile.write('},') # end fieldAliases

        jsonFile.write('"fields": [')
        for index in range(len(fields)):
          field = fields[index]
          
          jsonFile.write("{")
          jsonFile.write('"name":"{}",'.format(field.name) )
          jsonFile.write('"type":"{}",'.format(fieldTypeDict[field.type]) )
          jsonFile.write('"alias":"{}",'.format(field.aliasName) )
          jsonFile.write('"length":"{}"'.format(field.length) )               
          jsonFile.write("}")
          if (index != len(fields)-1):
            jsonFile.write(",")

        jsonFile.write('],') # end field

        jsonFile.write('"features": [')

        isFirstTime = True
        for row in cursor:
          if (isFirstTime):
            isFirstTime = False
          else:
            jsonFile.write( ',' )
            
          jsonFile.write('{ "attributes": {')
          #lstRow = []
          for index in range(len(row)):
            #lstRow.append(str(record))

            jsonFile.write( '"' )
            jsonFile.write( fields[index].name )
            jsonFile.write( '":' )
            if (fields[index].type == "String"):
              #messages.addMessage(row[index])
              jsonFile.write( '"' )
              jsonFile.write( row[index] )
              jsonFile.write( '"' )
              #lstRow.append( '"{}"'.format(row[index].encode('utf-8')))
            elif (fields[index].type == "Date"):
              EPOCH = datetime.datetime(1970, 1, 1, tzinfo = None) #tzinfo=pytz.timezone('utc'))
              epochMsec = int((row[index] - EPOCH).total_seconds() / 1000)
              jsonFile.write( str(epochMsec) )
            else:
              #lstRow.append( '{}'.format(row[index]))
              jsonFile.write( '{}'.format(row[index]) )
            if (index != len(row)-1):
              jsonFile.write( ',' )
              
          #jsonFile.write( ','.join(lstRow) )
                         
          jsonFile.write('} }')
        jsonFile.write('] }') #end features
Esempio n. 22
0
    def ArcGISPoint2Line(Current_lv, DF_LeveeFailures, CompleteDataframe,
                         ArcGISTablepath, ArcGISPointFC, ArcGISLineFCPath,
                         ArcGISLineFC_joinPath, ArcGISLineFC_joinPath_WGS84,
                         LeveeFailureLinesGeojson):
        DF_Completed = None
        Point = None
        start_time = time.time()

        #first of all, let's create an ArcGIS table from the panda dataframe
        FilesList = [
            ArcGISTablepath, ArcGISPointFC, ArcGISLineFCPath,
            ArcGISLineFC_joinPath, ArcGISLineFC_joinPath_WGS84,
            LeveeFailureLinesGeojson
        ]
        print(psutil.virtual_memory())
        for objects in FilesList:
            if arcpy.Exists(objects):
                arcpy.Delete_management(objects)
                print("The file " + objects + " was deleted")
        DF_Completed = CompleteDataframe
        x = np.array(np.rec.fromrecords(DF_Completed.values))
        names = DF_Completed.dtypes.index.tolist()
        x.dtype.names = tuple(names)
        arcpy.da.NumPyArrayToTable(x, ArcGISTablepath)
        print("ArcGIS table was successfully written")
        print(
            "--- %s seconds have elapsed within the ArcGISPoint2Line method ---"
            % (time.time() - start_time))

        X_CentroidOfLeveeFailure = DF_LeveeFailures.at[Current_lv, 'X_center']
        Y_CentroidOfLeveeFailure = DF_LeveeFailures.at[Current_lv, 'Y_center']
        arcpy.AddField_management(ArcGISTablepath, "X_levee", "long")
        arcpy.AddField_management(ArcGISTablepath, "Y_levee", "long")
        arcpy.CalculateField_management(ArcGISTablepath, "X_levee",
                                        X_CentroidOfLeveeFailure)
        arcpy.CalculateField_management(ArcGISTablepath, "Y_levee",
                                        Y_CentroidOfLeveeFailure)
        EventLayer = "Event_Layer"
        arcpy.MakeXYEventLayer_management(
            ArcGISTablepath,
            "X_centroid",
            "Y_centroid",
            EventLayer,
            spatial_reference=arcpy.SpatialReference(21781))
        arcpy.CopyFeatures_management(EventLayer, ArcGISPointFC)
        print(
            "--- %s seconds have elapsed within the ArcGISPoint2Line method ---"
            % (time.time() - start_time))

        print(psutil.virtual_memory())
        #arcpy.env.overwriteOutput = True
        #arcpy.env.qualifiedFieldNames = False
        arcpy.XYToLine_management(ArcGISPointFC,
                                  ArcGISLineFCPath,
                                  "X_levee",
                                  "Y_levee",
                                  "X_centroid",
                                  "Y_centroid",
                                  id_field="V25OBJECTI")
        JoinedLineFC = arcpy.AddJoin_management(ArcGISLineFCPath, "V25OBJECTI",
                                                ArcGISTablepath, "V25OBJECTI")
        print("Successful 1")
        arcpy.CopyFeatures_management(JoinedLineFC, ArcGISLineFC_joinPath)
        print("Successful 2")
        arcpy.Project_management(ArcGISLineFC_joinPath,
                                 ArcGISLineFC_joinPath_WGS84, 4326)
        print("Successful 3")
        arcpy.FeaturesToJSON_conversion(ArcGISLineFC_joinPath_WGS84,
                                        LeveeFailureLinesGeojson,
                                        format_json="FORMATTED",
                                        geoJSON="GEOJSON")
        print("Successful 4")
        print(
            "Line Feature Class was successfully created, projected to WGS84 and exported as GEOJSON"
        )
        print(
            "--- %s seconds have elapsed within the ArcGISPoint2Line method ---"
            % (time.time() - start_time))
Esempio n. 23
0
        arcpy.AddField_management(newShp, "name", "TEXT", fieldPrecision, "", "",
            "名称", "NULLABLE")
        # 定义新增 SHP 插入游标
        insert_cursor = arcpy.InsertCursor(newShp)
        # 对原有的每个要素进行处理,根据层数新增SHP
        for x in range(0, _cs):
            rowX = row 
            rowX.setValue("name", _name)
            rowX.setValue("cg", _cg)
            rowX.setValue("zg", (x+1) * _cg)
            rowX.setValue("cs", (x+1))
            insert_cursor.insertRow(rowX)
            print u"总体进度 {0} / {1} ,正在为 {2} 要素生成楼栋体,当前楼栋进度:{3} / {4} ".format(feature_index, features_cnt, _name, x+1, _cs)  
        feature_index = feature_index + 1
        return

#获取总图层数
totalNum = len(arcpy.ListFiles("*.shp"))
#遍历所有输入图层
for in_features in arcpy.ListFiles("*.shp"):
    # 根据输入SHP新建输出SHP
    global newShp
    newShp = arcpy.CreateFeatureclass_management(OutputSpace, str(random.randint(1,100)), "POLYGON", "", "", "", sr)
    one2N(in_features)
    # 将成果转为 JSON 
    arcpy.FeaturesToJSON_conversion(newShp, OutputSpace + "output.json", "FORMATTED")
 
 
endTime=datetime.datetime.now()
exeTime=(endTime-startTime).seconds
print u"已完成,总耗时:",exeTime,u"秒"
Esempio n. 24
0
        # jsonfilepath = r"S:\Gis\Python\AGO\Python_Host\\" + "json_file" + fc + ".geojson"
        # jsonFilePath = r"W:\FOR\RSI\DKL\General_User_Data\gamos\GREG_JSONs_for_AGO\fc_{}.geojson".format(fc)
        # jsonFilePath = r"T:\_test\fc_{}.geojson".format(fc)
        jsonFilePath = os.path.join(outFolder, "fc_{}_{}.geojson".format(fc, count))

        # Buid Feature Class Path for Input into Geoprocessing Tool
        FeatureClass = str(os.path.join(arcpy.env.workspace, fc))
        featureLayer = arcpy.MakeFeatureLayer_management(FeatureClass,"in_memory/_fl")
        
        # Ensure the JSON file you are uploading to AGO is new (to prevent a "RuntimeError: Item 'xyz' already exists" when executing fc_item.publish() )
        while os.path.exists(jsonFilePath):
            jsonFilePath = os.path.join(outFolder, "fc_{}_{}.geojson".format(fc, count+1))
        print("JSON to write: {}".format(jsonFilePath))

        # Convert FC to GeoJSON
        geojsonfile = arcpy.FeaturesToJSON_conversion(featureLayer, jsonFilePath, geoJSON="GEOJSON")

        # Connect to GovernmentofBC AGO Portal
        # Syntax: arcgis.gis.GIS(url=None, username=None, password=None, key_file=None, cert_file=None, verify_cert=True, set_active=True, client_id=None, profile=None, **kwargs)
        ago_gis = GIS('https://governmentofbc.maps.arcgis.com/',username,password)
        print("Logged in as {}".format(ago_gis.properties.user.username))
        print("Other info:\n{}\n".format(ago_gis.properties.user))

        theGroup = ago_gis.groups.search('title:Invasive Plant Program ', max_groups=10)
        if len(theGroup) == 1:
            targetGroup = theGroup[0] # you want to upload the feature layer to one specific group
            print(targetGroup, type(targetGroup))

        # Item Properties (Dictionary format). Type is Important for AGO to recognize the Data Format
        # Title must not be the same as the name of the feature class (this causes an error), so append the upload date to it.
        itemPropertiesDict = {'title':'{}_{}'.format(fc, time.strftime("%Y_%m_%d")),  
Esempio n. 25
0
    def execute(self, parameters, messages):
        """The source code of the tool."""

        arcpy.AddMessage("default.gdb_path: %s" % arcpy.env.workspace)

        arcpy.ImportToolbox(os.path.join(os.path.dirname(__file__), "URB.pyt"))
        arcpy.gp.toolbox = os.path.join(os.path.dirname(__file__), "URB.pyt")

        extent = parameters[0].value
        srs = parameters[1].value

        arcpy.AddMessage("control: %s %s" % (extent, srs))

        ext_poly = ext2poly(extent, arcpy.SpatialReference(3003))

        sel_fc = create_fc(ws="scratch")
        ext_fc_cursor = arcpy.da.InsertCursor(sel_fc, ("SHAPE@"))
        ext_fc_cursor.insertRow([ext_poly])
        del ext_fc_cursor

        sel_lyr = arcpy.mapping.Layer(sel_fc)
        arcpy.AddMessage("sel_lyr: %s" % str(sel_lyr))

        check_layer_list = [
            [
                r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.UN_VOL",
                "UN_VOL_AV", 0
            ],
            [
                r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.AATT",
                "", 1
            ],
            [
                r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.MN_EDI_NOVOL",
                "", 2
            ],
            [
                r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.MN_UVOL",
                "MN_UVO_ALT", 3
            ],
            [
                r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.AR_VRD",
                "", 4
            ],
            #[r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.AR_MARC", "", 5],
            #[r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.AC_VEI", "", 6],
            [
                r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.CL_AGR",
                "", 7
            ],
            [
                r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.A_PED",
                "", 8
            ],
            [
                r"Connessioni database\VISIO_R_GDBT.sde\SIT.DBTOPOGRAFICO\SIT.PS_INC",
                "", 9
            ],
        ]

        sel_fc = get_jobfile("memory")
        sel_fc_fields = (
            ("Layer", "TEXT", None, None, 10, "", "NULLABLE", "NON_REQUIRED"),
            ("Color", "SHORT", None, None, None, "", "NULLABLE",
             "NON_REQUIRED"),
            ("TxtValue", "TEXT", None, None, 10, "", "NULLABLE",
             "NON_REQUIRED"),
        )
        intersectOutput_clean = create_fc("memory", fields=sel_fc_fields)

        sel_note = get_jobfile("memory")
        sel_note_fields = (
            ("Layer", "TEXT", None, None, 50, "", "NULLABLE", "NON_REQUIRED"),
            ("Color", "SHORT", None, None, None, "", "NULLABLE",
             "NON_REQUIRED"),
            ("TxtValue", "TEXT", None, None, 255, "", "NULLABLE",
             "NON_REQUIRED"),
            ("CADType", "TEXT", None, None, 50, "", "NULLABLE",
             "NON_REQUIRED"),
        )
        intersectOutput_note = create_fc("memory",
                                         fields=sel_note_fields,
                                         geom_type="POINT")
        cursor_note = arcpy.da.InsertCursor(
            intersectOutput_note,
            ("Layer", "Color", "TxtValue", "CADType", "SHAPE@"))

        for check_layer_def in check_layer_list:
            check_layer = check_layer_def[0]
            arcpy.AddMessage("check_layer: %s" % check_layer)
            desc = arcpy.Describe(check_layer)
            inFeatures = [check_layer, sel_lyr]
            intersectOutput = get_jobfile("memory")
            clusterTolerance = 0
            arcpy.Intersect_analysis(inFeatures, intersectOutput, "",
                                     clusterTolerance, "input")

            if check_layer_def[1]:
                field_def = ("Layer", "Color", "TxtValue", "SHAPE@")
                check_def = [check_layer_def[1], "SHAPE@"]
            else:
                field_def = ("Layer", "Color", "SHAPE@")
                check_def = ["SHAPE@"]

            cursor_clean = arcpy.da.InsertCursor(intersectOutput_clean,
                                                 field_def)

            with arcpy.da.SearchCursor(intersectOutput, check_def) as cursor:
                for row in cursor:
                    if check_layer_def[1]:
                        row_def = [
                            desc.name.replace("SIT.", ""), check_layer_def[2],
                            str(row[0]), cursor[1]
                        ]
                        note_def = row_def[:-1] + [
                            "TEXT",
                            arcpy.PointGeometry(cursor[1].centroid)
                        ]
                        cursor_note.insertRow(note_def)
                    else:
                        row_def = [
                            desc.name.replace("SIT.", ""), check_layer_def[2],
                            cursor[0]
                        ]
                    cursor_clean.insertRow(row_def)

        del cursor_clean
        del cursor_note

        extraction_json_filepath = get_jobfile("output", "json")
        arcpy.FeaturesToJSON_conversion(intersectOutput_clean,
                                        extraction_json_filepath,
                                        format_json=True,
                                        geoJSON=True)

        arcpy.AddMessage(extraction_json_filepath)
        parameters[2].value = extraction_json_filepath

        extraction_dxf_filepath = get_jobfile("output", "dxf")
        arcpy.ExportCAD_conversion(
            [intersectOutput_clean, intersectOutput_note], "DXF_R2004",
            extraction_dxf_filepath, "USE_FILENAMES_IN_TABLES",
            "OVERWRITE_EXISTING_FILES", "")
        parameters[3].value = extraction_dxf_filepath

        lyr = arcpy.mapping.Layer(intersectOutput_clean)
        parameters[4].value = intersectOutput_clean
Esempio n. 26
0
    def onLine(self, line_geometry):
        root = 'C:/Users/luiza/Desktop/Plugin_clipagem/recortes'
        shape = 'C:/Users/luiza/Desktop/Plugin_clipagem/recortes/polygons.shp'
        arcpy.env.compression = "NONE"
        arr = arcpy.Array()
        shape = 'polygons'
        mxd = arcpy.mapping.MapDocument('current')
        numClips = self.countClip(mxd)
        print("numClips:")
        print(numClips)
        print("self last clip:")
        print(self.lastClip)

        #Se o numero de layers for igual a 0, significa que nao existem layers recortadas entao todos os arquivos nessa pasta tem de ser apagados
        #if(numClips == 0):
        #   for dir in os.listdir(root):
        #       shutil.rmtree(root + dir)

        part = line_geometry.getPart(0)
        for pt in part:
            print pt
            arr.add(pt)

        arr.add(line_geometry.firstPoint)
        arr.remove(arr.count - 2)
        print("arr[-1]")
        print arr[-1]
        polygon = arcpy.Polygon(arr)
        #currentClip = numClips + 1
        currentClip = self.getCurrentClip()

        currentClipFolder = root + "/clip" + str(currentClip)
        polygonName = 'polygons' + str(currentClip)
        polygonShp = currentClipFolder + '/' + polygonName + '.shp'

        # Verifica se existe uma pasta com o mesmo nome
        if arcpy.Exists(currentClipFolder):
            shutil.rmtree(currentClipFolder)

        os.mkdir(currentClipFolder)
        arcpy.CopyFeatures_management(polygon, polygonShp)

        df = arcpy.mapping.ListDataFrames(mxd)[0]
        ext = polygon.extent
        print("polygonshp:")
        print(polygonShp)
        poly_lyr = arcpy.mapping.ListLayers(mxd, polygonName)[0]

        rect = str(ext.XMin) + " " + str(ext.YMin) + " " + str(
            ext.XMax) + " " + str(ext.YMax)
        print("rect:")
        print rect

        ###SALVAR O JSON
        arcpy.env.workspace = "C:/Users/luiza/Desktop/Plugin_clipagem/recortes"
        jsonname = polygonShp.split('.')[0] + ".json"
        jsonformatted = polygonShp.split('.')[0] + "formatted" + ".json"
        arcpy.FeaturesToJSON_conversion(polygonShp, jsonname)
        arcpy.FeaturesToJSON_conversion(polygonShp, jsonformatted, "FORMATTED")
        print("Json file created!")

        arcpy.mapping.MapDocument('current').save()
Esempio n. 27
0
                    arcpy.AddField_management(args.layer, "temp", new_type)
                    # calculate the value based on the old field
                    arcpy.CalculateField_management(args.layer, "temp", 'int(round(!field!))', "PYTHON3")
                    # delete the old field
                    arcpy.AlterField_management(args.layer, f_name, f_name+"_old", f_aliasName+"_old")
                    # rename the new field
                    arcpy.AlterField_management(args.layer, "temp", f_name, f_aliasName)

    if args.layer in arcpy.ListFeatureClasses():

        result = arcpy.GetCount_management(os.path.join(args.geodatabase, args.layer))
        print("Feature Class [{}] has {} rows".format(os.path.join(args.geodatabase, args.layer), result[0]))

        if args.format == "geojson":
            outfile = "{}.geojson".format(args.layer)
            arcpy.FeaturesToJSON_conversion(os.path.join(args.geodatabase, args.layer), outfile, geoJSON='GEOJSON')
            print("Wrote {}".format(outfile))

        if args.format == "shp":
            outfile = "{}.shp".format(args.layer)
            arcpy.FeatureClassToShapefile_conversion(os.path.join(args.geodatabase, args.layer), Output_Folder=".")
            print("Wrote {}".format(outfile))

        if args.format == "csv":
            outfile = os.path.join(".","{}.csv".format(args.layer))
            arcpy.CopyRows_management(os.path.join(args.geodatabase, args.layer), outfile)
            print("Wrote {}".format(outfile))

    if args.layer in arcpy.ListTables():

        result = arcpy.GetCount_management(os.path.join(args.geodatabase, args.layer))
Esempio n. 28
0
    def UniteBuildingsPerLevee (accum_csv, Levee_failures_shp, LF_shp_WGS84, geojson, Current_LF):
        out_cor_system = arcpy.SpatialReference(4326)
        if not arcpy.Exists(LF_shp_WGS84):
            arcpy.Project_management(Levee_failures_shp, LF_shp_WGS84, out_coor_system=out_cor_system)
        else:
            print ("Dataset " + str(LF_shp_WGS84) + " already exists")
        if not arcpy.Exists(geojson):
            arcpy.FeaturesToJSON_conversion(LF_shp_WGS84, geojson, format_json="FORMATTED",
                                        geoJSON="GEOJSON")
        else:
            print("Dataset " + str(geojson) + " already exists")
        with open(geojson, mode='r+') as gj:
            geojson_dictionary = json.load(gj)

        df_csv = pd.read_csv(accum_csv)
        dict_csv = df_csv.to_dict()

        fields_to_delete=['Wert', 'X_centroid', 'Y_centroid', 'MaxDepth', 'deg_of_loss']
        for items in fields_to_delete:
            if items in dict_csv:
                del dict_csv[items]

        ##create new dictionary with subdictionaries and the information per levee
        new_dict={}
        i=1
        while i <= 42:
            k=0
            while k < len(dict_csv['Levee_Failure']):
                if dict_csv['Levee_Failure'][k]==i:
                    print("Move to Levee Failure from LF " + str(i))
                    if str(i) not in new_dict.keys():
                        new_dict[str(i)] = {}
                        new_dict[str(i)]['Levee_Failures'] = []
                        new_dict[str(i)]['Levee_Failures'].append(dict_csv ['Levee_Failure'][k])
                    elif 'Levee_Failures' not in new_dict[str(i)].keys():
                        new_dict[str(i)]['Levee_Failures'] = []
                        new_dict[str(i)]['Levee_Failures'].append(dict_csv['Levee_Failure'][k])
                    else:
                        new_dict[str(i)]['Levee_Failures'].append(dict_csv['Levee_Failure'][k])

                    print ("Move to V25OBJECTI from LF " + str(i))
                    if 'V25OBJECTI' not in new_dict[str(i)].keys():
                        new_dict[str(i)]['V25OBJECTI'] = []
                        new_dict[str(i)]['V25OBJECTI'].append(dict_csv ['V25OBJECTI'][k])
                    elif 'V25OBJECTI' in new_dict[str(i)].keys():
                        new_dict[str(i)]['V25OBJECTI'].append(dict_csv['V25OBJECTI'][k])
                    else:
                        print ("Something went wrong")

                    print("Move to Damage from LF " + str(i))
                    if 'Damage' not in new_dict[str(i)].keys():
                        new_dict[str(i)]['Damage'] = []
                        new_dict[str(i)]['Damage'].append(dict_csv ['Damage'][k])
                    elif 'Damage' in new_dict[str(i)].keys():
                        new_dict[str(i)]['Damage'].append(dict_csv['Damage'][k])
                    else:
                        print("Something went wrong")
                k = k + 1
            i = i + 1

        ##calculate total cost per levee failure
        for dict in new_dict.keys():
            print (dict)
            new_dict[dict]['Total_Damage'] =sum(new_dict[dict]['Damage'])
            new_dict[dict]['Affected_Buildings']=len(new_dict[dict]['Damage'])

        ##merge the two dictionaries
        for items in new_dict.keys():
            for parts in geojson_dictionary['features']:
                if parts['properties']['LF'] == int(items):
                    parts['properties']['Total_Damage'] = new_dict[items]['Total_Damage']
                    parts['properties']['Buildings'] = new_dict[items]['V25OBJECTI']
                    parts['properties']['Affected_Buildings'] = new_dict[items]['Affected_Buildings']

        # for parts in geojson_dictionary['features']:
        #     if 'Buildings' not in parts['properties'].keys():
        #         del geojson_dictionary['features'][parts]



        if Current_LF <=4:
            for parts in geojson_dictionary['features']:
                if parts['properties']['LF'] < 5:
                    if not 'temp' in geojson_dictionary.keys():
                        geojson_dictionary['temp']=[]
                        geojson_dictionary['temp'].append(parts)
                    else:
                        geojson_dictionary['temp'].append(parts)
        else:
            for parts in geojson_dictionary['features']:
                if parts['properties']['LF'] > 4:
                    if not 'temp' in geojson_dictionary.keys():
                        geojson_dictionary['temp']=[]
                        geojson_dictionary['temp'].append(parts)
                    else:
                        geojson_dictionary['temp'].append(parts)

        del geojson_dictionary['features']
        geojson_dictionary['features'] = geojson_dictionary.pop('temp')
        print ("successfully modified dictionary")

        ##drop modified dictionary back to geojson
        mod_file = open(geojson, "w+")
        json.dump(geojson_dictionary, mod_file, indent=3)

        print ("GEOJSON successfully modified")
ogrfile = arcpy.GetParameterAsText(4)
if arcpy.Exists(ogrfile):
    desc = arcpy.Describe(ogrfile)
    shfl1 = str(desc.catalogPath)
    extn = os.path.splitext(shfl1)[1]  # get extension of a file
    # if extention is shapfile do not convert into gjson other wise convert

    if extn == ".shp":
        shfl = shfl1
    else:
        arcpy.AddMessage("Extracting json outlet file from: " + shfl1)
        basename = os.path.basename(shfl1)  # get last part of the path
        dirname = os.path.dirname(p)  # get directory
        arcpy.env.workspace = dirname  # does not work without specifying the workspace
        arcpy.FeaturesToJSON_conversion(shfl1, basename +
                                        ".json")  # convert feature to json
        shfl = os.path.join(dirname, basename + ".json")

    arcpy.AddMessage("Using Outlets " + shfl)

delineate = arcpy.GetParameterAsText(5)
arcpy.AddMessage("Delineate Single Watershed: " + delineate)

# Input Number of Processes
inputProc = arcpy.GetParameterAsText(6)
arcpy.AddMessage("Number of Processes: " + inputProc)

# Outputs
ord = arcpy.GetParameterAsText(7)
arcpy.AddMessage("Output Stream Order Grid: " + ord)
Esempio n. 30
0
arcpy.env.workspace = r"R:\users\anagha.uppal\MapRE\MapRE_data\OUTPUTS\SAPP"
input_gdbs = arcpy.ListWorkspaces("*", "FileGDB")
print(input_gdbs)
output_folder = r"R:\users\anagha.uppal\MapRE\MapRE_data\OUTPUTS\SAPP\outputs_for_web"
outCS = arcpy.SpatialReference(4326)

for each in input_gdbs:
    print(each)
    foldername = each.rsplit("\\", 1)[1]
    foldername = foldername.split(".")[0]
    print(foldername)
    outdirname = os.path.join(output_folder, foldername)
    if not os.path.exists(outdirname):
        outdir = os.mkdir(outdirname)
    outdir = outdirname
    arcpy.env.workspace = each
    featureclasses = [fc for fc in arcpy.ListFeatureClasses("*_attr*")]
    print(featureclasses)
    for fc in featureclasses:
        print(fc)
        print(outdir)
        os.mkdir(os.path.join(outdir, fc))
        fcdir = os.path.join(outdir, fc)
        print(fcdir)
        shpout = os.path.join(fcdir, fc + ".shp")
        shp = arcpy.Project_management(fc, shpout, outCS)
        #shutil.make_archive(fc, 'zip', root_dir=outdir)
        jsonout = os.path.join(outdir, fc + ".json")
        print(jsonout)
        arcpy.FeaturesToJSON_conversion(shp, jsonout)