Exemple #1
0
def refresh():
  #Checks for directory
  if not os.path.exists(directory):
    os.makedirs(directory)
  #Check if .gdb exists and creates one if it doesn't
  if not arcpy.Exists(outfile):
    arcpy.CreateFileGDB_management("C:/Data", "CCTV.gdb")
    arcpy.CreateFeatureDataset_management("C:/Data/CCTV.gdb", "SewerCollectionNetwork")
  #Secondary check to ensure .gdb was creatred
  if arcpy.Exists(outfile):
    for i in getArguments():
      copy(i)

  #Runs if update basemap option is set to true
  if baseMap:
    #Creates dataset if it doesn't exist
    if not arcpy.Exists(baseData):
      arcpy.CreateFeatureDataset_management("C:/Data/CCTV.gdb", "BaseMap")

    #Change workspace to wake read only
    arcpy.env.workspace = wake
    
    #Copies feature classes to dataset
    for b in baseLayers:
      arcpy.AddMessage('Copying %s' % b)
      outFeatureClass = os.path.join(baseData, b)
      arcpy.CopyFeatures_management(b, outFeatureClass)

    #Change workspace back to cctv
    arcpy.env.workspace = cctv


  #Sets the changes the current map document to new workspace
  changeWorkSpace("C:/Data/CCTV.gdb")
 def transG(gauss, utm, krrgjsh):
     name = coord["2"] + "_2_" + coord["1"] + "_7parameters"
     name2 = coord["1"] + "_2_" + coord["3"] + "_7parameters"
     arcpy.CADToGeodatabase_conversion(
         path, os.path.join(workingDir, "tempFolder\\test.gdb"), coord["2"],
         1000, gauss)
     arcpy.CreateFeatureDataset_management(
         os.path.join(workingDir, "tempFolder\\test.gdb"), coord["1"], utm)
     arcpy.CreateFeatureDataset_management(
         os.path.join(workingDir, "tempFolder\\test.gdb"), coord["3"],
         krrgjsh)
     try:
         arcpy.CreateCustomGeoTransformation_management(
             name, gauss, utm, utm_2_gauss(-1))
         arcpy.CreateCustomGeoTransformation_management(
             name2, utm, krrgjsh, utm_2_krrgjsh(-1))
     except:
         print("Transformimi ekziston")
     arcpy.env.workspace = workingDir + "\\tempFolder\\test.gdb\\" + coord[
         "2"]
     lyr = arcpy.ListFeatureClasses()
     for i in lyr:
         if i not in [
                 "Annotation", "MultiPatch", "Point", "Polygon", "Polyline"
         ]:
             continue
         arcpy.Project_management(
             i,
             os.path.join(workingDir, "tempFolder\\test.gdb", coord["1"],
                          i + "_" + coord["1"]), utm, name, gauss,
             "NO_PRESERVE_SHAPE", None, "NO_VERTICAL")
         arcpy.Project_management(
             os.path.join(workingDir, "tempFolder\\test.gdb", coord["1"],
                          i + "_" + coord["1"]),
             os.path.join(workingDir, "tempFolder\\test.gdb", coord["3"],
                          i + "_" + coord["3"]), krrgjsh, name2, utm,
             "NO_PRESERVE_SHAPE", None, "NO_VERTICAL")
     for j in range(3):
         if j == 1:
             continue
         arcpy.env.workspace = workingDir + "\\tempFolder\\test.gdb\\" + coord[
             str(j + 1)]
         lyr = arcpy.ListFeatureClasses()
         CADlayers = ""
         for i in lyr:
             CADlayers = CADlayers + arcpy.env.workspace + "\\" + i + ";"
         arcpy.ExportCAD_conversion(
             CADlayers, "DWG_R2013",
             os.path.join(workingDir,
                          dwgName[:-4] + "_" + coord[str(j + 1)] + ".dwg"),
             "Ignore_Filenames_in_Tables", "Overwrite_Existing_Files", "")
Exemple #3
0
def check_topology(input_file, workspace):
    """Create Database and check for overlapping features. This function
    is based on one previously created by Christian Kienholz, University
    of Alaska, Fairbanks, 03/2012"""
    # Create Database, add a data set and upload the features
    database = arcpy.CreateFileGDB_management(workspace, 'database.gdb')
    dataset = arcpy.CreateFeatureDataset_management(database, 'validation',
                                                    input_file)
    feature = str(dataset) + '\\feature'
    arcpy.CopyFeatures_management(input_file, feature)

    #Create topology and rules. Add feature to it
    topology = arcpy.CreateTopology_management(dataset, 'topology_rules')
    arcpy.AddFeatureClassToTopology_management(topology, feature, 1, 1)
    arcpy.AddRuleToTopology_management(topology, 'Must Not Overlap (Area)',
                                       feature)
    arcpy.ValidateTopology_management(topology)

    # Export Errors
    arcpy.ExportTopologyErrors_management(topology, database, 'Errors')
    error_count = arcpy.GetCount_management(str(database) + '\\Errors_poly')
    original_count = arcpy.GetCount_management(input_file)

    arcpy.Delete_management(database)  # Delete database

    return [str(error_count), str(original_count)]
Exemple #4
0
    def create_dataset(self, ds_name, ds_sde, ds_sf=""):
        """
        create feature dataset of the sde database
        :param ds_name: feature dataset name
        :param ds_sde: the feature dataset sde file
        :param ds_sf: feature dataset spatial reference
        :return: Ture
        such as:
            ds_name = "test"
            ds_sde = "D:\config\SQL_SERVER_localhost_sde_source.sde"
            ds_sf = "C:\data\studyarea.prj" or ""
        sd_sf is defalut null
        """
        try:
            assert isinstance(ds_name, basestring)
            assert isinstance(ds_sf, basestring)
        except Exception as e:
            emsg = "SDEOpr create_dataset parameter type is error: %s" % e.message
            raise Exception(emsg)

        self.isexist_sde(ds_sde)
        try:
            sf = arcpy.SpatialReference(ds_sf) if os.path.exists(
                ds_sf) else None
            arcpy.CreateFeatureDataset_management(out_dataset_path=ds_sde,
                                                  out_name=ds_name,
                                                  spatial_reference=sf)
        except Exception as e:
            emsg = "SDEOpr create_dataset is failure: %s" % e.message
            raise Exception(emsg)
        else:
            ds = ds_sde + "\\" + ds_name
            return arcpy.Exists(ds)
Exemple #5
0
def Boo_TSA_Overlay_StageData(outWrk, dbUser, dbPass):
    tmpName = '{0}.gdb'.format("Boo_TSA_Overlay_StageData")
    tmpWrk = os.path.join(outWrk, tmpName)
    if not arcpy.Exists(tmpWrk):
        arcpy.CreateFileGDB_management(outWrk, tmpName)
    if not arcpy.Exists(os.path.join(outWrk, tmpName, "Data")):
        arcpy.CreateFeatureDataset_management(tmpWrk, "Data",
                                              arcpy.SpatialReference(3005))

    outWrk = os.path.join(tmpWrk, "Data")

    #--Get a BCGW Connection
    bcgwconn = CreateBCGWConn(dbUser, dbPass)
    #--Create the TSA Layer
    wc = 'TSB_NUMBER IS NULL AND RETIREMENT_DATE IS NULL'
    lyrTSA = r'in_memory\lyrTSA'
    print 'Creating layer {0}....'.format(lyrTSA)
    arcpy.MakeFeatureLayer_management(os.path.join(bcgwconn, srcTSA), lyrTSA,
                                      wc)
    fcTSAOut = os.path.join(outWrk, "TSA")
    DeleteExists(fcTSAOut)
    arcpy.CopyFeatures_management(lyrTSA, fcTSAOut)

    print 'Creating layer {0}....'.format("Caribou Buffer")
    #--Create the Boo Layer
    fcBoo25 = BufferBoo(outWrk, os.path.join(bcgwconn, srcCboo))

    print 'Data staging complete....'
    return ([outWrk, fcBoo25, fcTSAOut])
def create_geodatabase(out_folder_path, file_gdb_name, feature_dataset_names, spatial_ref):

    """
    PARAMETERS:
    out_folder_path: string to the output folder where the geodatabase will be stored
    file_gdb_name: string of the file geodatabase name
    feature_dataset_names: an array storing the names of each feature dataset to create
    under the file geodatabase.  the names of the feature datasets will be in the form
    of strings
    USE:
    >>> out_folder_path = r"C:\Users\...\Datasbase"
    >>> file_gdb_name = r"geodatabase_name.gdb"
    >>> feature_dataset_name = ["geodataset1", "geodataset2", "geodataset3",...]
    >>> create_geodatabase(out_folder_path, file_gdb_name, feature_dataset_name)
    """

    ## Import Packages
    import arcpy
    import os
    
    ## Set Local Environment Variables
    arcpy.env.overwriteOutput = True 
    
    ## Create a Spatial Reference Object
    sr = arcpy.SpatialReference(spatial_ref)

    ## Create a File Geodatabase for the Feature Datasets
    arcpy.CreateFileGDB_management(out_folder_path, file_gdb_name)

    ## Get the Output Geodatabase Path
    out_gdb_path = os.path.join(out_folder_path, file_gdb_name)

    ## Create Feature Datasets
    for name in feature_dataset_names:
        arcpy.CreateFeatureDataset_management(out_gdb_path, name, sr)
Exemple #7
0
def main():

    logFileName = "T:/createARDdir.log"
    logFile = file(logFileName, "w")
    tsaNums = sys.argv[1]
    root  = sys.argv[2]
    year  = str(sys.argv[3])
    tsas = []
    fileListToArray(tsas,tsaNums)
    arcpy.env.overwriteOutput = False
    for tsa in tsas:
        rootTSA = root  + "\\" + tsa
        rootTSAgdb = root  + "\\" + tsa + "\\" + tsa + "_" + year + ".gdb"
        gdbName =  tsa + "_" + year + ".gdb"
        gdbDir = root  + "\\" + tsa
        cmd = r"mkdir " + rootTSA # create folder for each tsa under units folder
        os.system(cmd)
        if not arcpy.Exists(rootTSAgdb):
          arcpy.AddMessage("Creating File GDB %s/%s..." % (gdbDir,gdbName))
          arcpy.CreateFileGDB_management(gdbDir, gdbName)

          arcpy.AddMessage("Creating feature datasets...")
          for dataset in ("src", "wrk", "fin"):
            sr = arcpy.SpatialReference(3005)
            arcpy.CreateFeatureDataset_management(rootTSAgdb, dataset, sr)
    logFile.close()
def CreateDataset(nome, spatialReference):
    try:
        arcpy.CreateFeatureDataset_management(arcpy.env.workspace, nome,
                                              spatialReference)
        print u"Dataset {} criado com sucesso".format(nome)
    except Exception as ex:
        print u"ERRO:", nome, ex.message
Exemple #9
0
def MakeFgdb(dir, db, fd, spatRef):
    """ Create a file geodatabase and featuredataset """
    if os.path.isdir(dir) != 1:
        os.mkdir(dir)
    KillObject(os.path.join(dir, db))
    arcpy.CreateFileGDB_management(dir, db)
    arcpy.CreateFeatureDataset_management(os.path.join(dir, db), fd, spatRef)
def CreateTopo(fc):
    ifc = fc
#     print "ifc:",ifc
    
    Coordinate_System = ifc.replace("shp", "prj")
#     print "Coordinate_System",Coordinate_System
#     os.path.join(shpFolder,Folder + "_地类更新.prj")
#     print Coordinate_System
#     File_GDB_Name = os.path.dirname(ifc),os.path.basename(ifc)    #"PLA_Topo.gdb"
    Geodatabase = ifc.replace("shp", "gdb")
    print "Geodatabase:",Geodatabase
    try:
        #新建数据库
        arcpy.CreateFileGDB_management(os.path.dirname(ifc), os.path.basename(ifc).replace("shp", "gdb"), "CURRENT")
    except:
        print "数据库已经建好"
        
    out_dataset_path = Geodatabase
    out_name = os.path.basename(ifc).replace(".shp", "")  #数据集名称
#     print "out_name:",out_name
    print "数据集名称:",out_name
    #创建数据集
    arcpy.CreateFeatureDataset_management(out_dataset_path, out_name, Coordinate_System)

    #向数据集导入shp
    in_features = ifc  #导入的shp名称
    out_path = os.path.join(Geodatabase,out_name)   #shp导入的数据集位置
    #print u"shp导入的数据集位置:" + str(out_path)
    out_name = os.path.basename(ifc).replace(".shp", "") + "_ToPo"    #导入后shp的名称
    #print u"导入后shp的名称:" + str(out_name)
    arcpy.FeatureClassToFeatureClass_conversion (in_features, out_path, out_name)


    # Process: Create Topology
    Topo_name =os.path.basename(ifc).replace(".shp", "") + "_ToPology" #拓扑结构的名称
    #print u"拓扑结构的名称:" + str(Topo_name)
    arcpy.CreateTopology_management(out_path, Topo_name, "")   #zhiduo_tp  拓扑结构

    # Process: Add Feature Class To Topology
    Topology = os.path.join(out_path,Topo_name)  # 拓扑结构的路径
    ##print u"拓扑结构的路径:" + str(Topology)
    ToPoShp = os.path.join(out_path,out_name)    # 要做拓扑的数据集里的shp
    ##print u"要做拓扑的数据集里的shp:" + str(ToPoShp)
    arcpy.AddFeatureClassToTopology_management(Topology, ToPoShp, "1", "1")

    # Process: Add Rule To Topology
    arcpy.AddRuleToTopology_management(Topology, "Must Not Have Gaps (Area)", ToPoShp, "", "", "")
    #print u"正在添加 Must Not Have Gaps 规则"
    arcpy.AddRuleToTopology_management(Topology, "Must Not Overlap (Area)", ToPoShp, "", "", "")
    #print u"正在添加 Must Not Overlap 规则"

    # Process: Validate Topology
    try:
        arcpy.ValidateTopology_management(Topology, "true")
    except:
        print str(ifc) + "的拓扑未成功验证,请人工重新建立拓扑。"

    print "拓扑验证完成" + '\n'
    print 
Exemple #11
0
def dataset_create(_out_ds_path, _spatial_reference):
    # arcpy.AddMessage(_out_ds_path)
    _ds_name = _out_ds_path.split('\\')[-1]
    # arcpy.AddMessage(_ds_name)
    _out_gdb = _out_ds_path[0:len(_out_ds_path) - len(_ds_name) - 1]
    # arcpy.AddMessage(_out_gdb)
    arcpy.CreateFeatureDataset_management(_out_gdb, _ds_name,
                                          _spatial_reference)
Exemple #12
0
def create_fds():
    if sde_fida:
        arcpy.Delete_management(os.path.join(get_workspace(), featuredataset))
        print("FDS deleted")
    arcpy.CreateFeatureDataset_management(
        get_workspace(), featuredataset, spatial_reference=arcpy.SpatialReference(2056)
    )
    print("FDS created")
Exemple #13
0
def collectMatchingFeatures(output_dir, output_gdb_name, input_fcs, field, vals, sr=''):
    """Given an output geodatabase or folder, a list of feature classes, a target 
    fieldname, and a target value, itereate through feature classes, select
    all features within each feature class that matches the target value within
    the target field, and place all matching features into a feature dataset 
    named after the current value. 
    
    Issue:
        Losing spatial reference...
    
    Params:
        output_dir : str
            String representation of the target output directory
        output_gdb_name : str
            String representation of the desired output file geodatabase name,
            with or without the file extension
        input_fcs : list
            List of string representations of valid feature classes
        field : string
            String representation of the desired field for matching values
        vals : list
            List of strings you want to match, must be strings
        sr : str
            See arcpy.CreateFeatureDataset_management docstring for what is 
            allowed. If no spatial_reference is provided, then the 
            spatial_reference of the first feature class will be assumed"""
    validresult, invalids = validateFeatureClasses(input_fcs)
    if validresult:
        if not output_gdb_name[-3:] == 'gdb':
            output_gdb_name += '.gdb'
        arcpy.CreateFileGDB_management(output_dir, output_gdb_name)
        lyrs = []
        for fc in input_fcs:
            lyrs.append(arcpy.MakeFeatureLayer_management(fc,os.path.split(fc)[1]+'_lyr'))
        if not sr:
            sr = arcpy.Describe(lyrs[0]).spatialReference
        with open('log.txt', 'w') as log:
            for val in vals:
                print('{}'.format(val))
                log.write('{}\n'.format(val))
                where = '"{}" = \'{}\''.format(field,val)
                log.write(where + '\n')
                fd = arcpy.CreateFeatureDataset_management(os.path.join(output_dir,output_gdb_name),
                                                           val,sr)
                for lyr in lyrs:
                    fc_out = os.path.join(fd[0],os.path.split(str(lyr))[1][:-4]+'_'+val)
                    log.write(fc_out + '\n')
                    try:    
                        arcpy.SelectLayerByAttribute_management(lyr,'NEW_SELECTION',where)
                        arcpy.CopyFeatures_management(lyr,fc_out)
                    except:
                        log.write('arcpy.Select_analysis failed on {}\n'.format(lyr))
                        continue
                    arcpy.SelectLayerByAttribute_management(lyr,'CLEAR_SELECTION')
    else:
        print('Oops! The following feature classes may not exists.:')
        for x in invalids:
            print('x')
Exemple #14
0
def clip(raster, nhd_gdb, projection, outfolder):

    env.workspace = nhd_gdb
    env.outputCoordinateSystem = projection
    env.compression = "NONE"  # only final tifs are generated
    env.pyramid = "NONE"

    # Create a feature dataset in NHD file geodatabase named "HUC8_Albers" in Albers projection
    out_feature_dataset = "HUC8_Albers"
    arcpy.CreateFeatureDataset_management(env.workspace, out_feature_dataset,
                                          projection)
    arcpy.RefreshCatalog(nhd)

    # HUC8 polygons each saved as separate fc inheriting albers from environ
    huc8_fc = "WBD_HU8"
    field = "HUC_8"
    arcpy.MakeFeatureLayer_management(huc8_fc, "huc8_layer")

    with arcpy.da.SearchCursor(huc8_fc, field) as cursor:
        for row in cursor:
            if row[0].startswith(nhdsubregion):
                whereClause = ''' "%s" = '%s' ''' % (field, row[0])
                arcpy.SelectLayerByAttribute_management(
                    "huc8_layer", 'NEW_SELECTION', whereClause)
                arcpy.CopyFeatures_management(
                    "huc8_layer",
                    os.path.join(out_feature_dataset, "HUC" + row[0]))

    #retrieve only the single huc8 fcs and not the one with all of them
    fcs = arcpy.ListFeatureClasses("HUC%s*" % nhdsubregion, "Polygon",
                                   out_feature_dataset)
    fcs_buffered = [
        os.path.join(out_feature_dataset, fc + "_buffer") for fc in fcs
    ]
    out_clips = [
        os.path.join(outfolder, "huc8clips" + nhdsubregion,
                     "NED" + fc[3:] + ".tif") for fc in fcs
    ]

    # Buffer HUC8 feature classes by 5000m
    for fc, fc_buffered in zip(fcs, fcs_buffered):
        arcpy.Buffer_analysis(fc, fc_buffered, "5000 meters")

    cu.multi_msg("Created HUC8 buffers.")
    arcpy.RefreshCatalog(nhd)

    # Clips rasters
    cu.multi_msg("Starting HUC8 clips...")
    for fc_buffered, out_clip in zip(fcs_buffered, out_clips):
        arcpy.Clip_management(raster, '', out_clip, fc_buffered, "0",
                              "ClippingGeometry")

    arcpy.Compact_management(nhd)

    cu.multi_msg("Clipping complete.")
Exemple #15
0
def create_dataset(name):
    global out_database_path
    try:
        arcpy.CreateFeatureDataset_management(out_database_path, name,
                                              template_feature_set)
        print("Making dataset {}".format(name))
        arcpy.AddMessage("Making dataset {}".format(name))
    except:
        print("{} Dataset already exist".format(name))
        arcpy.AddMessage("{} Dataset already exist".format(name))
    dataset = "{}\{}".format(out_database_path, name)
    return dataset
Exemple #16
0
def calculateNetworkServiceArea(inFacilities, roads, workspace):
    #beginning workspace
    arcpy.env.workspace = workspace
    
    #checking correct fc for network analysis
    featureclasses = arcpy.ListFeatureClasses()
    fc1 = featureclasses[0]
    fc2 = featureclasses[1]
    describe_fc1 = arcpy.Describe(fc1)
    describe_fc2 = arcpy.Describe(fc2)
    if describe_fc2.shapeType != "Point":
        return(describe_fc2.shapeType)
        return("Wrong shapefile type for facilities.")

    if describe_fc1.shapeType != "Polyline":
        return(describe_fc1.shapeType)
        return("Wrong shapefile type for network.")


    # creates a gdb file for analysis
    arcpy.CreateFileGDB_management(workspace, "Net_Analysis.gdb")
    #path of gdb file
    gdb_workspace = os.path.abspath(workspace) + "\\Net_Analysis.gdb"
    #setting workspace to the new gdb file
    arcpy.env.workspace = gdb_workspace
    
    out_name = "net_analysis"
    folderPath = os.path.abspath(workspace)
    dsc_fc = arcpy.Describe(roads)
    coord_sys = dsc_fc.spatialReference

    
    arcpy.CreateFeatureDataset_management("Net_Analysis.gdb", out_name, coord_sys)
    arcpy.CopyFeatures_management(folderPath + "\\" + str(roads), gdb_workspace)
    arcpy.CopyFeatures_management(folderPath + "\\" + str(inFacilities), gdb_workspace)

    arcpy.na.CreateNetworkDataset("net_analysis", "route_ND", ["roads"])

    
    arcpy.na.MakeServiceAreaAnalysisLayer("net_analysis//roads_ND", layer_name="an_layer", travel_mode="Driving Time", travel_direction="FROM_FACILITIES",
                                          cutoffs="5;10;15", time_of_day="", time_zone="", output_type="POLYGONS", polygon_detail="STANDARD",
                                          geometry_at_overlaps="OVERLAP", geometry_at_cutoffs="RINGS", polygon_trim_distance="100 Meters", exclude_sources_from_polygon_generation="",
                                          accumulate_attributes="")

    
    arcpy.na.AddLocations("an_layer", "Facilities", "net_analysis\schools", field_mappings="Name Name #",
                          search_tolerance="5000 Meters", sort_field="", search_criteria="",
                          match_type="MATCH_TO_CLOSEST", append="APPEND", snap_to_position_along_network="NO_SNAP", snap_offset="5 Meters",
                          exclude_restricted_elements="EXCLUDE", search_query="")

    arcpy.na.Solve("an_layer", ignore_invalids="SKIP", terminate_on_solve_error="TERMINATE",
                   simplification_tolerance="", overrides="")
    def execute(self, parameters, messages):
        """The source code of the tool."""
        arcpy.AddMessage("\nCreating File GDB...")
        logging.info("Starting createGDBStruct.py script...\n")
        # Define variables from parameters
        working_folder = parameters[0]
        gdbName = parameters[1]

        coord_sys = arcpy.SpatialReference("NAD 1983 Canada Atlas Lambert")
        logging.info("Spatial Reference set: NAD 1983 Canada Atlas Lambert")

        # Create file GDB and define path
        arcpy.CreateFileGDB_management(working_folder, gdbName, "CURRENT")
        logging.info("Create File GDB: '%s' created in %s", gdbName, working_folder)
        file_GDB = os.path.join(working_folder, gdbName) + ".gdb"

        # Create dark features dataset
        arcpy.AddMessage("Creating Dark Features Dataset...")
        dark_featDSstring = "dark_features"
        arcpy.CreateFeatureDataset_management(file_GDB, dark_featDSstring, coord_sys)
        logging.info("Create Feature Dataset: '%s' created in %s", dark_featDSstring, file_GDB)
        feat_DS = os.path.join(file_GDB, dark_featDSstring)

        # Create features union dataset
        arcpy.AddMessage("Creating Union Dataset...")
        unionDSstring = "feature_union"
        arcpy.CreateFeatureDataset_management(file_GDB, unionDSstring, coord_sys)
        logging.info("Create Feature Dataset: '%s' created in %s", unionDSstring, file_GDB)
        union_DS = os.path.join(file_GDB, unionDSstring)

        # Create features overlap dataset
        arcpy.AddMessage("Creating Overlap Dataset...")
        overlapDSstring = "feature_overlap"
        arcpy.CreateFeatureDataset_management(file_GDB, overlapDSstring, coord_sys)
        logging.info("Create Feature Dataset: '%s' created in %s", overlapDSstring, file_GDB)
        overlap_DS = os.path.join(file_GDB, overlapDSstring)
        logging.info("createGDBStruct.py script finished\n\n")

        return feat_DS, union_DS, overlap_DS, file_GDB
Exemple #18
0
def create_featdts(geodb, name, epsg_code):
    """
    Create Feature Dataset in existing GeoDataBase
    """

    from glass.web.srorg import get_wkt_esri

    arcpy.CreateFeatureDataset_management(
        geodb, name,
        get_wkt_esri(epsg_code) if epsg_code != 3857 else os.path.join(
            os.path.dirname(os.path.abspath(__file__)), 'epsg_3857.prj'))

    return os.path.join(geodb, name)
Exemple #19
0
def buildGDB(outPath,
             gdb_name,
             feature_datasets=[],
             spatial_ref=arcpy.SpatialReference('WGS 1984')):
    # Creates a gdb from inputs and adds in feature datasets to that gdb if any and returns the path to the gdb
    gdb_path = os.path.join(outPath, gdb_name + '.gdb')
    if arcpy.Exists(gdb_path):
        return gdb_path
    arcpy.CreateFileGDB_management(outPath, gdb_name)
    if isinstance(feature_datasets, list) and len(feature_datasets) > 0:
        for fds in feature_datasets:
            arcpy.CreateFeatureDataset_management(gdb_path, fds, spatial_ref)

    return gdb_path
Exemple #20
0
def ReprojectHARNDataset(OutputGdb, CalcXY):
    arcpy.AddMessage("\nFinal Step: Reprojecting dataset to NAD 1983")

    # create separate objects from user supplied path
    SplitGdbPath = OutputGdb.split('\\')
    Folder = '\\'.join(SplitGdbPath[:-1])
    GdbName = '\\'.join(SplitGdbPath[-1:]).split('.')[0]
    GdbNameNAD83 = GdbName + '_NAD83.gdb'
    OutputGdbNAD83 = Folder + '\\' + GdbNameNAD83

    # create spatial reference object, new gdb, new feature dataset
    sr = arcpy.SpatialReference('NAD 1983')
    arcpy.CreateFileGDB_management(Folder, GdbNameNAD83, 'CURRENT')
    arcpy.CreateFeatureDataset_management(OutputGdbNAD83, 'CadastralReference',
                                          sr)

    # input/outut, import HARN features into NAD83 dataset
    infcs = [
        """{}\\CadastralReference\\PLSSTownship""".format(OutputGdb),
        """{}\\CadastralReference\\PLSSSpecialSurvey""".format(OutputGdb),
        """{}\\CadastralReference\\PLSSSecondDivision""".format(OutputGdb),
        """{}\\CadastralReference\\PLSSPoint""".format(OutputGdb),
        """{}\\CadastralReference\\PLSSFirstDivision""".format(OutputGdb),
        """{}\\CadastralReference\\MetadataGlance""".format(OutputGdb),
        """{}\\CadastralReference\\MeanderedWater""".format(OutputGdb),
        """{}\\CadastralReference\\ConflictedAreas""".format(OutputGdb),
        """{}\\CadastralReference\\Control""".format(OutputGdb),
        """{}\\CadastralReference\\SurveySystem""".format(OutputGdb)
    ]

    outdataset = """{}\\CadastralReference""".format(OutputGdbNAD83)
    arcpy.FeatureClassToGeodatabase_conversion(infcs, outdataset)

    PLSSPointReProject = """{}\\PLSSPoint""".format(outdataset)

    if CalcXY == 'true':
        arcpy.AddMessage("Calculating XCOORD, YCOORD, COORDSYS AND HDATUM")
        arcpy.CalculateField_management(PLSSPointReProject, "XCOORD",
                                        "!Shape.Centroid.X!", "Python_9.3", "")
        arcpy.CalculateField_management(PLSSPointReProject, "YCOORD",
                                        "!Shape.Centroid.Y!", "Python_9.3", "")
        spatialRef = arcpy.Describe(PLSSPointReProject).SpatialReference
        srType = "'{}'".format(spatialRef.type)
        #srName = "'{}'".format(spatialRef.name) Possibly to be used later. Right now the HDATUM is hard coded to NAD83 to fit the CadNSDI standard.
        arcpy.CalculateField_management(PLSSPointReProject, "COORDSYS", srType,
                                        "PYTHON", "")
        arcpy.CalculateField_management(PLSSPointReProject, "HDATUM",
                                        "'NAD83'", "PYTHON", "")
def projecting_dataset(projection):
    arcpy.CreateFeatureDataset_management(arcpy.env.workspace, "Network",
                                          projection)
    arcpy.env.outputZFlag = "Disabled"
    arcpy.env.outputCoordinateSystem = projection
    input_lines_p = arcpy.FeatureClassToFeatureClass_conversion(
        r"Placemarks\Polylines", "Network", "Lines_p")
    input_points_p = arcpy.FeatureClassToFeatureClass_conversion(
        r"Placemarks\Points", "Network", "Points_p")
    for field in [
            "Voltage", "Line_Type", "Complexity", "Trace_Version", "Status"
    ]:
        arcpy.AddField_management(input_lines_p, field, "TEXT")
    for field in ["Voltage", "Point_Type"]:
        arcpy.AddField_management(input_points_p, field, "TEXT")
    return input_lines_p, input_points_p
Exemple #22
0
def GeodataSetup(Projection, Workspace):  #Spatial Reference Setting
    try:
        arcpy.CreateFileGDB_management(
            Workspace, "GeopyGeodata.gdb"
        )  #Creates file geodatabase in user-defined workspace location.
        prjFile = os.path.join(
            arcpy.GetInstallInfo()["InstallDir"],
            "Coordinate Systems/Projected Coordinate Systems/State Plane/NAD 1983 (US Feet)/NAD 1983 StatePlane Washington North FIPS 4601 (US Feet).prj"
        )
        spatialRef = arcpy.SpatialReference(prjFile)
        FD = arcpy.CreateFeatureDataset_management(
            Workspace + "\\GeopyGeodata.gdb", "GeopyFeatureData",
            spatialRef)  #Creates feature dataset for point features.
    except:
        arcpy.AddMessage(
            "GeodataSetup Not Successful... Attempting to continue\n")
        return
Exemple #23
0
def copy_features(input_table, out_feature_class):
    print("Checking if feature(s) exist in the database...\n")
    try:
        if arcpy.Exists(out_feature_class):
            arcpy.Delete_management(out_feature_class)

        # creating a new Feature dataset, "newFD", if it doesn't exist
        if not arcpy.Exists(out_nameFD):
            arcpy.CreateFeatureDataset_management(out_dataset_path, out_nameFD,
                                                  CoordSys)

        # Methods for updating, e.g. Copy Features, Feature class conversion
        arcpy.CopyFeatures_management(input_table, out_feature_class)
        print("Copying features...\n")
        print("Copy Features Complete!\n")
    except Exception as err:
        print err.message
        print("Error occurred while copying feature(s)\n")
def CreateTempDB(wrk, sType='FILE', name='SPI_DataAnalysis'):
    if sType == 'FILE':
        tmpName = '{0}.gdb'.format(name)
        tmpWrk = os.path.join(wrk, tmpName)
        if not arcpy.Exists(os.path.join(wrk, tmpName)):
            #DeleteExists(tmpWrk)
            arcpy.CreateFileGDB_management(wrk, tmpName)
        if not arcpy.Exists(os.path.join(wrk, tmpName, "Data")):
            arcpy.CreateFeatureDataset_management(tmpWrk, "Data",
                                                  arcpy.SpatialReference(3005))
        return os.path.join(tmpWrk, "Data")
    elif sType == 'PERSONAL':
        tmpName = '{0}.mdb'.format(name)
        tmpWrk = os.path.join(wrk, tmpName)
        if not arcpy.Exists(tmpWrk):
            #DeleteExists(tmpWrk)
            arcpy.CreatePersonalGDB_management(wrk, tmpName)
        return tmpWrk
Exemple #25
0
    def create(self, geodatabase_path, dataset_name, srid):

        # Récupérations du système de coordonnées pour les datasets

        #dsc = arcpy.Describe(file_srid)
        #coord_sys = dsc.spatialReference
        coord_sys = arcpy.SpatialReference(srid)

        # Création du dataset
        #print(u"Création du dataset " + dataset_name)
        if arcpy.Exists(geodatabase_path + "/" + dataset_name):
            arcpy.env.workspace = geodatabase_path + "/" + dataset_name
            feature_list = arcpy.ListFeatureClasses()
            for feature in feature_list:
                arcpy.Delete_management(feature)
            arcpy.Delete_management(geodatabase_path + "/" + dataset_name)

        arcpy.CreateFeatureDataset_management(geodatabase_path, dataset_name,
                                              coord_sys)
Exemple #26
0
def make_trend_template(out_path, out_gdb_name=None, overwrite=False):
    """
    Helper function to generate a blank output workspace with necessary feature
    dataset categories

    Args:
        out_path (str): path where trend template gdb is written
        out_gdb_name (str): optional name of output gdb
        overwrite (bool): boolean flag to overwrite an existing copy of the out_gdb_name

    Returns (str):
        path to the newly created reporting geodatabase
    """
    out_gdb = make_reporting_gdb(out_path, out_gdb_name, overwrite)
    for fds in ["Networks", "Points", "Polygons"]:
        arcpy.CreateFeatureDataset_management(
            out_dataset_path=out_gdb, out_name=fds, spatial_reference=PMT.SR_FL_SPF
        )
    return out_gdb
def createNewFGDB():
    print 'Creating FGDB: ' + '"' + newFGDB + '"' + ' at: ' + currentData_file_loc + '\n'
    logging.info('Creating FGDB: ' + '"' + newFGDB + '"' + ' at: ' +
                 currentData_file_loc + '\n')
    arcpy.CreateFileGDB_management(currentData_file_loc, newFGDB, "CURRENT")

    #Create Feature Datasets:  RWATER, SEWER, WATER, GRID, SANGIS
    for fd in FGDB_fds:
        print 'Creating Feature Dataset: ' + fd
        logging.info('Creating Feature Dataset: ' + fd)
        out_dataset_path = newFGDB_loc
        out_name = fd
        spatial_reference = prjFile

        arcpy.CreateFeatureDataset_management(out_dataset_path, out_name,
                                              spatial_reference)

    print '\n'
    logging.info('\n')
def node_generator(gtfs_folder):

    print('Building file geodatabase')
    gdb_folder = os.path.join(os.path.expanduser('~'), "OneDrive", "Documents",
                              "ArcGIS", "Projects", "MBTA_Overview", "GTFS",
                              "GTFS_gdb")
    gdb_path = os.path.join(gdb_folder, f"MBTA_GTFS_{date}.gdb")
    if arcpy.Exists(gdb_path):
        arcpy.Delete_management(gdb_path)
    gdb = arcpy.management.CreateFileGDB(gdb_folder, f"MBTA_GTFS_{date}")
    fd = arcpy.CreateFeatureDataset_management(gdb, "transit_network",
                                               '102100')
    path = arcpy.Describe(fd).catalogPath

    print('Building network nodes')
    arcpy.conversion.GTFSToNetworkDatasetTransitSources(
        gtfs_folder, fd, 'INTERPOLATE')

    return path
Exemple #29
0
def Topo(strPathFC, strPathFDS, strPathTopo, BoolValidate=None):
    ''' Create a no gaps/no overlaps topology for a feature class.
        creates requisite featuredataset if needed.
        optionally validates topology.
        Returns the location of the new featureclass in the featuredataset.
    '''
    if BoolValidate is None:
        BoolValidate = True

    if os.path.dirname(strPathFC):
        strFC = os.path.basename(strPathFC)
    strGDB, strFDS = os.path.split(strPathFDS)
    strPathFDS_FC = strPathFDS + os.sep + strFC

    if not arcpy.Exists(strPathTopo):
        print('\tCreate FDS and add FC...')
        sr = arcpy.Describe(strPathFC).spatialreference
        if not arcpy.Exists(strPathFDS):
            arcpy.CreateFeatureDataset_management(strGDB, strFDS, sr)

        if not arcpy.Exists(strPathFDS_FC):
            arcpy.CopyFeatures_management(strPathFC, strPathFDS_FC)

        print('\tCreate topology and add rules...')
        arcpy.CreateTopology_management(strPathFDS,
                                        os.path.basename(strPathTopo))

        arcpy.AddFeatureClassToTopology_management(strPathTopo, strPathFDS_FC,
                                                   1)
        arcpy.AddRuleToTopology_management(strPathTopo,
                                           'Must Not Have Gaps (Area)',
                                           strPathFDS_FC)
        arcpy.AddRuleToTopology_management(strPathTopo,
                                           'Must Not Overlap (Area)',
                                           strPathFDS_FC)

    if BoolValidate:
        print('\tValidate...')
        arcpy.ValidateTopology_management(strPathTopo)

    return strPathFDS_FC
Exemple #30
0
 def create_mdb_DateBase(self):
     """
         Create *mdb DataBase with DateSet, and polyline layer with fields ("Name_ATE", "Name", "Name_EVA", "ID_ATE", "ID_EVA").
         Set sybtype for field "Name_ATE" and add sybtypes.
         :return: DateBase
         """
     # create mdb
     arcpy.CreatePersonalGDB_management(
         self.work_path, '{0}_streets.mdb'.format(self.name_district))
     # create dataset Streets
     arcpy.CreateFeatureDataset_management(self.nameDataBase, "Streets",
                                           wgs84)
     # create shp Streets
     arcpy.CreateFeatureclass_management(self.nameDataSet, "Streets",
                                         "POLYLINE", "", "DISABLED",
                                         "DISABLED", wgs84, "", "0", "0",
                                         "0")
     # create fields in shp Streets
     arcpy.AddField_management(self.nameStreets, "Name_ATE", "LONG", "", "",
                               "", "", "NULLABLE", "REQUIRED", "")
     arcpy.AddField_management(self.nameStreets, "Name", "LONG", "", "", "",
                               "", "NULLABLE", "REQUIRED", "")
     arcpy.AddField_management(self.nameStreets, "Name_EVA", "LONG", "", "",
                               "", "", "NULLABLE", "REQUIRED", "")
     arcpy.AddField_management(self.nameStreets, "ID_ATE", "LONG", "", "",
                               "", "", "NULLABLE", "REQUIRED", "")
     arcpy.AddField_management(self.nameStreets, "ID_EVA", "LONG", "", "",
                               "", "", "NULLABLE", "REQUIRED", "")
     # set Sybtypefield - Name_ATE
     arcpy.SetSubtypeField_management(self.nameStreets, "Name_ATE")
     # create sybtypes in DateBase
     if self.name_district != "Минск":
         for element in self.new_dict_syptypes.items():
             arcpy.AddSubtype_management(self.nameStreets, element[1][0],
                                         element[0])
     else:
         arcpy.AddSubtype_management(self.nameStreets, 17030, 'Минск')