def createGDB(Name, Path):
    """
    Create personal geodatabase that will be the tool's main workspace.All
    feature classes and tables-after clipping-will be housed in the
    geodatabase. Function returns a string that represents the path to the
    geodatabase.

    createGDB(Name, Path)
        INPUTS:
            Name(string):
                The validated project name returned by setProjectName. The created
                geodatabase will bear the same name as the project.
            Path(string):
                The path to the working directory to house the geodatabase.
    """
    msg = 'Creating the project Database \n'
    env.workspace = Path
    if os.path.exists(env.workspace):
        dbPath = arcpy.CreatePersonalGDB_management(Path, Name, 'CURRENT')
        msg += arcpy.GetMessages()
    else:
        raise ValueError('Directory Not Found: {}'.format(
            os.path.abspath(env.workspace)))
    dumpToLogFile(msg)
    return str(dbPath)
Beispiel #2
0
def preProcess(datetime, province, infiles):
    # 根据datetime创立工作目录
    cwd = os.getcwd()
    workpath = ''.join([cwd, u"/temp/", province, '/', datetime])
    if not os.path.exists(workpath):
        os.makedirs(workpath)
    workspace = ''.join([workpath, '/GDB.gdb'])
    if not arcpy.Exists(workspace):
        arcpy.CreateFileGDB_management(workpath, 'GDB.gdb')
    arcpy.env.overwriteOutput = True

    text_table = ''.join([workspace, "/", u"data", datetime, u".txt"])
    writeOriginData(infiles, text_table)
    database_path = generateDatabase(text_table, province)

    #建立数据库,以供SQL查询
    if not arcpy.Exists(''.join([workpath, '/SQL.mdb'])):
        arcpy.CreatePersonalGDB_management(workpath, "SQL.mdb")  #在指定位置建立个人数据库
        arcpy.FeatureClassToGeodatabase_conversion(database_path, ''.join(
            [workpath, '/SQL.mdb']))  #将文件数据库中的要素类导入到个人数据库

    #读取省下属各地级市面积,这要在地图数据中预先计算好,这只是读取
    province_area = {}
    province_feature = ''.join(
        [cwd, u'/data/LightningBulletin.gdb/', province, u'_分区'])
    with SearchCursor(province_feature, ["Region", "area"]) as cursor:
        for row in cursor:
            province_area[row[0]] = row[1]

    f = open(os.path.join(workspace, 'province_area.pkl'), 'wb')
    pickle.dump(province_area, f, pickle.HIGHEST_PROTOCOL)
    f.close()
Beispiel #3
0
def clean_location(path, path_type="FGDB"):
    """
		Copied from PISCES 2.0.4
		Cleans and recreates a particular type of workspace. Types include file geodatabases, personal geodatabases, and folders
	:param path: the full path to the item to clean
	:param path_type: one of the following: (FGDB, PGDB, Folder) for File Geodatabase, Personal Geodatabase, and folders, respectively
	:return:
	"""

    processing_log.info("Recreating {0:s}".format(path))
    if path_type not in ("FGDB", "PGDB", "Folder"):
        raise ValueError("path_type must be one of (FGDB, PGDB, Folder)")

    if path_type == "Folder":
        if os.path.exists(path):  # delete the folder if it exists
            shutil.rmtree(path)

        os.mkdir(path)  # then recreate it
    else:
        if arcpy.Exists(path):  # if we have a GDB, delete it
            arcpy.Delete_management(path)

        path_parts = os.path.split(path)  # Now recreate it based on type
        if path_type == "FGDB":
            arcpy.CreateFileGDB_management(
                path_parts[0], path_parts[1]
            )  # then recreate it - fastest way to compact a temp GDB
        elif path_type == "PGDB":
            arcpy.CreatePersonalGDB_management(path_parts[0], path_parts[1])
Beispiel #4
0
def createDatabase(outputDir,thisDB):
    addMsgAndPrint('  Creating geodatabase '+thisDB+'...')
    if arcpy.Exists(outputDir+'/'+thisDB):
        addMsgAndPrint('  Geodatabase '+thisDB+' already exists.')
        addMsgAndPrint('   forcing exit with error')
        raise arcpy.ExecuteError
    try:
        if thisDB[-4:] == '.mdb':
            arcpy.CreatePersonalGDB_management(outputDir,thisDB)
        if thisDB[-4:] == '.gdb':
            arcpy.CreateFileGDB_management(outputDir,thisDB)
        return True
    except:
        addMsgAndPrint('Failed to create geodatabase '+outputDir+'/'+thisDB)
        addMsgAndPrint(arcpy.GetMessages(2))
        return False
def CreateTempDB(wrk, sType='FILE', name='SPI_DataAnalysis'):
    if sType == 'FILE':
        tmpName = '{0}.gdb'.format(name)
        tmpWrk = os.path.join(wrk, tmpName)
        if not arcpy.Exists(os.path.join(wrk, tmpName)):
            #DeleteExists(tmpWrk)
            arcpy.CreateFileGDB_management(wrk, tmpName)
        if not arcpy.Exists(os.path.join(wrk, tmpName, "Data")):
            arcpy.CreateFeatureDataset_management(tmpWrk, "Data",
                                                  arcpy.SpatialReference(3005))
        return os.path.join(tmpWrk, "Data")
    elif sType == 'PERSONAL':
        tmpName = '{0}.mdb'.format(name)
        tmpWrk = os.path.join(wrk, tmpName)
        if not arcpy.Exists(tmpWrk):
            #DeleteExists(tmpWrk)
            arcpy.CreatePersonalGDB_management(wrk, tmpName)
        return tmpWrk
    def createdb(self, dbtype, statename, countyname):
        import arcpy

        try:
            if dbtype == 'File Geodatabase':
                arcpy.CreateFileGDB_management(self.directory,
                                               str(self.database_name))
            elif dbtype == 'Personal Geodatabase':
                arcpy.CreatePersonalGDB_management(self.directory,
                                                   str(self.database_name))
        except:
            pass

        def import_shapes():
            path = os.path.join(self.directory, 'data', 'download')
            arcpy.env.workspace = path
            fc = arcpy.ListFeatureClasses()

            for feature in fc:
                if dbtype == 'Personal Geodatabase':
                    ext = '.mdb'
                else:
                    ext = '.gdb'

                shapefile = os.path.join(path, feature)
                try:
                    arcpy.FeatureClassToFeatureClass_conversion(
                        shapefile,
                        os.path.join(self.directory,
                                     str(self.database_name) + ext),
                        statename.replace(" ", "_") + '_' +
                        countyname.replace(" ", "_") + '_' +
                        feature[feature.rfind('_') + 1:-4])
                except:
                    print "feature exists?"

        import_shapes()
Beispiel #7
0
 def create_mdb_DateBase(self):
     """
         Create *mdb DataBase with DateSet, and polyline layer with fields ("Name_ATE", "Name", "Name_EVA", "ID_ATE", "ID_EVA").
         Set sybtype for field "Name_ATE" and add sybtypes.
         :return: DateBase
         """
     # create mdb
     arcpy.CreatePersonalGDB_management(
         self.work_path, '{0}_streets.mdb'.format(self.name_district))
     # create dataset Streets
     arcpy.CreateFeatureDataset_management(self.nameDataBase, "Streets",
                                           wgs84)
     # create shp Streets
     arcpy.CreateFeatureclass_management(self.nameDataSet, "Streets",
                                         "POLYLINE", "", "DISABLED",
                                         "DISABLED", wgs84, "", "0", "0",
                                         "0")
     # create fields in shp Streets
     arcpy.AddField_management(self.nameStreets, "Name_ATE", "LONG", "", "",
                               "", "", "NULLABLE", "REQUIRED", "")
     arcpy.AddField_management(self.nameStreets, "Name", "LONG", "", "", "",
                               "", "NULLABLE", "REQUIRED", "")
     arcpy.AddField_management(self.nameStreets, "Name_EVA", "LONG", "", "",
                               "", "", "NULLABLE", "REQUIRED", "")
     arcpy.AddField_management(self.nameStreets, "ID_ATE", "LONG", "", "",
                               "", "", "NULLABLE", "REQUIRED", "")
     arcpy.AddField_management(self.nameStreets, "ID_EVA", "LONG", "", "",
                               "", "", "NULLABLE", "REQUIRED", "")
     # set Sybtypefield - Name_ATE
     arcpy.SetSubtypeField_management(self.nameStreets, "Name_ATE")
     # create sybtypes in DateBase
     if self.name_district != "Минск":
         for element in self.new_dict_syptypes.items():
             arcpy.AddSubtype_management(self.nameStreets, element[1][0],
                                         element[0])
     else:
         arcpy.AddSubtype_management(self.nameStreets, 17030, 'Минск')
import arcpy,os

# 创建个人地理数据库
arcpy.CreatePersonalGDB_management (os.getcwd(), "test.mdb")
# 创建MyFeatureclass1、Featureclass2、Featureclass3要素数据集
arcpy.CreateFeatureclass_management (os.getcwd()+os.sep+"test.mdb", "MyFeatureclass1","POLYGON")
arcpy.CreateFeatureclass_management (os.getcwd()+os.sep+"test.mdb", "Featureclass2","POLYGON")
arcpy.CreateFeatureclass_management (os.getcwd()+os.sep+"test.mdb", "Featureclass3","POINT")

# 列举创建的个人地理数据库中的要素类
arcpy.env.workspace = os.getcwd()+os.sep+"test.mdb"
fcs = arcpy.ListFeatureClasses()
print("All Featureclasses :")
for fc in fcs :
    print(fc)

# 列举创建的个人地理数据库中的要素类(使用通配符,只列举以My开头,要素类型为面的)
fcs = arcpy.ListFeatureClasses("My*","Polygon")
print("Starts with My and Polygon :")
for fc in fcs :
    print(fc)


# 列举创建的个人地理数据库中的点要素类 
fcs = arcpy.ListFeatureClasses("","Point")
print("Point Featureclasses :")
for fc in fcs :
    print(fc)
arcpy.Delete_management(os.getcwd()+os.sep+"test.mdb")
def temp_mdb(eingangstabellen, sql, ausgabetabelle):
    # funktionsweise: kopiere tabellen aus einer gegebenen liste in eine temporäre PGDB, fuere sql-code auf diesen tabellen aus und kopiere das ergebnis zurueck

    arcpy.env.overwriteOutput = True

    # Create Temp Folder
    base_path = str(sys.path[0]).split("2 Planungsprojekte analysieren")[
        0]  # Pfad zum Basisverzeichnis RPC
    out_folder_path = join(base_path, '2_Tool', "Projektverwaltung",
                           "temp")  # Pfad zur temporaeren mdb inkl.
    temp_mdb_path = join(out_folder_path, "PGDB_Temp.mdb")

    if not os.path.exists(out_folder_path):
        os.makedirs(out_folder_path)

    # Create Temp MDB File
    out_name = "PGDB_Temp.mdb"
    out_version = "10.0"
    try:
        arcpy.CreatePersonalGDB_management(out_folder_path, out_name,
                                           out_version)
    except:
        shutil.rmtree(out_folder_path)
        os.makedirs(out_folder_path)

    # Copy given tables to Temporary MDB File
    for eingangstabelle in eingangstabellen:
        eingangstabelle_pfad = join(eingangstabelle[0], eingangstabelle[1])
        temptabelle_pfad = join(temp_mdb_path,
                                eingangstabelle[1].replace(' ', '_'))

        arcpy.CopyRows_management(eingangstabelle_pfad, temptabelle_pfad, "")

    # run SQL code on imported tables in PGDB
    connectionstring = 'DRIVER={Microsoft Access Driver (*.mdb)};DBQ=' + temp_mdb_path  # create connection string

    conn = pyodbc.connect(connectionstring)
    cursor = conn.cursor()

    try:
        cursor.execute(sql)
    except Exception as e:
        arcpy.AddMessage("sql-fehlerhaft \n")
        arcpy.AddMessage(sql)
        sys.exit()

    conn.commit()
    conn.close()

    # Copy resulting table to FGDB
    fgdb_eingangstabelle_pfad = join(temp_mdb_path, ausgabetabelle[1])
    fgdb_ausgabetabelle_pfad = join(ausgabetabelle[0], ausgabetabelle[1])

    arcpy.CopyRows_management(fgdb_eingangstabelle_pfad,
                              fgdb_ausgabetabelle_pfad, "")

    # Delete Temp Folder
    try:
        shutil.rmtree(out_folder_path)
    except:
        pass

    # Collect Garbage
    gc.collect()
Beispiel #10
0
import arcpy
import os

arcpy.env.overwriteOutput = True

arcpy.env.workspace = "Z:/Dropbox/data/LEAPS/data/Constructed/ethnic_info/gis/"

# Target geodatabase name -- delete if it fails up front!
Output = "ethnic_gis.mdb"
try:
    arcpy.Delete_management(Output, "Workspace")
    print "Deleted Old Database"
except:
    # Output.gdb did not exist, pass
    print "Did not delete"
    pass
arcpy.CreatePersonalGDB_management(arcpy.env.workspace, Output, "CURRENT")
print "Created New Database"

arcpy.MakeXYEventLayer_management(
    "hhgps.txt", "hh_gps_east", "hh_gps_north", "temp",
    "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119521E-09;0.001;0.001;IsHighPrecision",
    "#")

arcpy.CopyFeatures_management("temp", "ethnic_gis.mdb/hhs")

print "Done!"
Beispiel #11
0
# Create a temporary database for processing errors
now = datetime.datetime.now()

if arcpy.GetInstallInfo()['ProductName'] == 'Desktop':
    gdbname = now.strftime("%Y%m%dT%H%M%S") + ".mdb"
else:
    gdbname = now.strftime("%Y%m%dT%H%M%S") + ".gdb"
arcpy.AddMessage("Product is  " + arcpy.GetInstallInfo()['ProductName'])

TempWksp = Workspace + "\\Temp\\" + gdbname
TempDir = Workspace + "\\Temp"
os.mkdir(TempDir)
arcpy.AddMessage("Temp = " + TempWksp)

if arcpy.GetInstallInfo()['ProductName'] == 'Desktop':
    arcpy.CreatePersonalGDB_management(Workspace + "\\Temp", gdbname)
else:
    arcpy.CreateFileGDB_management(Workspace + "\\Temp", gdbname)

LineShape = TempWksp + "\\RevLine"
PolyShape = TempWksp + "\\RevPoly"
LineShapeSingle = TempWksp + "\\RevLine_single"
LineShapeDissolve = TempWksp + "\\RevLine_dissolve"
PolyShapeSingle = TempWksp + "\\RevPoly_single"
PolyShapeDissolve = TempWksp + "\\RevPoly_dissolve"
TempFC = TempWksp + "\\TempPoint"

# Paths to tables in Reviewer workspace
SessionsTable = ReviewerWorkspace + "\\REVSESSIONTABLE"
REVTABLEMAIN = ReviewerWorkspace + "\\REVTABLEMAIN"
REVTABLEPOINT = ReviewerWorkspace + "\\REVDATASET\\REVTABLEPOINT"
 def createMDB(tempdb_name):
     tmpDatabase = myPath+tempdb_name
     if os.path.exists(tmpDatabase):
         arcpy.Delete_management(tmpDatabase) #os.remove(tmpDatabase)
     arcpy.CreatePersonalGDB_management(myPath, tempdb_name)
import arcpy, os

arcpy.CreatePersonalGDB_management(os.getcwd(), "testListWorkspaces0.mdb")
arcpy.CreatePersonalGDB_management(os.getcwd(), "testListWorkspaces1.mdb")
arcpy.CreateFolder_management(os.getcwd(), "testListWorkspaces2")
arcpy.CreateFileGDB_management(os.getcwd(), "testListWorkspaces3.gdb")
arcpy.CreateFileGDB_management(os.getcwd(), "testListWorkspaces4.gdb")
arcpy.env.workspace = os.getcwd()
workSpaces = arcpy.ListWorkspaces()
print "This is all workSpaces:"
workSpaces = arcpy.ListWorkspaces()
for workSpace in workSpaces:
    print  workSpace
print "This is all gdb:"
workSpaces = arcpy.ListWorkspaces("*", "FileGDB")
for workSpace in workSpaces:
    print  workSpace

workSpaces = arcpy.ListWorkspaces()
for workSpace in workSpaces:
    arcpy.Delete_management(workSpace)
Beispiel #14
0
# load list of FC corresponding to type of network chosen
logger.log(10, '0 ; Restricting over limitation string: ' + queryArea)
logger.log(10, '0 ; Restricting over buffer area: ' + queryBufferArea)
cursor.execute('SELECT * FROM Model_' + typeNetwork.upper() + ';')
rows = cursor.fetchall()
for row in rows:
    if row[2].upper() in typeNetwork.upper():
        defFieldFC = []
        defFieldFC.append(row[0])
        defFC.append(defFieldFC)
# conn.close()

# prepare target personal MDB (gdb_target)
if arcpy.Exists(gdb_target):
    arcpy.Delete_management(gdb_target)
arcpy.CreatePersonalGDB_management(output_folder, gdb_target.split('\\')[-1])
# runCommand = 'copy '+templateDB+' '+gdb_target
# logger.log(10, '0 ; copy template personal MDB over '+gdb_target+' ('+runCommand+')')
# os.system(runCommand.replace("/","\\"))
logger.log(10, '0 ; created MASTER database in ' + gdb_target)

# start looking for GDB in given directory - some bug appearing https://geonet.esri.com/thread/72686 - coped with that
try:
    for (path, dirs, files) in os.walk(input_folder):
        if ".gdb" not in path.lower():
            arcpy.env.workspace = path
            logger.log(
                10, '0 ; Searching for file geodatabases (*.GDB) in "' +
                input_folder + '"')
            for database in arcpy.ListWorkspaces("*", "FileGDB"):
                if database.split('\\')[-1] == 'GIS.gdb':
Beispiel #15
0
##import arcpy
##from arcpy import env
##out_path = "G:\School\Personal Drive\Fall2014\Python\Python\Data\Exercise09"
##env.workspace = out_path
##rasterlist = arcpy.ListRasters()
##arcpy.CreatePersonalGDB_management(out_path + "/Results", "allrasters.gdb")
##for rasters in rasterlist:
##    desc=arcpy.Describe(rasters)
##    rastername=desc.baseName
##    outraster = out_path + "/Results/allrasters.gdb/" + rastername
##    arcpy.CopyRaster_management(rasters, outraster)
##
import arcpy
from arcpy import env
out_path = "G:\School\Personal Drive\Fall2014\Python\Python\Data\Exercise09"
env.workspace = out_path
rasterlist = arcpy.ListRasters()
arcpy.CreatePersonalGDB_management(out_path + "/Results", "myrasters.gdb")
for raster in rasterlist:
    desc = arcpy.Describe(raster)
    rname = desc.baseName
    outraster = out_path + "/Results/myrasters.gdb/" + rname
    arcpy.CopyRaster_management(raster, outraster)
import arcpy
import random
arcpy.CreatePersonalGDB_management("C:/temp", "Circles.mdb")
arcpy.env.workspace = r'C:\temp\Circles.mdb'
mygeodatabase = r'C:\temp\Circles.mdb'
arcpy.CreateRandomPoints_management
(arcpy.env.workspace, 'three_points', ",", 3)
arcpy.Buffer_analysis("three_points", "three_polys", "10")
Beispiel #17
0
    good_slope=((slope > 5) & (slope < 20)) #create raster objects with slope between 5 and 20
    good_aspect=((aspect > 150)&(aspect < 270))#create raster object with aspect between 150 and 270
    good_land=((land_cover == 41)|(land_cover == 42)|(land_cover == 43))#create raster object with only land cover types of 41,42 or 43
    good_area=(good_slope & good_aspect & good_land)#combine all these criteria into one raster object 

    good_area.save("challenge1")#save the raster object as "challenge1"
    arcpy.CheckInExtension("spatial")#check back in spatial analyst extension
else:
    print "Spatial Analyst Extension license is not available"

# Name: David Espinola
#Date: April 2, 2019
#Description: Challenge 2- Write a script that copies all rasters in the workspace to a new file geodatabase.
#You can use the rasters in the Exercise09 folder as an example
import os
from arcpy.sa import *
inpath="C:/GEO6533/Python/Data/Exercise09"
arcpy.env.workspace="C:/GEO6533/Python/Data/Exercise09"#set workspace and import arcpy
arcpy.env.overwriteOutput=True
arcpy.CreatePersonalGDB_management(path, "pGDB.mdb")#create personal geodatabase
#outpath=os.path.join(inpath,"pGDB.mdb")
outpath="C:/GEO6533/Python/Data/Exercise09/pGDB.mdb"
Rasters=arcpy.ListRasters()#create list of rasters in workspace

for raster in Rasters:#create for loop to iterate over rasters
    raster_name=arcpy.Describe(raster).basename#get name without file extension
     print raster_name#check names are correct
    arcpy.CopyRaster_management(raster,outpath+"/"+raster_name)#copy each raster to personal geodatabase
   

Beispiel #18
0
def MWCOG_BufferAndExport(point_shape, temp_prefix_short,
                          temp_prefix_long, startfresh, export):

    global my_out_fields
    global my_join_uniq_id

    #
    # Step 1: Message
    #

    mystarttime   = datetime.datetime.now()
    myendtime     = None
    myelapsedtime = None
    MWCOG_PrintWriter( "\n\tMWCOG_BufferAndExport (Started: " \
                      + str(mystarttime).split(".")[0] + ")" )
    MWCOG_PrintWriter( "\tShapefile  : " \
                      + str( ('..' + point_shape[(len(point_shape)\
                                                  -my_rightstringlength):])
                                               if len(point_shape) > my_rightstringlength
                                               else point_shape))
    MWCOG_PrintWriter( "\tShort Walk : " + str(dist_short) + ", Field = " \
                      + str(temp_prefix_short))
    MWCOG_PrintWriter( "\tLong Walk  : " + str(dist_long)  + ", Field = " \
                      + str(temp_prefix_long))

    #
    # Step 2: Create temporary file geodatabase
    #

    if (startfresh == 1):
        if (arcpy.Exists(my_GDB)) :
            MWCOG_PrintWriter( "\tdeleting geo-database ..." )
            arcpy.Delete_management(my_tmp_folder + "\\" + my_GDB_name)
        if (my_GDB_type == "PERSONAL"):
                MWCOG_PrintWriter( "\tcreating personal geo-database ...")
                arcpy.CreatePersonalGDB_management(my_tmp_folder,
                                                   my_GDB_name, "10.0")
        else:
                MWCOG_PrintWriter( "\tcreating file geo-database ...")
                arcpy.CreateFileGDB_management(my_tmp_folder,
                                               my_GDB_name, "10.0")
    else:
        if (arcpy.Exists(my_GDB)) : pass
        else:
                if (my_GDB_type == "PERSONAL"):
                        MWCOG_PrintWriter( "\tcreating personal geo-database ...")
                        arcpy.CreatePersonalGDB_management(my_tmp_folder,
                                                           my_GDB_name, "10.0")
                else:
                        MWCOG_PrintWriter( "\tcreating file geo-database ...")
                        arcpy.CreateFileGDB_management(my_tmp_folder,
                                                       my_GDB_name, "10.0")

    #
    # Step 3: Set Workspace(s)
    #

    arcpy.env.scratchWorkspace = my_GDB

    ## if (startfresh == 1):
    ##     MWCOG_PrintWriter( "\tlisting environment variables:" )
    ##     environments = arcpy.ListEnvironments()
    ##     for environment in environments:
    ##             envSetting = getattr(env, environment)
    ##             MWCOG_PrintWriter( "\t\t%-28s: %s" % (environment, envSetting) )

    #
    # Step 4: Work
    #

    try:
        #
        # Construct paths
        #
        temp_TAZ      = my_GDB        + "\\" + "TAZ_with_PercentWalkSheds"

        short_Temp1   = my_GDB        + "\\" + str(temp_prefix_short) + "Temp1"
        short_Temp2   = my_GDB        + "\\" + str(temp_prefix_short) + "Temp2"
        short_Temp3   = my_GDB        + "\\" + str(temp_prefix_short) + "Temp3"
        short_Temp4   = my_GDB        + "\\" + str(temp_prefix_short)
        short_OutFile = my_out_folder + "\\" + str(temp_prefix_short) + ".csv"

        long_Temp1    = my_GDB        + "\\" + str(temp_prefix_long)  + "Temp1"
        long_Temp2    = my_GDB        + "\\" + str(temp_prefix_long)  + "Temp2"
        long_Temp3    = my_GDB        + "\\" + str(temp_prefix_long)  + "Temp3"
        long_Temp4    = my_GDB        + "\\" + str(temp_prefix_long)
        long_OutFile  = my_out_folder + "\\" + str(temp_prefix_long)  + ".csv"

        #
        # Delete Existing Outputs
        #
        MWCOG_PrintWriter( "\tinitializing outputs ..." )

        if (startfresh == 1):

            # if starting afresh, copy TAZ layer into geodatabase and compute area
            if arcpy.Exists(temp_TAZ)  : arcpy.Delete_management(temp_TAZ)
            arcpy.CopyFeatures_management(User_Input_TAZLandArea_Shapefile, temp_TAZ)

            # check if area field exists else compute
            my_fieldList = arcpy.ListFields(temp_TAZ)

            my_fieldexists = False
            for my_field in my_fieldList:
                    if my_field.name == my_TAZ_area_name:
                            my_fieldexists = True
                            MWCOG_PrintWriter( "\t\tfound/using existing TAZ area field: " \
                                              + my_TAZ_area_name )
                            break
            if (my_fieldexists): pass
            else:
                    MWCOG_PrintWriter( "\t\tcreating TAZ area field: " \
                                      + my_TAZ_area_name )
                    arcpy.AddField_management(temp_TAZ, my_TAZ_area_name, "DOUBLE",
                                              14, "", "", my_TAZ_area_name)
                    arcpy.CalculateField_management( temp_TAZ, my_TAZ_area_name,
                                                    "!shape.area@SQUAREFEET!", "PYTHON")

        # delete old files
        if arcpy.Exists(short_Temp1)   : arcpy.Delete_management(short_Temp1)
        if arcpy.Exists(short_Temp2)   : arcpy.Delete_management(short_Temp2)
        if arcpy.Exists(short_Temp3)   : arcpy.Delete_management(short_Temp3)
        if arcpy.Exists(short_Temp4)   : arcpy.Delete_management(short_Temp4)
        if arcpy.Exists(short_OutFile) : arcpy.Delete_management(short_OutFile)

        if arcpy.Exists(long_Temp1)    : arcpy.Delete_management(long_Temp1)
        if arcpy.Exists(long_Temp2)    : arcpy.Delete_management(long_Temp2)
        if arcpy.Exists(long_Temp3)    : arcpy.Delete_management(long_Temp3)
        if arcpy.Exists(long_Temp4)    : arcpy.Delete_management(long_Temp4)
        if arcpy.Exists(long_OutFile)  : arcpy.Delete_management(long_OutFile)

        #
        # Process: Buffer & Compact database
        #
        MWCOG_PrintWriter( "\tbuffering ..." )

        arcpy.Buffer_analysis(point_shape, short_Temp1, dist_short,
                              my_buffer_sideType, my_buffer_endType,
                              my_buffer_dissolveType, my_buffer_dissolveField)
        arcpy.Compact_management( my_GDB )

        arcpy.Buffer_analysis(point_shape,  long_Temp1,  dist_long,
                              my_buffer_sideType, my_buffer_endType,
                              my_buffer_dissolveType, my_buffer_dissolveField)
        arcpy.Compact_management( my_GDB )

        #
        # Process: Add a field to dissolve on
        #
        MWCOG_PrintWriter( "\tadding a field for dissolving split buffers ..." )

        arcpy.AddField_management(short_Temp1, my_temp_buffer_dissolve_field_name,
                                  "SHORT", 1, "", "",
                                  my_temp_buffer_dissolve_field_name)
        arcpy.CalculateField_management(short_Temp1,
                                        my_temp_buffer_dissolve_field_name,
                                        "0", "PYTHON", "")

        arcpy.AddField_management( long_Temp1, my_temp_buffer_dissolve_field_name,
                                  "SHORT", 1, "", "",
                                  my_temp_buffer_dissolve_field_name)
        arcpy.CalculateField_management( long_Temp1,
                                        my_temp_buffer_dissolve_field_name,
                                        "0", "PYTHON", "")

        #
        # Process: Dissolve 1
        #
        MWCOG_PrintWriter( "\tdissolving any split buffers ..." )

        arcpy.Dissolve_management(short_Temp1, short_Temp2, "Buf_Dis", "",
                                  "MULTI_PART", "DISSOLVE_LINES")
        arcpy.Compact_management( my_GDB )

        arcpy.Dissolve_management( long_Temp1,  long_Temp2, "Buf_Dis", "",
                                  "MULTI_PART", "DISSOLVE_LINES")
        arcpy.Compact_management( my_GDB )

        #
        # Process: Intersect
        #
        MWCOG_PrintWriter( "\tintersecting ..." )

        arcpy.Intersect_analysis("'" + short_Temp2 + "'" + " #;" + \
                                 "'" + temp_TAZ + "'" + " #",
                                 short_Temp3, "NO_FID", "", "INPUT")
        arcpy.Compact_management( my_GDB )

        arcpy.Intersect_analysis("'" + long_Temp2 + "'" + " #;" + "'" \
                                 + temp_TAZ + "'" + " #",
                                 long_Temp3, "NO_FID", "", "INPUT")
        arcpy.Compact_management( my_GDB )

        #
        # Process: Dissolve 2
        #
        MWCOG_PrintWriter( "\tdissolving ..." )

        arcpy.Dissolve_management(short_Temp3, short_Temp4, my_dissolve_Fields,
                                  "", "MULTI_PART", "DISSOLVE_LINES")
        arcpy.Compact_management( my_GDB )

        arcpy.Dissolve_management( long_Temp3,  long_Temp4, my_dissolve_Fields,
                                  "", "MULTI_PART", "DISSOLVE_LINES")
        arcpy.Compact_management( my_GDB )

        #
        # Process: Join Short-Walk to Zone Layer
        #
        MWCOG_PrintWriter( "\tcomputing short-walk (" + calc_type + ") ..." )

        # join
        # Help: http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#//001700000065000000
        arcpy.JoinField_management(temp_TAZ, my_TAZ_name, short_Temp4,
                                   my_TAZ_name, my_join_Fieldlist)



        # construct shape_area name
        # Below method is quirky, but this is how ArcGIS appends fields
        if   my_join_uniq_id == 0:
            my_shape_area_field = "Shape_Area"
        elif my_join_uniq_id == 1:
            my_shape_area_field = "Shape_Area_1"
        elif my_join_uniq_id == 2:
            my_shape_area_field = "Shape_Area_12"
        elif my_join_uniq_id == 3:
            my_shape_area_field = "Shape_Area_12_13"
        elif my_join_uniq_id == 4:
            my_shape_area_field = "Shape_Area_12_13_14"
        elif my_join_uniq_id == 5:
            my_shape_area_field = "Shape_Area_12_13_14_15"
        elif my_join_uniq_id == 6:
            my_shape_area_field = "Shape_Area_12_13_14_15_16"
        else: my_shape_area_field = "UNDEFINED"

        my_join_uniq_id += 1

        # calculate percent walk
        my_calcfield_expression = "CalcAttribute( !" + my_TAZ_area_name \
        + "!, !" + my_shape_area_field + "! )"
        arcpy.AddField_management(temp_TAZ, temp_prefix_short, "DOUBLE", "",
                                  "", "", "", "NULLABLE", "REQUIRED", "")

        # select based on calculation type
        if   (calc_type == "PercentWalk") :
                arcpy.CalculateField_management(temp_TAZ, temp_prefix_short,
                                                my_calcfield_expression, "PYTHON",
                                                my_pctwlk_calcfield_codeblock)
        elif (calc_type == "AreaWalk") :
                arcpy.CalculateField_management(temp_TAZ, temp_prefix_short,
                                                my_calcfield_expression, "PYTHON",
                                                my_arawlk_calcfield_codeblock)
        else:
                arcpy.AddError("ERROR: Un-recognized calc_type specified!")

        # make a note of this field
        my_out_fields.append(temp_prefix_short)

        #
        # Process: Join Long-Walk to Zone Layer
        #
        MWCOG_PrintWriter( "\tcomputing long-walk (" + calc_type + ") ..." )

        # join
        # Help: http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#//001700000065000000
        arcpy.JoinField_management(temp_TAZ, my_TAZ_name, long_Temp4,
                                   my_TAZ_name, my_join_Fieldlist)

        # construct shape_area name
        # Below method is quirky, but this is how ArcGIS appends fields
        if   my_join_uniq_id == 0:
            my_shape_area_field = "Shape_Area"
        elif my_join_uniq_id == 1:
            my_shape_area_field = "Shape_Area_1"
        elif my_join_uniq_id == 2:
            my_shape_area_field = "Shape_Area_12"
        elif my_join_uniq_id == 3:
            my_shape_area_field = "Shape_Area_12_13"
        elif my_join_uniq_id == 4:
            my_shape_area_field = "Shape_Area_12_13_14"
        elif my_join_uniq_id == 5:
            my_shape_area_field = "Shape_Area_12_13_14_15"
        elif my_join_uniq_id == 6:
            my_shape_area_field = "Shape_Area_12_13_14_15_16"
        else: my_shape_area_field = "UNDEFINED"

        my_join_uniq_id += 1

        # calculate percent walk
        my_calcfield_expression = "CalcAttribute( !" + my_TAZ_area_name + "!, !" \
        + my_shape_area_field + "! )"
        arcpy.AddField_management(temp_TAZ, temp_prefix_long, "DOUBLE", "", "",
                                  "", "", "NULLABLE", "REQUIRED", "")

        # select based on calculation type
        if   (calc_type == "PercentWalk") :
                arcpy.CalculateField_management(temp_TAZ, temp_prefix_long,
                                                my_calcfield_expression, "PYTHON",
                                                my_pctwlk_calcfield_codeblock)
        elif (calc_type == "AreaWalk") :
                arcpy.CalculateField_management(temp_TAZ, temp_prefix_long,
                                                my_calcfield_expression, "PYTHON",
                                                my_arawlk_calcfield_codeblock)
        else:
                arcpy.AddError("ERROR: Un-recognized calc_type specified!")

        # make a note of this field
        my_out_fields.append(temp_prefix_long)

        #
        # Process: Export Feature Attribute to ASCII...
        #
        if (export==1):
            MWCOG_PrintWriter( "\twriting out " + calc_type + " Unsorted CSV file ..." )
            if arcpy.Exists(User_Output_Walkshed_CSV_File_UnSorted):
                arcpy.Delete_management(User_Output_Walkshed_CSV_File_UnSorted)
            arcpy.ExportXYv_stats(temp_TAZ, my_out_fields, "COMMA",
                                  User_Output_Walkshed_CSV_File_UnSorted,
                                  "ADD_FIELD_NAMES")

            MWCOG_PrintWriter( "\tsorting " + calc_type + " CSV file ..." )
            if arcpy.Exists(User_Output_Walkshed_CSV_File):
                arcpy.Delete_management(User_Output_Walkshed_CSV_File)

            with open(User_Output_Walkshed_CSV_File_UnSorted, "rb") as infile, \
            open(User_Output_Walkshed_CSV_File, "wb") as outfile:
               reader = csv.reader(infile)
               writer = csv.writer(outfile)
               # returns the headers or `None` if the input is empty
               headers = next(reader, None)
               if headers:
                   writer.writerow(headers)
               # 2 specifies that we want to sort according to the third column "TAZID"
               sortedlist = sorted(reader,  key = lambda x:int(x[2]))
               #now write the sorted result into new CSV file
               for row in sortedlist:
                   writer.writerow(row)

            MWCOG_PrintWriter( "\twriting out " + calc_type + " TXT file ..." )
            if arcpy.Exists(User_Output_Walkshed_TXT_File)  :
                arcpy.Delete_management(User_Output_Walkshed_TXT_File)

            # list fields
            fieldList = arcpy.ListFields(User_Output_Walkshed_CSV_File)

            # skip the fields 1,2 & 5  'XCoord, YCoord, TAZ, TAZ_AREA, MTLRTSHR,
            #                  MTLRTLNG, ALLPKSHR, ALLPKLNG, ALLOPSHR, ALLOPLNG'

            FieldsToSkip = ['XCoord', 'YCoord']

            # open input and process

            i = 1
            f = open(User_Output_Walkshed_TXT_File,'w')

            # variables for statistics

            my_num_taz                      = 0
            my_total_taz_area               = 0
            my_total_taz_walk_area          = 0
            my_total_taz_walk_pk_area       = 0
            my_total_taz_walk_op_area       = 0

            my_total_mtlrt_shr_area         = 0
            my_total_mtlrt_lng_area         = 0
            my_total_allpk_shr_area         = 0
            my_total_allpk_lng_area         = 0
            my_total_allop_shr_area         = 0
            my_total_allop_lng_area         = 0

            my_taz_list_zero_land_area      = []
            my_taz_list_zero_walk_area      = []
            my_taz_shr_list_pk_less_than_op = []
            my_taz_lng_list_pk_less_than_op = []

            # write header
            for field in fieldList:
                 if field.name in FieldsToSkip: i += 1
                 else:
                       if field.name == my_TAZ_name.upper() :
                        f.write('%6s'    % field.name)
                       elif i < len(fieldList)              :
                        f.write('%10s'   % field.name)
                       else                                 :
                        f.write('%10s\n' % field.name)
                       i += 1

            # write (copy) data
            rows = arcpy.SearchCursor(User_Output_Walkshed_CSV_File)
            for row in rows:
                i = 1
                my_mtlrt_shr = 0
                my_mtlrt_lng = 0
                my_allpk_shr = 0
                my_allpk_lng = 0
                my_allop_shr = 0
                my_allop_lng = 0

                for field in fieldList:
                     if field.name in FieldsToSkip: i += 1
                     else:
                           # write fields
                           if   field.name == my_TAZ_name.upper()      :
                               f.write('%6d' %  row.getValue(field.name) )
                           elif field.name == my_TAZ_area_name.upper() :
                               f.write('%10.4f' % round((row.getValue(field.name) / (5280*5280)), 4) )
                           elif i < len(fieldList)                     :
                               f.write('%10.4f' % round(row.getValue(field.name), 4))
                           else :
                               f.write('%10.4f\n' % round(row.getValue(field.name), 4))
                           i += 1

                           # save field value for checks
                           if( field.name == "MTLRTSHR" ) :
                               my_mtlrt_shr = round( row.getValue(field.name), 4)
                           if( field.name == "MTLRTLNG" ) :
                               my_mtlrt_lng = round( row.getValue(field.name), 4)
                           if( field.name == "ALLPKSHR" ) :
                               my_allpk_shr = round( row.getValue(field.name), 4)
                           if( field.name == "ALLPKLNG" ) :
                               my_allpk_lng = round( row.getValue(field.name), 4)
                           if( field.name == "ALLOPSHR" ) :
                               my_allop_shr = round( row.getValue(field.name), 4)
                           if( field.name == "ALLOPLNG" ) :
                               my_allop_lng = round( row.getValue(field.name), 4)

                # update stats on fields

                my_num_taz += 1
                my_total_taz_area += round( (row.getValue(my_TAZ_area_name.
                                                          upper()) / (5280*5280)), 4)

                if( row.getValue(my_TAZ_area_name.upper()) == 0 ):
                   my_taz_list_zero_land_area.append( str( row.
                                                          getValue(my_TAZ_name.upper())))

                if( (my_mtlrt_shr + my_mtlrt_lng + my_allpk_shr +
                     my_allpk_lng + my_allop_shr + my_allop_lng) == 0 ):
                   my_taz_list_zero_walk_area.append( str( row.
                                                          getValue(my_TAZ_name.upper())))

                my_total_mtlrt_shr_area +=  my_mtlrt_shr
                my_total_mtlrt_lng_area +=  my_mtlrt_lng
                my_total_allpk_shr_area +=  my_allpk_shr
                my_total_allpk_lng_area +=  my_allpk_lng
                my_total_allop_shr_area +=  my_allop_shr
                my_total_allop_lng_area +=  my_allop_lng

                if( my_allpk_shr < my_allop_shr ):
                   my_taz_shr_list_pk_less_than_op.append( str( row.getValue(my_TAZ_name.upper())))
                if( my_allpk_lng < my_allop_lng ):
                   my_taz_lng_list_pk_less_than_op.append( str( row.getValue(my_TAZ_name.upper())))

                my_total_taz_walk_area    += ( max( my_mtlrt_lng, my_allpk_lng, my_allop_lng ))
                my_total_taz_walk_pk_area += ( max( my_mtlrt_lng, my_allpk_lng               ))
                my_total_taz_walk_op_area += ( max( my_mtlrt_lng,               my_allop_lng ))

            del rows
            f.close()

            # report stats on fields
            if( calc_type == "AreaWalk" ):
                MWCOG_PrintWriter( "\nSUMMARY REPORT:"                                                                                                                                                                                                )
                MWCOG_PrintWriter( "\n\tNumber of TAZ Records in Output                                  : " \
                                  + str('{:,}'.format(my_num_taz))                                                                                                         )
                MWCOG_PrintWriter( "\tTotal TAZ LAND Area                                              : " \
                                  + str('{:9.4f} sq. mi.'.format(my_total_taz_area))                                                                                         )
                if( len(my_taz_list_zero_land_area) == 0 ) :
                        MWCOG_PrintWriter( "\tTAZs with Zero LAND Area                                         : NONE"                                                                                                                                )
                else:
                        MWCOG_PrintWriter( "\tTAZs with Zero LAND Area                                         : " \
                                          + "(Count=" + str(len(my_taz_list_zero_land_area)) + ") " \
                                          + ','.join(sorted(my_taz_list_zero_land_area, key=int)))
                MWCOG_PrintWriter( "\n\tTotal TAZ Long-Walk Area                                         : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                      .format(my_total_taz_walk_area,
                                              my_total_taz_walk_area/my_total_taz_area)))
                MWCOG_PrintWriter( "\tTotal TAZ Long-Walk (Peak Period) Area                           : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                      .format(my_total_taz_walk_pk_area,
                                              my_total_taz_walk_pk_area/my_total_taz_area)))
                MWCOG_PrintWriter( "\tTotal TAZ Long-Walk (Off-Peak Period) Area                       : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                      .format(my_total_taz_walk_op_area,
                                              my_total_taz_walk_op_area/my_total_taz_area)))
                if( len(my_taz_list_zero_walk_area) == 0 ) :
                        MWCOG_PrintWriter( "\tTAZs with Zero WALK Area                                         : NONE"                                                                                                                                )
                else:
                        MWCOG_PrintWriter( "\tTAZs with Zero WALK Area                                         : " \
                                          + "(Count=" + str(len(my_taz_list_zero_walk_area)) \
                                          + ") " + ','.join(sorted(my_taz_list_zero_walk_area,
                                                                   key=int)))
                MWCOG_PrintWriter( "\n\tTotal MTLRTSHR Area                                              : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                        .format(my_total_mtlrt_shr_area,
                                                my_total_mtlrt_shr_area/my_total_taz_area)))
                MWCOG_PrintWriter( "\tTotal MTLRTLNG Area                                              : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                        .format(my_total_mtlrt_lng_area,
                                                my_total_mtlrt_lng_area/my_total_taz_area)))
                MWCOG_PrintWriter( "\tTotal ALLPKSHR Area                                              : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                        .format(my_total_allpk_shr_area,
                                                my_total_allpk_shr_area/my_total_taz_area)))
                MWCOG_PrintWriter( "\tTotal ALLPKLNG Area                                              : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                        .format(my_total_allpk_lng_area,
                                                my_total_allpk_lng_area/my_total_taz_area)))
                MWCOG_PrintWriter( "\tTotal ALLOPSHR Area                                              : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                        .format(my_total_allop_shr_area,
                                                my_total_allop_shr_area/my_total_taz_area)))
                MWCOG_PrintWriter( "\tTotal ALLOPLNG Area                                              : " \
                                  + str('{:9.4f} sq. mi. ({:6.2%} of TAZ Land Area)'
                                        .format(my_total_allop_shr_area,
                                                my_total_allop_shr_area/my_total_taz_area)))
                if( len(my_taz_shr_list_pk_less_than_op) == 0 ) :
                        MWCOG_PrintWriter( "\n\tTAZs with Short-Walk Less in Peak Period than in Off-Peak Period : NONE" )
                else:
                        MWCOG_PrintWriter( "\n\tTAZs with Short-Walk Less in Peak Period than in Off-Peak Period : " \
                                          + "(Count=" + str(len(my_taz_shr_list_pk_less_than_op)) + ") " \
                                          + ','.join(sorted(my_taz_shr_list_pk_less_than_op, key=int))      )
                if( len(my_taz_lng_list_pk_less_than_op) == 0 ) :
                        MWCOG_PrintWriter( "\tTAZs with  Long-Walk Less in Peak Period than in Off-Peak Period : NONE\n" )
                else:
                        MWCOG_PrintWriter( "\tTAZs with  Long-Walk  Less in Peak Period than in Off-Peak Period : " \
                                          + "(Count=" + str(len(my_taz_lng_list_pk_less_than_op)) \
                                          + ") " + ','.join(sorted(my_taz_lng_list_pk_less_than_op, key=int)) + "\n" )
    except:
        #
        # Get the traceback object
        #
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        #
        # Concatenate information together concerning the error into a message string
        #
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n"\
         + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n\n\n"

        #
        # Return python error messages for use in script tool or Python Window
        #
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        return 1

    myendtime     = datetime.datetime.now()
    myelapsedtime = myendtime - mystarttime
    MWCOG_PrintWriter( "\tMWCOG_BufferAndExport (Finished: "\
                      + str(myendtime).split(".")[0] + ", Elapsed: " \
                      + str(myelapsedtime).split(".")[0] + ")" )
    return 0
Beispiel #19
0
    def create_address_layer(self):
        """
        :return:
        """
        # Choose territory and select qyeryset
        self.choose_territory()
        if self.region:
            queryset = self.create_address_region_queryset()
        elif self.minsk:
            queryset = self.create_address_minsk_queryset()
        else:
            queryset = self.create_address_district_queryset()

        # create mdb

        try:
            arcpy.CreatePersonalGDB_management(self.work_path, self.name_file)

        except:
            pass
        self.nameDateBase = os.path.join(self.work_path, self.name_file)
        # Create table in DataBase
        arcpy.CreateTable_management(self.nameDateBase, self.name_table, "",
                                     "")
        name_table = os.path.join(self.nameDateBase, self.name_table)
        # Create fields in table
        list_field = []
        # Call function set_with_coord() at first time
        if self.set_with_coord():
            select_field = self.select_fields[:-2]
        else:
            select_field = self.select_fields
        for field in select_field:
            # field[2] is means True/False in all_select_fields
            if field[2]:
                # FIELDS: field[1][1:-1] - is allies for field like 'ID_ATE', so we cut '', field[3] - type of field like text or double
                arcpy.AddField_management(name_table, field[1][1:-1], field[3],
                                          "", "", "", "", "NULLABLE",
                                          "NON_REQUIRED", "")
                list_field.append(field[1][1:-1])
        if self.with_coord:
            cursor_arc = arcpy.da.InsertCursor(name_table, list_field)
            for element in queryset:
                cursor_arc.insertRow(element[:-2])
        # this case if select doesn't contain coordinates, another words self.with_coord = False
        else:
            cursor_arc = arcpy.da.InsertCursor(name_table, list_field)
            for element in queryset:
                cursor_arc.insertRow(element)

        # if queryset contains coordinates we create shp address and convert it in wgs84
        if self.with_coord:
            # create mdb
            # Add values in SHP address42
            arcpy.CreateFeatureclass_management(self.work_path,
                                                "address42.shp", 'POINT', "",
                                                "DISABLED", "DISABLED", sk_42,
                                                "", "0", "0", "0")
            name_address42 = os.path.join(self.work_path, "address42.shp")

            name_address = os.path.join(
                self.work_path, '{0}_address.shp'.format(self.name_table))
            arcpy.AddField_management(name_address42, "ID_ADR", "LONG", "", "",
                                      "", "", "NULLABLE", "REQUIRED", "")
            cursor_arc = arcpy.da.InsertCursor(name_address42,
                                               ["ID_ADR", "SHAPE@XY"])
            for element in queryset:
                if element[-1] is not None:
                    cursor_arc.insertRow([element[0]] +
                                         [(element[-1], element[-2])])
            # convert coordinates from sk42 to wgs84
            arcpy.Project_management(name_address42, name_address, wgs84,
                                     "CK42_to_ITRF2005", sk_42)
            # add coordinates in wgs84
            arcpy.AddXY_management(name_address)
            # Delete shp address42
            arcpy.Delete_management(name_address42, "FeatureClass")

            maska_temp = os.path.join(self.path_to_maska, 'maska_temp.shp')

            if self.region:
                arcpy.Select_analysis(
                    os.path.join(self.path_to_maska,
                                 "region_maska84.shp"), maska_temp,
                    "\"uid\" = {0}".format(regions[self.name_territory]))
            elif self.minsk:
                arcpy.Select_analysis(
                    os.path.join(self.path_to_maska, "region_maska84.shp"),
                    maska_temp, "\"uid\" = 5")
            else:
                arcpy.Select_analysis(
                    os.path.join(self.path_to_maska,
                                 "district_maska84.shp"), maska_temp,
                    "\"uid\" = {0}".format(districts[self.name_territory]))

            arcpy.Clip_analysis(
                name_address, maska_temp,
                os.path.join(self.nameDateBase,
                             '{0}_address'.format(self.name_table)), "")

            # arcpy.FeatureClassToGeodatabase_conversion(name_address, self.nameDateBase)
            arcpy.Delete_management(name_address, "FeatureClass")
            arcpy.Delete_management(maska_temp, "FeatureClass")
def execute(request):
    """Copies data to an existing geodatabase or feature dataset.
    :param request: json as a dict.
    """
    added = 0
    errors = 0
    skipped = 0
    global result_count
    parameters = request['params']

    # Get the target workspace location.
    out_gdb = task_utils.get_parameter_value(parameters, 'target_workspace',
                                             'value')

    # Retrieve the coordinate system code.
    out_coordinate_system = task_utils.get_parameter_value(
        parameters, 'output_projection', 'code')
    if not out_coordinate_system == '0':  # Same as Input
        arcpy.env.outputCoordinateSystem = task_utils.get_spatial_reference(
            out_coordinate_system)

    task_folder = request['folder']
    if not os.path.exists(task_folder):
        os.makedirs(task_folder)

    # Check if the geodatabase exists or if it is a feature dataset.
    is_fds = False
    if not os.path.exists(out_gdb):
        if out_gdb.endswith('.gdb'):
            arcpy.CreateFileGDB_management(os.path.dirname(out_gdb),
                                           os.path.basename(out_gdb))
            status_writer.send_status(
                _('Created output workspace: {0}').format(out_gdb))
        elif out_gdb.endswith('.mdb'):
            arcpy.CreatePersonalGDB_management(os.path.dirname(out_gdb),
                                               os.path.basename(out_gdb))
            status_writer.send_status(
                _('Created output workspace: {0}').format(out_gdb))
        elif out_gdb.endswith('.sde'):
            status_writer.send_state(status.STAT_FAILED,
                                     _('{0} does not exist').format(out_gdb))
            return
        else:
            # Possible feature dataset.
            is_fds = is_feature_dataset(out_gdb)
            if not is_fds:
                if os.path.dirname(out_gdb).endswith('.gdb'):
                    if not os.path.exists(os.path.dirname(out_gdb)):
                        arcpy.CreateFileGDB_management(
                            os.path.dirname(os.path.dirname(out_gdb)),
                            os.path.basename(os.path.dirname(out_gdb)))
                    arcpy.CreateFeatureDataset_management(
                        os.path.dirname(out_gdb), os.path.basename(out_gdb))
                elif os.path.dirname(out_gdb).endswith('.mdb'):
                    if not os.path.exists(os.path.dirname(out_gdb)):
                        arcpy.CreatePersonalGDB_management(
                            os.path.dirname(os.path.dirname(out_gdb)),
                            os.path.basename(os.path.dirname(out_gdb)))
                    arcpy.CreateFeatureDataset_management(
                        os.path.dirname(out_gdb), os.path.basename(out_gdb))

    status_writer.send_status(_('Setting the output workspace...'))
    arcpy.env.workspace = out_gdb

    headers = {
        'x-access-token': task_utils.get_security_token(request['owner'])
    }
    result_count, response_index = task_utils.get_result_count(parameters)
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json',
                               fl)
    # query = '{0}{1}{2}'.format("http://localhost:8888/solr/v0", '/select?&wt=json', fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count),
                                    task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']),
                                    task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, result_count),
                                    task_utils.CHUNK_SIZE, '')

    status_writer.send_percent(0.0, _('Starting to process...'),
                               'add_to_geodatabase')
    for group in groups:
        if fq:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                verify=verify_ssl,
                headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query +
                                   '{0}&ids={1}'.format(fl, ','.join(group)),
                                   verify=verify_ssl,
                                   headers=headers)
        else:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                verify=verify_ssl,
                headers=headers)

        docs = results.json()['response']['docs']
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(
                parameters[response_index]['response']['docs'])
        input_rows = collections.defaultdict(list)
        for doc in docs:
            if 'path' not in doc:
                input_rows[doc['title']].append(doc)
        if input_rows:
            result = add_to_geodatabase(input_rows, out_gdb, is_fds)
            added += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = add_to_geodatabase(input_items, out_gdb, is_fds)
            added += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(
                status.STAT_FAILED,
                _('No items to process. Check if items exist.'))
            return

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(skipped + errors))
    task_utils.report(os.path.join(task_folder, '__report.json'), added,
                      skipped, errors, errors_reasons, skipped_reasons)
    # Cria a pasta 'Dados Brutos', caso não exista.
    print '## Etapa 1: Cria a estrutura das pastas e "Geodatabase"'

    directorys = [
        'Dados Brutos', 'Geodata', 'Documentos', 'Geodata//shp', 'Geodata//kml'
    ]
    for directory in directorys:
        try:
            os.makedirs(os.path.join(Input, folder, directory))
        except OSError:
            if not os.path.isdir(os.path.join(Input, folder, directory)):
                raise

# Cria Geodatabase
    try:
        arcpy.CreatePersonalGDB_management(
            os.path.join(Input, folder, 'Geodata'), 'Geo_SiCAR.mdb')
    except:
        print 'Erro qualquer no geodatabase'

# -------------------------------------------------------------------------------------------------------
# Extrai e cria a variável com o número do CAR, assumindo que a pasta está com o número.
    print '## Etapa 2: Pega o Numero do CAR a partir do nome das pastas.'
    numberfolder = ''
    validLetters = '1234567890'
    for char in folder:
        if char in validLetters:
            numberfolder += char

    print '>> Portanto: ' + folder + ' se transformou em ' + numberfolder + '.'

    # -------------------------------------------------------------------------------------------------------
def createMDB(tempdb_name):
    tmpDatabase = myPath + tempdb_name
    if os.path.exists(tmpDatabase):
        arcpy.Delete_management(tmpDatabase)  #os.remove(tmpDatabase)
    arcpy.CreatePersonalGDB_management(myPath, tempdb_name)
    arcpy.AddMessage('  --> Utworzono baze mdb ' + tempdb_name)
Beispiel #23
0
    
    OutputRasters = []
    temp = arcpy.env.workspace
    arcpy.env.workspace = os.path.join(hp.Workspace, "WIPoutput.mdb")
    OutputRasters = arcpy.ListRasters()
    arcpy.env.workspace = temp
    
    existing_params = hp.GetAlias(bmp_eeff_l)
    proposed_params = hp.GetAlias(bmp_peff_l)
    streamlen_params = hp.GetAlias(stream_len_perft)
    
    if not existing_params.keys().sort() == proposed_params.keys().sort():
        raise Exception, "Parameters found for Existing efficiencies and Proposed efficiencies do not match"

    gdb = "CIP_%s.mdb" % ScenName.replace(" ", "_")
    arcpy.CreatePersonalGDB_management(hp.Workspace, gdb)
    
    vectmask = os.path.join(hp.SWorkspace, "vectmask.shp")
    BMPpts = os.path.join(hp.SWorkspace, "BMPpts.shp")
    arcpy.RasterToPolygon_conversion(hp.Mask, vectmask, "SIMPLIFY", "Value")
    arcpy.Clip_analysis(bmp_noclip, vectmask, BMPpts)
    
    flowdir = os.path.join(hp.Workspace + "\\WIPoutput.mdb", "flowdir")* hp.Mask 
    flowdir.save(os.path.join(hp.SWorkspace, "flowdir")) 
    
    Streams_nd = Raster(os.path.join(hp.Workspace + "\\WIPoutput.mdb", "streams"))
    streams = hp.RemoveNulls(Streams_nd)
    
    hp.log("Finding CIP projects...")
    CIPBMPpts = os.path.join(hp.SWorkspace, "CIPpts.shp")
    CIP_found = hp.GetSubset(BMPpts, CIPBMPpts, " \"%s\" = 'TRUE' " % bmp_CIPproj)
Beispiel #24
0
dataset = arcpy.CreateFeatureDataset_management(fileGDB,
                                                "dataset",
                                                spatial_reference=sr)
# 在文件数据库中创建一个点文件(注意指定坐标系)
fileGDB_Point = arcpy.CreateFeatureclass_management(fileGDB,
                                                    "point",
                                                    "POINT",
                                                    spatial_reference=sr,
                                                    has_z='DISABLED')
# 在文件数据库要素集中创建一个线文件(不需要指定坐标系)
fileGDB_Line = arcpy.CreateFeatureclass_management(dataset, "line", "POLYLINE")

# 3.创建个人地理数据库
# ./:代表当前文件夹
# personal.mdb: 个人地理数据库的名称
personalGDB = arcpy.CreatePersonalGDB_management(r"./", "personal.mdb")

# 4.创建具有Z值的文件,has_z可以是DISABLED ,ENABLED ,SAME_AS_TEMPLATE
# SAME_AS_TEMPLATE代表仅当模板具有 z 值时,输出要素类才会具有 z 值
# 创建一个有Z值的要素类(单纯的shp文件)
lineShp_Z = arcpy.CreateFeatureclass_management(r"./",
                                                "flow.shp",
                                                "POLYLINE",
                                                spatial_reference=sr,
                                                has_z='ENABLED')
# 在文件数据库中创建一个具有Z值的点文件
fileGDB_Point_Z = arcpy.CreateFeatureclass_management(fileGDB,
                                                      "point",
                                                      "POINT",
                                                      spatial_reference=sr,
                                                      has_z='ENABLED')
Beispiel #25
0
import arcpy
from arcpy import env
file_path = "P:/py3200k/Homework/Exercise_9/Exercise09"
env.workspace = file_path
rasterlist = arcpy.ListRasters()
arcpy.CreatePersonalGDB_management(file_path + "/Results", "newrastersfile.gdb")
for rasters in rasterlist:
    desc = arcpy.Describe(rasters)
    rname = desc.baseName
    outraster = file_path + "/Results/newrastersfile.gdb/" + rname
    
Beispiel #26
0
def Plantillas_TMP(carpeta):
    list_fc = []
    fecha = time.strftime('%d%b%y')
    hora = time.strftime('%H%M%S')
    nameFile = "Proceso-{}-{}".format(fecha, hora)
    FILE = arcpy.CreateFolder_management(carpeta, nameFile)
    GDB = arcpy.CreatePersonalGDB_management(FILE, "Segmentacion Rural",
                                             "10.0")

    FcOriginales = arcpy.CreateFeatureDataset_management(
        GDB, "Insumos Originales", arcpy.SpatialReference(4326))

    FC = [["DIST", "POLYGON"], ["AER", "POLYGON"], ["CCPP", "POINT"],
          ["VIV", "POINT"]]

    Campos_AER = [("UBIGEO", "TEXT", "6"), ("IDAER", "TEXT", "12"),
                  ("AER_INI", "TEXT", "3"), ("AER_FIN", "TEXT", "3"),
                  ("CCPP_AER", "SHORT", "3"), ("VIV_AER", "SHORT", "5"),
                  ("AER_POS", "TEXT", "2")]
    Campos_VIV = [("UBIGEO", "TEXT", "6"), ("CODCCPP", "TEXT", "4"),
                  ("AREA", "SHORT", "1"), ("ID_REG_OR", "SHORT", "4"),
                  ("P29", "SHORT", "1"), ("P29M", "SHORT", "1"),
                  ("OR_VIV_RUTA", "SHORT", "5"),
                  ("OR_CCPP_DIST", "SHORT", "5"), ("IDVIV", "TEXT", "14"),
                  ("IDCCPP", "TEXT", "10"), ("IDRUTA", "TEXT", "17"),
                  ("IDSCR", "TEXT", "14"), ("IDAER", "TEXT", "12")]
    Campos_CCPP = [("UBIGEO", "TEXT", "6"), ("CODCCPP", "TEXT", "4"),
                   ("AREA", "SHORT", "1"), ("VIV_CCPP", "SHORT", "6"),
                   ("IDCCPP", "TEXT", "10"), ("IDRUTA", "TEXT", "17"),
                   ("IDSCR", "TEXT", "14"), ("IDAER", "TEXT", "12")]
    Campos_DIST = [("UBIGEO", "TEXT", "6"), ("REGION", "SHORT", "1"),
                   ("REGION_NAT", "TEXT", "10")]

    for i in FC:
        fc_tmp = arcpy.CreateFeatureclass_management(
            "in_memory", i[0], i[1], "#", "#", "#",
            arcpy.SpatialReference(4326))
        if i[0] == "DIST":
            for n in Campos_DIST:
                if n[1] == "TEXT":
                    arcpy.AddField_management(fc_tmp, n[0], n[1], '#', '#',
                                              n[2], '#', 'NULLABLE',
                                              'NON_REQUIRED', '#')
                else:
                    arcpy.AddField_management(fc_tmp, n[0], n[1], n[2], '#',
                                              '#', '#', 'NULLABLE',
                                              'NON_REQUIRED', '#')
        elif i[0] == "AER":
            for n in Campos_AER:
                if n[1] == "TEXT":
                    arcpy.AddField_management(fc_tmp, n[0], n[1], '#', '#',
                                              n[2], '#', 'NULLABLE',
                                              'NON_REQUIRED', '#')
                else:
                    arcpy.AddField_management(fc_tmp, n[0], n[1], n[2], '#',
                                              '#', '#', 'NULLABLE',
                                              'NON_REQUIRED', '#')
        elif i[0] == "CCPP":
            for n in Campos_CCPP:
                if n[1] == "TEXT":
                    arcpy.AddField_management(fc_tmp, n[0], n[1], '#', '#',
                                              n[2], '#', 'NULLABLE',
                                              'NON_REQUIRED', '#')
                else:
                    arcpy.AddField_management(fc_tmp, n[0], n[1], n[2], '#',
                                              '#', '#', 'NULLABLE',
                                              'NON_REQUIRED', '#')
        else:
            for n in Campos_VIV:
                if n[1] == "TEXT":
                    arcpy.AddField_management(fc_tmp, n[0], n[1], '#', '#',
                                              n[2], '#', 'NULLABLE',
                                              'NON_REQUIRED', '#')
                else:
                    arcpy.AddField_management(fc_tmp, n[0], n[1], n[2], '#',
                                              '#', '#', 'NULLABLE',
                                              'NON_REQUIRED', '#')

        dato = arcpy.CreateFeatureclass_management(
            FcOriginales, i[0], i[1], fc_tmp, "#", "#",
            arcpy.SpatialReference(4326))
        list_fc.append(dato)
    return list_fc
Beispiel #27
0
mw_Valve = Model_mdb+"\\mw_Valve.shp"
mw_Valve_out = Output+"\\WGS84.mdb\\WGS84\\mw_Valve"
mw_Valve_shp = Output+"\\SHP_WGS\\mw_Valve.shp"

mw_DemAlloc = Model_mdb+"\\mw_DemAlloc.shp"
mw_DemAlloc_out = Output+"\\WGS84.mdb\\WGS84\\mw_DemAlloc"
mw_DemAlloc_shp = Output+"\\SHP_WGS\\mw_DemAlloc.shp"

mw_DemConn = Model_mdb+"\\mw_DemConn.shp"
mw_DemConn_out = Output+"\\WGS84.mdb\\WGS84\\mw_DemConn"
mw_DemConn_shp = Output+"\\SHP_WGS\\mw_DemConn.shp"


# Process: Creating Geodatabase and Feature Dataset
if not os.path.exists(Output+"\\WGS84.mdb"):
	arcpy.CreatePersonalGDB_management(Output, "WGS84", "CURRENT")
	arcpy.CreateFeatureDataset_management(Output+"\\WGS84.mdb", "WGS84", "PROJCS['WGS_1984_Web_Mercator_Auxiliary_Sphere',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator_Auxiliary_Sphere'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],PARAMETER['Auxiliary_Sphere_Type',0.0],UNIT['Meter',1.0]];-20037700 -30241100 10000;-100000 10000;-100000 10000;0,001;0,001;0,001;IsHighPrecision")

# Process: Define new projection
print "Defining Krovak coordinate system...";
arcpy.DefineProjection_management(mw_Junction, "PROJCS['S-JTSK_Krovak_East_North',GEOGCS['GCS_S_JTSK',DATUM['D_S_JTSK',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Krovak'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Pseudo_Standard_Parallel_1',78.5],PARAMETER['Scale_Factor',0.9999],PARAMETER['Azimuth',30.28813975277778],PARAMETER['Longitude_Of_Center',24.83333333333333],PARAMETER['Latitude_Of_Center',49.5],PARAMETER['X_Scale',-1.0],PARAMETER['Y_Scale',1.0],PARAMETER['XY_Plane_Rotation',90.0],UNIT['Meter',1.0]]")
arcpy.DefineProjection_management(mw_Pipe, "PROJCS['S-JTSK_Krovak_East_North',GEOGCS['GCS_S_JTSK',DATUM['D_S_JTSK',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Krovak'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Pseudo_Standard_Parallel_1',78.5],PARAMETER['Scale_Factor',0.9999],PARAMETER['Azimuth',30.28813975277778],PARAMETER['Longitude_Of_Center',24.83333333333333],PARAMETER['Latitude_Of_Center',49.5],PARAMETER['X_Scale',-1.0],PARAMETER['Y_Scale',1.0],PARAMETER['XY_Plane_Rotation',90.0],UNIT['Meter',1.0]]")
arcpy.DefineProjection_management(mw_Tank, "PROJCS['S-JTSK_Krovak_East_North',GEOGCS['GCS_S_JTSK',DATUM['D_S_JTSK',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Krovak'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Pseudo_Standard_Parallel_1',78.5],PARAMETER['Scale_Factor',0.9999],PARAMETER['Azimuth',30.28813975277778],PARAMETER['Longitude_Of_Center',24.83333333333333],PARAMETER['Latitude_Of_Center',49.5],PARAMETER['X_Scale',-1.0],PARAMETER['Y_Scale',1.0],PARAMETER['XY_Plane_Rotation',90.0],UNIT['Meter',1.0]]")
arcpy.DefineProjection_management(mw_Source, "PROJCS['S-JTSK_Krovak_East_North',GEOGCS['GCS_S_JTSK',DATUM['D_S_JTSK',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Krovak'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Pseudo_Standard_Parallel_1',78.5],PARAMETER['Scale_Factor',0.9999],PARAMETER['Azimuth',30.28813975277778],PARAMETER['Longitude_Of_Center',24.83333333333333],PARAMETER['Latitude_Of_Center',49.5],PARAMETER['X_Scale',-1.0],PARAMETER['Y_Scale',1.0],PARAMETER['XY_Plane_Rotation',90.0],UNIT['Meter',1.0]]")
arcpy.DefineProjection_management(mw_Pump, "PROJCS['S-JTSK_Krovak_East_North',GEOGCS['GCS_S_JTSK',DATUM['D_S_JTSK',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Krovak'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Pseudo_Standard_Parallel_1',78.5],PARAMETER['Scale_Factor',0.9999],PARAMETER['Azimuth',30.28813975277778],PARAMETER['Longitude_Of_Center',24.83333333333333],PARAMETER['Latitude_Of_Center',49.5],PARAMETER['X_Scale',-1.0],PARAMETER['Y_Scale',1.0],PARAMETER['XY_Plane_Rotation',90.0],UNIT['Meter',1.0]]")
arcpy.DefineProjection_management(mw_Valve, "PROJCS['S-JTSK_Krovak_East_North',GEOGCS['GCS_S_JTSK',DATUM['D_S_JTSK',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Krovak'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Pseudo_Standard_Parallel_1',78.5],PARAMETER['Scale_Factor',0.9999],PARAMETER['Azimuth',30.28813975277778],PARAMETER['Longitude_Of_Center',24.83333333333333],PARAMETER['Latitude_Of_Center',49.5],PARAMETER['X_Scale',-1.0],PARAMETER['Y_Scale',1.0],PARAMETER['XY_Plane_Rotation',90.0],UNIT['Meter',1.0]]")
arcpy.DefineProjection_management(mw_DemAlloc, "PROJCS['S-JTSK_Krovak_East_North',GEOGCS['GCS_S_JTSK',DATUM['D_S_JTSK',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Krovak'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Pseudo_Standard_Parallel_1',78.5],PARAMETER['Scale_Factor',0.9999],PARAMETER['Azimuth',30.28813975277778],PARAMETER['Longitude_Of_Center',24.83333333333333],PARAMETER['Latitude_Of_Center',49.5],PARAMETER['X_Scale',-1.0],PARAMETER['Y_Scale',1.0],PARAMETER['XY_Plane_Rotation',90.0],UNIT['Meter',1.0]]")
arcpy.DefineProjection_management(mw_DemConn, "PROJCS['S-JTSK_Krovak_East_North',GEOGCS['GCS_S_JTSK',DATUM['D_S_JTSK',SPHEROID['Bessel_1841',6377397.155,299.1528128]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Krovak'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Pseudo_Standard_Parallel_1',78.5],PARAMETER['Scale_Factor',0.9999],PARAMETER['Azimuth',30.28813975277778],PARAMETER['Longitude_Of_Center',24.83333333333333],PARAMETER['Latitude_Of_Center',49.5],PARAMETER['X_Scale',-1.0],PARAMETER['Y_Scale',1.0],PARAMETER['XY_Plane_Rotation',90.0],UNIT['Meter',1.0]]")

# Process: Start Project features from Krovak to wgs84
print "Projecting to WGS84 coordinate system...";
arcpy.env.overwriteOutput = True

# -------------------------------------------------------------------------------------------------------
# Cria a pasta 'Dados Brutos', caso não exista.
print '## Etapa 1: Cria a estrutura das pastas e "Geodatabase"'

directorys = ['Geodata', 'Documentos']
for directory in directorys:
    try:
        os.makedirs(os.path.join(Output, directory))
    except OSError:
        if not os.path.isdir(os.path.join(Output, directory)):
            raise

try:
    arcpy.CreatePersonalGDB_management(os.path.join(Output, 'Geodata'), 'Geo_SiCAR.mdb')
except:
    print 'Erro qualquer no geodatabase'

# -------------------------------------------------------------------------------------------------------
# Lista Diretórios que estiverem na raiz abaixo
print '## Etapa 2: Lista Diretórios que estiverem na raiz abaixo'
folders = os.listdir(Input)

# -------------------------------------------------------------------------------------------------------
# Lista Diretórios que estiverem na raiz abaixo
print '## Etapa 3: Cria um array com os shapes a serem unidos'

shapes = ['AppsUnificadas',\
          'Nascente_Points',\
          'Nascente_Polygon',\
Beispiel #29
0
#coding = utf-8
# -*- coding: UTF-8 -*-

import os
import arcpy
from arcpy import env

def Do(gdb_dir_in,mdb_dir_out)
    #获得GDB列表
    env.workspace = gdb_dir_in
    GDBlist = arcpy.ListWorkspaces("*","FileGDB")

    gdb_index = 1
    mdblist = []
    for gdb in GDBlist:
        gdbname = os.path.split(gdb)[1]
        mdbname =os.path.splitext(gdbname)[0] + '.mdb'
        mdb = mdb_dir_out + os.sep +mdbname
        if arcpy.Exists(mdb):
            os.remove(mdb)
        arcpy.CreatePersonalGDB_management(mdb_dir_out,mdbname)
        env.workspace = gdb
        fcs_in_gdb = arcpy.ListFeatureClasses()
        arcpy.FeatureClassToGeodatabase_conversion(fcs_in_gdb,mdb)
        print str(gdb_index) + '\t' + gdb + ' translate to mdb done!'
        gdb_index = gdb_index + 1
if __name__=="__main__":
    print time.strftime("start:%Y/%m/%d:%H:%M:%S")
    Do(sys.argv[1],sys.argv[2]])
    print time.strftime("done:%Y/%m/%d:%H:%M:%S")
Beispiel #30
0
    def SetEnvVar(self):
        '''Set the geoprocessing environment variables stored in the WIP.dat config file that is stored in each dataset folder.'''
        #~ self.log("calling Set GPEnvironment")
        self.Mask = ""

        self.AppPath = os.path.split(sys.argv[0])[0]
        thiswip = os.path.join(self.Workspace, "WIP.dat")
        if os.path.exists(thiswip):
            execfile(thiswip)
        else:
            self.log('    Did not find this WIP.dat %s ' % thiswip)
            ######
        self.valid = True
        ######
        if not self.valid:
            self.log(
                "    Skipping this tool run since a previous tool run was invalid"
            )
            sys.exit(1)

        # Find out which tool is running
        self.current_tool = os.path.split(sys.argv[0])[1].replace(
            ".pyc", "").replace(".py", "")
        if self.current_tool == "CIP":
            self.current_tool = "CIP_%s" % sys.argv[1].replace(" ", "_")

        self.models[self.current_tool] = {}
        self.models[self.current_tool]["input"] = sys.argv[1:]
        self.models[self.current_tool]["output"] = []

        self.SWorkspace = os.path.join(self.Workspace, "Scratch")
        if not os.path.exists(self.SWorkspace):
            os.mkdir(self.SWorkspace)

        arcpy.env.Workspace = self.SWorkspace

        self.Logfilename = os.path.join(self.Workspace,
                                        self.current_tool + ".log")

        self.VWorkspace = os.path.join(self.Workspace, "WIPoutput.mdb")
        if not os.path.exists(self.VWorkspace):
            arcpy.CreatePersonalGDB_management(self.Workspace, "WIPoutput.mdb",
                                               "CURRENT")

        if type(self.Mask) != type(""):
            dsc = arcpy.Describe(self.Mask)
            arcpy.env.extent = dsc.Extent
            arcpy.env.snapRaster = self.Mask.catalogPath
            arcpy.cellSize = dsc.MeanCellHeight
            self.cellSize = dsc.MeanCellHeight
            #~ self.log(" Extent has been set to %s" % arcpy.extent)
            self.log("    Cell size has been set to %s %s" %
                     (arcpy.cellSize, self.units['type']))

        # f = open(os.path.join(self.AppPath, r'../ToolData/Metadatainfo.csv'), 'r')
        # header = f.readline().split(',')
        # tags = f.readline().split(',')
        # self.tags = {}
        # for k, v in zip(header, tags):
        # self.tags[k] = v

        # self.metadata = {}
        # for i in f.readlines():
        # d = i.split(',')
        # self.metadata[d[0].lower()] = {}
        # for j in range(1, len(d)-1):
        # self.metadata[d[0].lower()][header[j]] = d[j]
        # f.close()

        # for i in self.metadata.keys():
        # self.metadata[i]['Citation Originator'] = "%s\\%s" % (os.environ['USERDOMAIN'], os.environ['USERNAME'])

        self.log("    Finished loading env vars")