Ejemplo n.º 1
0
def ExportReport(table, delta_date):
	env.overwriteOutput = True
	env.workspace = "Database Connections/RPUD_TRANSDB.sde"
	#env.workspace = os.path.join(os.path.dirname(sys.argv[0]), "RPUD_TESTDB - MOBILE_EDIT_VERSION.sde") #the name of database connection may need to be changed when in production

	#convert local time to UTC for query
	now = datetime.date.today().strftime('%Y%m%d')
	today = datetime.datetime(int(now[:4]), int(now[4:6]), int(now[6:]), 00, 00, 00) + datetime.timedelta(hours=4)
	yesterday = today - datetime.timedelta(days=delta_date)
	outputExcel = os.path.join("//corfile/Public_Utilities_NS/5215_Capital_Improvement_Projects/636_Geographic_Info_System/Joe/Collector App/Flushing app/Daily Report/", table + "_" + yesterday.strftime("%Y%m%d") + ".xls")
	logMessage("Input table is: " + table)
	logMessage("Output Excel file is: " + os.path.basename(outputExcel))
	print ("Exporting table to Excel...")

	#query report table for records in previous day
	whereClause = '"CREATED_DATE" < timestamp \'{0}\' AND "CREATED_DATE" > timestamp \'{1}\' AND "CREW" NOT LIKE \'_GIS\' AND "CREW" NOT LIKE \'_test\' ORDER BY REPORT_DATE'.format(str(today), str(yesterday))
	arcpy.MakeQueryTable_management(table, 'queryTable', "NO_KEY_FIELD", "", "", whereClause)
	recordNum = arcpy.GetCount_management('queryTable')
	logMessage(str(recordNum) + " " + table + " reports for " + (yesterday).strftime("%b %d, %Y"))

	#for test, print out fiels in queryTable
	# fields = arcpy.ListFields('queryTable')
	# for field in fields:
	# 	print("{0} is a type of {1}".format(field.aliasName, field.type))

	#export queried table to excel, ALIAS option does not work here so far, need a solution
	arcpy.TableToExcel_conversion('queryTable', outputExcel, 'ALIAS')
	logMessage(os.path.basename(outputExcel) + " has been exported.")
	#return yesterday date for naming
	return yesterday, recordNum
Ejemplo n.º 2
0
def get_precipitation(dataspace, date, idw_pow, rastercellsize,
                      parameter_safe):
    """
    Funktion zur Interpolation des Niederschlags anhand des Inverse Distance Weighting (IDW). Die Berechnung erfolgt
    über das IDW-Tool der Erweiterung "Spatial Analyst". Zunächst werden die Niederschlagswerte eines Tages je Station
    in eine Liste geschrieben. Der Index der Niederschlagswerte je Station entspricht dem Index des dazugehörigen
    Stationsnamens in einer zweiten Liste. Diese Listen werden mittels eines Searchcursors erstellt. Über einen
    Updatecursor werden die Niederschlagsmesswerte in eine Tabelle geschrieben, die Informationen zum Stationsnamen und
    deren Koordinaten enthalten. Der Interpolationsradius ist auf das EZG Schotten 2 angepasst.
    :param dataspace: Dateipfad der Basisdaten (Typ: string)
    :param date: Tages ID (Typ: integer)
    :param idw_pow: Exponent des IDW (Typ: integer)
    :param rastercellsize: Groesse der Rasterzellen (Typ: foat)
    :param parameter_safe: Werte der Parameterkombination (Typ: string)
    :return: Interpolation des Niederschlags (Typ: raster)
    """
    arcpy.MakeQueryTable_management(
        r'{}\N_Messstationen;'.format(dataspace) +
        r'{}\N_Zeitreihen'.format(dataspace), "p_temp", "USE_KEY_FIELDS",
        "N_Messstationen.Stationsnummer;N_Zeitreihen.TagesID",
        "N_Messstationen.Stationsnummer;N_Messstationen.Stationsname; N_Messstationen.Shape\
                                    ;N_Zeitreihen.Tagessumme_mm;N_Zeitreihen.TagesID",
        "N_Zeitreihen.Stationsnummer =\
                                    N_Messstationen.Stationsnummer AND N_Zeitreihen.TagesID = {}"
        .format(date))

    # Niederschlagsinterpolation mittels Inverse Distance Weighting (IDW)
    idw = Idw("p_temp", "N_Zeitreihen.Tagessumme_mm", rastercellsize, idw_pow,
              RadiusFixed(20000.00000, 5), "")
    idw.save("IDW_rp{}_c{}_{}.tif".format(parameter_safe[0], parameter_safe[1],
                                          date))
    arcpy.Delete_management("p_temp")
    print(time.strftime("%H:%M:%S: ") + "P ausgeführt.")

    return idw
Ejemplo n.º 3
0
def get_precipitation(dataspace, date, idw_pow, rastercellsize,
                      parameter_safe):
    """
    Interpolates the precipitation by Inverse Distance Weighting (IDW). The calculation is done by the "IDW" tool of the
    "Spatial Analyst" extension of Arc GIS. The precipitation data is selected by a query table from the timeseries 
    table ("N_Zeitreihen") and the attribute table from the precipitation stations ("N_Messstationen").
    :param dataspace: directory of the base data (:type: string)
    :param date: daily ID (:type: integer)
    :param idw_pow: IDW power (:type: integer)
    :param rastercellsize: raster cellsize (:type: float)
    :param parameter_safe: values of the variable combination (:type: tuple)
    :return: precipitation interpolation (:type: raster)
    """
    arcpy.MakeQueryTable_management(
        r'{}\N_Messstationen;'.format(dataspace) +
        r'{}\N_Zeitreihen'.format(dataspace), "p_temp", "USE_KEY_FIELDS",
        "N_Messstationen.Stationsnummer;N_Zeitreihen.TagesID",
        "N_Messstationen.Stationsnummer;N_Messstationen.Stationsname; N_Messstationen.Shape\
                                    ;N_Zeitreihen.Tagessumme_mm;N_Zeitreihen.TagesID",
        "N_Zeitreihen.Stationsnummer =\
                                    N_Messstationen.Stationsnummer AND N_Zeitreihen.TagesID = {}"
        .format(date))
    idw = Idw("p_temp", "N_Zeitreihen.Tagessumme_mm", rastercellsize, idw_pow,
              RadiusFixed(20000.00000, 5), "")
    idw.save("IDW_rp{}_c{}_{}.tif".format(parameter_safe[0], parameter_safe[1],
                                          date))
    arcpy.Delete_management("p_temp")
    arcpy.AddMessage(
        time.strftime("%H:%M:%S: ") + "Niederschlag interpoliert.")

    return idw
 def AMEALL(self):
     '''Método que retorna la subred de la zona productora'''
     # Process: Make Query Table
     variable = g_ESRI_variable_1
     arcpy.MakeQueryTable_management(
         os.path.join(self.base, "Zona_Productora"), variable,
         "USE_KEY_FIELDS", "", "", "No_Zona = '{}'".format(self.subred))
     return variable
Ejemplo n.º 5
0
def importar_tablas_trabajo(data, campos):
    arcpy.env.overwriteOutput = True

    db = 'CPV_SEGMENTACION_GDB'
    ip = '172.18.1.93'
    usuario = 'sde'
    password = '******'
    path_conexion = conx.conexion_arcgis(db, ip, usuario, password)
    arcpy.env.workspace = path_conexion

    temp_ubigeos = ""
    i = 0
    for x in data:
        i = i + 1
        if (i == 1):
            temp_ubigeos = "'{}'".format(x[0])
        else:
            temp_ubigeos = "{},'{}'".format(temp_ubigeos, x[0])

    if len(data) > 0:
        sql = expresion.Expresion_2(data, campos)
    else:
        sql = ' FLAG_NUEVO=1'

    list_capas = [
        ["{}.sde.VW_FRENTES".format(db), tb_frentes_temp, 1],
    ]

    for i, capa in enumerate(list_capas):
        if capa[2] == 1:

            print "select * from {} where {} ".format(capa[0], sql)
            x = arcpy.MakeQueryLayer_management(
                path_conexion, 'capa{}'.format(i),
                "select * from {} where {} ".format(capa[0], sql))

        else:
            x = arcpy.MakeQueryTable_management(capa[0], "capa{}".format(i),
                                                "USE_KEY_FIELDS", "objectid",
                                                "", sql)

        if capa[1] in [tb_frentes_temp]:
            temp = arcpy.CopyFeatures_management(x,
                                                 'in_memory/temp_{}'.format(i))
            arcpy.AddField_management(temp, 'MANZANA2', 'TEXT', 50)
            arcpy.CalculateField_management(temp, 'MANZANA2', '!MANZANA!',
                                            "PYTHON_9.3")
            arcpy.DeleteField_management(temp, ['MANZANA'])
            arcpy.CopyFeatures_management(temp, capa[1])
            arcpy.AddField_management(capa[1], 'MANZANA', 'TEXT', 50)
            arcpy.CalculateField_management(capa[1], 'MANZANA', '!MANZANA2!',
                                            "PYTHON_9.3")
            arcpy.DeleteField_management(capa[1], ['MANZANA2'])
        else:
            arcpy.CopyFeatures_management(x, capa[1])

    arcpy.Sort_management(tb_frentes_temp, tb_frentes,
                          ['UBIGEO', 'ZONA', 'MANZANA', 'FRENTE_ORD'])
Ejemplo n.º 6
0
 def copy_to_memory_with_id_filter(self, input_table,
                                   in_memory_output_table_name,
                                   id_field_name, id_list):
     in_memory_table = self.workspace + "\\" + in_memory_output_table_name
     where_clause = id_field_name + " in ("
     for count, id in enumerate(id_list):
         where_clause += str(id)
         if len(id_list) > 1 and count + 1 < len(id_list):
             where_clause += ","
     where_clause += ")"
     arcpy.MakeQueryTable_management(input_table, in_memory_table, "", "",
                                     "", where_clause)
Ejemplo n.º 7
0
def exportToCSV(field_alias, field_names, table, outFile, outDir):
	#calculate date and local timestamp 
	now = datetime.date.today().strftime('%Y%m%d')
	today = datetime.datetime(int(now[:4]), int(now[4:6]), int(now[6:]), 00, 00, 00) + datetime.timedelta(hours=4)
	yesterday = today - datetime.timedelta(days=1)
	#build query clause
	whereClause = '"CREATED_DATE" <= timestamp \'{0}\' AND "CREATED_DATE" > timestamp \'{1}\' AND "CREW" NOT LIKE \'_GIS\' AND "CREW" NOT LIKE \'_test\' ORDER BY REPORT_DATE'.format(str(today), str(yesterday))	
	arcpy.MakeQueryTable_management(table, 'queryTable' + table, "NO_KEY_FIELD", "", "", whereClause)
	recordNum = arcpy.GetCount_management('queryTable' + table)
	logMessage(str(recordNum) + " " + table + " reports for " + (yesterday).strftime("%b %d, %Y"))
	#list fields
	fields = arcpy.ListFields('queryTable' + table)

	#write to csv
	outFullFile = outFile + "_" + yesterday.strftime("%Y%m%d") + ".csv"
	with open(os.path.join(outDir, outFullFile), 'wb') as f:
		w = csv.writer(f)
		w.writerow(field_alias)
		rows = arcpy.SearchCursor("queryTable" + table)
		row = rows.next()
		for row in rows:
			field_vals = []
			for field in fields:
				if field.name in field_names:
					#remove none and invalid value and convert utc time to local
					if row.getValue(field.name) == None:
						field_val = ""
					elif field.type == "Date":
						if row.getValue(field.name).year == 1899:
							field_val = ""
						else:
							field_val = (row.getValue(field.name) - datetime.timedelta(hours=4)).strftime("%Y-%m-%d %H:%M:%S")
						# if field.name == "REPORT_DATE":
						# 	field_val = (row.getValue(field.name) - datetime.timedelta(hours=4)).strftime("%Y-%m-%d")
					field_val = row.getValue(field.name)
					if isinstance(field_val, unicode):
						field_val = field_val.encode('utf-8') # handle utf characters
					# print field_val
					field_vals.append(field_val)
			w.writerow(field_vals)
		del row
		del rows
	f.close()

	return recordNum, outFullFile
Ejemplo n.º 8
0
def _create_query_table(feature, table, input_type, geodatabase):
    workspace_path = _get_workspace(geodatabase)

    _logger.info("Building Query Table parameters")
    qf_parameters = _build_qf(feature, table, workspace_path, input_type)

    _logger.info("Generating Query Table")
    QT = arcpy.MakeQueryTable_management(qf_parameters[0], qf_parameters[1],
                                         "USE_KEY_FIELDS", qf_parameters[2],
                                         qf_parameters[3], qf_parameters[4])[0]

    _logger.info("Copying final Query Table to\n"
                 "{}".format(qf_parameters[5]))
    _logger.info("feature count: {}".format(arcpy.GetCount_management(QT)[0]))

    out__q_t = os.path.join(workspace_path, qf_parameters[5])
    out_feature = arcpy.CopyFeatures_management(QT, out__q_t)[0]
    _logger.info("OUT: {}".format(out__q_t))
    return out_feature
Ejemplo n.º 9
0
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# shCreateFeatureClassInScracth.py
# Created on: 2014-12-19 16:26:26.00000
#   (generated by ArcGIS/ModelBuilder)
# Description: 
# ---------------------------------------------------------------------------

# Import arcpy module
import arcpy


arcpy.env.overwriteOutput = 1

# Local variables:
AucklandStravaMetro_Edges_NZTM = "D:\\TEMP\\scratch.gdb\\AucklandStravaMetro_Edges_NZTM"
CycleEvents = "D:\\TEMP\\scratch.gdb\\CycleEvents"
AllCycleEventsIn2013 = "AllCycleEventsIn2013"
AllCycleEventsIn2013FC = "D:\\TEMP\\scratch.gdb\\AllCycleEvents"

# Process: Make Query Table
arcpy.MakeQueryTable_management("D:\\TEMP\\scratch.gdb\\AucklandStravaMetro_Edges_NZTM;D:\\TEMP\\scratch.gdb\\CycleEvents", AllCycleEventsIn2013, "ADD_VIRTUAL_KEY_FIELD", "", "AucklandStravaMetro_Edges_NZTM.OBJECTID #;AucklandStravaMetro_Edges_NZTM.ID #;AucklandStravaMetro_Edges_NZTM.OSM_ID #;AucklandStravaMetro_Edges_NZTM.OSM_NAME #;AucklandStravaMetro_Edges_NZTM.OSM_META #;AucklandStravaMetro_Edges_NZTM.OSM_SOURCE #;AucklandStravaMetro_Edges_NZTM.OSM_TARGET #;AucklandStravaMetro_Edges_NZTM.CLAZZ #;AucklandStravaMetro_Edges_NZTM.FLAGS #;AucklandStravaMetro_Edges_NZTM.SOURCE #;AucklandStravaMetro_Edges_NZTM.TARGET #;AucklandStravaMetro_Edges_NZTM.KM #;AucklandStravaMetro_Edges_NZTM.KMH #;AucklandStravaMetro_Edges_NZTM.COST #;AucklandStravaMetro_Edges_NZTM.REVERSE_CO #;AucklandStravaMetro_Edges_NZTM.X1 #;AucklandStravaMetro_Edges_NZTM.Y1 #;AucklandStravaMetro_Edges_NZTM.X2 #;AucklandStravaMetro_Edges_NZTM.Y2 #;AucklandStravaMetro_Edges_NZTM.OID_1 #;AucklandStravaMetro_Edges_NZTM.EDGE_ID #;AucklandStravaMetro_Edges_NZTM.TRIDERCNT #;AucklandStravaMetro_Edges_NZTM.TRRIDERCNT #;AucklandStravaMetro_Edges_NZTM.TRIDECNT #;AucklandStravaMetro_Edges_NZTM.TRRIDECNT #;AucklandStravaMetro_Edges_NZTM.BIKECNT #;AucklandStravaMetro_Edges_NZTM.BIKETIME #;AucklandStravaMetro_Edges_NZTM.RBIKETIME #;AucklandStravaMetro_Edges_NZTM.COMMUTECNT #;AucklandStravaMetro_Edges_NZTM.AMRIDER #;AucklandStravaMetro_Edges_NZTM.AMRRIDER #;AucklandStravaMetro_Edges_NZTM.AMRIDE #;AucklandStravaMetro_Edges_NZTM.AMRRIDE #;AucklandStravaMetro_Edges_NZTM.AMBIKECNT #;AucklandStravaMetro_Edges_NZTM.AMBIKET #;AucklandStravaMetro_Edges_NZTM.AMRBIKET #;AucklandStravaMetro_Edges_NZTM.AMCOMMUTE #;AucklandStravaMetro_Edges_NZTM.PMRIDER #;AucklandStravaMetro_Edges_NZTM.PMRRIDER #;AucklandStravaMetro_Edges_NZTM.PMRIDE #;AucklandStravaMetro_Edges_NZTM.PMRRIDE #;AucklandStravaMetro_Edges_NZTM.PMBIKECNT #;AucklandStravaMetro_Edges_NZTM.PMBIKET #;AucklandStravaMetro_Edges_NZTM.PMRBIKET #;AucklandStravaMetro_Edges_NZTM.PMCOMMUTE #;AucklandStravaMetro_Edges_NZTM.IPRIDER #;AucklandStravaMetro_Edges_NZTM.IPRRIDER #;AucklandStravaMetro_Edges_NZTM.IPRIDE #;AucklandStravaMetro_Edges_NZTM.IPRRIDE #;AucklandStravaMetro_Edges_NZTM.IPBIKECNT #;AucklandStravaMetro_Edges_NZTM.IPBIKET #;AucklandStravaMetro_Edges_NZTM.IPRBIKET #;AucklandStravaMetro_Edges_NZTM.IPCOMMUTE #;AucklandStravaMetro_Edges_NZTM.Shape #;CycleEvents.OBJECTID #;CycleEvents.edge_id #;CycleEvents.year #;CycleEvents.day #;CycleEvents.hour #;CycleEvents.minute #;CycleEvents.athlete_count #;CycleEvents.rev_athlete_count #;CycleEvents.activity_count #;CycleEvents.rev_activity_count #;CycleEvents.total_activity_count #;CycleEvents.activity_time #;CycleEvents.rev_activity_time #;CycleEvents.commute_count #;CycleEvents.activity_count_norm #", "CycleEvents.edge_id = AucklandStravaMetro_Edges_NZTM.EDGE_ID")

# Process: Copy Features
arcpy.CopyFeatures_management(AllCycleEventsIn2013, AllCycleEventsIn2013FC, "", "0", "0", "0")

Ejemplo n.º 10
0
arcpy.JoinField_management(WIC__4_, "WORKORDERID_RINDEX", WIC_trim_xy_PARCELS,
                           "WORKORDERID_RINDEX", "FACILITYID_Text")

# Process: Alter Field (8)
arcpy.AlterField_management(WIC__5_, "FACILITYID_Text",
                            "PARCEL_FACILITYID_FromWOXY", "", "", "255",
                            "NON_NULLABLE", "false")

# Process: Add Field (PARCEL_FACILITYID)
arcpy.AddField_management(WIC__6_, "PARCEL_FACILITYID", "TEXT", "", "", "", "",
                          "NULLABLE", "NON_REQUIRED", "")

# Process: Calculate Field (5)
arcpy.CalculateField_management(
    WIC__7_, "PARCEL_FACILITYID",
    "SelectFACID(!PARCEL_FACILITYID_FromWO_FACID!, !PARCEL_FACILITYID_FromLOCATION!, !PARCEL_FACILITYID_FromWOXY!)",
    "PYTHON",
    "def SelectFACID(ParcelFID, LocationFID, WOXYFID):\\n  if ParcelFID != None :\\n    return ParcelFID\\n  elif LocationFID != None :\\n    return LocationFID\\n  elif WOXYFID != None :\\n    return WOXYFID\\n  return None"
)

# Process: [CREATE WIC PARCEL POLYGONS] Make Query Table
arcpy.MakeQueryTable_management(
    "C:\\Users\\william.bezts\\Documents\\ArcGIS\\scratch.gdb\\WIC;C:\\Users\\william.bezts\\Documents\\ArcGIS\\scratch.gdb\\PWD_PARCELS",
    WIC_PARCELS_Layer, "ADD_VIRTUAL_KEY_FIELD", "",
    "WIC.WORKORDERID #;WIC.WO_INITIATEDATE #;WIC.WORKORDERID_RINDEX #;PWD_PARCELS.Shape #;PWD_PARCELS.PARCELID #;PWD_PARCELS.ADDRESS #;PWD_PARCELS.BLDG_DESC #;PWD_PARCELS.BILLING_CLASS #;PWD_PARCELS.CHECKEDBY #;PWD_PARCELS.CHECKEDDATE #;PWD_PARCELS.FACILITYID #;PWD_PARCELS.FACILITYID_Text #",
    "WIC.PARCEL_FACILITYID = PWD_PARCELS.FACILITYID_Text")

# Process: Select [remove NULL NONE PARCEL_FACILITYID]
arcpy.Select_analysis(WIC_PARCELS_Layer, WIC_PARCELS,
                      "PWD_PARCELS_FACILITYID_Text IS NOT NULL")
def process_store_facts(args):
    logging.basicConfig(level=logging.DEBUG)
    _log.info('Process stores and facts started')

    #Set-up inputs
    store_fc = os.path.join(os.path.dirname(__file__),
                            'RBAMRGIS03.sde/RBGIS01.DBO.s_gis_store_pnt')
    #store_facts_month = os.path.join(os.path.dirname(__file__),'RBAMRGIS03.sde/RBGIS01.dbo.f_gis_month_store')
    store_facts_year = os.path.join(
        os.path.dirname(__file__),
        'RBAMRGIS03.sde/RBGIS01.dbo.f_gis_year_store')
    store_deltas = os.path.join(
        os.path.dirname(__file__),
        'RBAMRGIS03.sde/RBGIS01.dbo.d_gis_store_delta')
    locator = args[1]

    #Set-up outputs
    geocode_result = arcpy.env.scratchWorkspace + '/geocode_result'
    geocode_projected = arcpy.env.scratchWorkspace + '/stores_projected'
    deltas_view_new = 'deltas_new'
    deltas_table_new = arcpy.env.scratchWorkspace + '/select_new_result'
    deltas_view_removed = 'deltas_removed'
    deltas_table_removed = arcpy.env.scratchWorkspace + '/select_remove_result'
    fact_view = 'fact_view'
    fact_table = arcpy.env.scratchWorkspace + '/fact_table'
    store_feature_layer = 'store_feature_layer'
    store_fact_result = arcpy.env.scratchWorkspace + '/store_fact_result'

    #Select new deltas
    deltas_where_new = "type = 'New'"
    arcpy.MakeQueryTable_management(store_deltas, deltas_view_new,
                                    'NO_KEY_FIELD', '', '', deltas_where_new)
    deltas_new_count = int(
        arcpy.GetCount_management(deltas_view_new).getOutput(0))
    arcpy.CopyRows_management(deltas_view_new, deltas_table_new)
    _log.info('Created table view of store deltas where ' + deltas_where_new +
              ' with ' + str(deltas_new_count) + ' rows')

    #Geocode new deltas
    _log.info('Starting to geocode new store deltas')
    arcpy.GeocodeAddresses_geocoding(
        deltas_table_new, locator,
        "Address store_addr1 VISIBLE NONE;City store_city VISIBLE NONE;State state_code VISIBLE NONE;Zip zip VISIBLE NONE",
        geocode_result)
    _log.info('Finished geocoding new store deltas')

    try:
        #Begin append new deltas to master FC
        #Begin remove deltas from master store FC
        workspace = os.path.dirname(store_fc)

        #Set-up fields for use in search, update, and insert cursors
        store_fields = arcpy.ListFields(store_deltas)
        store_field_names = []
        for store_field in store_fields:
            if store_field.name != 'sub_channel' and store_field.name != 'store_status' and store_field.name != 'OBJECTID':
                store_field_names.append(store_field.name)
        store_fields_string = '; '.join(store_field_names)

        #Set-up where clauses
        geocodes_where_matched = ''' "Status" = 'M' OR "Status" = 'T' '''
        deltas_where_remove = "type = 'Removed'"

        #Begin insert and remove deltas with edit session
        with arcpy.da.Editor(workspace) as edit:
            #Set-up cursors for inserts
            insert_cursor = arcpy.InsertCursor(store_fc, '')
            search_cursor_geocodes = arcpy.SearchCursor(
                geocode_result, geocodes_where_matched, '',
                store_fields_string)

            _log.info(
                'Begin to insert new store deltas into master store feature class'
            )

            #Begin inserts
            for delta_row_new in search_cursor_geocodes:
                insert_cursor.insertRow(delta_row_new)
                _log.info('Inserted new store with store id ' +
                          str(delta_row_new.store_id))

            _log.info(
                'Finished inserting new store deltas into master store feature class'
            )

            #Clean-up insert-related cursors
            del delta_row_new
            del search_cursor_geocodes
            del insert_cursor

            #Make table view to include rows that should be removed
            arcpy.MakeQueryTable_management(store_deltas, deltas_view_removed,
                                            'NO_KEY_FIELD', '', '',
                                            deltas_where_remove)
            deltas_remove_count = int(
                arcpy.GetCount_management(deltas_view_removed).getOutput(0))
            arcpy.CopyRows_management(deltas_view_removed,
                                      deltas_table_removed)

            _log.info('Created table view of remove store deltas where ' +
                      deltas_where_remove + ' with ' +
                      str(deltas_remove_count) + ' rows')

            #Get IDs to delete
            search_cursor_remove = arcpy.SearchCursor(deltas_table_removed, '',
                                                      '', u'store_id')
            remove_ids = []
            for delta_row_remove in search_cursor_remove:
                remove_ids.append(delta_row_remove.store_id)
            #Clean-up cursor
            del delta_row_remove
            del search_cursor_remove

            _log.info(
                'Begin to delete store deltas from master store feature class where'
                + deltas_where_remove)

            #Begin delete
            update_cursor = arcpy.da.UpdateCursor(store_fc, [u'store_id'])
            for row in update_cursor:
                if row[0] in remove_ids:
                    update_cursor.deleteRow()
                    _log.info('Deleted row with id ' + str(row[0]))

            #Clean-up delete-related cursor
            del row
            del update_cursor

            _log.info(
                'Finished deleting store deltas from master store feature class'
            )
            _log.info('Master store feature class now contains ' +
                      str(arcpy.GetCount_management(store_fc)))

        #Project the geocodes results
        projectGeocodeResult(arcpy.env.scratchWorkspace, store_fc,
                             geocode_projected,
                             "WGS 1984 Web Mercator Auxiliary Sphere")
    except arcpy.ExecuteError as e:
        log_msg = 'Could not insert or remove all store deltas from master store feature class - rolling back changes'
        _log.error(log_msg)
        exception_report = Exception(log_msg + '<br>' + e.message)
        send_alert_email(exception_report)
Ejemplo n.º 12
0
                ';Shape' + \
                ';Id' + \
                ';RigScheduleName' + \
                ';SubSiteName' + \
                ';EntityId' + \
                ';RigId' + \
                ';StartDate' + \
                ';EndDate' + \
                ';ActivityType' + \
                ';SegmentType'

        #Make Query Table
        #MakeQueryTable_management (in_table, out_table, in_key_field_option, {in_key_field}, {in_field}, {where_clause})
        arcpy.MakeQueryTable_management(
            '{0};{1}'.format(in1, in2), RigSched, "USE_KEY_FIELDS", keyFields,
            inFields,
            'pegistest.STAGE.EPEX_RigActivityPlans.EntityId = pegistest.GIS.INDUSTRY_WELLS.EPEX_ID AND pegistest.STAGE.EPEX_RigActivityPlans.EndDate >getdate()'
        )
        log.write(
            str(datetime.datetime.today().strftime('%H:%M:%S')) +
            ' Make Query Table Management Complete to {0}.\n'.format(
                'RigSched'))
        print str(datetime.datetime.today().strftime('%H:%M:%S')
                  ) + ' Make Query Table Management Complete to {0}.\n'.format(
                      'RigSched')

        #TEST OUTPUT
        #FeatureClassToFeatureClass_conversion (in_features, out_path, out_name, {where_clause}, {field_mapping}, {config_keyword})
        #arcpy.FeatureClassToFeatureClass_conversion ('Test1', fgdbPath, 'Test')

        #Point to Line
Ejemplo n.º 13
0
#    them in the same geodatabase as the feature class.
# 2. MakeQueryTable generates a temporary layer. Use CopyFeatures_management to export
#    the layer and save it as a permanent feature class.
###

# Import libraries
import arcpy,os
from arcpy import env

# Set workspace
env.overwriteOutput = True
env.workspace = r"PATH TO GEODATABASE"

# List the polygon feature class and table that want to be joined
tableList = [ r"PATH TO GEOMETRY FILE",
             \r"PATH TO TABLE"]

# Define the query for matching
whereClause = "GEOMETRYLAYERNAME.FIELD = TABLENAME.FIELD"

# Name the temporary layer name created by MakeQueryTable
lyrName = "TEMPORARY LAYER NAME"

# Name the output fc name
outFeatureClass = "PERMANENT LAYER NAME"

arcpy.MakeQueryTable_management(tableList, lyrName,"USE_KEY_FIELDS", "", "", whereClause)

# Since lyrName created by MakeQueryTable is temporary, save it as permanent gdb file
arcpy.CopyFeatures_management(lyrName, outFeatureClass)
Ejemplo n.º 14
0
###print feature count
##print str(n) + ' Features'

#Join Scats Features with Attributes
startTask = datetime.datetime.now()
print '\n2. Join Scats Features with Attributes'
arcpy.MakeTableView_management(in_table=tbl, out_view="scatstbl")

fields = arcpy.ListFields("scatstbl")
for f in fields:
    print f.name

tables = ["scatstbl", "scatslyr"]
sql = "Scats_Attributes.Id = Scats.Id"
arcpy.MakeQueryTable_management(in_table=tables,
                                out_table="scatsFeatures",
                                in_key_field_option="ADD_VIRTUAL_KEY_FIELD",
                                where_clause=sql)
endTask = datetime.datetime.now()
print 'elapsed ' + str(endTask - startTask)

#check and delete
startTask = datetime.datetime.now()
print '\n3. Copy Features to ScatsFeatures'
if arcpy.Exists("Scats_Features"):
    arcpy.Delete_management(in_data="Scats_Features")
arcpy.CopyFeatures_management(in_features="scatsFeatures",
                              out_feature_class="Scats_Features")
print arcpy.GetCount_management("scatsFeatures")
endTask = datetime.datetime.now()
print 'elapsed ' + str(endTask - startTask)
Ejemplo n.º 15
0
def generate_soil_report(order_obj):
    arcpy.AddMessage('  -- Start generating PSR soil report...')
    start = timeit.default_timer()
    ### set scratch folder
    arcpy.env.workspace = config.scratch_folder
    arcpy.env.overwriteOutput = True
    ### extract buffer size for soil report
    eris_id = 0

    if '9334' not in order_obj.psr.search_radius.keys():
        arcpy.AddMessage('      -- Soil search radius is not availabe')
        return
    config.buffer_dist_soil = str(
        order_obj.psr.search_radius['9334']) + ' MILES'
    arcpy.Buffer_analysis(config.order_geometry_pcs_shp,
                          config.order_buffer_shp, config.buffer_dist_soil)

    if order_obj.province == 'HI':
        config.data_path_soil = config.data_path_soil_HI
    elif order_obj.province == 'AK':
        config.data_path_soil = config.data_path_soil_AK
    else:
        config.data_path_soil = config.data_path_soil_CONUS

    data_soil = os.path.join(config.data_path_soil, 'MUPOLYGON')
    # select soil data by using spatial query of order buffere layer
    config.soil_lyr = arcpy.MakeFeatureLayer_management(data_soil, 'soil_lyr')
    arcpy.SelectLayerByLocation_management(config.soil_lyr, 'intersect',
                                           config.order_buffer_shp)
    arcpy.CopyFeatures_management(config.soil_lyr,
                                  config.soil_selectedby_order_shp)

    table_muaggatt = os.path.join(config.data_path_soil, 'muaggatt')
    table_component = os.path.join(config.data_path_soil, 'component')
    table_chorizon = os.path.join(config.data_path_soil, 'chorizon')
    table_chtexturegrp = os.path.join(config.data_path_soil, 'chtexturegrp')

    stable_muaggatt = os.path.join(config.temp_gdb, "muaggatt")
    stable_component = os.path.join(config.temp_gdb, "component")
    stable_chorizon = os.path.join(config.temp_gdb, "chorizon")
    stable_chtexture_grp = os.path.join(config.temp_gdb, "chtexturegrp")
    data_array = []
    if (int(arcpy.GetCount_management('soil_lyr').getOutput(0)) == 0
        ):  # no soil polygons selected
        arcpy.AddMessage('no soil data in order geometry buffer')
        psr_obj = models.PCR()
        eris_id = eris_id + 1
        psr_obj.insert_flex_rep(order_obj.id, eris_id, '9334', 2, 'N', 1,
                                'No soil data available in the project area.',
                                '')
    else:
        soil_selectedby_order_pcs_shp = arcpy.Project_management(
            config.soil_selectedby_order_shp,
            config.soil_selectedby_order_pcs_shp, order_obj.spatial_ref_pcs)
        # create map keys
        arcpy.Statistics_analysis(
            soil_selectedby_order_pcs_shp,
            os.path.join(config.scratch_folder, "summary_soil.dbf"),
            [['mukey', 'FIRST'], ["Shape_Area", "SUM"]], 'musym')
        arcpy.Sort_management(
            os.path.join(config.scratch_folder, "summary_soil.dbf"),
            os.path.join(config.scratch_folder, "summary_sorted_soil.dbf"),
            [["musym", "ASCENDING"]])
        seq_array = arcpy.da.TableToNumPyArray(
            os.path.join(config.scratch_folder, 'summary_sorted_soil.dbf'),
            '*')  #note: it could contain 'NOTCOM' record
        # retrieve attributes
        unique_MuKeys = utility.return_unique_setstring_musym(
            soil_selectedby_order_pcs_shp)

        if len(
                unique_MuKeys
        ) > 0:  # special case: order only returns one "NOTCOM" category, filter out
            where_clause_select_table = "muaggatt.mukey in " + unique_MuKeys
            arcpy.TableSelect_analysis(table_muaggatt, stable_muaggatt,
                                       where_clause_select_table)

            where_clause_select_table = "component.mukey in " + unique_MuKeys
            arcpy.TableSelect_analysis(table_component, stable_component,
                                       where_clause_select_table)

            unique_co_keys = utility.return_unique_set_string(
                stable_component, 'cokey')
            where_clause_select_table = "chorizon.cokey in " + unique_co_keys
            arcpy.TableSelect_analysis(table_chorizon, stable_chorizon,
                                       where_clause_select_table)

            unique_achkeys = utility.return_unique_set_string(
                stable_chorizon, 'chkey')
            if len(
                    unique_achkeys
            ) > 0:  # special case: e.g. there is only one Urban Land polygon
                where_clause_select_table = "chorizon.chkey in " + unique_achkeys
                arcpy.TableSelect_analysis(table_chtexturegrp,
                                           stable_chtexture_grp,
                                           where_clause_select_table)

                table_list = [
                    stable_muaggatt, stable_component, stable_chorizon,
                    stable_chtexture_grp
                ]
                field_list = config.fc_soils_field_list  #[['muaggatt.mukey','mukey'], ['muaggatt.musym','musym'], ['muaggatt.muname','muname'],['muaggatt.drclassdcd','drclassdcd'],['muaggatt.hydgrpdcd','hydgrpdcd'],['muaggatt.hydclprs','hydclprs'], ['muaggatt.brockdepmin','brockdepmin'], ['muaggatt.wtdepannmin','wtdepannmin'], ['component.cokey','cokey'],['component.compname','compname'], ['component.comppct_r','comppct_r'], ['component.majcompflag','majcompflag'],['chorizon.chkey','chkey'],['chorizon.hzname','hzname'],['chorizon.hzdept_r','hzdept_r'],['chorizon.hzdepb_r','hzdepb_r'], ['chtexturegrp.chtgkey','chtgkey'], ['chtexturegrp.texdesc1','texdesc'], ['chtexturegrp.rvindicator','rv']]
                key_list = config.fc_soils_key_list  #['muaggatt.mukey', 'component.cokey','chorizon.chkey','chtexturegrp.chtgkey']

                where_clause_query_table = config.fc_soils_where_clause_query_table  #"muaggatt.mukey = component.mukey and component.cokey = chorizon.cokey and chorizon.chkey = chtexturegrp.chkey"
                #Query tables may only be created using data from a geodatabase or an OLE DB connection
                query_table = arcpy.MakeQueryTable_management(
                    table_list, 'query_table', 'USE_KEY_FIELDS', key_list,
                    field_list, where_clause_query_table
                )  #note: outTable is a table view and won't persist

                arcpy.TableToTable_conversion(
                    query_table, config.temp_gdb, 'soil_table'
                )  #note: 1. <null> values will be retained using .gdb, will be converted to 0 using .dbf; 2. domain values, if there are any, will be retained by using .gdb
                data_array = arcpy.da.TableToNumPyArray(os.path.join(
                    config.temp_gdb, 'soil_table'),
                                                        '*',
                                                        null_value=-99)

        for i in range(0, len(seq_array)):
            map_unit_data = {}
            mukey = seq_array['FIRST_MUKE'][
                i]  #note the column name in the .dbf output was cut off
            # arcpy.AddMessage('      - multiple pages map unit ' + str(i))
            # arcpy.AddMessage('      - musym is ' + str(seq_array['MUSYM'][i]))
            # arcpy.AddMessage('      - mukey is ' + str(mukey))
            map_unit_data['Seq'] = str(
                i +
                1)  # note i starts from 0, but we want labels to start from 1

            if (seq_array['MUSYM'][i].upper() == 'NOTCOM'):
                map_unit_data['Map Unit Name'] = 'No Digital Data Available'
                map_unit_data['Mukey'] = mukey
                map_unit_data['Musym'] = 'NOTCOM'
            else:
                if 'data_array' not in locals(
                ):  #there is only one special polygon(urban land or water)
                    cursor = arcpy.SearchCursor(stable_muaggatt,
                                                "mukey = '" + str(mukey) + "'")
                    row = cursor.next()
                    map_unit_data['Map Unit Name'] = row.muname
                    # arcpy.AddMessage('      -  map unit name: ' + row.muname)
                    map_unit_data['Mukey'] = mukey  #note
                    map_unit_data['Musym'] = row.musym
                    row = None
                    cursor = None

                elif ((utility.return_map_unit_attribute(
                        data_array, mukey,
                        'muname')).upper() == '?'):  #Water or Unrban Land
                    cursor = arcpy.SearchCursor(stable_muaggatt,
                                                "mukey = '" + str(mukey) + "'")
                    row = cursor.next()
                    map_unit_data['Map Unit Name'] = row.muname
                    # arcpy.AddMessage('      -  map unit name: ' + row.muname)
                    map_unit_data['Mukey'] = mukey  #note
                    map_unit_data['Musym'] = row.musym
                    row = None
                    cursor = None
                else:
                    map_unit_data['Mukey'] = utility.return_map_unit_attribute(
                        data_array, mukey, 'mukey')
                    map_unit_data['Musym'] = utility.return_map_unit_attribute(
                        data_array, mukey, 'musym')
                    map_unit_data[
                        'Map Unit Name'] = utility.return_map_unit_attribute(
                            data_array, mukey, 'muname')
                    map_unit_data[
                        'Drainage Class - Dominant'] = utility.return_map_unit_attribute(
                            data_array, mukey, 'drclassdcd')
                    map_unit_data[
                        'Hydrologic Group - Dominant'] = utility.return_map_unit_attribute(
                            data_array, mukey, 'hydgrpdcd')
                    map_unit_data[
                        'Hydric Classification - Presence'] = utility.return_map_unit_attribute(
                            data_array, mukey, 'hydclprs')
                    map_unit_data[
                        'Bedrock Depth - Min'] = utility.return_map_unit_attribute(
                            data_array, mukey, 'brockdepmin')
                    map_unit_data[
                        'Watertable Depth - Annual Min'] = utility.return_map_unit_attribute(
                            data_array, mukey, 'wtdepannmin')

                    component_data = utility.return_componen_attribute_rv_indicator_Y(
                        data_array, mukey)
                    map_unit_data['component'] = component_data
            map_unit_data["Soil_Percent"] = "%s" % round(
                seq_array['SUM_Shape_'][i] / sum(seq_array['SUM_Shape_']) *
                100, 2) + r'%'
            config.report_data.append(map_unit_data)
        # create the map
        create_map(order_obj)

    end = timeit.default_timer()
    arcpy.AddMessage((' -- End generating PSR soil report. Duration:',
                      round(end - start, 4)))
)

# Process: Delete Field (5)
arcpy.DeleteField_management(RockTicketsSR, "Date_Year")

arcpy.AddMessage(
    "deleted Date_Year field from RockTicketsSR feature class to keep same schema for copying updated data"
)
# Process: Convert Time Field
arcpy.ConvertTimeField_management(RockTicketsSR, "DATE_R", "'Not Used'",
                                  "Date_Year", "TEXT", "yyyy;1033;;")

arcpy.AddMessage("created Date_Year field")
# Process: Make Query Table (3)
arcpy.MakeQueryTable_management(RockTicketsSR, QueryTable1, "USE_KEY_FIELDS",
                                "RockTicketsSR.PROJ_Num", "",
                                "Date_Year =" + "\'" + str(Year_Edit) + "\'")

arcpy.AddMessage(
    "Query out only the year you are calculating and dump the data into QueryTable1"
)
# Process: Copy Rows (3)
arcpy.CopyRows_management(QueryTable1, QueryTable_CopyRows, "")

arcpy.AddMessage(
    "copy rows from   QueryTable1 into QueryTable_copyRows of the year we are calculating"
)
# Process: Create Relationship Class
arcpy.CreateRelationshipClass_management(
    Rock_Shoulder_Right, QueryTable_CopyRows,
    Rock_Shoulder_Right_QueryTable_CopyRows, "SIMPLE",
Ejemplo n.º 17
0
def mainFunction(propertyTitlesFeatureClass,memorialsTable,suburbsFeatureClass,mortgageFeatureClass,mortgageSuburbsFeatureClass): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)  
    try:          
        # --------------------------------------- Start of code --------------------------------------- #

        # Set the banks
        banks = ["Auckland Savings Bank","Australia and New Zealand Banking Group","Australian Mutual Provident Society","Bank of New Zealand","Heartland Bank","Kiwibank","New Zealand Home Loans","PGG Wrightson","Rabobank","Southland Building Society","Sovereign","Taranaki Savings Bank","The Co-Operative Bank","Wairarapa Building Society","Welcome Home Loan","Westpac","Other"]

        # Copy property titles and select out current mortgage data
        arcpy.AddMessage("Extracting mortgage data...")
        arcpy.TableSelect_analysis(memorialsTable, os.path.join(arcpy.env.scratchGDB, "Mortgage"), "instrument_type = 'Mortgage' AND current = 'T'")
        arcpy.Select_analysis(propertyTitlesFeatureClass, os.path.join(arcpy.env.scratchGDB, "PropertyTitles"), "")

        arcpy.AddMessage("Creating mortgage feature class...")
        # Join property titles and mortgage data
        arcpy.MakeQueryTable_management(os.path.join(arcpy.env.scratchGDB, "PropertyTitles") + ";" + os.path.join(arcpy.env.scratchGDB, "Mortgage"), "PropertyTitlesMortgageLayer", "USE_KEY_FIELDS", "", "", "PropertyTitles.title_no = Mortgage.title_no")
        arcpy.Select_analysis("PropertyTitlesMortgageLayer", mortgageFeatureClass, "")

        # Cleaning up fields
        arcpy.AddMessage("Cleaning up fields...")
        arcpy.AddField_management(mortgageFeatureClass, "mortgage_provider", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
        # Calculating mortgage provider field from memorial text
        arcpy.CalculateField_management(mortgageFeatureClass, "mortgage_provider", "changeValue(!memorial_text!)", "PYTHON_9.3", "def changeValue(var):\\n  if \"ANZ\" in var:\\n    return \"Australia and New Zealand Banking Group\"\\n  if \"National Bank of New Zealand\" in var:\\n    return \"Australia and New Zealand Banking Group\"\\n  if \"Post Office Bank\" in var:\\n    return \"Australia and New Zealand Banking Group\"\\n  if \"Westpac\" in var:\\n    return \"Westpac\"\\n  if \"Home Mortgage Company\" in var:\\n    return \"Westpac\"\\n  if \"Trust Bank New Zealand\" in var:\\n    return \"Westpac\"\\n  if \"ASB\" in var:\\n    return \"Auckland Savings Bank\"\\n  if \"Bank of New Zealand\" in var:\\n    return \"Bank of New Zealand\"\\n  if \"Kiwibank\" in var:\\n    return \"Kiwibank\"\\n  if \"TSB\" in var:\\n    return \"Taranaki Savings Bank\"\\n  if \"Southland Building Society\" in var:\\n    return \"Southland Building Society\"\\n  if \"AMP\" in var:\\n    return \"Australian Mutual Provident Society\"\\n  if \"Rabobank\" in var:\\n    return \"Rabobank\"\\n  if \"Rabo Wrightson\" in var:\\n    return \"Rabobank\"\\n  if \"Countrywide\" in var:\\n    return \"Australia and New Zealand Banking Group\"\\n  if \"Mortgage Holding Trust\" in var:\\n    return \"Sovereign\"\\n  if \"Co-operative Bank\" in var:\\n    return \"The Co-Operative Bank\"\\n  if \"Co-Operative Bank\" in var:\\n    return \"The Co-Operative Bank\"\\n  if \"PSIS\" in var:\\n    return \"The Co-Operative Bank\"\\n  if \"New Zealand Home Lending\" in var:\\n    return \"New Zealand Home Loans\"\\n  if \"Wairarapa Building Society\" in var:\\n    return \"Wairarapa Building Society\"\\n  if \"PGG Wrightson\" in var:\\n    return \"PGG Wrightson\"\\n  if \"Heartland Bank\" in var:\\n    return \"Heartland Bank\"\\n  if \"Heartland Building Society\" in var:\\n    return \"Heartland Bank\"\\n  if \"Housing New Zealand\" in var:\\n    return \"Welcome Home Loan\"\\n  if \"Housing Corporation of New Zealand\" in var:\\n    return \"Welcome Home Loan\"\\n  else:\\n    return \"Other\"")
        arcpy.DeleteField_management(mortgageFeatureClass, "id;spatial_extents_shared;OBJECTID_1;id_1;title_no_1;land_district_1")
        arcpy.AddField_management(mortgageFeatureClass, "land_area", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
        arcpy.CalculateField_management(mortgageFeatureClass, "land_area", "!SHAPE_Area!", "PYTHON_9.3", "")        
      
        # Copy suburbs data
        arcpy.Select_analysis(suburbsFeatureClass, os.path.join(arcpy.env.scratchGDB, "Suburb"), "")
       
        # Spatially join suburb info
        arcpy.AddMessage("Analysing mortgages by suburb...")
        arcpy.SpatialJoin_analysis(mortgageFeatureClass, os.path.join(arcpy.env.scratchGDB, "Suburb"), os.path.join(arcpy.env.scratchGDB, "MortgageSuburbs"), "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT", "", "")
        # Summary stats for suburbs
        arcpy.Statistics_analysis(os.path.join(arcpy.env.scratchGDB, "MortgageSuburbs"), os.path.join(arcpy.env.scratchGDB, "MortgageSuburbsStats"), "mortgage_provider COUNT", "SUBURB_4THORDER;mortgage_provider")

        # Add the banks count fields
        for bank in banks:
            arcpy.AddField_management(os.path.join(arcpy.env.scratchGDB, "MortgageSuburbsStats"), bank.replace(" ", "_").replace("-", "_"), "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
            arcpy.CalculateField_management(os.path.join(arcpy.env.scratchGDB, "MortgageSuburbsStats"), bank.replace(" ", "_").replace("-", "_"), "0", "PYTHON_9.3", "")

        # Get the banks fields
        for count,value in enumerate(banks):
            banks[count] = value.replace(" ", "_").replace("-", "_").replace("(", "_").replace(")", "_")
            
        fields = ["SUBURB_4THORDER","mortgage_provider","FREQUENCY"] + banks
        with arcpy.da.UpdateCursor(os.path.join(arcpy.env.scratchGDB, "MortgageSuburbsStats"), fields) as cursor:
            # For each row
            for row in cursor:
                suburb = row[0]
                mortgageProvider = row[1]
                mortgageProvider = mortgageProvider.replace(" ", "_").replace("-", "_").replace("(", "_").replace(")", "_")
                count = row[2]

                # Update the mortgage provider row with its count
                row[fields.index(mortgageProvider)] = count
                cursor.updateRow(row)

        # Dissolve the stats
        arcpy.Statistics_analysis(os.path.join(arcpy.env.scratchGDB, "MortgageSuburbsStats"), os.path.join(arcpy.env.scratchGDB, "MortgageSuburbsStatsDissolved"), "FREQUENCY SUM;Auckland_Savings_Bank SUM;Australia_and_New_Zealand_Banking_Group SUM;Australian_Mutual_Provident_Society SUM;Bank_of_New_Zealand SUM;Heartland_Bank SUM;Kiwibank SUM;New_Zealand_Home_Loans SUM;PGG_Wrightson SUM;Rabobank SUM;Southland_Building_Society SUM;Sovereign SUM;Taranaki_Savings_Bank SUM;The_Co_Operative_Bank SUM;Wairarapa_Building_Society SUM;Welcome_Home_Loan SUM;Westpac SUM;Other SUM;", "SUBURB_4THORDER")

        # Create mortgage suburbs feature class
        arcpy.AddMessage("Creating mortgage suburbs feature class...")
        arcpy.Select_analysis(suburbsFeatureClass, mortgageSuburbsFeatureClass, "")
        arcpy.AddField_management(mortgageSuburbsFeatureClass, "land_area", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
        arcpy.CalculateField_management(mortgageSuburbsFeatureClass, "land_area", "!SHAPE_Area!", "PYTHON_9.3", "")                
        # Join on mortgages suburb data
        arcpy.JoinField_management(mortgageSuburbsFeatureClass, "SUBURB_4THORDER", os.path.join(arcpy.env.scratchGDB, "MortgageSuburbsStatsDissolved"), "SUBURB_4THORDER", "")
        arcpy.DeleteField_management(mortgageSuburbsFeatureClass, "FREQUENCY;SUBURB_1STORDER;SUBURB_2NDORDER;SUBURB_3RDORDER")
        arcpy.AlterField_management(mortgageSuburbsFeatureClass, "SUBURB_4THORDER", "Suburb", "", "TEXT", "60", "NULLABLE", "false")
        arcpy.AlterField_management(mortgageSuburbsFeatureClass, "SUM_FREQUENCY", "SUM_ALL", "", "DOUBLE", "8", "NULLABLE", "false")
     
        # --------------------------------------- End of code --------------------------------------- #  
            
        # If called from gp tool return the arcpy parameter   
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                arcpy.SetParameterAsText(1, output)
        # Otherwise return the result          
        else:
            # Return the output if there is any
            if output:
                return output      
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file            
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
    # If arcpy error
    except arcpy.ExecuteError:           
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)   
        arcpy.AddError(errorMessage)           
        # Logging
        if (enableLogging == "true"):
            # Log error          
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        for i in range(len(e.args)):
            if (i == 0):
                # Python version check
                if sys.version_info[0] >= 3:
                    # Python 3.x
                    errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8')                 
                else:
                    # Python 2.x
                    errorMessage = unicode(e.args[i]).encode('utf-8')
            else:
                # Python version check
                if sys.version_info[0] >= 3:
                    # Python 3.x
                    errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8')
                else:
                    # Python 2.x
                    errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8')               
        arcpy.AddError(errorMessage)              
        # Logging
        if (enableLogging == "true"):
            # Log error            
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)            
import arcpy, os, datetime
from arcpy import env
tempData = os.path.join(os.getcwd(), "OARS Temp")
env.workspace = os.path.join(tempData, "OARS_Temp.gdb")
env.overwriteOutput = False
year = "2018"                                                                   # <-------------------------------------------------------------------------------- (1) Change Year

# Bring Shapefile into Geodatabase
shapefile = "Treatments_{0}_merge.shp".format(year)
featureClass = "Treatments_{0}_merge".format(year)
arcpy.CopyFeatures_management(os.path.join(tempData, shapefile), featureClass)
arcpy.Rename_management(featureClass, "Treatments")
fc = "Treatments"

# "Shape_Length" and "Shape_Area" are required fields, but unfortunately are not in the order that I need to match OARS template (cannot delete these and simply add them again); Work-around below.
arcpy.MakeQueryTable_management(fc, "QueryTable", "USE_KEY_FIELDS", "", "Treatments.OBJECTID; Treatments.Shape; Treatments.Orig_Name; Treatments.ProjectNam; Treatments.GrantTitle; Treatments.FundNumber; Treatments.Coop_ID; Treatments.WkplanNum; Treatments.District; Treatments.Landowner; Treatments.AgencyInv; Treatments.CWPP; Treatments.CARS; Treatments.ForestType; Treatments.Treatment; Treatments.AccompDate; Treatments.Input_Date; Treatments.FY; Treatments.Acres")
arcpy.CopyFeatures_management("QueryTable", "Treatments_Edited")
arcpy.Delete_management("QueryTable")
arcpy.Delete_management("Treatments")                                           # This step & the next allow me to keep original "Treatments" name
arcpy.Rename_management("Treatments_Edited", "Treatments")

# Another cleanup - MakeQueryTable function corrects the order of "Shape_Length" & "Shape_Area", but unfortunately changes the FC's field names & aliases.
arcpy.AlterField_management(fc, "Treatments_Orig_Name", "Orig_Name", "Original Shapefile Name")
arcpy.AlterField_management(fc, "Treatments_ProjectNam", "ProjectNam", "Project Name")
arcpy.AlterField_management(fc, "Treatments_GrantTitle", "GrantTitle", "Grant Title")
arcpy.AlterField_management(fc, "Treatments_FundNumber", "FundNumber", "Fund Number")
arcpy.AlterField_management(fc, "Treatments_Coop_ID", "Coop_ID", "Cooperator ID")
arcpy.AlterField_management(fc, "Treatments_WkplanNum", "WkplanNum", "Workplan Number")
arcpy.AlterField_management(fc, "Treatments_District", "District", "District")
arcpy.AlterField_management(fc, "Treatments_Landowner", "Landowner", "Landowner")
arcpy.AlterField_management(fc, "Treatments_AgencyInv", "AgencyInv", "Agencies Involved")
ListVarSummer = ['Qcsf','Qwwf','tsc','trc']
ListVarwinter = ['Qhsf','Qwwf','tsh','trh']
for N in Season:
    if N == Namesummer:
        List = ListVarSummer
        var2 = '_SU'
    else:
        List = ListVarwinter
        var2 = '_WIN'
    for var in List:
        fieldList = [[N+'.NAME','NAME'],[N+'.TIME','TIME'],[N+'.'+var,var],
                     [CQ_name+'.Name','Name'],[CQ_name+'.Shape','Shape']]
        whereClause = CQ_name+'.Name = '+N+'.NAME'
        tableList = Database+'\\'+N
        layerCQ = CQ_name+'.lyr'
        arcpy.MakeFeatureLayer_management(Database+'\\'+CQ_name,layerCQ)

        # make query of table
        arcpy.MakeQueryTable_management([tableList,layerCQ],"QueryTable","USE_KEY_FIELDS","#",fieldList,whereClause)
        Layer = "QueryTable"

        # set temporal directories
        if not os.path.exists(Animation+'\\'+CQ_name):
            os.makedirs(Animation+'\\'+CQ_name)
        arcpy.FeatureClassToFeatureClass_conversion(Layer,Animation+'\\'+CQ_name,var+var2)

# <rawcell>

# The rest is done in ArcGIS manually. Visualization!

def crs6_add_data_preparation(args):

    # parameters
    wkgFolder = args[0]
    labelGDBname = args[1]

    # Set locations, etc
    labelGDBpath = os.path.join(wkgFolder, labelGDBname)
    fcEPrclPath = os.path.join(labelGDBpath, fcEsmntPrcl)
    fcELeasePath = os.path.join(labelGDBpath, fcEsmntLease)
    fcLPrclPath = os.path.join(labelGDBpath, fcLeasePrcl)
    fcRCLPath = os.path.join(labelGDBpath, fcRoadCL)
    fcRCdsslvPath = os.path.join(labelGDBpath, fcRCLdsslv)
    fcCadP = fcCadastre + "_P"
    fcCadPPath = os.path.join(labelGDBpath, fcCadP)
    tblPLnkPath = os.path.join(labelGDBpath, tblPropLink)
    fcPrclPLPath = os.path.join(labelGDBpath, fcPrclPLink)
    tblPropPath = os.path.join(labelGDBpath, tblProperty)
    fcCnnctPPath = os.path.join(labelGDBpath, fcConnectProp)
    fcDsslvIDPath = os.path.join(labelGDBpath, dsslvIDFC)

    # Set environment
    arcpy.env.workspace = labelGDBpath
    arcpy.env.overwriteOutput = True
    arcpy.env.configkeyword = "GEOMETRY"

    # log function
    log_msg('calling {}'.format(script_name))

    # variables
    err_message = None

    try:
        ### Easement lease
        # Copy easement parcel
        log_msg('Copying easement parcel data in labels gdb ...')
        arcpy.Copy_management(fcEPrclPath, fcELeasePath)

        # Select records to append
        log_msg('Selecting lease parcels to append...')
        delete_layer("leaseplyr")
        arcpy.MakeFeatureLayer_management(fcLPrclPath, "leaseplyr")
        parcelClause = '"PARCEL_INTENT" = ' + "'LCOV'" + ' OR "PARCEL_INTENT" = ' + "'EASM'"
        arcpy.SelectLayerByAttribute_management("leaseplyr", "NEW_SELECTION",
                                                parcelClause)

        log_msg('Appending lease parcels...')
        arcpy.Append_management("leaseplyr", fcELeasePath, "NO_TEST")

        ### Road CL
        log_msg('Working on road data...')
        if field_exist(fcRoadCL, rfield) == False:
            # Add field
            arcpy.AddField_management(fcRoadCL, rfield, "TEXT", "", "",
                                      rfieldlen)

        # Calculate values
        log_msg('Calculate values: {} ...'.format(rfield))
        calcexpr = ('!{}!.upper() + ", " + !{}!.upper()').format(
            statsfields[0][0], statsfields[1][0])
        arcpy.CalculateField_management(fcRoadCL, rfield, calcexpr,
                                        "PYTHON_9.3")
        # Dissolve data, using statistics fields
        log_msg('Dissolving ...')
        arcpy.Dissolve_management(fcRCLPath, fcRCdsslvPath, rfield,
                                  statsfields)

        # Add fields
        arcpy.AddField_management(fcRCLdsslv, statsfields[0][0], "TEXT", "",
                                  "", sfieldlen)
        arcpy.AddField_management(fcRCLdsslv, statsfields[1][0], "TEXT", "",
                                  "", sfieldlen)
        # Calculate values
        sfields = []
        for i in range(len(statsfields)):
            sfields.append(statsfields[i][0])
            arcpy.AddField_management(fcRCLdsslv, statsfields[i][0], "TEXT",
                                      "", "", sfieldlen)
            sfield = statsfields[i][1] + "_" + statsfields[i][0]
            calcexpr = ('!{}!').format(sfield)
            arcpy.CalculateField_management(fcRCLdsslv, statsfields[i][0],
                                            calcexpr, "PYTHON_9.3")

        ### Connect_Property
        log_msg('Working on Connect_Property')

        # Make query table
        mqtblList = [fcCadPPath, tblPLnkPath]
        whereClause = tblPropLink + "." + parIDfield + " = " + fcCadP + "." + parIDfield  # NOTE: no quotes - known bug
        arcpy.MakeQueryTable_management(mqtblList, "propQueryTbl",
                                        "ADD_VIRTUAL_KEY_FIELD", "", "",
                                        whereClause)
        # Get number of rows
        numMQrows = int(arcpy.GetCount_management("propQueryTbl").getOutput(0))
        # Export
        log_msg('Exporting...')
        arcpy.CopyFeatures_management("propQueryTbl", fcPrclPLPath)
        # Check number of rows
        numPPLrows = int(arcpy.GetCount_management(fcPrclPLPath).getOutput(0))
        if numPPLrows != numMQrows:
            log_msg(
                'ERROR: Wrong number of rows exported for link FC; {} versus {}'
                .format(numMQrows, numPPLrows))
        else:
            log_msg('Correct number of rows exported for link FC.')

        # Dissolve on ID
        log_msg('Dissolving on ID...')
        dfield = tblPropLink + "_" + propIDfield
        sfield = tblPropLink + "_" + parIDfield
        statsfield = [[sfield, "COUNT"]]
        arcpy.Dissolve_management(fcPrclPLink, fcDsslvIDPath, dfield,
                                  statsfield)

        # Join the TP_Property table
        log_msg('Preparing to join property table...')
        # Create temporary layer/view
        delete_layer('dsslvlyr')
        arcpy.MakeFeatureLayer_management(fcDsslvIDPath, "dsslvlyr")
        delete_layer('proptblview')
        arcpy.MakeTableView_management(tblPropPath, "proptblview")

        # Make join
        log_msg('Adding join ...')
        arcpy.AddJoin_management("dsslvlyr", dfield, "proptblview",
                                 propIDfield, "KEEP_ALL")
        log_msg('Property table joined')

        # Output
        log_msg('Copying features...')
        arcpy.CopyFeatures_management("dsslvlyr", fcCnnctPPath)

        log_msg("Process time: %s \n" %
                str(datetime.datetime.now() - starttime))

    except Exception as e:
        err_message = "ERROR while running {0}: {1}".format(script_name, e)

    return err_message, log_messages
Ejemplo n.º 21
0
    else:
        return counts[0]
        
# Expand looping
for crop in uniqueCrops:
    crop = int(crop)
    crop = str(crop)

    # check for existing geodatabase
    if not arcpy.Exists("{}CDL{}x_cnty.gdb".format(outws, crop)):
        print "Creating CDL{}x_cnty Geodatabase".format(crop)
        arcpy.CreateFileGDB_management(outws, "CDL{}x_cnty.gdb".format(crop))

    # Return a unique set of values in the FIPS field for counties with higher NASS acreage than the CDL
    gtacres = "gtlayer"
    arcpy.MakeQueryTable_management(acres, gtacres, "USE_KEY_FIELDS", "OBJECTID", "", "NASS_{0} > CDL_{0}".format(
        crop))
    uniqueValues = {(row[0], row[1]) for row in arcpy.da.SearchCursor(gtacres, (geoid, "NASS_{}".format(crop)))}

    # Loop through counties needing expansion
    for fips, nassacres in uniqueValues:
        arcpy.env.workspace = "{}CDL{}x_cnty.gdb/".format(outws, crop)
        arcpy.env.scratchWorkspace = "{}CDL{}x_cnty.gdb/".format(outws, crop)
        fips = str(fips)
        croptime1 = datetime.datetime.now()
        print "expanding CDL{}_{}".format(crop, fips)
        # Set mask from script 5
        mask = Raster("{}Cult_2017_{}".format(maskws, fips))
        infile = Raster("{0}CDL_{1}_cnty.gdb/CDL_{1}_{2}".format(inws, crop, fips))
        # Expand flagged county by 1 pixel
        outfile = Expand(infile, 1, 1)
        # Using the environmental setting for mask does not work here. Use mask in map algebra instead.
inputDB = arcpy.GetParameterAsText(
    0)  # Input database. Assuming SSURGO or gSSURGO soils database
outputTbl = arcpy.GetParameterAsText(
    1)  # Output table containing surface texture for the dominant component

try:

    # Get surface texture for all components
    PrintMsg(" \nGetting surface texture for each component...", 0)
    env.workspace = inputDB
    qTbl = "QueryTexture"
    tbls = ["chtexture", "chtexturegrp", "chorizon"]
    qFlds = "chtexture.texcl texcl;chtexture.lieutex lieutex;chtexturegrp.texture texture;chtexturegrp.texdesc texdesc;chorizon.hzname hzname;chorizon.desgnmaster desgnmaster;chorizon.hzdept_r hzdept_r;chorizon.hzdepb_r hzdepb_r;chorizon.cokey cokey"
    query = "chtexture.chtgkey = chtexturegrp.chtgkey AND chtexturegrp.chkey = chorizon.chkey AND chorizon.hzdept_r = 0 AND chtexturegrp.rvindicator = 'Yes'"

    arcpy.MakeQueryTable_management(tbls, qTbl, "ADD_VIRTUAL_KEY_FIELD", "#",
                                    qFlds, query)

    # OBJECTID
    # chtexture_texcl
    # chtexture_lieutex
    # chtexturegrp_texture
    # chtexturegrp_texdesc
    # chorizon_hzname
    # chorizon_desgnmaster
    # chorizon_hzdept_r
    # chorizon_hzdepb_r
    # chorizon_cokey

    dTexture = dict()
    tFlds = ("chorizon_cokey", "chtexture_texcl", "chtexture_lieutex",
             "chtexturegrp_texture")
Ejemplo n.º 23
0
        lyrselection = arcpy.SelectLayerByLocation_management(
            in_layer="templyr",
            overlap_type="WITHIN_A_DISTANCE",
            select_features=selfeat,
            search_distance="500 Feet",
            selection_type="NEW_SELECTION",
            invert_spatial_relationship="NOT_INVERT")
        arcpy.CopyFeatures_management(lyrselection, outfeat)

    selbyloc(WorkingGDB + "\\BCParcels",
             WorkingGDB + "\\" + "Parcels_selection")
    # GENERATE QUERY TABLE (AS LAYER - THEREFORE MUST BE COPIED)
    QueryTable123 = arcpy.MakeQueryTable_management(
        ["Account_Parcels", "Parcels_selection"],
        "QueryTable123",
        "USE_KEY_FIELDS",
        "",
        in_field=
        "Parcels_selection.Shape #;Account_Parcels.strap #;Account_Parcels.PARCEL_NO #",
        where_clause="Account_Parcels.PARCEL_NO = Parcels_selection.PARCEL_NO")
    # END PRODUCT
    QueryTable_Final = WorkingGDB + "\\" + "QueryTable_Final"
    # Process: Copy Features
    if arcpy.Exists("QueryTable_Final"):
        arcpy.Delete_management("QueryTable_Final")
    arcpy.CopyFeatures_management(QueryTable123, QueryTable_Final, "", "0",
                                  "0", "0")
    # Process: Join Field (Buildings)
    arcpy.JoinField_management(QueryTable_Final, "Account_Parcels_strap",
                               "Buildings", "strap")

    # Process: Join Field (Owner_Address)
Ejemplo n.º 24
0
def exportToCSV(field_alias, field_names, table, outFile, outDir):
    #calculate date and local timestamp
    firstDayofM = date(date.today().year,
                       date.today().month, 1).strftime('%Y%m%d')
    firstDayofLM = date(date.today().year,
                        date.today().month - 1, 1).strftime('%Y%m%d')
    thisMonth = datetime.datetime(int(firstDayofM[:4]), int(firstDayofM[4:6]),
                                  int(firstDayofM[6:]), 00, 00,
                                  00) + datetime.timedelta(hours=4)
    lastMonth = datetime.datetime(int(firstDayofLM[:4]), int(
        firstDayofLM[4:6]), int(firstDayofLM[6:]), 00, 00,
                                  00) + datetime.timedelta(hours=4)
    print thisMonth
    print lastMonth
    #build query clause
    whereClause = '"CREATED_DATE" < timestamp \'{0}\' AND "CREATED_DATE" >= timestamp \'{1}\' ORDER BY CREATED_DATE'.format(
        str(thisMonth), str(lastMonth))
    print whereClause
    arcpy.MakeQueryTable_management(table, 'queryTable' + table,
                                    "NO_KEY_FIELD", "", "", whereClause)
    recordNum = arcpy.GetCount_management('queryTable' + table)
    logMessage(
        str(recordNum) + " " + table + " reports for " +
        (lastMonth).strftime("%b, %Y"))
    #list fields
    fields = arcpy.ListFields('queryTable' + table)

    #write to csv
    outFullFile = outFile + "_" + lastMonth.strftime("%Y%m") + ".csv"
    with open(os.path.join(outDir, outFullFile), 'wb') as f:
        w = csv.writer(f)
        w.writerow(field_alias)
        rows = arcpy.SearchCursor("queryTable" + table)
        row = rows.next()
        for row in rows:
            field_vals = []
            for field in fields:
                if field.name in field_names:
                    #remove none and invalid value and convert utc time to local
                    if row.getValue(field.name) == None:
                        field_val = ""
                    elif field.type == "Date":
                        if row.getValue(field.name).year == 1899:
                            field_val = ""
                        else:
                            field_val = (row.getValue(field.name) -
                                         datetime.timedelta(hours=4)
                                         ).strftime("%Y-%m-%d %H:%M:%S")
                        # if field.name == "REPORT_DATE":
                        # 	field_val = (row.getValue(field.name) - datetime.timedelta(hours=4)).strftime("%Y-%m-%d")
                    else:
                        field_val = row.getValue(field.name)
                    field_vals.append(field_val)
            w.writerow([
                s.encode('utf-8') if type(s) is unicode else s
                for s in field_vals
            ])
        del row
        del rows
    f.close()

    return recordNum, outFullFile
def importar_tablas_corregidas(data,campos):
    arcpy.env.overwriteOutput = True
    if arcpy.Exists("CPV_SEGMENTACION2.sde") == False:
        arcpy.CreateDatabaseConnection_management("Database Connections",
                                                  "CPV_SEGMENTACION2.sde",
                                                  "SQL_SERVER",
                                                  ip_server,
                                                  "DATABASE_AUTH",
                                                  "us_arcgis_seg_2",
                                                  "MBs0p0rt301",
                                                  "#",
                                                  "CPV_SEGMENTACION",
                                                  "#",
                                                  "#",
                                                  "#",
                                                  "#")
    arcpy.env.workspace = "Database Connections/CPV_SEGMENTACION2.sde"
    path_conexion="Database Connections/CPV_SEGMENTACION2.sde"
    where_expression=expresion.Expresion(data, campos)


    temp_ubigeos = ""
    i=0
    for x in data:
        i=i+1
        if (i==1):
            temp_ubigeos="'{}'".format(x[0])
        else:
            temp_ubigeos = "{},'{}'".format(temp_ubigeos,x[0])


    print temp_ubigeos
    sql=expresion.Expresion(data, campos)
    manzanas_Layer = arcpy.MakeQueryLayer_management(path_conexion, 'TB_MZS',"SELECT geom,UBIGEO,CODCCPP,ZONA,MANZANA,VIV_MZ,FALSO_COD,MZS_COND,CANT_BLOCK FROM TB_MANZANA WHERE UBIGEO IN ({}) ".format(temp_ubigeos))
    zonas_Layer = arcpy.MakeQueryLayer_management(path_conexion, 'CPV_SEGMENTACION.dbo.VW_ZONA_CENSAL',"SELECT * FROM CPV_SEGMENTACION.dbo.VW_ZONA_CENSAL WHERE {} ".format(sql))
    eje_vial_Layer = arcpy.MakeQueryLayer_management(path_conexion, 'CPV_SEGMENTACION.dbo.TB_EJE_VIAL',"SELECT * FROM CPV_SEGMENTACION.dbo.TB_EJE_VIAL where UBIGEO IN  ({}) ".format(temp_ubigeos))
    #viviendas_Layer = arcpy.MakeQueryLayer_management(path_conexion, 'CPV_SEGMENTACION.dbo.VW_VIVIENDAS_U',"SELECT * FROM CPV_SEGMENTACION.dbo.VW_VIVIENDAS_U WHERE {} ".format(sql))
    #frentes_mfl = arcpy.MakeQueryLayer_management(path_conexion, 'CPV_SEGMENTACION.dbo.TB_FRENTES',"SELECT * FROM CPV_SEGMENTACION.dbo.VW_FRENTES WHERE {} ".format(sql))
    manzanas_mfl = arcpy.MakeFeatureLayer_management(manzanas_Layer,"manzanas_mfl")
    zonas_mfl=arcpy.MakeFeatureLayer_management(zonas_Layer, "zonas_mfl")
    eje_vial_mfl = arcpy.MakeFeatureLayer_management(eje_vial_Layer, "eje_vial_mfl")



    list_mfl=[[manzanas_mfl,tb_manzanas],
              #[frentes_mfl,tb_frentes]

              ]

    i=0
    for x in list_mfl:
        i=i+1
        temp= arcpy.CopyFeatures_management(x[0], 'in_memory/temp_{}'.format(i))
        arcpy.AddField_management(temp, 'MANZANA2', 'TEXT', 50)
        arcpy.CalculateField_management(temp, 'MANZANA2', '!MANZANA!', "PYTHON_9.3")
        arcpy.DeleteField_management(temp, ['MANZANA'])
        arcpy.CopyFeatures_management(temp, x[1])
        arcpy.AddField_management(x[1], 'MANZANA', 'TEXT', 50)
        arcpy.CalculateField_management(x[1], 'MANZANA', '!MANZANA2!', "PYTHON_9.3")
        arcpy.DeleteField_management(temp, ['MANZANA2'])

    arcpy.CopyFeatures_management(zonas_mfl, tb_zonas)
    arcpy.CopyFeatures_management(eje_vial_mfl, tb_ejes_viales)
    arcpy.TableToTable_conversion(path_conexion + '/CPV_SEGMENTACION.dbo.VW_MZS_CONDOMINIOS', path_ini,"tb_mzs_condominios.dbf")
    arcpy.env.workspace = path_ini+""
    arcpy.DeleteField_management(tb_manzanas, ['AEU','IDMANZANA'])
    arcpy.AddField_management(tb_manzanas, "IDMANZANA", "TEXT")
    expression = "(!UBIGEO!)+(!ZONA!)+(!MANZANA!)"
    arcpy.CalculateField_management(tb_manzanas, "IDMANZANA", expression, "PYTHON_9.3")
    arcpy.AddField_management(tb_manzanas, "AEU", "SHORT")
    arcpy.AddField_management(tb_manzanas, "AEU_2", "SHORT")
    arcpy.AddField_management(tb_manzanas, "FLG_MZ", "SHORT")
    arcpy.Dissolve_management(tb_frentes, tb_frentes_dissolve,['UBIGEO', 'ZONA', 'MANZANA', 'FRENTE_ORD'])


    database = "CPV_SEGMENTACION_GDB"
    if arcpy.Exists("{}.sde".format(database)) == False:
        arcpy.CreateDatabaseConnection_management("Database Connections",
                                                  "{}.sde".format(database),
                                                  "SQL_SERVER",
                                                  ip_server,
                                                  "DATABASE_AUTH",
                                                  "sde",
                                                  "$deDEs4Rr0lLo",
                                                  "#",
                                                  database,
                                                  "#",
                                                  "#",
                                                  "#",
                                                  "#")
    arcpy.env.workspace = "Database Connections/{}.sde".format(database)
    path_conexion2 = "Database Connections/{}.sde".format(database)

    list_capas=[
                ["{}.sde.SEGM_AEU".format(database),tb_aeus,2],
                ["{}.sde.SEGM_RUTA".format(database), tb_rutas, 2],

                ["{}.sde.SEGM_SECCION".format(database), tb_secciones, 1],
                ["{}.sde.SEGM_SITIOS_INTERES".format(database), tb_sitios_interes, 1],
                ["{}.sde.SEGM_SUBZONA".format(database), tb_subzonas, 2],
                ] ##capa orin, destino

    for i,capa in enumerate(list_capas):

        if capa[2]==1:
            #print 'aqui'

            #if capa[1]==tb_viviendas_ordenadas_temp:
            #    x = arcpy.MakeQueryLayer_management(path_conexion2, 'capa{}'.format(i),"select * from {} where {}".format(capa[0], sql))
#
            #else:
            x = arcpy.MakeQueryLayer_management(path_conexion2, 'capa{}'.format(i),"select * from {} where {} ".format(capa[0],sql))

        else:
            x = arcpy.MakeQueryTable_management(capa[0], "capa{}".format(i), "USE_KEY_FIELDS", "objectid", "", sql)


        if capa[1] in (tb_rutas,tb_rutas_puntos):
            #####tratamiento del campo manzana para las Ñ #####

            if capa[1] in (tb_rutas) :
                temp = arcpy.CopyRows_management(capa[0], 'in_memory/temp_m2{}'.format(i))
            else:
                temp = arcpy.CopyFeatures_management(capa[0], 'in_memory/temp_m{}'.format(i))


            arcpy.AddField_management(temp, 'MANZANA2', 'TEXT', 50)
            arcpy.CalculateField_management(temp, 'MANZANA2', '!MANZANA!', "PYTHON_9.3")
            arcpy.DeleteField_management(temp, ['MANZANA'])

            #####copiando archivos

            if capa[1] in (tb_rutas):
                print capa[1]
                temp = arcpy.CopyRows_management(temp, capa[1])
            else:
                temp = arcpy.CopyFeatures_management(temp, capa[1])

            arcpy.AddField_management(capa[1], 'MANZANA', 'TEXT', 50)
            arcpy.CalculateField_management(capa[1], 'MANZANA', '!MANZANA2!', "PYTHON_9.3")
            arcpy.DeleteField_management(capa[1], ['MANZANA2'])

            arcpy.AddField_management(capa[1], 'AEU2', 'TEXT', 3)
            arcpy.CalculateField_management(capa[1], 'AEU2', 'str(!AEU!).zfill(3)', "PYTHON_9.3")
            arcpy.DeleteField_management(capa[1], ['AEU'])
            arcpy.AddField_management(capa[1], 'AEU', 'TEXT', 3)
            arcpy.CalculateField_management(capa[1], 'AEU', '!AEU2!', "PYTHON_9.3")
            arcpy.DeleteField_management(capa[1], ['AEU2'])
        else:
            print capa[1]
            if capa[2] > 1:
                arcpy.CopyRows_management(x, capa[1])
            else:
                arcpy.CopyFeatures_management(x, capa[1])
def importar_tablas(data,campos):
    arcpy.env.overwriteOutput = True
    database='CPV_SEGMENTACION_GDB'
    ip = '172.18.1.93'
    usuario='sde'
    password='******'
    path_conexion=conx.conexion_arcgis(database,ip,usuario,password)
    arcpy.env.workspace =path_conexion

    #temp_ubigeos = ""
    #i=0

    #for x in data:
    #    i=i+1
    #    if (i==1):
    #        temp_ubigeos="'{}'".format(x[0])
    #    else:
    #        temp_ubigeos = "{},'{}'".format(temp_ubigeos,x[0])

    sql=expresion.Expresion_2(data, campos)

    list_capas=[
                ["{}.sde.TB_EJE_VIAL".format(database), tb_ejes_viales, 1],
                ["{}.sde.VW_ZONA_CENSAL".format(database), tb_zonas, 1],
                ["{}.sde.TB_MANZANA".format(database), tb_manzanas, 1],
                ["{}.sde.SEGM_SITIOS_INTERES".format(database), tb_sitios_interes, 1],

                ]



    for i,capa in enumerate(list_capas):
        print capa
        if capa[2] == 1:
            if capa[1] in [tb_manzanas,tb_sitios_interes,tb_ejes_viales]:

                if capa[1] == tb_sitios_interes:
                    x = arcpy.MakeQueryLayer_management(path_conexion, 'capa{}'.format(i),
                                                        "select * from {} where ({}) AND (CODIGO<91 AND CODIGO<>26) ".format(
                                                            capa[0],
                                                            sql))
                else:
                    x = arcpy.MakeQueryLayer_management(path_conexion, 'capa{}'.format(i),
                                                        "select * from {} where  {}  ".format(capa[0],
                                                                                                         sql))

            else:
                x = arcpy.MakeQueryLayer_management(path_conexion, 'capa{}'.format(i),
                                                    "select * from {} where {} ".format(capa[0], sql ))

        else:
            x = arcpy.MakeQueryTable_management(capa[0], "capa{}".format(i), "USE_KEY_FIELDS", "objectid", "", sql)



        if capa[1] in [tb_manzanas]:
            if capa[2]==1:
                temp = arcpy.CopyFeatures_management(x, "in_memory\\temp_{}".format(i))
                arcpy.AddField_management(temp, 'MANZANA2', 'TEXT', 50)
                arcpy.CalculateField_management(temp, 'MANZANA2', '!MANZANA!', "PYTHON_9.3")
                arcpy.DeleteField_management(temp, ['MANZANA'])
                arcpy.CopyFeatures_management(temp, capa[1])
                arcpy.AddField_management(capa[1], 'MANZANA', 'TEXT', 50)
                arcpy.CalculateField_management(capa[1], 'MANZANA', '!MANZANA2!', "PYTHON_9.3")
                arcpy.DeleteField_management(capa[1], ['MANZANA2'])
            else:
                temp = arcpy.CopyRows_management(x, "in_memory\\temp_{}".format(i))
                arcpy.AddField_management(temp, 'MANZANA2', 'TEXT', 50)
                arcpy.CalculateField_management(temp, 'MANZANA2', '!MANZANA!', "PYTHON_9.3")
                arcpy.DeleteField_management(temp, ['MANZANA'])
                arcpy.CopyRows_management(temp, capa[1])
                arcpy.AddField_management(capa[1], 'MANZANA', 'TEXT', 50)
                arcpy.CalculateField_management(capa[1], 'MANZANA', '!MANZANA2!', "PYTHON_9.3")
                arcpy.DeleteField_management(capa[1], ['MANZANA2'])

        else:


            if capa[2] > 1:
                arcpy.CopyRows_management(x, capa[1])
            else:
                arcpy.CopyFeatures_management(x, capa[1])
    arcpy.env.workspace = path_ini + ""
 arcpy.AddField_management(fcRCLdsslv,statsfields[0][0],"TEXT","","",sfieldlen)
 arcpy.AddField_management(fcRCLdsslv,statsfields[1][0],"TEXT","","",sfieldlen)
 # Calculate values
 sfields = []  
 for i in range(len(statsfields)):
     sfields.append(statsfields[i][0])
     arcpy.AddField_management(fcRCLdsslv,statsfields[i][0],"TEXT","","",sfieldlen)
     sfield = statsfields[i][1] + "_" + statsfields[i][0]
     calcexpr = ('!{}!').format(sfield)
     arcpy.CalculateField_management(fcRCLdsslv,statsfields[i][0],calcexpr,"PYTHON_9.3")
 ### Connect_Property
 print('Working on Connect_Property')
 # Make query table
 mqtblList = [fcCadPPath,tblPLnkPath]
 whereClause = tblPropLink + "." + parIDfield + " = " + fcCadP + "." + parIDfield # NOTE: no quotes - known bug
 arcpy.MakeQueryTable_management(mqtblList,"propQueryTbl","ADD_VIRTUAL_KEY_FIELD","","",whereClause)
 # Get number of rows
 numMQrows = int(arcpy.GetCount_management("propQueryTbl").getOutput(0))
 # Export
 print('Exporting...')
 arcpy.CopyFeatures_management("propQueryTbl",fcPrclPLPath)
 # Check number of rows
 numPPLrows = int(arcpy.GetCount_management(fcPrclPLPath).getOutput(0))
 if numPPLrows != numMQrows:
     print('ERROR: Wrong number of rows exported for link FC; {} versus {}').format(numMQrows,numPPLrows)
     errorLogic = 1
 else:
     print('Correct number of rows exported for link FC.')
 # Dissolve on ID
 print('Dissolving on ID...')
 dfield = tblPropLink + "_" + propIDfield
Ejemplo n.º 28
0
print "***\nworking With A Many To One Relationship\n***"

#todo: declare parameters here
fc = "AucklandStravaMetro_Edges_NZTM"
fckey = 'AucklandStravaMetro_Edges_NZTM.edge_id'
tbl = r'Database Connections\[email protected]\GIS.gisadmin.auckland_edges_ride_data'
tblkey = 'edge_id'
sqlClause = "(year = 2013 AND hour IN (16,17)) AND day IN (7,14,21,28,35,42,49,56,63,70,77,84,91,98,105,112,119,126,133,140,147,154,161,168,175,182,189,196,203,210,217,224,231,238,245,252,259,266,273,280,287,294,301,308,315,322,329,336,343,350,357,364)"
whereClause = "(CycleEvents.edge_id = AucklandStravaMetro_Edges_NZTM.edge_id)"

#todo: make query Layer
print 'make Query Layer'
listTables = []
listTables.append(tbl)
arcpy.MakeQueryTable_management(in_table=listTables,
                                out_table="auckland_edges_ride_data",
                                in_key_field_option="ADD_VIRTUAL_KEY_FIELD")

#copy Query Table Locally
print 'Copy Query Tables Locally to Cycle Events'
if arcpy.Exists("CycleEvents"):
    arcpy.Delete_management("CycleEvents")
arcpy.CopyRows_management("auckland_edges_ride_data", "CycleEvents")
flds = []
flds.append("edge_id")
arcpy.AddIndex_management(in_table="CycleEvents",
                          fields=flds,
                          index_name="EdgeIdIdx")

#many To One Join
#print "start Many To One Join"