Ejemplo n.º 1
0
def convertExcelToTable(tempTable):
    if arcpy.Exists(jointable):
        arcpy.Delete_management(jointable)
        arcpy.AddMessage('%s deleted!' % jointable)
        arcpy.ExcelToTable_conversion(tempTable, jointable)
    else:
        arcpy.ExcelToTable_conversion(tempTable, jointable)
Ejemplo n.º 2
0
def crearDominios(archExcel=""):
    #------------------------------
    if eco:
        imprimir("Procesando DOMINIOS")
    if archExcel == "":
        arcExcel = archivoE
    if not os.path.exists(archExcel):
        return
    dominios = ['ESTADO', 'CAUSAL', 'EQUIPO']
    listaD = arcpy.da.ListDomains(arcpy.env.workspace)
    ll = []
    for dn in listaD:
        ll.append(dn.name)
    for dm in dominios:
        x = "SI_" + dm
        if eco:
            imprimir("DOMINIO =" + x)
        if x in ll:
            pass
        else:
            try:
                arcpy.ExcelToTable_conversion(archExcel, "T_" + x, x)
                arcpy.TableToDomain_management("T_" + x, "CODIGO",
                                               "DESCRIPCION",
                                               arcpy.env.workspace, x)
            except:
                imprimir("===> PROBLEMA PARA PROCESAR DOMINIO:" + x)
                pass
Ejemplo n.º 3
0
def YearXLSX():
    newdf2 = pd.DataFrame()
    YearList = data.Scaled_Year.unique()

    for x in YearList:
        scayea = pd.DataFrame()
        HA = pd.DataFrame()
        ha13 = pd.DataFrame()
        scayea = data[data.Scaled_Year == x]

        for y in Git_HATM_List:

            HA = scayea[scayea.Timber_Mark == y]

            if not HA.empty:
                if not ha13.empty:
                    ha13 = ha13.append(HA, ignore_index=True)
                else:
                    ha13 = HA

        if not ha13.empty:
            thisyeardata = ha13
            print(thisyeardata.head())
            print('hi rex you were here recently')
            newdf = thisyeardata.join(House_TM.set_index('WILP_TM'),
                                      on='Timber_Mark')

            ##            print newdf
            ##            try:
            newdf['TM_Vol_Av_HA'] = newdf.apply(
                lambda row: 0 if row.Total_Volume == 0 or row.TM_TOT_AREA == 0
                else row.Total_Volume / row.TM_TOT_AREA,
                axis=1)
            ##            except:
            ##                newdf['TM_Vol_Av_HA']=0
            ##            try:
            newdf['TM_Val_Av_HA'] = newdf.apply(
                lambda row: 0 if row.Total_Value == 0 or row.TM_TOT_AREA == 0
                else row.Total_Value / row.TM_TOT_AREA,
                axis=1)
            ##            except:
            ##                newdf['TM_Val_Av_HA']=0
            newdf['TM_Hz_Vol'] = newdf.apply(lambda row: 0
                                             if row.TM_Vol_Av_HA == 0 else row.
                                             TM_Vol_Av_HA * row.WILP_TM_AREA,
                                             axis=1)
            newdf['TM_Hz_Val'] = newdf.apply(lambda row: 0
                                             if row.TM_Val_Av_HA == 0 else row.
                                             TM_Val_Av_HA * row.WILP_TM_AREA,
                                             axis=1)

            newdf2 = newdf2.append(newdf, ignore_index=True)

    print(newdf2.head())
    filename = 'rexington13.xlsx'  ## str('scaleyear_%s' %x+'.xlsx')
    filefile = os.path.join(TempContainer, filename)
    newdf2.to_excel(filefile, engine='xlsxwriter')

    arcpy.env.workspace = str(TempGDB)
    arcpy.ExcelToTable_conversion(filefile, filename[:-5])
Ejemplo n.º 4
0
def import_afpat_data(afpat_spreadsheet, output_gdb, logger):

    import arcpy
    """
    import afpat from spreadsheet to arcgis table
    """

    logger.debug("start: import_afpat_data")

    if not os.path.exists(afpat_spreadsheet):
        error = "can't find afpat spreadsheet {}".format(afpat_spreadsheet)
        logger.error(error)
        raise IOError(error)

    if not os.path.exists(output_gdb):

        error = "can't find scratch gdb {}".format(output_gdb)
        logger.error(error)
        raise IOError(error)

    sheet = "2. User input & Results"

    out_table = os.path.join(output_gdb, "afpat_raw")

    if arcpy.Exists(out_table):
        arcpy.Delete_management(out_table)

    arcpy.ExcelToTable_conversion(afpat_spreadsheet, out_table, sheet)

    logger.debug("finish: import_afpat_data")
def spreadsheetToLinePolygon(dataFile, geometryType, xField, yField,
                             spreadsheetUniqueID, inputCoordinateSystemName,
                             inputCoordinateSystem, outputCoordinateSystemName,
                             outputCoordinateSystem, transformation):
    # If excel spreadsheet
    if dataFile.lower().endswith(('.xls', '.xlsx')):
        dataFile = arcpy.ExcelToTable_conversion(dataFile,
                                                 "in_memory\\DatasetExcel", "")

    # If projection needed
    if (transformation.lower() != "none"):
        printMessage("Importing CSV/Excel...", "info")
        arcpy.MakeXYEventLayer_management(dataFile, xField, yField,
                                          "InputLayer", inputCoordinateSystem,
                                          "")
        printMessage(
            "Projecting layer from " + inputCoordinateSystemName + " to " +
            outputCoordinateSystemName + "...", "info")
        arcpy.Project_management(
            "InputLayer", os.path.join(arcpy.env.scratchGDB,
                                       "Layer_Projected"),
            outputCoordinateSystem, transformation, inputCoordinateSystem,
            "NO_PRESERVE_SHAPE", "")
    else:
        printMessage("Importing CSV/Excel...", "info")
        arcpy.MakeXYEventLayer_management(dataFile, xField, yField, "Layer",
                                          inputCoordinateSystem, "")
        arcpy.CopyFeatures_management("Layer", "in_memory\\Dataset", "", "0",
                                      "0", "0")

    if (transformation.lower() != "none"):
        dataset = os.path.join(arcpy.env.scratchGDB, "Layer_Projected")
    else:
        dataset = "in_memory\\Dataset"

    printMessage("Creating layer...", "info")
    if (geometryType.lower() == "line"):
        # Convert the points to lines using the unique identifier field to create each unique line
        arcpy.PointsToLine_management(dataset, "in_memory\\DatasetLine",
                                      spreadsheetUniqueID, "", "NO_CLOSE")
        output = arcpy.MakeFeatureLayer_management("in_memory\\DatasetLine",
                                                   "Layer", "", "", "")
    if (geometryType.lower() == "polygon"):
        # Convert the points to lines using the unique identifier field to create each unique line, then close the final line
        arcpy.PointsToLine_management(dataset, "in_memory\\DatasetLine",
                                      spreadsheetUniqueID, "", "CLOSE")
        # Convert the lines to polygons and join on attribute from lines
        arcpy.FeatureToPolygon_management("in_memory\\DatasetLine",
                                          "in_memory\\DatasetPolygon", "",
                                          "ATTRIBUTES", "")
        arcpy.JoinField_management("in_memory\\DatasetPolygon", "OID",
                                   "in_memory\\DatasetLine", "OID",
                                   spreadsheetUniqueID)
        output = arcpy.MakeFeatureLayer_management("in_memory\\DatasetPolygon",
                                                   "Layer", "", "", "")
    return output
Ejemplo n.º 6
0
def refresh_table(gdb, excel, table):
    '''
    Any Updates that happen to the xlsx file are then translated into a overwritten table 'MapOrderTable'
    That is configured and joined within the ArcGIS Pro Project.
    '''

    # Create table from Excel File (First update from map order)
    # logger.info("Refreshing table...")
    arcpy.ExcelToTable_conversion(excel,
                                  os.path.join(gdb, table))
Ejemplo n.º 7
0
def join_concelhos_contractos(tabela, folha, concelho, montante, concelhos):
    """
    Group By and Join
    """
    # Converter a tabela para dbf
    os.mkdir("C:\\areatrab")
    arcpy.ExcelToTable_conversion(tabela, "C:\\areatrab\\tabela.dbf", folha)
    arcpy.Statistics_analysis(
        "C:\\areatrab\\tabela.dbf", "C:\\areatrab\\sum_concelho.dbf", [[montante, "SUM"]], concelho)
    arcpy.JoinField_management(concelhos, "FID", "C:\\areatrab\\sum_concelho.dbf", concelho, "")
Ejemplo n.º 8
0
def upLoadFile(file):

    table = wksp + "\\Temp\\CSV_To_Table.gdb\\" + surveyName
    xyeventlyr = table + "XY"
    outfc = table + "FC"

    arcpy.CreateFileGDB_management(wksp + "\\Temp", "CSV_To_Table.gdb")
    if FeatureClass:
        print "Found Something"
        arcpy.ExcelToTable_conversion(CSVFile, table, "RoundIISurvey")
        #may still need to handle field type here
        arcpy.MakeXYEventLayer_management(table, "X_Coord", "Y_Coord",
                                          xyeventlyr)
        arcpy.FeatureClassToFeatureClass_conversion(
            xyeventlyr, wksp + "\\Temp\\CSV_To_Table.gdb", surveyName + "FC")
    else:
        arcpy.ExcelToTable_conversion(
            CSVFile, wksp + "\\Temp\\CSV_To_Table.gdb\\" + surveyName,
            "RoundIISurvey")
    return outfc
def projectJoin(sourcePath, wkspGdb, inSpatial, ctyAbbr,
                sourceOwnershipTable, joinInField, joinJoinField):
    '''Projects county dataset to workspace geodatabase. If there is a table to
    join in other attributes, joins table and then does projection. Will
    overwrite existing feature dataset if exists.'''
    # spatial reference object for projection
    outCoordsObj = arcpy.SpatialReference("NAD 1983 UTM Zone 15N")
    # output path for parcel feature class
    outPath = os.path.join(wkspGdb, ctyAbbr)
    tempPath = os.path.join(wkspGdb, "temp")
    print "Creating feature class {0}...".format(outPath)
    # clear in_layer if exists from previous failed run
    if arcpy.Exists("in_layer"):
        arcpy.Delete_management("in_layer")
    # clear tempFc used for join if exists from previous failed run
    if arcpy.Exists(tempPath):
        arcpy.Delete_management(tempPath)
    # set up dataset
    inLayer = arcpy.MakeFeatureLayer_management(inSpatial, "in_layer")
    # join if needed
    if sourceOwnershipTable != '':
        # if xslx, import table to wkspGdb, else set joinTable path
        if sourceOwnershipTable[-4:] == 'xlsx':
            inXlsx = os.path.join(sourcePath, ctyAbbr, sourceOwnershipTable)
            outTablePath = os.path.join(wkspGdb, ctyAbbr+"_t")
            # delete if exists
            if arcpy.Exists(outTablePath):
                arcpy.Delete_management(outTablePath)
            # excel to table
            arcpy.ExcelToTable_conversion(inXlsx, outTablePath)
            joinTable = outTablePath
        else:
            joinTable = os.path.join(sourcePath, ctyAbbr, sourceOwnershipTable)
        print "Adding join..."
        inField = joinInField
        joinField = joinJoinField
        arcpy.AddJoin_management(inLayer, inField, joinTable, joinField)
        print "Copying joined features..."
        tempFc = arcpy.CopyFeatures_management(inLayer, tempPath)
        # reset inlayer to copied features
        arcpy.Delete_management("in_layer")
        inLayer = arcpy.MakeFeatureLayer_management(tempFc, "in_layer")
    # delete if exists
    if arcpy.Exists(outPath):
        arcpy.Delete_management(outPath)
    # project
    print "Projecting..."
    outFc = arcpy.Project_management(inLayer, outPath, outCoordsObj)
    # delete layer
    arcpy.Delete_management("in_layer")
    # delete tempFc used for join if needed
    if arcpy.Exists(tempPath):
        arcpy.Delete_management(tempPath)
Ejemplo n.º 10
0
def excelToTable(workspace, output_gdb, output_temp, output, input_folder):
    logger.info('CONVERSION FROM XLSX TO FILE GEODATABASE TABLE...\n' + '\n')
    input_file_list = [
        f for f in os.listdir(input_folder)
        if os.path.isfile(os.path.join(input_folder, f))
    ]
    sheet = "Operations"
    count_xlsx = 0
    total_xlsx = len(input_file_list)
    acceptedFieldList = returnAcceptedFieldList()

    logger.info(
        'There are {0} excel spreadsheets to convert\n'.format(total_xlsx))

    for xlsx in input_file_list:
        logger.info('At {0} of {1} conversions'.format(count_xlsx, total_xlsx))
        logger.info('running conversion of: %s ', input_file_list[count_xlsx])
        arcpy.ExcelToTable_conversion(input_folder + xlsx, output_temp, sheet)
        logger.info('completed conversion...')

        ### adjust temp table's schema
        logger.info('updating temp table schema...')
        logger.info('adding new fields...')
        addFields(output_temp)

        ### list fields and their information
        fieldInfo(output_temp)

        ### update field types in preparation for the append
        fieldTypeConverter(output_temp)

        logger.info('deleting original fields that cause issue with schema...')
        fieldsToDelete(acceptedFieldList, output_temp)

        if not arcpy.Exists(output):
            logger.info('creating CCI table...')
            arcpy.CreateTable_management(output_gdb, 'CCI', output_temp)

        # if the append fails, show what the differences are in the schema
        try:
            logger.info('appending to output table... \n')
            arcpy.Append_management(output_temp, output)
            count_xlsx += 1
        except:
            logger.info(
                'APPEND FAILED due to schema differences\nSCRIPT STOPPED')
            compareTables(output, output_temp, workspace)
            exit()

        logger.info('deleting temp table... \n')
        arcpy.Delete_management(in_data=output_temp)
        logger.info('moving to next spreadsheet... \n')
Ejemplo n.º 11
0
def storetivity_data_check(loc):
    input_excel = loc
    sheet_name = "data"
    memory_table = "in_memory" + "\\" + "memoryTable"
    #Makes sure memory_table is empty
    arcpy.Delete_management(memory_table)
    arcpy.ExcelToTable_conversion(input_excel, memory_table, sheet_name)
    STORE = []
    with arcpy.da.SearchCursor(memory_table, ['testS', 'WellID']) as cursor:
        for row in cursor:
            if row[0] != 0 and row[0] is not None:
                STORE.append(row)
    return STORE
def ExcelToTable(ExcelFile, SheetName, TableName):
    '''
    Function that takes an Excel File and a Sheet Name and
    transforms that sheet into an ESRI table.  All variables
    should be string. Note: Sheet must be setup with one top
    row for column names and then the rest should be rows
    containing data.
    '''
    # Location of File GDB
    arcpy.env.workspace = r"C:\Users\jamesd26.NETID\Desktop\Domain Updates\zEquipmentInventory.gdb"
    #Remove existing table
    arcpy.env.overwriteOutput = True
    arcpy.ExcelToTable_conversion(ExcelFile, TableName, SheetName)
Ejemplo n.º 13
0
def importallsheets(in_excel, outGDB):
    workbook = xlrd.open_workbook(in_excel)
    sheets = [sheet.name for sheet in workbook.sheets()]

    print('{} sheets found: {}'.format(len(sheets), ','.join(sheets)))
    for sheet in sheets:
        out_table = os.path.join(
            outGDB,
            arcpy.ValidateTableName(
                '{0}_{1}'.format(os.path.basename(in_excel), sheet), outGDB))
        print('Converting {} to {}'.format(sheet, out_table))

        arcpy.ExcelToTable_conversion(in_excel, out_table, sheet)
Ejemplo n.º 14
0
def excelToTable():
    """
    Creates a table from excel

    Returns: Table
    """

    in_excel = r'C:\Users\JBurton_AOR\Documents\ArcGIS\Projects\Oahu\Oahu_Attribute_Table.xlsx'
    out_table = r'C:\Users\JBurton_AOR\Documents\ArcGIS\Projects\Oahu\Oahu.gdb\Oahu_Attribute_Table'

    #excel to table using local variables
    arcpy.ExcelToTable_conversion(in_excel, out_table)
    print('Oahu attribute table completed')
    return
Ejemplo n.º 15
0
def excelTOtable():
    env.workspace = excels

    excel_list = arcpy.ListFiles()
    for filename in excel_list:
        arcpy.ExcelToTable_conversion(filename, filename)
        print "File %s converted to DBASE file." % filename

    DBASEs = arcpy.ListTables()
    for filename in DBASEs:
        arcpy.TableToTable_conversion(filename, geodatabase,
                                      filename[:-4] + "_tempTable")
        print "File %s converted to Table." % filename
        arcpy.Delete_management(filename)
        print "Redundant DBASE files deleted."
Ejemplo n.º 16
0
def importallsheets(in_excel, out_gdb):
    workbook = xlrd.open_workbook(in_excel)
    sheets = [sheet.name for sheet in workbook.sheets()]
    print('{} sheets found: {}'.format(len(sheets), ','.join(sheets)))
    for sheet in sheets:
        # The out_table s based on the input excel filename
        #an underscore seperator followed by the sheet name
        out_table = os.path.join(
            out_gdb,
            arcpy.ValidateTableName(
                "{0}_{1}".format(os.path.basename(in_excel), sheet), out_gdb))
        print('Converting {} to {}'.format(sheet, out_table))

        # Perform the conversion
        arcpy.ExcelToTable_conversion(in_excel, out_table, sheet)
Ejemplo n.º 17
0
def importallsheets(in_excel, table_prefix, out_gdb):
    workbook = xlrd.open_workbook(in_excel)
    sheets = [sheet.name for sheet in workbook.sheets()]

    arcpy.AddMessage('{} sheets found: {}'.format(len(sheets),
                                                  ','.join(sheets)))
    for sheet in sheets:
        out_table = os.path.join(
            out_gdb,
            arcpy.ValidateTableName("{0}_{1}".format(table_prefix, sheet),
                                    out_gdb))

        arcpy.AddMessage('Converting {} to {}'.format(sheet, out_table))

        # Perform the conversion
        arcpy.ExcelToTable_conversion(in_excel, out_table, sheet)
Ejemplo n.º 18
0
def criminalityIndex(table, neighborhoods):
    inputTable = arcpy.ExcelToTable_conversion(table, "criminalityTable", "")
    crimeLayer = arcpy.MakeFeatureLayer_management(neighborhoods,
                                                   "crime_layer")

    #Joining the crime table with the CBS neighborhood dataset table
    arcpy.JoinField_management(crimeLayer, "statcode", inputTable, "STATCODE",
                               ["inwoners", "crimeTotal", "relatCrime"])
    criminalityFC = arcpy.CopyFeatures_management(neighborhoods,
                                                  "criminality2015")
    arcpy.DeleteField_management(
        criminalityFC, ["AREA", "POLY_AREA", "standStep1", "standCrime"])
    arcpy.AddGeometryAttributes_management(criminalityFC, "AREA", "",
                                           "SQUARE_KILOMETERS")

    #Calculating the population density on a neighborhood level as equal input for the standardized crime rate
    arcpy.AddField_management(criminalityFC, "standStep1", "FLOAT", 10, 4, "",
                              "", "NULLABLE")
    arcpy.CalculateField_management(criminalityFC, "standStep1",
                                    "[INWONERS] / [POLY_AREA]")

    #Calculating a standardized crime rate by dividing the total amount of crimes by the population density
    arcpy.AddField_management(criminalityFC, "standCrime", "FLOAT", 10, 4, "",
                              "", "NULLABLE")
    #To prevent overflow errors, it is avoided that there is a division by 0
    expression1 = "Calculate(!crimeTotal!, !standStep1!, !standCrime!)"
    codeblock1 = """def Calculate(x, y, z):
        if (x != 0 and y != 0):
            z = x / y
        elif (x == 0 or y == 0):
            z = 0
        return z """
    arcpy.CalculateField_management(criminalityFC, "standCrime", expression1,
                                    "PYTHON_9.3", codeblock1)
    """
    arcpy.AddField_management(criminalityFC, "crime", "FLOAT", 10, 4, "", "", "NULLABLE")    
    expression2 = "Calculate(!relatCrime!, !crime!)"
    codeblock2 = "def Calculate(x, y):
        if (x == '.'):
            y = 0
        elif (x != '.'):
            y = x
        return y"
    arcpy.CalculateField_management(criminalityFC, "crime", expression2, "PYTHON_9.3", codeblock2)
    arcpy.DeleteField_management(criminalityFC, ["INWONERS_1", "STATCODE_1", "CRIMETOT_1"])
    """
    return criminalityFC
Ejemplo n.º 19
0
def prepData():
    #*********Prep Fields**********
    #Bring Excel Into File GDB
    arcpy.ExcelToTable_conversion(excelHRI, tableHRI, "")

    #Recalc MUNI
    arcpy.CalculateField_management(tableHRI, "MUNI", "!MUNI!.upper()",
                                    "PYTHON_9.3")

    #Add Full Address Field
    arcpy.AddField_management(tableHRI, "FULLADDR", "TEXT", "", "", "", "",
                              "NULLABLE", "")
    #Calculate Full Address
    address_codeblock = """def ifBlock(House_Number, Street_Direction, Street_Name, Street_Type, MUNI):
        return House_Number + " " + Street_Direction + " " + Street_Name + " " + Street_Type + " " + MUNI"""
    arcpy.CalculateField_management(
        tableHRI, "FULLADDR",
        "ifBlock(!House_Number!, !Street_Direction!, !Street_Name!, !Street_Type!, !MUNI!)",
        "PYTHON_9.3", address_codeblock)
Ejemplo n.º 20
0
def importallsheets(in_excel, out_gdb):
    # Function taken from ESRI documentation http://pro.arcgis.com/en/pro-app/tool-reference/conversion/excel-to-table.htm
    workbook = xlrd.open_workbook(in_excel)
    sheets = [sheet.name for sheet in workbook.sheets()]

    print('{} sheets found: {}'.format(len(sheets), ','.join(sheets)))
    for sheet in sheets:
        # The out_table is based on the input excel file name
        # a underscore (_) separator followed by the sheet name
        out_table = os.path.join(
            out_gdb,
            arcpy.ValidateTableName(
                "{0}_{1}".format(os.path.basename(in_excel), sheet),
                out_gdb))

        print('Converting {} to {}'.format(sheet, out_table))

        # Perform the conversion
        arcpy.ExcelToTable_conversion(in_excel, out_table, sheet)
    return()
Ejemplo n.º 21
0
def store_sheet(loc):
    """Retrieves pump test data from a spreadsheet and converts it to a json
       file for future use.
    
    This function reads Justin Blum's pump test spreadsheet. The Relate ID and
    any Storativity values that are greater than zero and not null are then
    recorded. These are then dumped into a .json file for future use.
    
    Parameters
    ----------
    loc: Excel Spreadsheet
    This spreadsheet has data from every pumping test performed in Minnesota from
    the last 57 years.
    
    Notes
    -----
    This function only needs to be executed once for the .json file to be created.
    The spreadsheet is not currently being updated so the data set will not change.
    """
    input_excel = loc
    sheet_name = "data"
    memory_table = "in_memory" + "\\" + "memoryTable"
    #Makes sure memory_table is empty
    arcpy.Delete_management(memory_table)
    arcpy.ExcelToTable_conversion(input_excel, memory_table, sheet_name)
    storativity = {}
    field_indices = {}
    fields = ['Well_ID', 'Storativity']
    for i, field in enumerate(fields):
        storativity[field] = []
        field_indices[i] = field
    with arcpy.da.SearchCursor(memory_table, ['WellID', 'testS']) as cursor:
        for row in cursor:
            for i in range(len(row)):
                if row[1] != 0 and row[1] is not None:
                    storativity[field_indices[i]].append(row[i])
    store_data = json.dumps(storativity)
    outfile = open('storativity_data.json', 'w')
    outfile.write(store_data)
    return storativity
Ejemplo n.º 22
0
    def onClick(self):

        # Importa tabela Excel para ArcGIS
        arcpy.ExcelToTable_conversion(r'path\to\matriz_impacto.xls', r'path\to\save.dbf', 'nome')

        # Seleciona localizacoes = [1, 2, 3, 4] e separa impactos por area
        for i in range(1,5):

            arcpy.TableSelect_analysis('nome', r'path\to\split' + str(i) + '.dbf', '"Localizaca" = ' + str(i))
            
            # Conta numero de linhas das tabelas criadas
            count=arcpy.GetCount_management('split' + str(i))

            # Copia shapes das localizacoes
            for j in range(0,int(count[0])):
                arcpy.CopyFeatures_management(['ADA_SIRGAS','AID_SIRGAS','AII_SIRGAS','AAR_SIRGAS'][i-1], r'path\to\copy'+str(i)+'_'+str(j), '#', '0', '0', '0')

                arcpy.TableSelect_analysis('split' + str(i), r:'path\to\split'+str(i)+'_'+str(j), '"OID" = '+str(j))

                arcpy.JoinField_management('copy'+str(i)+'_'+str(j), 'Valor', 'split'+str(i)+'_'+str(j), 'LOCALIZACA', '#')

                arcpy.PolygonToRaster_conversion('copy'+str(i)+'_'+str(j), 'SIGNIFICAN', r'path\to\impacto'+str(i)+'_'+str(j)+'.tif', 'CELL_CENTER', 'NONE','0,0001')
Ejemplo n.º 23
0
# Local variables:
bou2_4p_shp = "C:\\Users\\Hong\\Documents\\ArcGIS\\ADS-B\\省界\\bou2_4p.shp"
bou2_4p_shp__2_ = bou2_4p_shp
bou2_4p_Layer = "bou2_4p_Layer"
bou2_4p_Layer__3_ = bou2_4p_Layer
bou2_4p_Layer__2_ = bou2_4p_Layer__3_
省名_xlsx = "C:\\Users\\Hong\\Documents\\ArcGIS\\ADS-B\\省界\\省名.xlsx"
Output_Table = "C:\\Users\\Hong\\Documents\\ArcGIS\\ADS-B\\ADS-B.gdb\\省名_ExcelToTable"

# Process: Define Projection
arcpy.DefineProjection_management(
    bou2_4p_shp,
    "GEOGCS['WGS_1984_(Transit)',DATUM['World_Geodetic_System_1984_(Transit)',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
)

# Process: Make Feature Layer
arcpy.MakeFeatureLayer_management(
    bou2_4p_shp__2_, bou2_4p_Layer, "", "",
    "FID FID VISIBLE NONE;Shape Shape VISIBLE NONE;AREA AREA VISIBLE NONE;PERIMETER PERIMETER VISIBLE NONE;BOU2_4M_ BOU2_4M_ VISIBLE NONE;BOU2_4M_ID BOU2_4M_ID VISIBLE NONE;ADCODE93 ADCODE93 VISIBLE NONE;ADCODE99 ADCODE99 VISIBLE NONE;PROVINCE PROVINCE VISIBLE NONE;PROVINCE_1 PROVINCE_1 VISIBLE NONE;PROVINCE_2 PROVINCE_2 VISIBLE NONE"
)

# Process: Delete Field
arcpy.DeleteField_management(bou2_4p_Layer, "PROVINCE")

# Process: Excel To Table
arcpy.ExcelToTable_conversion(省名_xlsx, Output_Table, "Sheet1")

# Process: Join Field (2)
arcpy.JoinField_management(bou2_4p_Layer__3_, "BOU2_4M_", Output_Table,
                           "BOU2_4M", "PROVINCE")
Ejemplo n.º 24
0
## ---------------------------------------------------------------------------
## 2. Generate XY Layer event
## Description: Creates a new point feature layer based on x- and y-coordinates defined in a source table.

print "\nStep 2 Generate XY Layer event starts at", datetime.datetime.now(
).strftime("%A, %B %d %Y %I:%M:%S%p")

# Set local variables
xlsx = "DWR_Water_Right_-_Net_Amounts_import_astext.xlsx"
table = "DWR_Water_Right_Net_Amounts.dbf"
x_coords = "Longitude"
y_coords = "Latitude"

# Convert excel to dbf
in_Table = os.path.join(interFolder, table)
arcpy.ExcelToTable_conversion(xlsx, in_Table)

# Make the XY event layer
nameLayer = os.path.splitext(in_Table)[0] + '_prepared'
outLayer = os.path.join(interFolder, nameLayer)
arcpy.MakeXYEventLayer_management(in_Table, x_coords, y_coords, outLayer)

# Save to a layer file
saved_Layer = outLayer + ".lyr"
arcpy.SaveToLayerFile_management(outLayer, saved_Layer)

# Create Shapefile
arcpy.FeatureClassToGeodatabase_conversion(saved_Layer, out_gdb)

print "Step 2 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")
Ejemplo n.º 25
0
# Get the map document
mxd = arcpy.mapping.MapDocument("CURRENT")
# Get the data frame named 0
df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
# Create a new layer named block
arcpy.MakeFeatureLayer_management('c:\Users\student\Desktop\O\kc_block_10.shp',
                                  'block')
# Add the layer to the map at the bottom of the TOC in data frame 0
# arcpy.mapping.AddLayer(df, block,"BOTTOM")

# Import the excel file and convert it to table file, which is read by ArcMap
inputExcel = r'c:\Users\student\Desktop\O\Car2goOD.xls'
sheetName = "Sheet1"
memoryTable = "in_memory" + "\\" + "memoryTable"
arcpy.Delete_management(memoryTable)
arcpy.ExcelToTable_conversion(inputExcel, memoryTable, sheetName)
# arcpy.MakeXYEventLayer_management("memoryTable", "olon", "olat", "ocar_layer",r"Coordinate Systems\Geographic Coordinate System\North America\Nad 1983","")

# Display the x,y coordinates for the origin cars
try:
    # Set the local variables
    in_Table = "memoryTable.dbf"
    x_coords = "olon"
    y_coords = "olat"
    z_coords = ""
    out_Layer = "ocar_layer"
    saved_Layer = r"c:\Users\student\Desktop\O\ocar.lyr"

    # Set the spatial reference GCS_North_American_1983
    spRef = r"Coordinate Systems\Geographic Coordinate System\North America\Nad 1983"
Ejemplo n.º 26
0
arcpy.CreateFileGDB_management(interFolder, gdb_name)

print "Step 1 Create a geodatabase completed at", datetime.datetime.now(
).strftime("%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 2. Excel To Table
## Description: Convert excel files to dbf files for US.

print "\nStep 2 Excel To Table starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

# Input
in_excel = "US\\CA1_1969_2015_personalIncome.xlsx"
output = os.path.join(interFolder, "Income_US.dbf")
arcpy.ExcelToTable_conversion(in_excel, output)
arcpy.AddField_management(output, "ISO_GEOID", "TEXT", "", "", "8")
arcpy.CalculateField_management(output, "ISO_GEOID", "'840' + !GeoFIPS!",
                                "PYTHON_9.3")
dbfList_US.append(output)

print "Step 2 completed at", datetime.datetime.now().strftime("%I:%M:%S%p")

## ---------------------------------------------------------------------------
## 3. Join excel to shapefile
## Description: Join excel to shapefile for US dataset

print "\nStep 3 Join starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

# Input
Ejemplo n.º 27
0
def excelToTable(workspace, output_gdb, output_temp, output, input_folder,
                 start_xlsx):
    # creates a list of all the xlsx file found in the folder
    input_file_list = [
        f for f in os.listdir(input_folder) if
        os.path.isfile(os.path.join(input_folder, f)) and f.endswith('.xlsx')
    ]
    sheet = "Operations"
    # lists the current spreadsheet to be converted (eg. it is at number 15 of 118 spreadsheets to complete)
    count_xlsx = 1
    total_xlsx = len(input_file_list)
    acceptedFieldList = returnAcceptedFieldList()

    logger.info(
        'There are {0} excel spreadsheets to convert\n'.format(total_xlsx))

    # start script at file number that caused issue (input, start, end)
    for xlsx in islice(input_file_list, start_xlsx, None):
        logger.info(
            'Converting number {0} of {1} spreadsheets to convert'.format(
                count_xlsx, total_xlsx))
        logger.info('Spreadsheet name: %s ', input_file_list[start_xlsx])
        arcpy.ExcelToTable_conversion(input_folder + xlsx, output_temp, sheet)
        logger.info('completed conversion...')

        ### adjust temp table's schema
        logger.info('updating temp table schema...')
        logger.info('adding new fields...')
        addFields(output_temp)

        ### list fields and their information
        fieldInfo(output_temp)

        ### update field types in preparation for the append
        fieldTypeConverter(output_temp)

        # if any fields from the spreadsheet template are missing, add it to the temp table
        # must be done after the excel to table conversion, otherwise fields will be added with no values
        # and when the conversion trys to take place it sees the field and does not copy the values over
        logger.info(
            'checking for missing fields from the spreadsheet template that need to be added...'
        )
        fieldsToAdd(acceptedFieldList, output_temp)

        logger.info('deleting original fields that cause issue with schema...')
        fieldsToDelete(acceptedFieldList, output_temp)

        # if CCI table does not exist, create it based off the temp folder
        if not arcpy.Exists(output):
            logger.info('creating CCI table...')
            arcpy.CreateTable_management(output_gdb, 'CCI', output_temp)

        # if the append fails, show what the differences are in the schema
        try:
            logger.info('appending to output table... \n')
            arcpy.Append_management(output_temp, output)
            count_xlsx += 1
            start_xlsx += 1
        except:
            logger.info(
                'APPEND FAILED due to schema differences\nSCRIPT STOPPED')
            compareTables(output, output_temp, workspace)
            exit()

        logger.info('deleting temp table... \n')
        arcpy.Delete_management(in_data=output_temp)
        logger.info('moving to next spreadsheet... \n')
Ejemplo n.º 28
0
# -*- coding: utf-8 -*-
# User: liaochenchen, hygnic
# Date: 2020/2/19
# arcgis 10.6  pandas

import arcpy, os

workspace = r"G:\works"
excel_file = ur"G:\蓬溪\蓬溪县明月镇桂花桥村、广坝村、九块田村、宇安村、龙拱背村、火石村、碧山庙村、白庙村土地整理项目11\蓬溪县明月镇桂花桥村、广坝村、九块田村、宇安村、龙拱背村、火石村、碧山庙村、白庙村土地整理项目.xlsx"
excel_file2 = ur"G:\蓬溪\23.txt"
print os.path.isfile(excel_file)

arcpy.env.workspace = workspace
print "ok"
arcpy.ExcelToTable_conversion(excel_file, "we")
#Path to HLD Route
HLDroute = '{0}\\xxxxxx'.format(existingDataPath)

#Variable to store proper SQL expression
expressionSQL = "NF_ID = '{0}'".format(NFID)

#Select input NFID and use in SQL expression to export HLD Route
HLDexport = arcpy.Select_analysis(HLDroute, "in_memory\\HLDexport",
                                  expressionSQL)

#Make lyr file for table join
HLDlyr = arcpy.MakeFeatureLayer_management(HLDexport, "in_memory\\HLDlyr")

#Excel to Table
myTable = arcpy.ExcelToTable_conversion(excelSheet, "in_memory\\myTable")

#Join
joined = arcpy.AddJoin_management(HLDlyr, "SEGMENT_", myTable, "Segment_Name",
                                  "KEEP_COMMON")

#copy the FC stored in the joined variable and export to its own FC
newFC = arcpy.CopyFeatures_management(joined, "in_memory\\newFC")

#Apply correct projection to HLDexport
HLDproject = arcpy.Project_management(
    newFC, "HLDproject",
    "PROJCS['NAD_1983_StatePlane_California_V_FIPS_0405_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',6561666.666666666],PARAMETER['False_Northing',1640416.666666667],PARAMETER['Central_Meridian',-118.0],PARAMETER['Standard_Parallel_1',34.03333333333333],PARAMETER['Standard_Parallel_2',35.46666666666667],PARAMETER['Latitude_Of_Origin',33.5],UNIT['Foot_US',0.3048006096012192]]",
    "WGS_1984_(ITRF00)_To_NAD_1983",
    "PROJCS['WGS_1984_Web_Mercator_Auxiliary_Sphere',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator_Auxiliary_Sphere'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],PARAMETER['Auxiliary_Sphere_Type',0.0],UNIT['Meter',1.0]]",
    "NO_PRESERVE_SHAPE", "", "NO_VERTICAL")
fgdb = r"C:\Users\ytxu\Documents\ArcGIS\Projects\GE6211SDH\NodeAttributes_202101.gdb"
print("Importing excel to tables...")
arcpy.env.workspace = fgdb
arcpy.env.overwriteOutput = True

for f in node_attr_files:
    print(f)
    in_csv = os.path.join(folder, f)
    out_excel = os.path.join(folder, f.replace(".csv",".xlsx"))
    read_file = pd.read_csv(in_csv)
    if os.path.exists(out_excel):
        os.remove(out_excel)
    read_file.to_excel(out_excel, index = None, header=True)
    
    out_table = os.path.join(fgdb, f.replace(".csv",""))
    arcpy.ExcelToTable_conversion(out_excel, out_table, "Sheet1")


# %%
# Join Node Attributes to Bus Stop and Export as Feature Class
print("Creating joined feature classes...")
arcpy.env.workspace = fgdb
node_attr_tables = arcpy.ListTables()
arcpy.env.overwriteOutput = True
arcpy.env.qualifiedFieldNames = False

bus_stop = r"C:\Users\ytxu\Documents\ArcGIS\Projects\GE6211SDH\GE6211SDH.gdb\BusStop"
if arcpy.Exists("bus_stop_lyr"):
    arcpy.Delete_management("bus_stop_lyr")
arcpy.MakeFeatureLayer_management(bus_stop, "bus_stop_lyr")