Exemple #1
0
def addGDBTable2postgres_histo(currentobject, eu, eu_col):
    print 'addGDBTable2postgres_histo..................................................'
    print currentobject

    ##set the engine.....
    engine = create_engine(
        'postgresql://*****:*****@144.92.235.105:5432/usxp')

    # Execute AddField twice for two new fields
    fields = [f.name for f in arcpy.ListFields(currentobject)]

    # converts a table to NumPy structured array.
    arr = arcpy.da.TableToNumPyArray(currentobject, fields)
    print arr

    #### convert numpy array to pandas dataframe
    df = pd.DataFrame(data=arr)
    ### remove column
    del df['OBJECTID']
    print df

    ##perform a psuedo pivot table
    df = pd.melt(df,
                 id_vars=["LABEL"],
                 var_name="atlas_st",
                 value_name="count")

    df.columns = map(str.lower, df.columns)

    print df

    #### format column in df #########################
    ## strip character string off all cells in column
    df['atlas_st'] = df['atlas_st'].map(lambda x: x.strip('atlas_'))
    ## remove comma from year
    df['value'] = df['label'].str.replace(',', '')

    print df

    print 'lkl', gen.getPixelConversion2Acres(30)

    ####add column
    df['acres'] = df['count'] * gen.getPixelConversion2Acres(30)

    tablename = currentobject.split('\\')[-1]
    print 'tablename', tablename

    print df

    df.to_sql(tablename, engine, schema='zonal_hist')
Exemple #2
0
def addAcresField(tablename, schema, res):
    #this is a sub function for addGDBTable2postgres()

    cur = conn.cursor()

    ####DDL: add column to hold arrays
    query1 = 'ALTER TABLE {}.{} ADD COLUMN acres bigint'.format(
        schema, tablename, tablename)
    print query1
    cur.execute(query1)
    conn.commit()

    #####DML: insert values into new array column
    cur.execute('UPDATE {}.{} SET acres = count * {}'.format(
        schema, tablename, gen.getPixelConversion2Acres(res)))
    conn.commit()

    cur.execute(
        'ALTER TABLE sa.acres_ytc ADD COLUMN {} bigint'.format(tablename))
    conn.commit()

    query2 = 'update sa.acres_ytc set {} = instance.acres from (select value, acres from sa.{}) as instance where acres_ytc.value = instance.value'.format(
        tablename, tablename)
    cur.execute(query2)
    conn.commit()

    query3 = 'DROP TABLE sa.{}'.format(tablename)
    cur.execute(query3)
    conn.commit()
Exemple #3
0
def addRasterAttrib2postgres_specific(path, filename, database, schema):
    print path
    print filename
    # set the engine.....
    engine = create_engine(
        'postgresql://*****:*****@144.92.235.105:5432/{}'.format(
            database))

    # Execute AddField twice for two new fields
    fields = [f.name for f in arcpy.ListFields(path)]
    print 'fields:', fields

    # converts a table to NumPy structured array.
    arr = arcpy.da.TableToNumPyArray(path, fields)
    print arr

    # convert numpy array to pandas dataframe
    df = pd.DataFrame(data=arr)

    df.columns = map(str.lower, df.columns)

    ## add acres column
    df['acres'] = df['count'] * gen.getPixelConversion2Acres(30)
    ## add acres column
    df['hectares'] = df['count'] * gen.getPixelConversion2Hectares(30)

    print 'df-----------------------', df

    # # # use pandas method to import table into psotgres
    df.to_csv(
        'D:\\projects\\usxp\\deliverables\\s35\\csv\\{}.csv'.format(path),
        index=False,
        header=True)
Exemple #4
0
def addGDBTable2postgres(currentobject, eu, eu_col):
    print currentobject

    ##set the engine.....
    engine = create_engine(
        'postgresql://*****:*****@144.92.235.105:5432/usxp')

    # Execute AddField twice for two new fields
    fields = [f.name for f in arcpy.ListFields(currentobject)]

    # converts a table to NumPy structured array.
    arr = arcpy.da.TableToNumPyArray(currentobject, fields)
    print arr

    # convert numpy array to pandas dataframe
    df = pd.DataFrame(data=arr)

    df.columns = map(str.lower, df.columns)

    print 'lkl', gen.getPixelConversion2Acres(30)

    ####add column
    df['acres'] = df['count'] * 0.22227

    print 'df-----------------------', df

    schema = 'deliverables'
    tablename = currentobject.split('\\')[-1]
    print 'tablename', tablename

    tryit(df, tablename, eu, eu_col)
Exemple #5
0
def addGDBTable2postgres_histo(gdb, pgdb, schema, table):
	print 'addGDBTable2postgres_histo..................................................'
	# Set the workspace environment to local file geodatabase
	arcpy.env.workspace = gdb

	##set the engine.....
	engine = create_engine('postgresql://*****:*****@144.92.235.105:5432/{}'.format(pgdb))

	# Execute AddField twice for two new fields
	fields = [f.name for f in arcpy.ListFields(table)]

	# converts a table to NumPy structured array.
	arr = arcpy.da.TableToNumPyArray(table,fields)
	print arr

	#### convert numpy array to pandas dataframe
	df = pd.DataFrame(data=arr)
	### remove column
	del df['OBJECTID']

	# ##perform a psuedo pivot table
	# df=pd.melt(df, id_vars=["LABEL"],var_name="atlas_st", value_name="count")
	df=pd.melt(df, id_vars=["LABEL"], value_name="count")

	df.columns = map(str.lower, df.columns)

	# ####add column 
	df['acres'] = df['count']*gen.getPixelConversion2Acres(30)

	tablename = table.split('\\')[-1]
	print 'tablename', tablename

	df.to_sql(tablename, engine, schema=schema)
Exemple #6
0
def addGDBTable2postgres_histo(gdb, tablename, eu_col):
    print 'addGDBTable2postgres_histo..................................................'
    print tablename

    arcpy.env.workspace = "D:\\projects\\usxp\\deliverables\\maps\\{0}\\{0}.gdb".format(
        gdb)

    ##set the engine.....
    engine = create_engine(
        'postgresql://*****:*****@144.92.235.105:5432/usxp_deliverables')

    # Execute AddField twice for two new fields
    fields = [f.name for f in arcpy.ListFields(tablename)]

    # converts a table to NumPy structured array.
    arr = arcpy.da.TableToNumPyArray(tablename, fields)
    print arr

    #### convert numpy array to pandas dataframe
    df = pd.DataFrame(data=arr)
    ### remove column
    del df['OBJECTID']
    print df

    ##perform a psuedo pivot table
    df = pd.melt(df, id_vars=["LABEL"], var_name=eu_col, value_name="count")

    df.columns = map(str.lower, df.columns)

    print df

    #### format column in df #########################
    ## strip character string off all cells in column
    df['state'] = df[eu_col].map(lambda x: x.strip('atlas_'))
    ## remove comma from year
    # df['value'] = df['label'].str.replace(',', '')

    print df

    print 'lkl', gen.getPixelConversion2Acres(30)

    ####add column
    df['acres'] = df['count'] * gen.getPixelConversion2Acres(30)

    print df

    df.to_sql(tablename, engine, schema=gdb)
Exemple #7
0
def addRasterAttrib2postgres_specific(graphic_type, yxc, cdl_type, database, schema):
    filename = 's35_{0}_{1}'.format(cdl_type, graphic_type)
    print filename
    path='D:\\projects\\usxp\\deliverables\\s35\\conus.gdb\\{0}'.format(filename)
    print path

    # set the engine.....
    engine = create_engine('postgresql://*****:*****@144.92.235.105:5432/{}'.format(database))
    
    # Execute AddField twice for two new fields
    fields = [f.name for f in arcpy.ListFields(path)]
    print 'fields:', fields

    # converts a table to NumPy structured array.
    arr = arcpy.da.TableToNumPyArray(path,fields)
    print arr
    
    # convert numpy array to pandas dataframe
    df = pd.DataFrame(data=arr)

    if graphic_type == 'sm':
        df.columns = ['objectid','value','count','state','year','label']

        ##add 0 infront of state where length of state code is equal to 1
        df['state'] = np.where(df['state'].astype(str).str.len()==1, '0' + df['state'].astype(str), df['state'].astype(str))
        ## add acres column
        df['acres'] = df['count'] * gen.getPixelConversion2Acres(30)

        print 'df-----------------------', df

        # # # use pandas method to import table into psotgres
        df.to_sql(filename, engine, schema=schema, if_exists='replace')


    elif graphic_type == 'conus':
        print 'inside conus!!!!!!!!!!!!!!!!!!!!!!'
        df.columns = ['objectid','value','count','year','label']

        ## add acres column
        df['acres'] = df['count'] * gen.getPixelConversion2Acres(30)

        print 'df-----------------------', df

        # # # use pandas method to import table into psotgres
        df.to_sql(filename, engine, schema=schema, if_exists='replace')
Exemple #8
0
def addAcresField(tablename, schema):
    #this is a sub function for addGDBTable2postgres()
    
    cur = conn.cursor()
    
    #DDL: add column to hold arrays
    cur.execute('ALTER TABLE ' + schema + '.' + tablename + ' ADD COLUMN acres bigint;');
    
    #DML: insert values into new array column
    cur.execute('UPDATE '+ schema + '.' + tablename + ' SET acres = count * ' + gen.getPixelConversion2Acres(data['global']['res']));
    
    conn.commit()
    print "Records created successfully";
    conn.close()
Exemple #9
0
def addAcresField(schema, tablename, res):
    #this is a sub function for addGDBTable2postgres()
    
    cur = conn.cursor()
    
    ####DDL: add column to hold arrays
    query = 'ALTER TABLE {}.{} ADD COLUMN acres bigint'.format(schema, tablename)
    print query
    cur.execute(query)

    print int(tablename.split("_")[0][1:])

    #####DML: insert values into new array column
    cur.execute("UPDATE {0}.{1} SET acres=count*{2}".format(schema, tablename, gen.getPixelConversion2Acres(res) ))
    conn.commit() 
Exemple #10
0
def addAcresField_now(schema, tablename, res):
    #this is a sub function for addGDBTable2postgres()

    cur = conn.cursor()

    ####DDL: add column to hold arrays
    query1 = 'ALTER TABLE {}.{} ADD COLUMN acres bigint'.format(
        schema, tablename, tablename)
    print query1
    cur.execute(query1)
    conn.commit()

    #####DML: insert values into new array column
    cur.execute('UPDATE {}.{} SET acres = count * {}'.format(
        schema, tablename, gen.getPixelConversion2Acres(res)))
    conn.commit()
Exemple #11
0
def addAcresField(schema, tablename, res, total):
    #this is a sub function for addGDBTable2postgres()

    cur = conn.cursor()

    ####DDL: add column to hold arrays
    query1 = 'ALTER TABLE {}.{} ADD COLUMN acres bigint, ADD COLUMN perc numeric, ADD COLUMN series text'.format(
        schema, tablename)
    print query1
    cur.execute(query1)

    #####DML: insert values into new array column
    cur.execute(
        "UPDATE {0}.{1} SET acres=count*{2}, perc=(count/{3})*100, series='{4}'"
        .format(schema, tablename, gen.getPixelConversion2Acres(res), total,
                tablename.split("_")[0]))
    conn.commit()
Exemple #12
0
def addAcresField(schema, tablename, res):

	try:
		conn = psycopg2.connect("dbname= 'usxp_deliverables' user='******' host='144.92.235.105' password='******'")
	except:
		print "I am unable to connect to the database"
	#this is a sub function for addGDBTable2postgres()

	cur = conn.cursor()

	####DDL: add column to hold arrays
	query = 'ALTER TABLE {}.{} ADD COLUMN acres bigint'.format(schema, tablename)
	print query
	cur.execute(query)

	#####DML: insert values into new array column
	cur.execute("UPDATE {0}.{1} SET acres=count*{2}".format(schema, tablename, gen.getPixelConversion2Acres(res) ))
	conn.commit() 
Exemple #13
0
def addAcresField(schema, tablename, yxc, res):
    #this is a sub function for addGDBTable2postgres()
    
    cur = conn.cursor()
    
    ####DDL: add column to hold arrays
    ddl_query = 'ALTER TABLE {}.{} ADD COLUMN acres bigint, ADD COLUMN series text, ADD COLUMN yxc text, ADD COLUMN series_order integer'.format(schema, tablename)
    print ddl_query
    cur.execute(ddl_query)


    #####DML: insert values into new array column
    dml_query="UPDATE {0}.{1} SET acres=count*{2}, series='{3}', yxc='{4}', series_order={5}".format(schema, tablename, gen.getPixelConversion2Acres(res), tablename.split("_")[0], yxc, int(tablename.split("_")[0][1:]))
    print dml_query
    cur.execute(dml_query)
    
    conn.commit()