def jbybid():

    # Initialize the FieldStorage object so we can get user input
    params = cgi.FieldStorage()

    # Run the query, quoting the user input
    con = dbconn.run_query("""
            SELECT 
            count(jack) AS number,
            split_part(jack,'-',3) AS floor,
            split_part(jack,'-',4) AS room
            FROM access_points WHERE split_part(jack,'-',2)=""" + 
            dbconn.adapt(str(params['building'].value)).getquoted() + """
            GROUP BY floor,room
            ORDER BY floor,room
    """) 

    arr = {}
    for row in con:
        if row['floor'] in arr:
            arr[row['floor']].append(row['room'])
        else:
            arr[row['floor']] = [row['room']]

    res = []
    for k in arr:
        res.append({'floor':k, 'rooms':arr[k]})

    dbconn.send_array_as_json(res)
示例#2
0
def jbybid():

    # Initialize the FieldStorage object so we can get user input
    params = cgi.FieldStorage()

    # Run the query, quoting the user input
    con = dbconn.run_query(
        """
            SELECT 
            count(jack) AS number,
            split_part(jack,'-',3) AS floor,
            split_part(jack,'-',4) AS room
            FROM access_points WHERE split_part(jack,'-',2)=""" +
        dbconn.adapt(str(params['building'].value)).getquoted() + """
            GROUP BY floor,room
            ORDER BY floor,room
    """)

    arr = {}
    for row in con:
        if row['floor'] in arr:
            arr[row['floor']].append(row['room'])
        else:
            arr[row['floor']] = [row['room']]

    res = []
    for k in arr:
        res.append({'floor': k, 'rooms': arr[k]})

    dbconn.send_array_as_json(res)
示例#3
0
#!/usr/bin/env python


import glob,dbconn
from config import *

out_path = config.get('paths','solar_raster_output_dir')

print "Checking files in " + out_path

# Reset everything
q = "UPDATE sa_fishnets SET state=0"
dbconn.run_query(q)

for dirname in glob.glob(out_path + "\\*"):
    print "Processing " + dirname
    updatewhere = []
    for img in glob.glob(dirname + '\\*.img'):
        updatewhere.append(img.replace(dirname + '\\SRR_','').replace('.img',''))

    if len(updatewhere) > 0:
        print "Updating " + str(len(updatewhere))
        q = "UPDATE sa_fishnets SET state=2 WHERE state<>2 AND id IN (" + ','.join(updatewhere) + ")"
        dbconn.run_query(q)


q = "SELECT id FROM sa_fishnets"
res = dbconn.run_query(q)
ids = []
for row in res:
    ids.append(row['id'])
示例#4
0
            FROM access_points
            WHERE
                split_part(jack,'-',2) ~ '^[0-9]+$'
        ) fc ON (fc.building = b.building_n)
        GROUP BY b.building_n
    ) binfo,
    (
        -- Get a jack count of each building
        SELECT
        split_part(jack,'-',2) AS building,
        count(jack) AS jackcount
        FROM access_points
        WHERE 
        split_part(jack,'-',2) ~ '^[0-9]+$'
        GROUP BY split_part(jack,'-',2)
    ) jinfo
    WHERE
    binfo.building = jinfo.building
"""

# Extract just the jacks/area and jenks it
rows = dbconn.run_query(q).fetchall()
jacksqft = []
buildings = []
for row in rows:
    if (row['jacksqft'] != None):
        jacksqft.append(row['jacksqft'])
breaks = jenks.getJenksBreaks(jacksqft, 5)

dbconn.send_array_as_json(breaks)
示例#5
0
def mactrac():
    query = """ SELECT 
    -- Combine metadata with building info
    buildings.cartodb_id,
    apdata.jacksqft,
    apdata.sqftjack,
    apdata.buildingarea,
    apdata.orig_area,
    apdata.jackcount,
    buildings.name,
    ST_AsGeoJSON(buildings.the_geom) AS the_geom,
    ST_AsGeoJSON(st_centroid(buildings.the_geom)) AS centroid, 
    buildings.building_n
    FROM 
    buildings 
    LEFT JOIN 
    (
        -- Get a ratio of room area to jack count
        SELECT 
        binfo.building,
        binfo.buildingarea,
        binfo.orig_area,
        jinfo.jackcount,
        (jinfo.jackcount / binfo.buildingarea) AS jacksqft,
        (binfo.buildingarea / jinfo.jackcount ) AS sqftjack
        FROM 
        (
            -- Get the area of all the floors with 
            SELECT 
                b.building_n AS building,
                CASE count(fc.floors)
                    WHEN 0 THEN 0
                    ELSE  sum(b.shape_area)
                END AS orig_area,
                CASE count(fc.floors)
                    WHEN 0 THEN 0
                    ELSE  sum(ST_Area(ST_Transform(b.the_geom,3857)))
                END AS buildingarea
            FROM
                buildings b
            LEFT JOIN (
                SELECT DISTINCT
                    split_part(jack,'-',2) AS building,
                    split_part(jack,'-',3) AS floors
                FROM access_points
                WHERE
                    split_part(jack,'-',2) ~ '^[0-9]+$'
            ) fc ON (fc.building = b.building_n)
            GROUP BY b.building_n
        ) binfo,
        (
            -- Get a jack count of each building
            SELECT
            split_part(jack,'-',2) AS building,
            count(jack) AS jackcount
            FROM access_points
            WHERE 
            split_part(jack,'-',2) ~ '^[0-9]+$'
            GROUP BY split_part(jack,'-',2)
        ) jinfo
        WHERE
        binfo.building = jinfo.building

    ) apdata ON (apdata.building = buildings.building_n)
    ORDER BY apdata.jacksqft
    """

    # Extract just the jacks/area and jenks it
    rows = dbconn.run_query(query).fetchall()
    jacksqft = []
    for row in rows:
        if(row['jacksqft'] != None):
            jacksqft.append(row['jacksqft'])
    breaks = jenks.getJenksBreaks(jacksqft, 16)

    # Add that info into the resulting data
    for row in rows:
        if(row['jacksqft'] == None):
            row['jenks'] = None
        else:
            row['jenks'] = jenks.classify(row['jacksqft'], breaks)

    geojson = dbconn.array_to_geojson(rows)
    dbconn.send_array_as_json(geojson)
示例#6
0
def insertIntoDatabase(tablerows):
    queryprefix = "INSERT INTO " + config.get('postgres','schema') + "."  + tablename + " (lasfile,the_geom) VALUES "
    query = queryprefix + ",".join(tablerows) + ";\n"
    return dbconn.run_query(query)
示例#7
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

# Test if the database connection is working

import sys
from config import *
from distutils.spawn import find_executable
import dbconn

# Test 2: Do they have PostGIS?
res = dbconn.run_query("SELECT PostGIS_full_version() AS pg").fetchone()
if res is None:
    print "It looks like your database doesn't have PostGIS installed"

# Test 3: How about the specified schema?
res = dbconn.run_query(
    "SELECT schema_name FROM information_schema.schemata WHERE schema_name = '"
    + config.get('postgres', 'schema') + "'").fetchone()
if res is None:
    print "I couldn't find the specified schema"

for path in config.items('paths'):
    if not os.path.isdir(path[1]):
        print "Path does not exist -- " + ":".join(path)

if find_executable('blast2dem') == None:
    print "blast2dem.exe was not found in your PATH"

if find_executable('lasindex') == None:
    print "lasindex.exe was not found in your PATH"
# Enable debugging:
#import pdb; pdb.set_trace()

# Loop through unique .img files in each directory
for img in glob.glob(demdir + '\\*.img'):
    imgnumber = img.replace(demdir + '\\','').replace('.img','')
    print
    print imgnumber

    # 1) Query source lidar tile that centroid of DSM tile falls within
    q = """
        SELECT l.tile,l.q250k FROM dem_fishnets d,lidar_bbox l WHERE ST_WITHIN(ST_CENTROID(d.the_geom),l.the_geom) AND d.id=""" + imgnumber + """
    """
    
    t = dbconn.run_query(q)
    
    rec = t.fetchone()
    if rec == None:
        print "Centroid does not fall in a lidar tile"

        # 2) Centroid not in laz tile so next query first tile that intersects
        q = """
            SELECT l.tile,l.q250k FROM dem_fishnets d,lidar_bbox l WHERE ST_INTERSECTS(d.the_geom,l.the_geom) AND d.id=""" + imgnumber + """
        """
        t = dbconn.run_query(q)
        rec = t.fetchone()
        print rec

    laz = str(rec['tile']) + " - " + str(rec['q250k'])
示例#9
0
import sys,os

sys.path.insert(0, '../../steps_sql') # Add our steps_sql dir to our library path
import dbconn

print "Content-Type: text/html; charset=utf-8"
print 

q = """
SELECT state,AVG(time),COUNT(state)
FROM 
sa_fishnets 
GROUP BY state
"""

cur = dbconn.run_query(q)
obj = dbconn.cursor_to_object(cur,'state')

totalRecords = 0
for k in obj:
    totalRecords += obj[k]['count']

for x in range(-3,3):
    if not x in obj:
        obj[x] = {'count':0,'avg':0}


print """
<!DOCTYPE HTML>
<html>
<head>
示例#10
0
            FROM access_points
            WHERE
                split_part(jack,'-',2) ~ '^[0-9]+$'
        ) fc ON (fc.building = b.building_n)
        GROUP BY b.building_n
    ) binfo,
    (
        -- Get a jack count of each building
        SELECT
        split_part(jack,'-',2) AS building,
        count(jack) AS jackcount
        FROM access_points
        WHERE 
        split_part(jack,'-',2) ~ '^[0-9]+$'
        GROUP BY split_part(jack,'-',2)
    ) jinfo
    WHERE
    binfo.building = jinfo.building
"""

# Extract just the jacks/area and jenks it
rows = dbconn.run_query(q).fetchall();
jacksqft = []
buildings = []
for row in rows:
    if(row['jacksqft'] != None):
        jacksqft.append(row['jacksqft'])
breaks = jenks.getJenksBreaks(jacksqft,5)

dbconn.send_array_as_json(breaks)
示例#11
0
                         ", but didn't")
        print "\n\t\t" + command
        return False

    # Remove empty files. Will happen where fishnet is off the map
    # 750703 -- 748kb files when they're solid black (also no results)
    if re.match('.*bounding box. skipping.*', output, re.DOTALL) or int(
            os.stat(outputdir + "\\" + filename).st_size) == 750703:
        sys.stdout.write("\t\t\tNo data found, not saving tile.")
        os.unlink(outputdir + "\\" + filename)
        return True

    return True


res = dbconn.run_query(reserveQuery).fetchall()
count = 0
average = 0
while len(res) > 0:
    for row in res:
        count += 1

        sys.stdout.write("\nRunning blast2dem for row " + str(row['id']) +
                         "\t\t\t")
        starttime = time.time()

        # The long lists of files was making the command too long for PowerShell to handle
        # so instead we write the list of file names to a temp file and delete the file
        # when we're done
        tmp = tempfile.NamedTemporaryFile(delete=False,
                                          dir=config.get('paths', 'temp_dir'))
            config.get('postgres', 'lidar_bbox_table') +
            " USING gist (the_geom)")

sqls.append("""
CREATE TABLE """ + schema + "." + config.get('postgres', 'sa_fishnet_table') +
            """ 
(
  id serial,
  the_geom geometry(Polygon,""" + config.get('projection', 'srid') + """),
  state integer DEFAULT 0,
  "time" double precision,
  CONSTRAINT sa_fish2_pkey PRIMARY KEY (id)
) WITH (OIDS=FALSE)
""")

sqls.append("CREATE INDEX sa_fish2_geom_gist ON " + schema + "." +
            config.get('postgres', 'sa_fishnet_table') +
            " USING gist (the_geom)")
sqls.append("CREATE INDEX sa_fishnet_state ON " + schema + "." +
            config.get('postgres', 'sa_fishnet_table') +
            " USING btree (state)")

for sql in sqls:
    print sql
    print ""
    if not dbconn.run_query(sql):
        print "ERROR!"
        exit()

print "Done!"
示例#13
0
    if not os.path.isfile(outputdir + "\\" + filename):
        sys.stdout.write("\t\t\t" + output)
        sys.stdout.write("\t\t\tExpected to find output file " + filename + ", but didn't")
        print "\n\t\t" + command 
        return False

    # Remove empty files. Will happen where fishnet is off the map
    # 750703 -- 748kb files when they're solid black (also no results)
    if re.match('.*bounding box. skipping.*',output,re.DOTALL) or int(os.stat(outputdir + "\\" + filename).st_size) == 750703:
        sys.stdout.write("\t\t\tNo data found, not saving tile.")
        os.unlink(outputdir + "\\" + filename)
        return True

    return True

res = dbconn.run_query(reserveQuery).fetchall()
count = 0
average = 0;
while len(res) > 0:
    for row in res:
        count += 1

        sys.stdout.write("Running blast2dem for row " + str(row['id']) + "\t\t\t")
        starttime = time.time()

        # The long lists of files was making the command too long for PowerShell to handle 
        # so instead we write the list of file names to a temp file and delete the file
        # when we're done
        tmp = tempfile.NamedTemporaryFile(delete=False,dir=config.get('paths','temp_dir'))
        lidares = dbconn.run_query(lidarlist.replace("DEMID",str(row['id']))).fetchall()
        for lidar in lidares:
  the_geom geometry(Polygon,""" + config.get('projection','srid') + """),
  CONSTRAINT lidar_bbox_pkey PRIMARY KEY (id),
  CONSTRAINT lidar_bbox_unique_file UNIQUE (lasfile)
) WITH (OIDS=FALSE)
""")

sqls.append("CREATE INDEX bbox_gist ON " + schema + "." + config.get('postgres','lidar_bbox_table') + " USING gist (the_geom)")

sqls.append("""
CREATE TABLE """ + schema + "." + config.get('postgres','sa_fishnet_table') + """ 
(
  id serial,
  the_geom geometry(Polygon,""" + config.get('projection','srid') + """),
  state integer DEFAULT 0,
  "time" double precision,
  CONSTRAINT sa_fish2_pkey PRIMARY KEY (id)
) WITH (OIDS=FALSE)
""")

sqls.append("CREATE INDEX sa_fish2_geom_gist ON " + schema +  "." + config.get('postgres','sa_fishnet_table') + " USING gist (the_geom)")
sqls.append("CREATE INDEX sa_fishnet_state ON " + schema + "." + config.get('postgres','sa_fishnet_table') + " USING btree (state)")

for sql in sqls:
    print sql
    print ""
    if not dbconn.run_query(sql):
        print "ERROR!"
        exit()

print "Done!"
示例#15
0
    ST_XMax(the_geom) as xmax,
    ST_YMax(the_geom) as ymax,
    ST_Y(ST_Transform(ST_CENTROID(the_geom),4326)) AS lat
"""

completeQuery = """
    UPDATE 
    sa_fishnets sa
    SET state=NEWSTATE,
    time=RUNTIME
    WHERE
    sa.id=DEMID
"""

# connect to database using dbconn connection script
res = dbconn.run_query(reserveQuery).fetchall()
count = 0
average = 0

while len(res) > 0:
    for row in res:
        count += 1

        sys.stdout.write("Running Area Solar Radiation for row " + str(row['id']) + "    ")
        start_time = time.clock()

        # add buffer/subtract distance to extent boundaries
        x_min = int(row['xmin']) - buff
        y_min = int(row['ymin']) - buff
        x_max = int(row['xmax']) + buff
        y_max = int(row['ymax']) + buff
        x_min = int(row['xmin']) - buff
        y_min = int(row['ymin']) - buff
        x_max = int(row['xmax']) + buff
        y_max = int(row['ymax']) + buff
        latitude = float(row['lat'])

        # set processing environment
        arcpy.env.extent = arcpy.sa.Extent(x_min, y_min, x_max, y_max)

        # lookup dsm tile id
        demno = 0
        q = """
            SELECT d.id FROM dem_fishnets d,sa_fishnets s WHERE ST_WITHIN(s.the_geom,d.the_geom) AND s.id=""" + str(
            row['id']) + """
        """
        t = dbconn.run_query(q)
        for s in t:
            demno = str(s['id'])

        #clipped_solar_raster_dir = out_path + os.sep + 'SRR_' + str(row['id'] / 1000 * 1000).zfill(4) + os.sep
        clipped_solar_raster_dir = out_path + os.sep + demno + os.sep
        clipped_solar_raster = clipped_solar_raster_dir + str(
            row['id']) + '.img'  # what raster format do we want???

        if not os.path.exists(clipped_solar_raster_dir):
            os.mkdir(clipped_solar_raster_dir)

        if os.path.isfile(clipped_solar_raster):
            print "WARNING: Overwriting existing solar raster image at " + clipped_solar_raster
            os.unlink(clipped_solar_raster)
#!/usr/bin/env python


import glob,dbconn
from config import *

out_path = config.get('paths','solar_raster_output_dir')

print "Checking files in " + out_path

for dirname in glob.glob(out_path + "\\*"):
    print "Processing " + dirname
    updatewhere = []
    for img in glob.glob(dirname + '\\*.img'):
        updatewhere.append(img.replace(dirname + '\\SRR_','').replace('.img',''))

    if len(updatewhere) > 0:
        print "Updating " + str(len(updatewhere))
        q = "UPDATE sa_fishnets SET state=2 WHERE state<>2 AND id IN (" + ','.join(updatewhere) + ")"
        dbconn.run_query(q)
示例#18
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

# Test if the database connection is working

import sys
from config import *
from distutils.spawn import find_executable
import dbconn

# Test 2: Do they have PostGIS?
res = dbconn.run_query("SELECT PostGIS_full_version() AS pg").fetchone()
if res is None:
    print "It looks like your database doesn't have PostGIS installed"

# Test 3: How about the specified schema?
res = dbconn.run_query(
    "SELECT schema_name FROM information_schema.schemata WHERE schema_name = '" + config.get("postgres", "schema") + "'"
).fetchone()
if res is None:
    print "I couldn't find the specified schema"


for path in config.items("paths"):
    if not os.path.isdir(path[1]):
        print "Path does not exist -- " + ":".join(path)

if find_executable("blast2dem") == None:
    print "blast2dem.exe was not found in your PATH"