Beispiel #1
0
def wwa(grids, valid, iarchive):
    """An attempt at rasterizing the WWA"""
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    table = "warnings_%s" % (valid.year, )
    df = GeoDataFrame.from_postgis("""
        SELECT geom as geom, phenomena ||'.'|| significance as code, w.ugc from
        """ + table + """ w JOIN ugcs u on (w.gid = u.gid) WHERE
        issue < %s and expire > %s
        and w.wfo in ('FSD', 'ARX', 'DVN', 'DMX', 'EAX', 'FSD', 'OAX', 'MPX')
    """,
                                   pgconn,
                                   params=(valid, valid),
                                   index_col=None)
    transform = transform_from_corner(reference.IA_WEST, reference.IA_NORTH,
                                      0.01, 0.01)
    df['i'] = 1
    for vtec in df['code'].unique():
        df2 = df[df['code'] == vtec]
        shapes = ((geom, value) for geom, value in zip(df2.geometry, df2.i))
        stradd = "%s," % (vtec, )
        arr = features.rasterize(shapes=shapes,
                                 fill=0,
                                 transform=transform,
                                 out_shape=grids['wawa'].shape)
        shp = grids['wawa'].shape
        for i in range(shp[0]):
            for j in range(shp[1]):
                if arr[i, j] > 0:
                    grids['wawa'][i, j] = grids['wawa'][i, j] + stradd
Beispiel #2
0
def get_df(lname):
    conn = get_connection('datastore')

    gc_df = pd.read_sql('select * from geometry_columns',
                        conn,
                        index_col='f_table_name')
    srid = gc_df.loc[lname].srid
    gtype = gc_df.loc[lname].type
    gname = gc_df.loc[lname].f_geometry_column

    if gtype in ('POLYGON', 'MULTIPOLYGON'):
        sql = "select *, st_buffer({}, 0) as the_geom_clean from {} where {} is not null".format(
            gname, lname, gname)
    else:
        sql = "select *, {} as the_geom_clean from {} where {} is not null".format(
            gname, lname, gname)
    # print sql
    gdf = GeoDataFrame.from_postgis(sql,
                                    conn,
                                    geom_col='the_geom_clean',
                                    crs={
                                        'init': 'epsg:{}'.format(srid),
                                        'no_defs': True
                                    })
    # print lname, srid
    gdf.to_crs(epsg="3035", inplace=True)
    return gdf
Beispiel #3
0
    def test_from_postgis_default(self, connection_postgis, df_nybb):
        con = connection_postgis
        create_postgis(con, df_nybb)

        sql = "SELECT * FROM nybb;"
        df = GeoDataFrame.from_postgis(sql, con)

        validate_boro_df(df, case_sensitive=False)
Beispiel #4
0
def q_to_obj(dbname,
             query,
             db_api='psql',
             geomCol=None,
             epsg=None,
             of='df',
             cols=None,
             dbset='default'):
    """
    Query database and convert data to Pandas Dataframe/GeoDataFrame
    
    API's Available:
    * psql;
    * sqlite;
    * mysql;

    output format options ("of" parameter):
    * df (Pandas Dataframe);
    * dict (Python Dict);
    """

    if not query.startswith('SELECT '):
        # Assuming query is a table name
        from glass.pys import obj_to_lst
        from glass.ng.prop.sql import cols_name

        cols = cols_name(dbname, query) if not cols else \
            obj_to_lst(cols)

        query = "SELECT {} FROM {}".format(
            ", ".join(["{t}.{c} AS {c}".format(t=query, c=i) for i in cols]),
            query)

    if not geomCol:
        import pandas
        from glass.ng.sql.c import alchemy_engine

        pgengine = alchemy_engine(dbname, api=db_api, dbset=dbset)

        df = pandas.read_sql(query, pgengine, columns=None)

    else:
        from geopandas import GeoDataFrame
        from glass.ng.sql.c import sqlcon

        con = sqlcon(dbname, sqlAPI='psql', dbset=dbset)

        df = GeoDataFrame.from_postgis(
            query,
            con,
            geom_col=geomCol,
            crs="EPSG:{}".format(str(epsg)) if epsg else None)

    if of == 'dict':
        df = df.to_dict(orient="records")

    return df
Beispiel #5
0
    def test_from_postgis_default(self):
        if not self.run_db_test:
            raise unittest.case.SkipTest()

        with psycopg2.connect(dbname='test_geopandas') as con:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)

        self._validate_sql(df)
Beispiel #6
0
    def test_from_postgis_custom_geom_col(self, connection_postgis, df_nybb):
        con = connection_postgis
        geom_col = "the_geom"
        create_postgis(con, df_nybb, geom_col=geom_col)

        sql = "SELECT * FROM nybb;"
        df = GeoDataFrame.from_postgis(sql, con, geom_col=geom_col)

        validate_boro_df(df, case_sensitive=False)
    def test_from_postgis_default(self):
        if not self.run_db_test:
            raise unittest.case.SkipTest()

        with psycopg2.connect(dbname='test_geopandas') as con:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)

        self._validate_sql(df)
Beispiel #8
0
def _get_gdf(sql: str, uri: str = DEFAULT_DB_URI) -> GeoDataFrame:
    """
    Use geopandas to get a geodataframe from a query
    """

    connection = psycopg2.connect(uri)

    gdf = GeoDataFrame.from_postgis(sql, connection, geom_col="geom")

    connection.close()

    return gdf
Beispiel #9
0
    def test_from_postgis_custom_geom_col(self):
        if not self.run_db_test:
            raise unittest.case.SkipTest()

        with psycopg2.connect(dbname='test_geopandas') as con:
            sql = """SELECT
                     borocode, boroname, shape_leng, shape_area,
                     geom AS __geometry__
                     FROM nybb;"""
            df = GeoDataFrame.from_postgis(sql, con, geom_col='__geometry__')

        self._validate_sql(df)
    def test_from_postgis_default(self):
        con = connect("test_geopandas")
        if con is None or not create_postgis(self.df):
            raise pytest.skip()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)
        finally:
            con.close()

        validate_boro_df(df, case_sensitive=False)
Beispiel #11
0
    def test_from_postgis_default(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)
        finally:
            con.close()

        validate_boro_df(self, df)
    def test_from_postgis_custom_geom_col(self):
        if not self.run_db_test:
            raise unittest.case.SkipTest()

        with psycopg2.connect(dbname='test_geopandas') as con:
            sql = """SELECT
                     borocode, boroname, shape_leng, shape_area,
                     geom AS __geometry__
                     FROM nybb;"""
            df = GeoDataFrame.from_postgis(sql, con, geom_col='__geometry__')

        self._validate_sql(df)
    def test_from_postgis_default(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise pytest.skip()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)
        finally:
            con.close()

        validate_boro_df(df, case_sensitive=False)
Beispiel #14
0
    def test_from_postgis_default(self):
        con = tests.util.connect('test_geopandas')
        if con is None or not tests.util.create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)
        finally:
            con.close()

        tests.util.validate_boro_df(self, df)
    def test_from_postgis_custom_geom_col(self):
        con = connect('test_geopandas')
        geom_col = "the_geom"
        if con is None or not create_postgis(self.df, geom_col=geom_col):
            raise pytest.skip()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con, geom_col=geom_col)
        finally:
            con.close()

        validate_boro_df(df, case_sensitive=False)
    def test_from_postgis_custom_geom_col(self):
        con = connect('test_geopandas')
        geom_col = "the_geom"
        if con is None or not create_postgis(self.df, geom_col=geom_col):
            raise pytest.skip()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con, geom_col=geom_col)
        finally:
            con.close()

        validate_boro_df(df, case_sensitive=False)
    def test_from_postgis_default(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)
        finally:
            if PANDAS_NEW_SQL_API:
                # It's not really a connection, it's an engine
                con = con.connect()
            con.close()

        validate_boro_df(self, df)
Beispiel #18
0
    def test_from_postgis_default(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)
        finally:
            if PANDAS_NEW_SQL_API:
                # It's not really a connection, it's an engine
                con = con.connect()
            con.close()

        validate_boro_df(self, df)
Beispiel #19
0
    def test_from_postgis_custom_geom_col(self):
        con = tests.util.connect('test_geopandas')
        if con is None or not tests.util.create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = """SELECT
                     borocode, boroname, shape_leng, shape_area,
                     geom AS __geometry__
                     FROM nybb;"""
            df = GeoDataFrame.from_postgis(sql, con, geom_col='__geometry__')
        finally:
            con.close()

        tests.util.validate_boro_df(self, df)
Beispiel #20
0
    def test_from_postgis_custom_geom_col(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = """SELECT
                     borocode, boroname, shape_leng, shape_area,
                     geom AS __geometry__
                     FROM nybb;"""
            df = GeoDataFrame.from_postgis(sql, con, geom_col='__geometry__')
        finally:
            con.close()

        validate_boro_df(self, df)
    def test_from_postgis_custom_geom_col(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise pytest.skip()

        try:
            sql = """SELECT
                     borocode, boroname, shape_leng, shape_area,
                     geom AS __geometry__
                     FROM nybb;"""
            df = GeoDataFrame.from_postgis(sql, con, geom_col='__geometry__')
        finally:
            con.close()

        validate_boro_df(df, case_sensitive=False)
Beispiel #22
0
def psql_to_geodf(conParam, query, geomCol='geom', epsg=None):
    """
    Query database and convert data to Pandas GeoDataframe
    """

    from geopandas import GeoDataFrame
    from gasp.sql.c import psqlcon

    con = psqlcon(conParam)

    df = GeoDataFrame.from_postgis(
        query,
        con,
        geom_col=geomCol,
        crs="epsg:{}".format(str(epsg)) if epsg else None)

    return df
Beispiel #23
0
    def test_from_postgis_custom_geom_col(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = """SELECT
                     borocode, boroname, shape_leng, shape_area,
                     geom AS __geometry__
                     FROM nybb;"""
            df = GeoDataFrame.from_postgis(sql, con, geom_col='__geometry__')
        finally:
            if PANDAS_NEW_SQL_API:
                # It's not really a connection, it's an engine
                con = con.connect()
            con.close()

        validate_boro_df(self, df)
Beispiel #24
0
    def table_to_layer(self, table_name, schema=None, geom_type=None, bounds=None, polygon_extent=None):
        """ Convert table from database to GeoLayer instance

        :param table_name: name of table (case sensitive)
        :param schema: database schema (case sensitive)
        :param geom_type: geometry type
        :param bounds: bounding box (x_min, y_min, x_max, y_max)
        :param polygon_extent: shapely polygon
        :return:
        """
        if schema is None:
            schema = "public"

        if bounds is not None and polygon_extent is None:
            sql_string = f'SELECT * FROM "{schema}"."{table_name}" WHERE "{schema}"."{table_name}".geom && ' \
                         f'ST_MakeEnvelope({bounds[0]}, {bounds[1]}, {bounds[2]}, {bounds[3]})'
        elif polygon_extent is not None and bounds is None:
            sql_string = f'SELECT * FROM "{schema}"."{table_name}" WHERE ST_Within("{schema}"."{table_name}".geom, ' \
                         f'{polygon_extent})'
        else:
            sql_string = f'SELECT * FROM "{schema}"."{table_name}"'

        df = GeoDataFrame.from_postgis(sql_string, self.engine)

        if table_name in self.get_table_names(schema) and geom_type is None:
            try:
                layer = PolygonLayer(df, name=table_name)
            except GeoLayerError:
                try:
                    layer = LineLayer(df, name=table_name)
                except GeoLayerError:
                    layer = PointLayer(df, name=table_name)
        elif table_name in self.get_table_names(schema) and geom_type is not None:
            try:
                geom_type = check_string(geom_type, ("point", "line", "polygon"))
            except ValueError:
                raise SpatialDatabaseError("Invalid geometry type '%s'. Must be 'point', 'line' or 'polygon'." %
                                           geom_type)
            layer = self.layer_class[geom_type](df, name=table_name)
        else:
            raise SpatialDatabaseError("No table named '%s' in database '%s'" % (table_name, self.db_name))

        return layer
Beispiel #25
0
    def havne_polygoner(self):

        host_adress = '153.44.18.151'
        db = 'postgres'
        username = '******'
        dbkey = 'postgres'
        login = [host_adress, db, username, dbkey]

        try:
            connection = pg.connect('host=' + str(login[0]) + ' dbname=' +
                                    str(login[1]) + ' user ='******' password='******'\nKobling mot postgres database på SØA-server var en suksess\n'
            )
        except:
            print('Tilkobling mislyktes\n')

        schema = 'public.'
        table_name = 'havne_polygoner'

        table_coloumns = "location_name, uuid, type, google_address, street, source, offshore_name, offshore_kind, offshore_facilityfunctions, offshore_belongstoname, farm_id, farm_owner, farm_locationnumber, farm_locationname, geom "
        select_statement = "SELECT " + table_coloumns

        from_statement = "FROM " + schema + table_name

        sql_syntax = select_statement + from_statement
        print(sql_syntax)

        print('\nHenter havnepolygoner fra public.havne_polygoner...\n')
        print('Benytter Følgende SQL-Syntax: ' + sql_syntax + '\n')

        havnepolygoner = GeoDataFrame.from_postgis(sql_syntax,
                                                   connection,
                                                   geom_col='geom')

        return havnepolygoner
Beispiel #26
0
def wwa(grids, valid, iarchive):
    """An attempt at rasterizing the WWA"""
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    table = "warnings_%s" % (valid.year, )
    df = GeoDataFrame.from_postgis("""
        SELECT geom as geom, phenomena ||'.'|| significance as code, w.ugc from
        """ + table + """ w JOIN ugcs u on (w.gid = u.gid) WHERE
        issue < %s and expire > %s
        and w.wfo in ('FSD', 'ARX', 'DVN', 'DMX', 'EAX', 'FSD', 'OAX', 'MPX')
    """, pgconn, params=(valid, valid), index_col=None)
    transform = transform_from_corner(reference.IA_WEST, reference.IA_NORTH,
                                      0.01, 0.01)
    df['i'] = 1
    for vtec in df['code'].unique():
        df2 = df[df['code'] == vtec]
        shapes = ((geom, value) for geom, value in zip(df2.geometry, df2.i))
        stradd = "%s," % (vtec,)
        arr = features.rasterize(shapes=shapes, fill=0, transform=transform,
                                 out_shape=grids['wawa'].shape)
        shp = grids['wawa'].shape
        for i in range(shp[0]):
            for j in range(shp[1]):
                if arr[i, j] > 0:
                    grids['wawa'][i, j] = grids['wawa'][i, j] + stradd
Beispiel #27
0
    cursor = connection.cursor()
    cursor.execute(sql_all_spatial_tables)

    spatial_tables = cursor.fetchall()
    table_list = [x[0] for x in spatial_tables]

    cursor.close()
    connection.close()


# Step 4: Iterate over the list of spatial tables, save each to shapefile
# -----------------------------------------------------------------------

print(f"Saving {len(table_list)} spatial tables to shapefile")

for table in table_list:

    print("\t-> Saving", table)

    query = f"SELECT * FROM {table}"

    connection = psycopg2.connect(URI)

    gdf = GeoDataFrame.from_postgis(query, connection, geom_col="geom")

    connection.close()

    shp_path = SHP_FOLDER / f"{table}.shp"

    gdf.to_file(shp_path)
Beispiel #28
0
 w.wfo, status, eventid, u.ugc, phenomena,
 ST_area( ST_transform(u.geom,2163) ) / 1000000.0 as area2d,
 to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_expire,
 to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_issue,
 to_char(product_issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_prodissue,
 to_char(init_expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_init_expire
 from %s w JOIN ugcs u on (u.gid = w.gid) WHERE
 %s %s %s
 )
 SELECT %s from stormbased UNION SELECT %s from countybased %s
""" % (table2, timelimit, wfoLimiter, limiter,
       geomcol, table1, timelimit, wfoLimiter2, limiter,
       cols, cols, sbwlimiter)


df = GeoDataFrame.from_postgis(sql, pgconn, 'geo')
if len(df.index) == 0:
    sys.stdout.write("Content-type: text/plain\n\n")
    sys.stdout.write("ERROR: No results found for query, please try again")
    sys.exit()

# Capitolize columns please
df.columns = [s.upper() if s != 'geo' else s for s in df.columns.values]
df.to_file(fp+".shp")

shutil.copyfile("/opt/iem/data/gis/meta/4326.prj",
                fp+".prj")

z = zipfile.ZipFile(fp+".zip", 'w', zipfile.ZIP_DEFLATED)
z.write(fp+".shp")
z.write(fp+".shx")
Beispiel #29
0
def workflow(dt, dt2, states):
    """Generate for a given date """
    dbconn = get_dbconn('idep')
    dextra = "valid = %s"
    args = (dt, )
    if dt2 is not None:
        dextra = "valid >= %s and valid <= %s"
        args = (dt, dt2)
    statelimit = ""
    if states is not None:
        tokens = states.split(",")
        if tokens:
            _s = [" states ~* '%s' " % (a, ) for a in tokens]
            statelimit = " and (" + " or ".join(_s) + " ) "
    df = GeoDataFrame.from_postgis("""
        WITH data as (
            SELECT simple_geom, huc_12, hu_12_name
            from huc12 WHERE scenario = 0
            """ + statelimit + """),
        obs as (
            SELECT huc_12,
            sum(coalesce(avg_loss, 0)) as avg_loss,
            sum(coalesce(avg_delivery, 0)) as avg_delivery,
            sum(coalesce(qc_precip, 0)) as qc_precip,
            sum(coalesce(avg_runoff, 0)) as avg_runoff
            from results_by_huc12 WHERE
            """ + dextra + """ and scenario = 0 GROUP by huc_12)

        SELECT d.simple_geom, d.huc_12, d.hu_12_name as name,
        coalesce(o.qc_precip, 0) as prec_mm,
        coalesce(o.avg_loss, 0) as los_kgm2,
        coalesce(o.avg_runoff, 0) as runof_mm,
        coalesce(o.avg_delivery, 0) as deli_kgm
        from data d LEFT JOIN obs o ON (d.huc_12 = o.huc_12)
    """,
                                   dbconn,
                                   params=args,
                                   geom_col='simple_geom')

    os.chdir("/tmp")
    fn = "idepv2_%s" % (dt.strftime("%Y%m%d"), )
    if dt2:
        fn += dt2.strftime("_%Y%m%d")
    df.columns = [
        s.upper() if s != 'simple_geom' else s for s in df.columns.values
    ]
    df.to_file(fn + ".shp")
    shutil.copyfile("/opt/iem/data/gis/meta/5070.prj", fn + ".prj")
    zfp = zipfile.ZipFile(fn + ".zip", 'w', zipfile.ZIP_DEFLATED)
    suffixes = ['shp', 'shx', 'dbf', 'prj']
    for suffix in suffixes:
        zfp.write("%s.%s" % (fn, suffix))
    zfp.close()

    sys.stdout.write("Content-type: application/octet-stream\n")
    sys.stdout.write(("Content-Disposition: attachment; filename=%s.zip\n\n"
                      "") % (fn, ))

    sys.stdout.write(file(fn + ".zip", 'r').read())

    suffixes.append('zip')
    for suffix in suffixes:
        os.remove("%s.%s" % (fn, suffix))
Beispiel #30
0
def getAllStations(conn, lastyear=date.today().year):
    """ Gets all the table names of our DB"""
    sql = F"""SELECT id, geom, nombre 
                FROM cont_estaciones
                WHERE lastyear >= {lastyear}"""
    return GeoDataFrame.from_postgis(sql, con=conn, index_col='id')
Beispiel #31
0
 to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_issue,
 to_char(product_issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_prodissue,
 to_char(init_expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_init_expire
 from %s w JOIN ugcs u on (u.gid = w.gid) WHERE
 %s %s %s
 )
 SELECT %s from stormbased UNION SELECT %s from countybased %s
""" % (table2, timelimit, wfoLimiter, limiter, geomcol, table1, timelimit,
       wfoLimiter, limiter, cols, cols, sbwlimiter)

dtext = open("%s.txt" % (fp, ), 'w')
dtext.write(sql)
dtext.close()

pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
df = GeoDataFrame.from_postgis(sql, pgconn, 'geo')
# Capitolize columns please
df.columns = [s.upper() if s != 'geo' else s for s in df.columns.values]
df.to_file(fp + ".shp")

shutil.copyfile("/mesonet/www/apps/iemwebsite/data/gis/meta/4326.prj",
                fp + ".prj")

z = zipfile.ZipFile(fp + ".zip", 'w', zipfile.ZIP_DEFLATED)
z.write(fp + ".shp")
z.write(fp + ".shx")
z.write(fp + ".dbf")
z.write(fp + ".prj")
z.write(fp + ".txt")
z.close()
Beispiel #32
0
def main():
    """Go Main Go"""
    form = cgi.FieldStorage()
    sts, ets = get_time_extent(form)

    location_group = form.getfirst('location_group', 'wfo')
    if location_group == 'states':
        if 'states[]' in form:
            states = form.getlist('states[]')
            states.append('XX')  # Hack for 1 length
            wfo_limiter = ''
            wfo_limiter2 = (" and substr(w.ugc, 1, 2) in %s ") % (str(
                tuple(states)), )
        else:
            send_error('No state specified')
    elif location_group == 'wfo':
        wfo_limiter = parse_wfo_location_group(form)
        wfo_limiter2 = wfo_limiter
    else:
        # Unknown location_group
        send_error('Unknown location_group (%s)' % (location_group, ))

    # Change to postgis db once we have the wfo list
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    fn = "wwa_%s_%s" % (sts.strftime("%Y%m%d%H%M"), ets.strftime("%Y%m%d%H%M"))
    timeopt = int(form.getfirst('timeopt', [1])[0])
    if timeopt == 2:
        year3 = int(form.getfirst('year3'))
        month3 = int(form.getfirst('month3'))
        day3 = int(form.getfirst('day3'))
        hour3 = int(form.getfirst('hour3'))
        minute3 = int(form.getfirst('minute3'))
        sts = datetime.datetime(year3, month3, day3, hour3, minute3)
        sts = sts.replace(tzinfo=pytz.timezone("UTC"))
        fn = "wwa_%s" % (sts.strftime("%Y%m%d%H%M"), )

    os.chdir("/tmp/")
    for suffix in ['shp', 'shx', 'dbf', 'txt', 'zip']:
        if os.path.isfile("%s.%s" % (fn, suffix)):
            os.remove("%s.%s" % (fn, suffix))

    limiter = ""
    if 'limit0' in form:
        limiter += (
            " and phenomena IN ('TO','SV','FF','MA') and significance = 'W' ")

    sbwlimiter = " WHERE gtype = 'P' " if 'limit1' in form else ""

    warnings_table = "warnings"
    sbw_table = "sbw"
    if sts.year == ets.year:
        warnings_table = "warnings_%s" % (sts.year, )
        if sts.year > 2001:
            sbw_table = "sbw_%s" % (sts.year, )
        else:
            sbw_table = 'sbw_2014'

    geomcol = "geom"
    if form.getfirst('simple', 'no') == 'yes':
        geomcol = "simple_geom"

    cols = """geo, wfo, utc_issue as issued, utc_expire as expired,
     utc_prodissue as init_iss, utc_init_expire as init_exp,
     phenomena as phenom,
     gtype, significance as sig, eventid as etn,  status, ugc as nws_ugc,
     area2d as area_km2"""

    timelimit = "issue >= '%s' and issue < '%s'" % (sts, ets)
    if timeopt == 2:
        timelimit = "issue <= '%s' and issue > '%s' and expire > '%s'" % (
            sts, sts + datetime.timedelta(days=-30), sts)

    sql = """
    WITH stormbased as (
     SELECT geom as geo, 'P'::text as gtype, significance, wfo,
     status, eventid, ''::text as ugc,
     phenomena,
     ST_area( ST_transform(w.geom,2163) ) / 1000000.0 as area2d,
     to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_expire,
     to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_issue,
     to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_prodissue,
     to_char(init_expire at time zone 'UTC',
             'YYYYMMDDHH24MI') as utc_init_expire
     from %(sbw_table)s w WHERE status = 'NEW' and %(timelimit)s
     %(wfo_limiter)s %(limiter)s
    ),
    countybased as (
     SELECT u.%(geomcol)s as geo, 'C'::text as gtype,
     significance,
     w.wfo, status, eventid, u.ugc, phenomena,
     u.area2163 as area2d,
     to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_expire,
     to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_issue,
     to_char(product_issue at time zone 'UTC',
             'YYYYMMDDHH24MI') as utc_prodissue,
     to_char(init_expire at time zone 'UTC',
             'YYYYMMDDHH24MI') as utc_init_expire
     from %(warnings_table)s w JOIN ugcs u on (u.gid = w.gid) WHERE
     %(timelimit)s %(wfo_limiter2)s %(limiter)s
     )
     SELECT %(cols)s from stormbased UNION ALL
     SELECT %(cols)s from countybased %(sbwlimiter)s
    """ % dict(sbw_table=sbw_table,
               timelimit=timelimit,
               wfo_limiter=wfo_limiter,
               limiter=limiter,
               geomcol=geomcol,
               warnings_table=warnings_table,
               wfo_limiter2=wfo_limiter2,
               cols=cols,
               sbwlimiter=sbwlimiter)
    # dump SQL to disk for further debugging
    # o = open('/tmp/daryl.txt', 'w')
    # o.write(sql)
    # o.close()

    df = GeoDataFrame.from_postgis(sql, pgconn, 'geo')
    if len(df.index) == 0:
        sys.stdout.write("Content-type: text/plain\n\n")
        sys.stdout.write("ERROR: No results found for query, please try again")
        sys.exit()

    # Capitolize columns please
    df.columns = [s.upper() if s != 'geo' else s for s in df.columns.values]
    schema = {
        'geometry':
        'Polygon',
        'properties':
        OrderedDict([(u'WFO', 'str:3'), (u'ISSUED', 'str:12'),
                     (u'EXPIRED', 'str:12'), (u'INIT_ISS', 'str:12'),
                     (u'INIT_EXP', 'str:12'), (u'PHENOM', 'str:2'),
                     (u'GTYPE', 'str:1'), (u'SIG', 'str:1'), (u'ETN', 'str:4'),
                     (u'STATUS', 'str:3'), (u'NWS_UGC', 'str:6'),
                     (u'AREA_KM2', 'float:24.15')])
    }
    df.to_file(fn + ".shp", schema=schema)

    shutil.copyfile("/opt/iem/data/gis/meta/4326.prj", fn + ".prj")

    zf = zipfile.ZipFile(fn + ".zip", 'w', zipfile.ZIP_DEFLATED)
    zf.write(fn + ".shp")
    zf.write(fn + ".shx")
    zf.write(fn + ".dbf")
    zf.write(fn + ".prj")
    zf.close()

    sys.stdout.write("Content-type: application/octet-stream\n")
    sys.stdout.write("Content-Disposition: attachment; filename=%s.zip\n\n" %
                     (fn, ))
    sys.stdout.write(file(fn + ".zip", 'r').read())

    for suffix in ['zip', 'shp', 'shx', 'dbf', 'prj']:
        os.remove("%s.%s" % (fn, suffix))