コード例 #1
0
def make_atlas(extent_path, output_path, nlcd_path, org, wmd=False, subset=""):
    """
    Kicks off the Fire Atlas making process

    :param extent_path: Folder location of a finished subset
    :param output_path: Folder location to build the atlas
    :param nlcd_path: Folder location of the NLCD files
    :param org: Organizational code for the targeted refuge
    :param wmd: True if the target is a Wetland Management District, otherwise False
    :param subset: Subset of the subset region
    """
    # group = raw_input("Group: ")
    # extent = raw_input("Extent: ")
    # org = raw_input("Org Code: ")
    # group = 'g8'
    # extent = '1467525_766125_1511325_709065'
    # org = '41573'

    base = os.path.dirname(os.path.realpath(__file__))

    _, extent = ntpath.split(extent_path)
    if not extent:
        extent = ntpath.split(_)[-1]

    work_path = os.path.join(base, "_working")
    fws_master.createpath(work_path)

    # Create subsets for each individual fire
    # wmd = False
    # xmin, ymax, xmax, ymin
    # subset = []

    # Setup the postgresql database connection
    conn = psycopg2.connect(database="PostGIS", user="******", password="******")
    cursor = conn.cursor()

    if not cursor:
        print "unable to connect to DB"
        quit()

    select_str = (
        "SELECT description, relname FROM pg_description JOIN pg_class on pg_description.objoid = pg_class.oid\
                  WHERE description = '%s'"
        % extent
    )
    cursor.execute(select_str)
    perim_db = "bnb_perims." + cursor.fetchone()[1][:-2] + "_p"

    # Pathing used throughout the script
    # extent_path = os.path.join(os.getcwd(), os.pardir, group, extent)
    csv_path = os.path.join(extent_path, "FIRES_PARAM", (org + ".csv"))
    # sql2shpPath = os.path.join(os.getcwd(), 'tools', 'pgsql2shp')
    # work_path = os.path.join(os.getcwd(), os.pardir, '_working')
    user_perim_path = os.path.join(extent_path, "PERIMETERS", "USER")
    # output_path = 'D:\\FWS\\refuges\\%s' % org
    scene_output = os.path.join(output_path, "scene_subsets")
    perimeter_output = os.path.join(output_path, "perimeters")
    # nlcd_path = os.path.join(os.getcwd(), os.pardir, 'NLCD')
    scene_working = os.path.join(work_path, "scenes")

    # Setup the OGR shape file driver
    Driver = ogr.GetDriverByName("ESRI Shapefile")

    if not os.path.exists(output_path):
        subprocess.call("mkdir %s" % output_path, shell=True)

    # Open the refuge fire CSV and initialize the output CSV
    csv_file = open(csv_path, "rb")
    csv_log = open(os.path.join(output_path, org + "_fire_list.csv"), "wb")
    csv_write = csv.writer(csv_log, delimiter=",")

    header = (
        "FMIS Unique ID",
        "Discovery Date",
        "Fire Atlas ID",
        "Pre-Fire Scene(s)",
        "Post-Fire Scene(s)",
        "One Year Scene(s)",
        "Comments",
        "Confidence",
    )
    csv_write.writerow(header)

    fire_list = [line.rstrip("\n\r") for line in csv_file]

    shp_path_ls = []

    # Process each row in the refuge fire CSV
    x = 1
    error_ls = []
    for row in itertools.islice(fire_list, 1, None):

        print "Process fire %s" % str(x)
        x += 1
        # Initialize the master dictionary for use with the boiler plate metadata string
        meta_info = {
            "date": date.today().strftime("%d %B %Y"),
            "fire_id": "",
            "fire_name": "",
            "fire_date": "",
            "acres": "",
            "PR": "",
            "pre_fire": "",
            "pre_fire_nbr": "",
            "pre_fire_ndvi": "",
            "pre_fire_cf": "",
            "pre_fire_bnb": "",
            "post_fire_nbr": "",
            "post_fire_ndvi": "",
            "post_fire_cf": "",
            "post_fire_bnb": "",
            "post_fire": "",
            "one_yr": "",
            "one_yr_nbr": "",
            "one_yr_ndvi": "",
            "one_yr_cf": "",
            "one_yr_bnb": "",
            "subset_ext_ulx": "",
            "subset_ext_uly": "",
            "subset_ext_lrx": "",
            "subset_ext_lry": "",
            "rows": "",
            "cols": "",
            "perim_ext_ulx": "",
            "perim_ext_uly": "",
            "perim_ext_lrx": "",
            "perim_ext_lry": "",
            "subset_exta_ulx": "",
            "subset_exta_uly": "",
            "subset_exta_lrx": "",
            "subset_exta_lry": "",
            "state": "",
            "comments": "",
            "cntr_lat_p": "",
            "cntr_long_p": "",
        }

        fod_gid, fire_shp, user_shp, fire_scene, pre_fire, one_yr_post, comments, add_info, confidence = row.split(",")

        meta_info["comments"] = comments

        fire_shp_ls = fire_shp.split()
        fire_scene_ls = fire_scene.split()
        pre_fire_ls = pre_fire.split()
        one_yr_post_ls = one_yr_post.split()

        if not fire_shp and not user_shp and fod_gid != "ADD_IN":
            cursor.execute("SELECT * FROM public.fod_pts WHERE gid = %s", (fod_gid,))
            fod = cursor.fetchone()
            fod_date = fod[26]

            if int(fod_date[6:8]) < 50:
                fod_year = "20%s" % fod_date[6:8]
            else:
                fod_year = "19%s" % fod_date[6:8]

            disc_date = date(int(fod_year), int(fod_date[:2]), int(fod_date[3:5])).strftime("%d %B %Y")
            fws_un_id = fod[6]
            meta_info["post_fire"] = ", ".join(fire_scene_ls).replace("_sr", "_sr.tif")
            csv_write.writerow(
                (
                    fws_un_id,
                    disc_date,
                    "Not Mapped",
                    meta_info["pre_fire"],
                    meta_info["post_fire"],
                    meta_info["one_yr"],
                    meta_info["comments"],
                )
            )
            if not subset:
                copy_scenes(extent_path, scene_output, fire_scene_ls)
            else:
                copy_scenes(extent_path, scene_working, fire_scene_ls)
                for scene in fire_scene_ls:
                    window_scene(scene_working, scene_output, scene, subset)
            continue

        if fod_gid == "UNK":
            cursor.execute(
                'SELECT DISTINCT ON ("org name") "org name" FROM public.fod_pts WHERE "org code" = %s', (org,)
            )
            org_name = cursor.fetchone()[0]
            month, month_name, day = calc_gregorian(int(fire_scene[13:16]), int(fire_scene[9:13]))
            disc_date = day + " " + month_name + " " + fire_scene_ls[0][9:13]
            fire_type = "unk"
            fws_un_id = "n/a"
            fire_name = "n/a"
            meta_info["state"] = ""
            meta_info["fire_date"] = disc_date

        elif fod_gid == "ADD_IN":
            add_ls = add_info.split()
            cursor.execute(
                'SELECT DISTINCT ON ("org name") "org name" FROM public.fod_pts WHERE "org code" = %s', (org,)
            )
            org_name = cursor.fetchone()[0]
            month, month_name, day = calc_gregorian(int(fire_scene[13:16]), int(fire_scene[9:13]))
            disc_date = add_ls[3] + add_ls[4] + add_ls[5]
            fire_type = add_ls[2]
            fws_un_id = add_ls[0]
            fire_name = add_ls[1]
            meta_info["fire_name"] = fire_name
            meta_info["state"] = ""
            meta_info["fire_date"] = disc_date

        else:
            cursor.execute("SELECT * FROM public.fod_pts WHERE gid = %s", (fod_gid,))
            fod = cursor.fetchone()
            org_name = fod[4]
            fod_date = fod[26]

            if int(fod_date[6:8]) < 50:
                fod_year = "20%s" % fod_date[6:8]
            else:
                fod_year = "19%s" % fod_date[6:8]

            disc_date = date(int(fod_year), int(fod_date[:2]), int(fod_date[3:5])).strftime("%d %B %Y")

            fire_type = fod[18]
            fws_un_id = fod[6]
            fire_name = fod[7]
            meta_info["state"] = fod[39]

            meta_info["fire_name"] = fire_name
            meta_info["fire_date"] = disc_date

        temp_shp = os.path.join(work_path, "temp")

        if not user_shp and not fire_shp and fod_gid == "ADD_IN":
            csv_write.writerow(
                (
                    fws_un_id,
                    disc_date,
                    "Not Mapped",
                    meta_info["pre_fire"],
                    meta_info["post_fire"],
                    meta_info["one_yr"],
                    meta_info["comments"],
                )
            )
            if not subset:
                copy_scenes(extent_path, scene_output, fire_scene_ls)
            else:
                copy_scenes(extent_path, scene_working, fire_scene_ls)
                for scene in fire_scene_ls:
                    window_scene(scene_working, scene_output, scene, subset)
            continue

        if not user_shp and fire_shp:

            if len(fire_shp_ls) > 1:
                db_string = "gid = %s " % fire_shp_ls[0]
                for i in itertools.islice(fire_shp_ls, 1, None):
                    db_string += "or gid = %s " % i
            else:
                db_string = "gid = %s" % fire_shp_ls[0]

            subprocess.call(
                'ogr2ogr -a_srs EPSG:5070 -overwrite %s PG:"host=localhost user=postgres dbname=PostGIS" -sql\
             "SELECT ST_Union(geom) FROM %s WHERE %s"'
                % (temp_shp + ".shp", perim_db, db_string),
                shell=True,
            )

            if not os.path.exists(temp_shp + ".shp"):
                print "error with ", fire_shp
                print "using ", db_string
                print "with table ", perim_db
                quit()

        elif user_shp:
            if not os.path.exists(os.path.join(user_perim_path, user_shp + ".shp")):
                print "error with ", os.path.join(user_perim_path, user_shp + ".shp")
                quit()
            subprocess.call(
                "ogr2ogr -overwrite -t_srs EPSG:5070 %s %s"
                % (temp_shp + ".shp", os.path.join(user_perim_path, user_shp + ".shp")),
                shell=True,
            )

        in_ds = Driver.Open((temp_shp + ".shp"))
        in_layer = in_ds.GetLayer()

        in_extent = in_layer.GetExtent()  # xmin, xmax, ymin, ymax
        cent_x = int(in_extent[0]) + (int(in_extent[1]) - int(in_extent[0])) / 2
        cent_y = int(in_extent[2]) + (int(in_extent[3]) - int(in_extent[2])) / 2

        if fod_gid == "UNK":
            out_shp = "UNK_%s_%s_%s_%s" % (org, fire_scene_ls[0][9:16], str(cent_x), str(cent_y))
        else:
            out_shp = "FWS_%s_%s_%s_%s" % (org, fire_scene_ls[0][9:16], str(cent_x), str(cent_y))

        meta_info["fire_id"] = out_shp

        perim_out = os.path.join(perimeter_output, fire_scene_ls[0][9:13])
        if not os.path.exists(perim_out):
            subprocess.call("mkdir %s" % perim_out, shell=True)

        if os.path.exists(os.path.join(perim_out, out_shp + ".shp")):
            out_ds = Driver.Open(os.path.join(perim_out, out_shp + ".shp"), 1)
            out_ds.DeleteLayer(out_shp)
        else:
            out_ds = Driver.CreateDataSource(perim_out)

        out_ds = Driver.CreateDataSource(perim_out)
        out_layer = out_ds.CreateLayer(out_shp, in_layer.GetSpatialRef(), in_layer.GetGeomType())
        shp_path_ls.append(os.path.join(perim_out, out_shp))

        out_layer.CreateField(ogr.FieldDefn("Fire_Name", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Org_Code", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Refuge", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Disc_Date", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Fire_ID", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Fire_Type", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Area", ogr.OFTReal))
        out_layer.CreateField(ogr.FieldDefn("Perimeter", ogr.OFTReal))
        out_layer.CreateField(ogr.FieldDefn("Acres", ogr.OFTReal))
        out_layer.CreateField(ogr.FieldDefn("FWS_Un_FID", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Fire_Scene", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Pre_Fire", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("One_Yr_Pst", ogr.OFTString))
        out_layer.CreateField(ogr.FieldDefn("Comments", ogr.OFTString))
        out_layer.SyncToDisk()

        for feature in in_layer:
            geom = feature.GetGeometryRef()

            if not geom:
                continue

            geom = geom.Buffer(0.0)
            geom = geom.Simplify(0.0001)

            acres = geom.GetArea() / 4046.86

            out_feature_defn = out_layer.GetLayerDefn()
            out_feature = ogr.Feature(out_feature_defn)

            out_feature.SetField("Fire_Name", fire_name)
            out_feature.SetField("Org_Code", org)
            out_feature.SetField("Refuge", str(org_name))
            out_feature.SetField("Disc_Date", str(disc_date))
            out_feature.SetField("Fire_ID", out_shp)
            out_feature.SetField("Fire_Type", fire_type)
            out_feature.SetField("Area", geom.GetArea())
            out_feature.SetField("Perimeter", geom.Boundary().Length())
            out_feature.SetField("Acres", acres)
            out_feature.SetField("FWS_Un_FID", fws_un_id)
            out_feature.SetField("Comments", comments)
            out_feature.SetField("Fire_Scene", fire_scene)
            out_feature.SetField("Pre_Fire", pre_fire)
            out_feature.SetField("One_Yr_Pst", one_yr_post)
            out_feature.SetGeometry(geom)
            out_layer.CreateFeature(out_feature)
            out_layer.SyncToDisk()

        in_layer.ResetReading()

        meta_info["cntr_lat_p"], meta_info["cntr_long_p"] = calc_lat_lon(cent_y, cent_x)

        wgs_ul = calc_lat_lon(in_extent[3], in_extent[0])
        wgs_ur = calc_lat_lon(in_extent[3], in_extent[1])
        wgs_lr = calc_lat_lon(in_extent[2], in_extent[1])
        wgs_ll = calc_lat_lon(in_extent[2], in_extent[0])

        meta_info["perim_ext_uly"] = max(wgs_ul[0], wgs_ur[0])
        meta_info["perim_ext_ulx"] = min(wgs_ul[1], wgs_ll[1])
        meta_info["perim_ext_lry"] = min(wgs_ll[0], wgs_lr[0])
        meta_info["perim_ext_lrx"] = max(wgs_ur[1], wgs_lr[1])

        meta_info["acres"] = acres

        if fire_scene != "":
            scene_ls = []
            meta_info["post_fire"] = ", ".join(fire_scene_ls).replace("_sr", "_sr.tif")
            meta_info["post_fire_ndvi"] = ", ".join(fire_scene_ls).replace("_sr", "_sr_ndvi.tif")
            meta_info["post_fire_nbr"] = ", ".join(fire_scene_ls).replace("_sr", "_sr_nbr.tif")
            meta_info["post_fire_cf"] = ", ".join(fire_scene_ls).replace("_sr", "_cfmask.tif")
            meta_info["post_fire_bnb"] = ", ".join(fire_scene_ls).replace("_sr", "_bnb.tif")

            scene_ls.extend(fire_scene_ls)

            if pre_fire_ls:
                scene_ls.extend(pre_fire_ls)
                meta_info["pre_fire"] = ", ".join(pre_fire_ls).replace("_sr", "_sr.tif")
                meta_info["pre_fire_ndvi"] = ", ".join(pre_fire_ls).replace("_sr", "_sr_ndvi.tif")
                meta_info["pre_fire_nbr"] = ", ".join(pre_fire_ls).replace("_sr", "_sr_nbr.tif")
                meta_info["pre_fire_cf"] = ", ".join(pre_fire_ls).replace("_sr", "_cfmask.tif")
                meta_info["pre_fire_bnb"] = ", ".join(pre_fire_ls).replace("_sr", "_bnb.tif")

            if one_yr_post_ls:
                scene_ls.extend(one_yr_post_ls)
                meta_info["one_yr"] = ", ".join(one_yr_post_ls).replace("_sr", "_sr.tif")
                meta_info["one_yr_ndvi"] = ", ".join(one_yr_post_ls).replace("_sr", "_sr_ndvi.tif")
                meta_info["one_yr_nbr"] = ", ".join(one_yr_post_ls).replace("_sr", "_sr_nbr.tif")
                meta_info["one_yr_cf"] = ", ".join(one_yr_post_ls).replace("_sr", "_cfmask.tif")
                meta_info["one_yr_bnb"] = ", ".join(one_yr_post_ls).replace("_sr", "_bnb.tif")

            if not scene_ls:
                print "No scenes !!"
                continue

            proj_box = []

            if not subset:
                copy_scenes(extent_path, scene_output, scene_ls)
            else:
                copy_scenes(extent_path, scene_working, scene_ls)
                for scene in scene_ls:
                    window_scene(scene_working, scene_output, scene, subset)

            if wmd:
                # in_extent xmin, xmax, ymin, ymax
                proj_box.append(fws_master.fifteen_offset(in_extent[0] - 3000))
                proj_box.append(fws_master.fifteen_offset(in_extent[1] + 3000))
                proj_box.append(fws_master.fifteen_offset(in_extent[2] - 3000))
                proj_box.append(fws_master.fifteen_offset(in_extent[3] + 3000))

                fire_name_path = os.path.join(scene_output, "fire_subsets", meta_info["fire_id"])
                if not os.path.exists(fire_name_path):
                    subprocess.call("mkdir %s" % fire_name_path, shell=True)

                for scene in scene_ls:
                    window_scene(scene_output, fire_name_path, scene, proj_box)

            r_extent = get_coords(os.path.join(scene_output, scene_ls[0][9:13], "SR", scene_ls[0][:-2] + "sr.tif"))

            meta_info["rows"] = int((r_extent[3] - r_extent[1]) / 30)
            meta_info["cols"] = int((r_extent[2] - r_extent[0]) / 30)

            meta_info["PR"] = scene_ls[0][3:9]

            meta_info["subset_exta_ulx"] = r_extent[0]
            meta_info["subset_exta_uly"] = r_extent[3]
            meta_info["subset_exta_lrx"] = r_extent[2]
            meta_info["subset_exta_lry"] = r_extent[1]

            wgs_ul = calc_lat_lon(r_extent[3], r_extent[0])
            wgs_ur = calc_lat_lon(r_extent[3], r_extent[2])
            wgs_lr = calc_lat_lon(r_extent[1], r_extent[2])
            wgs_ll = calc_lat_lon(r_extent[1], r_extent[0])

            meta_info["subset_ext_uly"] = max(wgs_ul[0], wgs_ur[0])
            meta_info["subset_ext_ulx"] = min(wgs_ul[1], wgs_ll[1])
            meta_info["subset_ext_lry"] = min(wgs_ll[0], wgs_lr[0])
            meta_info["subset_ext_lrx"] = max(wgs_ur[1], wgs_lr[1])

        meta_file = open(os.path.join(perim_out, out_shp + ".txt"), "w")
        meta_file.write(metadata_str(meta_info))
        meta_file.close()

        csv_write.writerow(
            (
                fws_un_id,
                disc_date,
                meta_info["fire_id"],
                meta_info["pre_fire"],
                meta_info["post_fire"],
                meta_info["one_yr"],
                meta_info["comments"],
                confidence,
            )
        )

        in_ds = None
        out_ds = None

    if os.path.exists(os.path.join(perimeter_output, org + "_mosaic.shp")):
        subprocess.call("del %s.*" % (os.path.join(perimeter_output, org + "_mosaic")), shell=True)

    print "Generating Mosaic"
    subprocess.call(
        'ogr2ogr -overwrite -f "esri shapefile" %s_mosaic.shp %s'
        % (os.path.join(perimeter_output, org), shp_path_ls[0] + ".shp"),
        shell=True,
    )
    for shapefile in itertools.islice(shp_path_ls, 1, None):
        subprocess.call(
            'ogr2ogr -f "esri shapefile" -update -append %s_mosaic.shp %s'
            % (os.path.join(perimeter_output, org), shapefile + ".shp"),
            shell=True,
        )

    print "Adding NLCD to Package"
    if subset:
        nlcd_ext = "%s %s %s %s" % (subset[0], subset[3], subset[1], subset[2])
    else:
        nlcd_ext = " ".join(extent.split("_"))
    nlcd_out_path = os.path.join(output_path, "NLCD")

    nlcd_ls = []
    for file in os.listdir(nlcd_path):
        if file[-3:] != "img":
            continue
        nlcd_ls.append(os.path.join(nlcd_path, file))

    if not os.path.exists(nlcd_out_path):
        subprocess.call("mkdir %s" % nlcd_out_path, shell=True)

    for nlcd_file in nlcd_ls:
        head, tail = os.path.split(nlcd_file)
        subprocess.call(
            "gdal_translate -q -of GTiff -projwin %s %s %s"
            % (nlcd_ext, nlcd_file, os.path.join(nlcd_out_path, tail[:-3] + "tif"))
        )

    print "Adding FMIS points"
    support_path = os.path.join(output_path, "support_layers")

    if not os.path.exists(support_path):
        os.mkdir(support_path)

    # subprocess.call('ogr2ogr -overwrite -where "ORGCODE = %s" %s %s' % (org,
    #                                               os.path.join(support_path, org + '_FWS_Unit.shp'),
    #                                               os.path.join(os.getcwd(), os.pardir, 'boundaries', 'All_FWS_5070.shp')), shell=True)
    subprocess.call(
        'ogr2ogr -t_srs EPSG:5070 -overwrite %s PG:"host=localhost user=postgres dbname=PostGIS" -sql\
             "SELECT * FROM %s WHERE ""org code"" = %s"'
        % (os.path.join(support_path, org + "_FMIS.shp"), "fod_pts", org),
        shell=True,
    )

    cursor.close()
    conn.close()
    csv_log.close()
    print "Errors with the following:", error_ls
    print "Adding annual dNBRs"
    annual_dnbr(org, extent_path, output_path, subset)
コード例 #2
0
def create_perims(inputPath, pg_table, filter_ag=False, nlcdpath=False,
                  terrain_shadow=False, lsatmeta=False, filter_cloud=False):
    ###################################
    # Main variables used in processing
    ###################################

    min_thresh = 50  # minimum threshold to use
    max_thresh = 95
    day_diff = 48  # number of days to keep a geometry without getting merged
    max_day = 90  # max number of days to keep a geometry, independent of the last time merged
    dist_meter = 0  # distance used to union geometries that don't intersect
    min_pixel = 22  # minimum number of contiguous pixels to keep
    min_island = 9  # minimum number of contiguous pixels to keep for islands
    min_pixel_diag = 2  # value of 2 includes diagonal pixels in being contiguous, value of 1 does not include diagonals
    min_isl_diag = 2

    shade_thresh = 90  # Used for terrain shadows

    base = os.path.dirname(os.path.realpath(__file__))

    _, exPath = ntpath.split(inputPath)
    if not exPath:
        exPath = ntpath.split(_)[-1]

    outPath = os.path.join(base, '_working')
    fws_master.createpath(outPath)

    # Database connection
    conn = psycopg2.connect(database="PostGIS", user="******", password="******")
    try:
        cursor = conn.cursor()
    except psycopg2.Error as e:
        print e.pgerror
        exit()

    # Initialize dictionaries
    geom_dict = {}  # Keeps track of geometries and dates, for use as a buffer in merging polygons
    bnb_list_dict = {}  # File list of all the BNB rasters to process

    if filter_ag:
        coord_ls = []
        for coord in exPath.split('_'):
            coord_ls.append(int(coord))

        ag_dict = ag_masks(filter_ag, coord_ls, nlcdpath)
    else:
        ag_dict = ''

    # Get all the BNB rasters in a sorted list
    for root, dirs, files in os.walk(inputPath):
        for name in files:
            if name[-7:] != 'bnb.tif':
                continue

            date_key = name[9:16] + name[4:6] + name[7:9]
            bnb_list_dict[date_key] = name

    # Move through the list of BNB rasters for processing
    for bnb_key, file in sorted(bnb_list_dict.items()):

        # if file != 'LT40300281982347XXX01_bnb.tif':
        #     continue

        inRaster = os.path.join(inputPath, file[9:13], 'BNB', file)
        outRaster = os.path.join(outPath, file)

        yr = int(file[9:13])

        inDS = gdal.Open(inRaster, GA_ReadOnly)

        if inDS is None:
            print 'Unable to open %s, Please clear the DB and restart' % inRaster
            quit()

        inBand = inDS.GetRasterBand(1)
        rows = inBand.YSize
        cols = inBand.XSize
        rDriver = inDS.GetDriver()
        ingeo = inDS.GetGeoTransform()
        inData = np.array(inBand.ReadAsArray(0, 0, cols, rows), dtype=np.byte)

        if terrain_shadow or filter_ag:
            colmin = int(abs(coord_ls[0] - ingeo[0]) / 30)
            rowmin = int(abs(coord_ls[1] - ingeo[3]) / 30)
            colmax = colmin + cols
            rowmax = rowmin + rows

        # Use the Otsu method to determine a possible threshold
        thresh = threshold_otsu(inData)

        # Add the standard deviation if it is available
        if np.std(inData):
            thresh += int(math.ceil(np.std(inData)))

        if thresh < min_thresh:
            thresh = min_thresh
        elif thresh > max_thresh:
            thresh = max_thresh

        inData[inData < thresh] = 0
        inData[inData >= thresh] = 1

        cursor.execute(sql_thresh_string(pg_table, file[:21], thresh))
        conn.commit()

        if terrain_shadow:
            dem = terrain_shadow
            dem_out = os.path.join(outPath, 'shade.tif')

            terr_arr = terrain_mask(dem, dem_out, file[:-8], cursor, lsatmeta)[rowmin:rowmax, colmin:colmax]
            inData[terr_arr < shade_thresh] = 0

        f_yr = ''
        if filter_ag:
            if yr < 2001:
                f_yr = '1992'
                inData[ag_dict['1992'][rowmin:rowmax, colmin:colmax] == 0] = 0
            elif yr < 2006:
                f_yr = '2001'
                inData[ag_dict['2001'][rowmin:rowmax, colmin:colmax] == 0] = 0
            elif yr < 2011:
                f_yr = '2006'
                inData[ag_dict['2006'][rowmin:rowmax, colmin:colmax] == 0] = 0
            else:
                f_yr = '2011'
                inData[ag_dict['2011'][rowmin:rowmax, colmin:colmax] == 0] = 0

        if filter_cloud:
            cfRaster = os.path.join(inputPath, file[9:13], 'CFMASK', file[:-7] + 'cfmask.tif')
            cfmask = cloud(cfRaster)
            inData[cfmask == 4] = 0

        # Process the raster removing small objects, inside and outside of the possible shapes
        filter_raster = remove_small_objects(inData.astype(bool), min_pixel, min_pixel_diag)
        outData = np.logical_not(remove_small_objects(np.logical_not(filter_raster), min_island, min_isl_diag)).astype(int)

        out_ds = rDriver.Create(outRaster, cols, rows, 1, gdal.GDT_Byte)
        out_ds.SetGeoTransform(inDS.GetGeoTransform())
        out_ds.SetProjection(inDS.GetProjection())
        out_ds.GetRasterBand(1).WriteArray(outData)
        out_ds.FlushCache()
        out_ds = None
        out_ds = gdal.Open(outRaster)
        out_band = out_ds.GetRasterBand(1)

        # Begin polygon processing, utilizing gdal_polygonzie
        print 'Reading %s %s %s, ' % (file, f_yr, thresh),
        shape_path = os.path.join(outPath, file[:-8] + '.shp')
        drv = ogr.GetDriverByName('ESRI Shapefile')
        shp_ds = drv.CreateDataSource(shape_path)
        srs = osr.SpatialReference()
        srs.ImportFromWkt(out_ds.GetProjectionRef())
        shp_layer = shp_ds.CreateLayer(file[:-8], srs=srs)

        fd = ogr.FieldDefn('DN', ogr.OFTInteger)
        shp_layer.CreateField(fd)

        gdal.Polygonize(out_band, None, shp_layer, 0, ['8CONNECTED=8'])
        shp_ds = None
        out_ds = None

        Driver = ogr.GetDriverByName("ESRI Shapefile")
        DataSource = Driver.Open(os.path.join(outPath, (file[:-8] + '.shp')))
        Layer = DataSource.GetLayer()

        # Move through the features in the created shapefile from gdal_polygonize
        num = 0  # Keep the key names in geom_dict unique
        print 'Comparing %s against %s' % (Layer.GetFeatureCount(), len(geom_dict))
        for feature in Layer:
            num += 1

            geom = feature.GetGeometryRef().Clone()

            # Filter geometries based on the raster processing
            if feature.GetFieldAsString('DN') != '1':
                continue

            # Fix any errors from gdal_polygonzie
            geom = geom.Buffer(0.0)

            file_ls = [file]  # Keep track of what scenes are used in building a polygon

            if len(geom_dict) == 0:  # If empty, just add it to the dictionary
                geom_dict['%s_%s' % (file[:21], str(num))] = (geom, file[9:16], file[9:16], file_ls)
            else:
                merged = False
                out_keys = []  # Keep track of what geometries need to be written and removed from the dictionary
                union_keys = []  # Keep track of what geometries need to be merged
                for key in geom_dict:
                    # Use BNB file date to determine if geometries need to be written to DB and removed
                    if day_diff < abs(int(geom_dict[key][2][-3:]) - int(file[13:16])) < (365 - day_diff):
                        out_keys.append(key)
                        continue
                    if abs(int(geom_dict[key][1][-3:]) - int(file[13:16])) > max_day:
                        out_keys.append(key)
                        continue

                    # Use OGR to determine if the new geometry intersects or is within a certain distance of those
                    # in the dictionary
                    if dist_meter:
                        if geom.Intersects(geom_dict[key][0]) or geom.Distance(geom_dict[key][0]) < dist_meter:
                            union_keys.append(key)
                            merged = True
                    else:
                        if geom.Intersects(geom_dict[key][0]):
                            union_keys.append(key)
                            merged = True

                if not merged:
                    geom_dict['%s_%s' % (file[:21], str(num))] = (geom, file[9:16], file[9:16], file_ls)

                if merged:
                    begin_ls = []  # Keep track of perimeter beginning dates
                    end_ls = []  # Keep track of perimeter ending dates
                    begin_ls.append(file[9:16])
                    end_ls.append(file[9:16])
                    for union in union_keys:
                        geom = geom.Union(geom_dict[union][0])
                        file_ls += geom_dict[union][3]
                        begin_ls.append(geom_dict[union][1])
                        end_ls.append(geom_dict[union][2])
                        del geom_dict[union]
                    begin_ls.sort()
                    end_ls.sort()
                    geom_dict['%s_%s' % (file[:21], str(num))] = (geom, begin_ls[0], end_ls[-1], file_ls)

                for dict_remove in out_keys:
                    cursor.execute(sql_string(geom_dict[dict_remove], pg_table))
                    conn.commit()
                    del geom_dict[dict_remove]

        Layer.ResetReading()

        DataSource = None

        for delme in os.listdir(outPath):
            if delme[:4] != 'temp':
                subprocess.call('del %s' % os.path.join(outPath, delme), shell=True)

    # Output whatever is left in the geom_dict to the DB
    for key in geom_dict:
        cursor.execute(sql_string(geom_dict[key], pg_table))
        conn.commit()

    inDS = None
    cursor.close()
    conn.close()