def driftBiomassDensity(visitMetrics, driftInverts, driftInvertResults, sampleBiomasses):
    log = Logger("driftBiomassDensity")
    if driftInverts is None or driftInverts["values"].__len__() == 0:
        visitMetrics["DriftBiomassDensity"] = None
        return

    if driftInvertResults is None or driftInvertResults["values"].__len__() == 0:
        visitMetrics["DriftBiomassDensity"] = None
        return

    if sampleBiomasses is None:
        visitMetrics["DriftBiomassDensity"] = None
        return

    volumes = [s["value"]["VolumeSampled"] for s in driftInverts["values"]]

    if any([v is None for v in volumes]):
        log.warning("VolumeSampled contains 'None'")


    sumVolumeSampled = np.sum([v for v in volumes if v is not None])
    sampleResult = next((i for i in driftInvertResults["values"]))
    sumSampleBiomass = np.sum([s["value"]["DryMassGrams"] / sampleResult["value"]["PortionOfSampleSorted"]
                               for s in sampleBiomasses["values"]])

    visitMetrics["DriftBiomassDensity"] = None

    if sumVolumeSampled > 0:
        visitMetrics["DriftBiomassDensity"] = sumSampleBiomass / sumVolumeSampled
Exemplo n.º 2
0
    def buildManualFile(self, layerFileName, bMandatory):
        """
        Building a file path using manual layer file naming
        :param layerName:
        :param bMandatory:
        :return:
        """
        path = ""
        log = Logger("buildManualFile")
        try:
            match = next(file for file in os.listdir(self.directory)
                         if file.lower() == layerFileName.lower())
            path = os.path.join(self.directory, match)

        except Exception, e:
            log.warning(
                "The file called '{0}' does not exist in directory: {1}".
                format(layerFileName, self.directory))
            pass
def download_file(file_dict, folder):

    log = Logger('Download')

    if not file_dict['name']:
        log.warning('Missing file name in folder {}'.format(folder))
        return

    if not file_dict['downloadUrl'] or file_dict['downloadUrl'].lower() == '?download':
        log.warning('Missing download URL in folder {}'.format(folder))
        return

    file_path = os.path.join(folder, file_dict['name'])

    if not os.path.isdir(folder):
        os.makedirs(folder)

    # Write file info as JSON
    with open( os.path.splitext(file_path)[0] + '.json', 'w') as json_file:
        json.dump(file_dict, json_file)

    # Skip files that exist unless they are zero bytes in which case remove them
    if os.path.isfile(file_path):
        if os.stat(file_path).st_size == 0:
            log.warning('Removing zero byte file {}'.format(file_path))
            os.remove(file_path)
        else:
            return

    # Download missing file
    with open(file_path, 'w+b') as f:
        response = APIGet(file_dict['downloadUrl'], absolute=True)
        f.write(response.content)

    log.info('Downloaded missing file {}'.format(file_path))
Exemplo n.º 4
0
def BankfullMetrics(dem, detrended_dem, shp_points):
    """
    :param topoDataFolder:
    :param results_xmlfile:
    :param visitid:
    :return:
    """

    log = Logger("Bankfull Metrics")

    # 1.  find the average elevation of crew bankfull points in the detrended DEM.
    gdf_topo_points = geopandas.GeoDataFrame().from_file(shp_points)

    gdf_bf_points = None
    if 'Code' in gdf_topo_points:
        gdf_bf_points = gdf_topo_points[gdf_topo_points["Code"] == 'bf']
    else:
        gdf_bf_points = gdf_topo_points[gdf_topo_points["code"] == 'bf']

    log.info("Loaded BF points")

    with rasterio.open(detrended_dem) as rio_detrended:
        bf_elevations = [
            v[0] for v in rio_detrended.sample(
                zip([Point(p).x for p in gdf_bf_points.geometry],
                    [Point(p).y for p in gdf_bf_points.geometry]))
            if v[0] != rio_detrended.nodata
        ]  # Filter out points not within detrendedDEM data extent.
        detrended_band = rio_detrended.read(1)

    if len(bf_elevations) == 0:
        log.error("No valid bf elevation points found.")
    else:
        log.info("Sampled {} valid BF point elevations from the DetrendedDEM".
                 format(str(len(bf_elevations))))

    with rasterio.open(dem) as rio_dem:
        dem_band = rio_dem.read(1)

    # enforce orthogonal rasters
    dem_pad_top = int(
        (rio_detrended.bounds.top - rio_dem.bounds.top) /
        0.1) if rio_detrended.bounds.top > rio_dem.bounds.top else 0
    dem_pad_bottom = int(
        (rio_dem.bounds.bottom - rio_detrended.bounds.bottom) /
        0.1) if rio_dem.bounds.bottom > rio_detrended.bounds.bottom else 0
    dem_pad_right = int(
        (rio_detrended.bounds.right - rio_dem.bounds.right) /
        0.1) if rio_detrended.bounds.right > rio_dem.bounds.right else 0
    dem_pad_left = int(
        (rio_dem.bounds.left - rio_detrended.bounds.left) /
        0.1) if rio_dem.bounds.left > rio_detrended.bounds.left else 0

    det_pad_top = int(
        (rio_dem.bounds.top - rio_detrended.bounds.top) /
        0.1) if rio_detrended.bounds.top < rio_dem.bounds.top else 0
    det_pad_bottom = int(
        (rio_detrended.bounds.bottom - rio_dem.bounds.bottom) /
        0.1) if rio_dem.bounds.bottom < rio_detrended.bounds.bottom else 0
    det_pad_right = int(
        (rio_dem.bounds.right - rio_detrended.bounds.right) /
        0.1) if rio_detrended.bounds.right < rio_dem.bounds.right else 0
    det_pad_left = int(
        (rio_detrended.bounds.left - rio_dem.bounds.left) /
        0.1) if rio_dem.bounds.left < rio_detrended.bounds.left else 0

    np_detrended_ortho = np.pad(detrended_band,
                                ((det_pad_top, det_pad_bottom),
                                 (det_pad_left, det_pad_right)),
                                mode="constant",
                                constant_values=np.nan)
    np_dem_ortho = np.pad(dem_band, ((dem_pad_top, dem_pad_bottom),
                                     (dem_pad_left, dem_pad_right)),
                          mode="constant",
                          constant_values=np.nan)

    if all(v == 0 for v in [
            dem_pad_top, dem_pad_bottom, dem_pad_right, dem_pad_left,
            det_pad_top, det_pad_bottom, det_pad_right, det_pad_left
    ]):
        log.info("DEM and DetrendedDEM have concurrent extents")
    else:
        log.warning(
            "Non-Concurrent Rasters encountered. DEM and DetrendedDEM using padded extents"
        )

    ma_detrended = np.ma.MaskedArray(
        np_detrended_ortho, np.equal(np_detrended_ortho, rio_detrended.nodata))
    ma_dem = np.ma.MaskedArray(np_dem_ortho,
                               np.equal(np_dem_ortho, rio_dem.nodata))

    # Generate Trend Grid
    np_trendgrid = np.subtract(ma_dem, ma_detrended)
    log.info("Trend surface created")

    # Average BF elev to constant raster in detrended space
    ave_bf_det_elev = sum(bf_elevations) / float(len(bf_elevations))
    ma_bf_detrended = np.full_like(ma_detrended,
                                   ave_bf_det_elev,
                                   dtype=np.float64)
    log.info("Detrended BF surface created")

    # add trend grid to BF detrended surface
    np_bf_surface = np.add(ma_bf_detrended, np_trendgrid)
    log.info("BF elevation surface created")

    # Generate depth and volume
    np_bf_depth_raw = np.subtract(np_bf_surface, ma_dem)
    np_bf_depth = np.multiply(np.greater(np_bf_depth_raw, 0), np_bf_depth_raw)
    np_bf_volume = np.multiply(np_bf_depth, 0.1 * 0.1)
    log.info("BF Depth surface created")

    ma_bf_depth = np.ma.MaskedArray(np_bf_depth, np.equal(
        np_bf_depth,
        -0.0))  # -0.0 values were getting included in the mean calculation

    # Run ZonalStatisticsAsTable to get the metric values:
    # Sum the bankfull depth raster values and multiply by the area of one cell to produce BFVol.
    # Max the bankfull depth raster values is DepthBF_Max.
    # Average the bankfull depth raster values is DepthBF_Avg
    bf_volume = np.nansum(np_bf_volume)
    bf_depth_max = np.nanmax(ma_bf_depth)
    bf_depth_mean = np.nanmean(ma_bf_depth)
    log.info("BF metrics calculated")

    results = {
        "Volume": bf_volume,
        "Depth": {
            "Max": bf_depth_max,
            "Mean": bf_depth_mean
        }
    }

    return results
Exemplo n.º 5
0
def generate_substrate_raster(topo_project_folder,
                              out_path,
                              di_values,
                              dict_ocular_values,
                              out_channel_value=4000.0):
    """Generate Substrate Raster from Channel units and ocular substrate estimates for each di value provided

    :param str topo_project_folder: folder source of the topo project
    :param str out_path: path for outputs
    :param list di_values: list of int percentile values for roughness calculation
    :param dict dict_ocular_values: dictionary of ocular estimates of grain size values
    :param float out_channel_value: roughness value to use for out of channel areas, default = 4000
    :return: 0 for success
    """

    # Load Topo Project
    log = Logger("SubstrateRaster")
    log.info("topo_project_folder: {}".format(str(topo_project_folder)))
    log.info("outputPath: {}".format(str(out_path)))
    log.info("D Values: {}".format(str(di_values)))
    project = topoproject.TopoProject(topo_project_folder)
    topo_rs_project = riverscapes.Project(
        os.path.join(topo_project_folder, "project.rs.xml"))
    log.info("Topo project loaded")

    # Initialize Riverscapes Project
    rsproject = riverscapes.Project()
    rsproject.create("Substrate", "Substrate", __version__)
    for tagname, tags in {
            "Site": ["Site", "SiteName"],
            "Visit": ["Visit", "VisitID"],
            "Year": ["Year", "FieldSeason"],
            "Watershed": ["Watershed", "Watershed"]
    }.iteritems():
        if tags[0] in topo_rs_project.ProjectMetadata or tags[
                1] in topo_rs_project.ProjectMetadata:
            rsproject.addProjectMetadata(
                tagname, topo_rs_project.ProjectMetadata[tags[0]]
                if tags[0] in topo_rs_project.ProjectMetadata else
                topo_rs_project.ProjectMetadata[tags[1]])
        else:
            raise DataException("Missing project metadata")

    # 1. Do some math on the dictionary of substrate values for each di
    dict_di_roughness_values = {}
    list_keep_units = []
    for di in di_values:
        dict_units = dict_ocular_by_unit(dict_ocular_values)
        dict_roughness_values = {}
        for unitid, dict_unit in dict_units.iteritems():
            if all(dict_unit[key] is not None for key in [
                    "Bedrock", "Boulders", "Cobbles", "CourseGravel",
                    "FineGravel", "Fines", "Sand"
            ]):
                dict_roughness_values[int(unitid)] = calculate_grain_size(
                    dict_unit, di)
                if unitid not in list_keep_units:
                    list_keep_units.append(unitid)
            else:
                log.warning(
                    "Missing Channel Unit Substrate Values for Unit {}.".
                    format(str(unitid)))

        dict_roughness_values[0] = float(
            out_channel_value)  # Out of Channel "UnitNumber" == 0
        dict_di_roughness_values[di] = pandas.DataFrame(
            list(dict_roughness_values.iteritems()),
            index=dict_roughness_values.keys(),
            columns=["UnitNumber", "Roughness"])
        log.info("Calculated Roughness Values for D{}".format(str(di)))

    # 2. Spread the channel Unit areas
    gdf_expanded_channel_units = expand_polygons(
        project.getpath("ChannelUnits"),
        project.getpath("BankfullExtent"),
        keep_units=list_keep_units)
    log.info("Channel Units expanded to Bankfull Area")

    # 3. Add DEM area
    gdf_demextent = geopandas.GeoDataFrame.from_features(
        geopandas.GeoSeries(get_data_polygon(project.getpath("DEM"))))
    if not all(gdf_demextent.geometry.is_valid):
        gdf_demextent.geometry = gdf_demextent.geometry.buffer(0)
        log.info("Fix invalid geoms for DEM Extent")
    gdf_demextent["UnitNumber"] = 0
    gdf_in_channel_union = geopandas.GeoDataFrame.from_features(
        geopandas.GeoSeries(gdf_expanded_channel_units.unary_union.buffer(0)))
    gdf_out_of_channel = geopandas.overlay(gdf_demextent, gdf_in_channel_union,
                                           "difference")
    gdf_full_polygons = gdf_expanded_channel_units.append(gdf_out_of_channel)
    log.info("Out of Channel Area generated")

    for di, df_roughness_values in dict_di_roughness_values.iteritems():
        # 4 Add dict to channel units
        gdf_full_polygons_merged = gdf_full_polygons.merge(df_roughness_values,
                                                           on="UnitNumber")
        gdf_final_polys = gdf_full_polygons_merged.rename(
            columns={"Roughness_y": "Roughness"})
        gdf_final_polys.drop([
            col for col in gdf_final_polys.columns
            if col not in ["UnitNumber", "Roughness", 'geometry']
        ],
                             axis=1,
                             inplace=True)
        log.info("Roughness Values added to Channel Units for D{}".format(
            str(di)))

        # 5. Rasterize Polygons
        raster_substrate = path.join(out_path,
                                     "substrate_D{}.tif".format(str(di)))
        shp_substrate = path.join(out_path,
                                  "substrate_D{}.shp".format(str(di)))
        gdf_final_polys.to_file(shp_substrate)
        log.info("Saved Substrate Shapefile: {}".format(shp_substrate))
        rasterize_polygons(shp_substrate, project.getpath("DEM"),
                           raster_substrate, "Roughness")
        log.info("Created Substrate Raster: {}".format(raster_substrate))

        # Add Realization to Riverscapes
        realization = riverscapes.Realization("Substrate")
        realization.name = "Substrate_D{}".format(str(di))
        realization.productVersion = __version__
        ds_shapefile = riverscapes.Dataset().create(
            "Substrate_Shapefile", "substrate_D{}.shp".format(str(di)))
        ds_raster = riverscapes.Dataset().create(
            "Substrate_Raster", "substrate_D{}.tif".format(str(di)))
        ds_shapefile.metadata["D_Value"] = str(di)
        ds_raster.metadata["D_Value"] = str(di)
        ds_shapefile.id = "substrate_shapefile_d{}".format(str(di))
        ds_raster.id = "substrate_shapefile_d{}".format(str(di))
        realization.outputs[ds_shapefile.name] = ds_shapefile
        realization.outputs[ds_raster.name] = ds_raster
        rsproject.addRealization(realization)

    # Write Riverscapes Project.
    rsprojectxml = os.path.join(out_path, "project.rs.xml")
    rsproject.writeProjectXML(rsprojectxml)
    log.info("Riverscapes Project file saved: {}".format(rsprojectxml))

    return 0
Exemplo n.º 6
0
def champ_topo_checker(workbench, folder):

    log = Logger('Topo Checker')
    log.setup(logPath=os.path.join(
        folder,
        datetime.now().strftime("%Y%m%d-%H%M%S") + '_topo_checker.log'))

    dbCon = sqlite3.connect(workbench)
    dbCurs = dbCon.cursor()
    dbCurs.execute(
        'SELECT WatershedName, VisitYear, SiteName, VisitID' +
        ' FROM vwVisits WHERE ProgramID = 1 AND ProtocolID IN (2030, 416, 806, 1966, 2020, 1955, 1880, 10036, 9999)'
    )

    file_exists = 0
    file_zero = 0
    file_download = []
    file_errors = []

    for row in dbCurs.fetchall():
        watershed = row[0]
        visit_year = row[1]
        site = row[2]
        visitID = row[3]

        topo_path = os.path.join(folder, str(visit_year),
                                 watershed.replace(' ', ''), site,
                                 'VISIT_{}'.format(visitID), 'Field Folders',
                                 'Topo', 'TopoData.zip')

        download_needed = False
        if os.path.isfile(topo_path):
            file_exists += 1

            if os.stat(topo_path).st_size == 0:
                file_zero += 0
                download_needed = True
        else:
            download_needed = True

        if not download_needed:
            continue

        file_download.append(topo_path)

        try:
            topoFieldFolders = APIGet(
                'visits/{}/fieldFolders/Topo'.format(visitID))
            file = next(file for file in topoFieldFolders['files']
                        if file['componentTypeID'] == 181)
            downloadUrl = file['downloadUrl']
        except Exception, e:
            log.warning('No topo data for visit information {}: {}'.format(
                visitID, topo_path))
            continue

        # Download the file to a temporary location
        if not os.path.isdir(os.path.dirname(topo_path)):
            os.makedirs(os.path.dirname(topo_path))

        with open(topo_path, 'w+b') as f:
            response = APIGet(downloadUrl, absolute=True)
            f.write(response.content)
            log.info(topo_path)

        log.info('Downloaded {}'.format(topo_path))