コード例 #1
0
def populate_datamart(dbsession):
    # AdminLevelType
    for i in [
        ("COU", "Country", "Administrative division of level 0"),
        ("PRO", "Province", "Administrative division of level 1"),
        ("REG", "Region", "Administrative division of level 2"),
    ]:
        if AdminLevelType.get(dbsession, i[0]):
            continue
        r = AdminLevelType()
        r.mnemonic, r.title, r.description = i
        dbsession.add(r)

    # HazardLevel
    for i in [
        ("HIG", "High", 1),
        ("MED", "Medium", 2),
        ("LOW", "Low", 3),
        ("VLO", "Very low", 4),
    ]:
        if HazardLevel.get(dbsession, i[0]):
            continue
        r = HazardLevel()
        r.mnemonic, r.title, r.order = i
        dbsession.add(r)

    # HazardType
    for i in [
        ("FL", "River flood", 1),
        ("UF", "Urban flood", 2),
        ("CF", "Coastal flood", 3),
        ("EQ", "Earthquake", 4),
        ("LS", "Landslide", 5),
        ("TS", "Tsunami", 6),
        ("VA", "Volcano", 7),
        ("CY", "Cyclone", 8),
        ("DG", "Water scarcity", 9),
        ("EH", "Extreme heat", 10),
        ("WF", "Wildfire", 11),
        ("AP", "Air pollution", 12),
    ]:
        if HazardType.get(dbsession, i[0]):
            continue
        r = HazardType()
        r.mnemonic, r.title, r.order = i
        dbsession.add(r)

    # HazardCategory
    hazardlevels = dbsession.query(HazardLevel)
    for hazardtype in dbsession.query(HazardType):
        for hazardlevel in hazardlevels:
            if HazardCategory.get(dbsession, hazardtype, hazardlevel):
                continue
            r = HazardCategory()
            r.hazardtype = hazardtype
            r.hazardlevel = hazardlevel
            r.general_recommendation = "General recommendation for {} {}".format(
                hazardtype.mnemonic, hazardlevel.mnemonic)
            dbsession.add(r)
コード例 #2
0
def hazardcategories(request):
    hazard_types = request.dbsession.query(HazardType).order_by(
        HazardType.order)
    hazard_levels = []
    for level in ["HIG", "MED", "LOW", "VLO"]:
        hazard_levels.append(HazardLevel.get(request.dbsession, level))
    return {"hazard_types": hazard_types, "hazard_levels": hazard_levels}
コード例 #3
0
ファイル: test_completing.py プロジェクト: GFDRR/thinkhazard
    def test_not_corresponding_rasters(self, open_mock):
        """Difference in origin, resolution or size must not complete"""

        hazardset_id = "notpreprocessed"
        hazardtype = HazardType.get(DBSession, "FL")

        regions = DBSession.query(Region).all()

        hazardset = HazardSet(
            id=hazardset_id,
            hazardtype=hazardtype,
            local=False,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            regions=regions,
        )
        DBSession.add(hazardset)

        for level in ["HIG", "MED", "LOW"]:
            layer = Layer(
                hazardlevel=HazardLevel.get(DBSession, level),
                mask=False,
                return_period=None,
                data_lastupdated_date=datetime.now(),
                metadata_lastupdated_date=datetime.now(),
                geonode_id=new_geonode_id(DBSession),
                download_url="test",
                calculation_method_quality=5,
                scientific_quality=1,
                local=False,
                downloaded=True,
            )
            hazardset.layers.append(layer)

        mask_layer = Layer(
            hazardlevel=None,
            mask=True,
            return_period=None,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(DBSession),
            download_url="test",
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True,
        )
        hazardset.layers.append(mask_layer)

        DBSession.flush()

        self.completer().execute()

        hazardset = DBSession.query(HazardSet).one()
        self.assertEqual(
            hazardset.complete_error,
            "All layers should have the same origin, resolution and size",
        )
        self.assertEqual(hazardset.complete, False)
コード例 #4
0
def technical_rec_process(request, obj):
    if request.method == "GET":
        hazard_types = request.dbsession.query(HazardType).order_by(
            HazardType.order)
        hazard_levels = []
        for level in ["HIG", "MED", "LOW", "VLO"]:
            hazard_levels.append(HazardLevel.get(request.dbsession, level))
        if obj.id is None:
            action = request.route_url("admin_technical_rec_new")
        else:
            action = request.route_url("admin_technical_rec_edit", id=obj.id)
        return {
            "obj": obj,
            "action": action,
            "hazard_types": hazard_types,
            "hazard_levels": hazard_levels,
        }

    if request.method == "POST":
        obj.text = request.POST.get("text")
        obj.detail = request.POST.get("detail")
        if inspect(obj).transient:
            request.dbsession.add(obj)

        associations = request.POST.getall("associations")
        records = obj.hazardcategory_associations

        # Remove unchecked ones
        for record in records:
            if record.hazardcategory.name() not in associations:
                request.dbsession.delete(record)

        # Add new ones
        for association in associations:
            hazardtype, hazardlevel = association.split(" - ")
            if not obj.has_association(hazardtype, hazardlevel):
                hazardcategory = HazardCategory.get(request.dbsession,
                                                    hazardtype, hazardlevel)
                order = (request.dbsession.query(
                    func.coalesce(func.cast(func.max(HcTr.order), Integer), 0)
                ).select_from(HcTr).filter(
                    HcTr.hazardcategory_id == hazardcategory.id).first()[0] +
                         1)

                record = HcTr(hazardcategory=hazardcategory, order=order)
                obj.hazardcategory_associations.append(record)

        request.dbsession.flush()
        return HTTPFound(request.route_url("admin_technical_rec"))
コード例 #5
0
def make_layers():
    layers = []
    for level in ("HIG", "MED", "LOW"):
        layer = Layer()
        layer.hazardlevel = HazardLevel.get(level)
        layer.return_period = 1
        layer.hazardunit = "m"
        layer.data_lastupdated_date = datetime.now()
        layer.metadata_lastupdated_date = datetime.now()
        layer.geonode_id = new_geonode_id()
        layer.download_url = "http://something"
        layer.calculation_method_quality = 5
        layer.scientific_quality = 1
        layer.local = False
        layer.downloaded = True
        layers.append(layer)
    return layers
コード例 #6
0
ファイル: processing.py プロジェクト: GFDRR/thinkhazard
    def preprocessed_hazardlevel(self, geometry):
        hazardlevel = None
        reader = self.readers[0]

        for polygon in geometry.geoms:
            if not polygon.intersects(self.bbox):
                continue

            window = reader.window(*polygon.bounds)
            data = reader.read(1, window=window, masked=True)

            if data.shape[0] * data.shape[1] == 0:
                continue
            if data.mask.all():
                continue

            geometry_mask = features.geometry_mask(
                [polygon],
                out_shape=data.shape,
                transform=reader.window_transform(window),
                all_touched=True,
            )

            data.mask = data.mask | geometry_mask
            del geometry_mask

            if data.mask.all():
                continue

            for level in ("HIG", "MED", "LOW", "VLO"):
                level_obj = HazardLevel.get(self.dbsession, level)
                if level_obj <= hazardlevel:
                    break

                if level in self.type_settings["values"]:
                    values = self.type_settings["values"][level]
                    for value in values:
                        if value in data:
                            hazardlevel = level_obj
                            break

        return hazardlevel
コード例 #7
0
ファイル: test_completing.py プロジェクト: GFDRR/thinkhazard
    def test_open_exception(self, open_mock):
        """Test handling of open exception"""

        hazardset_id = "notpreprocessed"
        hazardtype = HazardType.get(DBSession, "EQ")

        regions = DBSession.query(Region).all()

        hazardset = HazardSet(
            id=hazardset_id,
            hazardtype=hazardtype,
            local=False,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            regions=regions,
        )
        DBSession.add(hazardset)

        for level in ["HIG", "MED", "LOW"]:
            layer = Layer(
                hazardlevel=HazardLevel.get(DBSession, level),
                mask=False,
                return_period=None,
                data_lastupdated_date=datetime.now(),
                metadata_lastupdated_date=datetime.now(),
                geonode_id=new_geonode_id(DBSession),
                download_url="test",
                calculation_method_quality=5,
                scientific_quality=1,
                local=False,
                downloaded=True,
            )
            hazardset.layers.append(layer)

        DBSession.flush()

        self.completer().execute()

        hazardset = DBSession.query(HazardSet).one()
        self.assertEqual(hazardset.complete_error,
                         "Error opening layer notpreprocessed")
        self.assertEqual(hazardset.complete, False)
コード例 #8
0
    def select_levels(self):
        for hazardset in self.dbsession.query(HazardSet):
            type_settings = self.settings["hazard_types"][hazardset.hazardtype.mnemonic]
            preprocessed = "values" in type_settings
            if preprocessed:
                continue

            for level_mne in ('HIG', 'MED', 'LOW'):
                level = HazardLevel.get(self.dbsession, level_mne)
                self.select_layer_for_level(hazardset, level)

            if type_settings.get("mask_return_period"):
                self.select_mask_layer(hazardset)

            # Purge superseeded layers
            self.dbsession.query(Layer) \
                .filter(Layer.hazardset_id == hazardset.id) \
                .filter(Layer.hazardlevel_id.is_(None)) \
                .filter(Layer.mask.is_(False)) \
                .delete()
コード例 #9
0
    def harvest_layer(self, object):
        logger.info("Harvesting layer {id} - {title}".format(**object))
        title = object["title"]

        # we need to retrieve more information on this layer
        # since the regions array is not advertised by the main
        # regions listing from GeoNode
        o = self.fetch("api/layers/{id}/".format(**object))

        if "regions" not in list(o.keys()):
            warning(object, 'Attribute "regions" is missing')

        region_ids = []
        for r in o.get("regions", []):
            # r is like "/api/regions/1/"
            region_ids.append(r.split("/")[3])

        if len(region_ids) == 0:
            regions = []
        else:
            regions = self.dbsession.query(Region).filter(Region.id.in_(region_ids)).all()

        hazardset_id = o['hazard_set']
        if not hazardset_id:
            logger.info("  hazard_set is empty")
            return False

        # FIXME: temporary override
        if hazardset_id in excluded_hazardsets:
            logger.info("  hazard_set {} is excluded, skipping")
            return False

        hazardtype = self.check_hazard_type(o)
        if not hazardtype:
            return False

        type_settings = self.settings["hazard_types"][hazardtype.mnemonic]
        preprocessed = "values" in type_settings

        local = "GLOBAL" not in hazardset_id

        mask = False
        if preprocessed is True:
            hazardlevel = None
            # harvest hazard_unit for preprocessed layers
            hazard_unit = o['hazard_unit']
            if o['hazard_period']:
                logger.info('  return period found in preprocessed hazardset')
                return False
            hazard_period = None

        else:
            try:
                hazard_period = int(o['hazard_period'])
            except:
                hazard_period = None
            if hazard_period is None:
                logger.info('  no return period found')
                return False
            hazardlevel = None
            for level in ("LOW", "MED", "HIG"):
                if between(hazard_period, type_settings["return_periods"][level]):
                    hazardlevel = HazardLevel.get(self.dbsession, level)
                    break

            if "mask_return_period" in type_settings and between(
                hazard_period, type_settings["mask_return_period"]
            ):
                mask = True

            if hazardlevel is None and not mask:
                logger.info("  No corresponding hazard_level")
                return False

            hazard_unit = o['hazard_unit']
            if hazard_unit == '':
                logger.info('  hazard_unit is empty')
                return False

        if o['srid'] != 'EPSG:4326':
            logger.info('  srid is different from "EPSG:4326"')
            return False

        data_update_date = parse_date(o['data_update_date'])
        if not data_update_date:
            warning(o, 'data_update_date is empty: set to {}'.format(datetime.fromtimestamp(0)))
            # We use a very old date for good comparison in decision tree
            data_update_date = datetime.fromtimestamp(0)

        metadata_update_date = parse_date(o['metadata_update_date'])
        if not metadata_update_date:
            warning(o, 'metadata_update_date is empty: set to {}'.format(datetime.fromtimestamp(0)))
            # We use a very old date for good comparison in decision tree
            metadata_update_date = datetime.fromtimestamp(0)

        calculation_method_quality = o['calculation_method_quality']
        if not calculation_method_quality:
            warning(o, 'calculation_method_quality is empty: skip layer')
            return False
        calculation_method_quality = int(float(calculation_method_quality))

        scientific_quality = o['scientific_quality']
        if not scientific_quality:
            warning(o, 'scientific_quality is empty')
            return False
        scientific_quality = int(float(scientific_quality))

        download_url = o['download_url']
        if not download_url:
            warning(o, 'download_url is empty')
            return False

        hazardset = self.dbsession.query(HazardSet).get(hazardset_id)

        # Create hazardset before layer
        if hazardset is None:
            logger.info("  Create new hazardset {}".format(hazardset_id))
            hazardset = HazardSet()
            hazardset.id = hazardset_id
            hazardset.hazardtype = hazardtype
            self.dbsession.add(hazardset)

        # get detail_url and owner_organization from last updated layer
        geonode = self.settings["geonode"]
        geonode_base_url = geonode["url"]

        if o['detail_url'] and not mask:
            hazardset.detail_url = geonode_base_url + o['detail_url']
        if o['owner']['organization'] and not mask:
            hazardset.owner_organization = o['owner']['organization']
        if not mask:
            hazardset.regions = regions

        layer = self.dbsession.query(Layer).get(o['id'])
        if layer is None:
            logger.info("  Create new Layer {}".format(title))
            layer = Layer()
            layer.geonode_id = o['id']
            layer.hazardset = hazardset
            layer.mask = False

        else:
            # If data has changed
            if (
                layer.data_lastupdated_date != data_update_date
                or layer.download_url != download_url
            ):
                logger.info("  Invalidate downloaded")
                layer.downloaded = False
                hazardset.complete = False
                hazardset.processed = None
                # Remove file from cache
                layer.download_url = download_url
                path = self.layer_path(layer)
                if os.path.isfile(path):
                    os.unlink(path)

            # Some hazardset fields are calculated during completing
            if (
                layer.calculation_method_quality != calculation_method_quality
                or layer.scientific_quality != scientific_quality
                or layer.metadata_lastupdated_date != metadata_update_date
            ):
                logger.info("  Invalidate complete")
                hazardset.complete = False

            # Some fields invalidate outputs
            if layer.hazardunit != hazard_unit:
                logger.info("  Invalidate processed")
                hazardset.processed = None

        typename = o.get("typename", None)
        if typename is None:
            warning(o, 'Attribute "typename" is missing')
        layer.typename = typename

        layer.return_period = hazard_period
        layer.hazardunit = hazard_unit
        layer.data_lastupdated_date = data_update_date
        layer.metadata_lastupdated_date = metadata_update_date
        layer.download_url = download_url

        # TODO: retrieve quality attributes
        layer.calculation_method_quality = calculation_method_quality
        layer.scientific_quality = scientific_quality
        layer.local = local

        layer.set_harvested(True)
        self.dbsession.flush()
        return True
コード例 #10
0
def populate_notpreprocessed(type, unit):
    hazardset_id = "notpreprocessed"
    hazardtype = HazardType.get(DBSession, type)
    hazardtype_settings = settings["hazard_types"][hazardtype.mnemonic]

    regions = DBSession.query(Region).all()

    print("Populating hazardset {}".format(hazardset_id))
    hazardset = HazardSet(
        id=hazardset_id,
        hazardtype=hazardtype,
        local=False,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        regions=regions,
    )
    DBSession.add(hazardset)

    return_periods = hazardtype_settings["return_periods"]

    for level in ("HIG", "MED", "LOW"):
        hazardlevel = HazardLevel.get(DBSession, level)
        level_return_periods = return_periods[level]
        if isinstance(level_return_periods, list):
            return_period = level_return_periods[0]
        else:
            return_period = level_return_periods

        layer = Layer(
            hazardlevel=hazardlevel,
            mask=False,
            return_period=return_period,
            hazardunit=unit,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(DBSession),
            download_url="test",
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True,
        )
        hazardset.layers.append(layer)

    mask_return_periods = hazardtype_settings["mask_return_period"]
    if isinstance(mask_return_periods, list):
        mask_return_period = mask_return_periods[0]
    else:
        mask_return_period = mask_return_periods
    layer = Layer(
        hazardlevel=None,
        mask=True,
        return_period=mask_return_period,
        hazardunit=unit,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        geonode_id=new_geonode_id(DBSession),
        download_url="test",
        calculation_method_quality=5,
        scientific_quality=1,
        local=False,
        downloaded=True,
    )
    hazardset.layers.append(layer)

    hazardset.complete = True
    DBSession.flush()
コード例 #11
0
ファイル: processing.py プロジェクト: GFDRR/thinkhazard
    def process_hazardset(self, hazardset_id):
        hazardset = self.dbsession.query(HazardSet).get(hazardset_id)
        if hazardset is None:
            raise ProcessException("Hazardset {} does not exist.".format(hazardset_id))

        chrono = datetime.datetime.now()

        if hazardset.processed is not None:
            if self.force:
                hazardset.processed = None
            else:
                raise ProcessException(
                    "Hazardset {} has already been processed.".format(hazardset.id)
                )

        logger.info("  Cleaning previous outputs")
        self.dbsession.query(Output).filter(Output.hazardset_id == hazardset.id).delete()
        self.dbsession.flush()

        self.type_settings = self.settings["hazard_types"][
            hazardset.hazardtype.mnemonic
        ]

        hazardset.processing_error = None

        with rasterio.Env():
            try:
                logger.info("  Opening raster files")
                # Open rasters
                self.layers = {}
                self.readers = {}
                if "values" in list(self.type_settings.keys()):
                    # preprocessed layer
                    layer = (
                        self.dbsession.query(Layer)
                        .filter(Layer.hazardset_id == hazardset.id)
                        .one()
                    )
                    reader = rasterio.open(self.layer_path(layer))

                    self.layers[0] = layer
                    self.readers[0] = reader

                else:
                    for level in ("HIG", "MED", "LOW"):
                        hazardlevel = HazardLevel.get(self.dbsession, level)
                        layer = (
                            self.dbsession.query(Layer)
                            .filter(Layer.hazardset_id == hazardset.id)
                            .filter(Layer.hazardlevel_id == hazardlevel.id)
                            .one()
                        )
                        reader = rasterio.open(self.layer_path(layer))

                        self.layers[level] = layer
                        self.readers[level] = reader
                    if "mask_return_period" in self.type_settings:
                        layer = (
                            self.dbsession.query(Layer)
                            .filter(Layer.hazardset_id == hazardset.id)
                            .filter(Layer.mask.is_(True))
                            .one()
                        )
                        reader = rasterio.open(self.layer_path(layer))
                        self.layers["mask"] = layer
                        self.readers["mask"] = reader

                outputs, error = self.create_outputs(hazardset)
                if error:
                    hazardset.processing_error = error
                if outputs:
                    self.dbsession.add_all(outputs)
                else:
                    hazardset.processing_error = 'No output generated'

            finally:
                logger.info("  Closing raster files")
                for key, reader in self.readers.items():
                    if reader and not reader.closed:
                        reader.close()

        if not hazardset.processing_error:
            hazardset.processed = datetime.datetime.now()
            logger.info(
                "  Successfully processed {},"
                " {} outputs generated in {}".format(
                    hazardset.id, len(outputs), datetime.datetime.now() - chrono
                )
            )
        else:
            logger.info('  Process of {} failed in {}, {}'
                        .format(hazardset.id,
                                datetime.datetime.now() - chrono,
                                hazardset.processing_error))

        self.dbsession.flush()
コード例 #12
0
ファイル: processing.py プロジェクト: GFDRR/thinkhazard
    def notpreprocessed_hazardlevel(self, hazardtype, geometry):
        level_vlo = HazardLevel.get(self.dbsession, "VLO")

        hazardlevel = None

        # Create some optimization caches
        polygons = {}
        bboxes = {}
        geometry_masks = {}  # Storage for the geometry geometry_masks

        inverted_comparison = (
            "inverted_comparison" in self.type_settings
            and self.type_settings["inverted_comparison"]
        )

        for level in ("HIG", "MED", "LOW"):
            layer = self.layers[level]
            reader = self.readers[level]

            threshold = self.get_threshold(
                hazardtype, layer.local, layer.hazardlevel.mnemonic, layer.hazardunit
            )

            for i in range(0, len(geometry.geoms)):
                if i not in polygons:
                    polygon = geometry.geoms[i]
                    bbox = polygon.bounds
                    polygons[i] = polygon
                    bboxes[i] = bbox
                else:
                    polygon = polygons[i]
                    bbox = bboxes[i]

                if not polygon.intersects(self.bbox):
                    continue

                window = reader.window(*bbox)

                # data: MaskedArray
                data = reader.read(1, window=window, masked=True)

                # check if data is empty (cols x rows)
                if data.shape[0] * data.shape[1] == 0:
                    continue
                # all data is masked which means that all is NODATA
                if data.mask.all():
                    continue

                if inverted_comparison:
                    data = data < threshold
                else:
                    data = data > threshold

                # some hazard types have a specific mask layer with very low
                # return period which should be used as mask for other layers
                # for example River Flood
                if "mask_return_period" in self.type_settings:
                    mask_layer = self.layers["mask"]
                    mask_reader = self.readers["mask"]

                    mask_threshold = self.get_threshold(
                        hazardtype, mask_layer.local, "MASK", mask_layer.hazardunit
                    )

                    mask_window = mask_reader.window(*bbox)
                    mask = self.readers["mask"].read(1, window=mask_window, masked=True)
                    if inverted_comparison:
                        mask = mask < mask_threshold
                    else:
                        mask = mask > mask_threshold

                    # apply the specific layer mask
                    data.mask = ma.getmaskarray(data) | mask.filled(False)
                    del mask
                    if data.mask.all():
                        continue

                if i in geometry_masks:
                    geometry_mask = geometry_masks[i]
                else:
                    geometry_mask = features.geometry_mask(
                        [polygon],
                        out_shape=data.shape,
                        transform=reader.window_transform(window),
                        all_touched=True,
                    )
                    geometry_masks[i] = geometry_mask

                data.mask = ma.getmaskarray(data) | geometry_mask
                del geometry_mask

                # If at least one value is True this means that there's
                # at least one raw value > threshold
                if data.any():
                    hazardlevel = layer.hazardlevel
                    break

                # check one last time is array is filled with NODATA
                if data.mask.all():
                    continue

                # Here we have at least one value lower than the current level
                # threshold
                if hazardlevel is None:
                    hazardlevel = level_vlo

            # we got a value for the level, no need to go further, this will be
            # the highest one
            if hazardlevel == layer.hazardlevel:
                break

        return hazardlevel
コード例 #13
0
def populate_processing():
    hazardtype_eq = HazardType.get("EQ")
    hazardtype_fl = HazardType.get("FL")

    hazardset_id = "hazardset1"
    print("Populating hazardset {}".format(hazardset_id))
    hazardset1 = HazardSet()
    hazardset1.id = hazardset_id
    hazardset1.hazardtype = hazardtype_eq
    hazardset1.local = False
    hazardset1.calculation_method_quality = 0
    hazardset1.data_lastupdated_date = datetime.now()
    hazardset1.metadata_lastupdated_date = datetime.now()
    hazardset1.complete = True
    hazardset1.layers.extend(make_layers())
    DBSession.add(hazardset1)

    hazardset_id = "hazardset1b"
    print("Populating hazardset {}".format(hazardset_id))
    hazardset1b = HazardSet()
    hazardset1b.id = hazardset_id
    hazardset1b.hazardtype = hazardtype_fl
    hazardset1b.local = True
    hazardset1b.calculation_method_quality = 3
    hazardset1b.data_lastupdated_date = datetime.now()
    hazardset1b.metadata_lastupdated_date = datetime.now()
    hazardset1b.complete = True
    hazardset1b.layers.extend(make_layers())
    DBSession.add(hazardset1b)

    # create hazardsets 2, 3 ... 5
    hazardset_id = "hazardset2"
    # calculation_method_quality = 1 / scientific_quality = 0
    print("Populating hazardset {}".format(hazardset_id))
    hazardset2 = HazardSet()
    hazardset2.id = hazardset_id
    hazardset2.hazardtype = hazardtype_eq
    hazardset2.local = True
    hazardset2.calculation_method_quality = 1
    hazardset2.scientific_quality = 0
    hazardset2.data_lastupdated_date = datetime.now()
    hazardset2.metadata_lastupdated_date = datetime.now()
    hazardset2.complete = True
    hazardset2.layers.extend(make_layers())
    DBSession.add(hazardset2)

    hazardset_id = "hazardset3"
    # date = 2015-01-01 / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    print("Populating hazardset {}".format(hazardset_id))
    hazardset3 = HazardSet()
    hazardset3.id = hazardset_id
    hazardset3.hazardtype = hazardtype_eq
    hazardset3.local = False
    hazardset3.calculation_method_quality = 1
    hazardset3.scientific_quality = 2
    hazardset3.data_lastupdated_date = datetime(2015, 1, 1, 0, 0)
    hazardset3.metadata_lastupdated_date = datetime.now()
    hazardset3.complete = True
    hazardset3.layers.extend(make_layers())
    DBSession.add(hazardset3)

    hazardset_id = "hazardset4"
    # date = 2015-01-01 / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    print("Populating hazardset {}".format(hazardset_id))
    hazardset4 = HazardSet()
    hazardset4.id = hazardset_id
    hazardset4.hazardtype = hazardtype_eq
    hazardset4.local = True
    hazardset4.calculation_method_quality = 1
    hazardset4.scientific_quality = 2
    hazardset4.data_lastupdated_date = datetime(2015, 1, 1, 0, 0)
    hazardset4.metadata_lastupdated_date = datetime.now()
    hazardset4.complete = True
    hazardset4.layers.extend(make_layers())
    DBSession.add(hazardset4)

    hazardset_id = "hazardset5"
    # date = now() / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    print("Populating hazardset {}".format(hazardset_id))
    hazardset5 = HazardSet()
    hazardset5.id = hazardset_id
    hazardset5.hazardtype = hazardtype_eq
    hazardset5.local = True
    hazardset5.calculation_method_quality = 1
    hazardset5.scientific_quality = 2
    hazardset5.data_lastupdated_date = datetime.now()
    hazardset5.metadata_lastupdated_date = datetime.now()
    hazardset5.complete = True
    hazardset5.layers.extend(make_layers())
    DBSession.add(hazardset5)

    hazardset_id = "hazardset6"
    # date = now() / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    print("Populating hazardset {}".format(hazardset_id))
    hazardset6 = HazardSet()
    hazardset6.id = hazardset_id
    hazardset6.hazardtype = hazardtype_eq
    hazardset6.local = False
    hazardset6.calculation_method_quality = 1
    hazardset6.scientific_quality = 2
    hazardset6.data_lastupdated_date = datetime.now()
    hazardset6.metadata_lastupdated_date = datetime.now()
    hazardset6.complete = True
    hazardset6.layers.extend(make_layers())
    DBSession.add(hazardset6)

    # populate output table

    # admin div (code 30) has only one hazardset for EQ
    admin30 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 30).one())
    # => test outcome = hazardset1
    output1 = Output()
    output1.hazardset = hazardset1
    output1.administrativedivision = admin30
    output1.hazardlevel = HazardLevel.get("HIG")
    DBSession.add(output1)

    # admin div (code 30) also has another hazardset
    # but this one is for FL
    # => test outcome = hazardset1b
    output1b = Output()
    output1b.hazardset = hazardset1b
    output1b.administrativedivision = admin30
    output1b.hazardlevel = HazardLevel.get("NPR")
    DBSession.add(output1b)

    # admin div (code 31) has 2 hazardsets,
    # one with a higher calculation_method_quality
    # => test outcome = hazardset2
    admin31 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 31).one())
    output2 = Output()
    output2.hazardset = hazardset1  # calculation_method_quality = 0
    output2.administrativedivision = admin31
    output2.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output2)
    output3 = Output()
    output3.hazardset = hazardset2  # calculation_method_quality = 1
    output3.administrativedivision = admin31
    output3.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output3)

    # admin div (code 32) has 2 hazardsets,
    # both share the same calculation_method_quality,
    # one with a higher scientific_quality
    # => test outcome = hazardset3
    admin32 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 32).one())
    output4 = Output()
    output4.hazardset = hazardset2
    # calculation_method_quality = 1 / scientific_quality = 0
    output4.administrativedivision = admin32
    output4.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output4)
    output5 = Output()
    output5.hazardset = hazardset3
    # calculation_method_quality = 1 / scientific_quality = 2
    output5.administrativedivision = admin32
    output5.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output5)

    # admin div (code 33) has 2 hazardsets,
    # both share the same ratings, one is global, one local
    # => test outcome = hazardset4
    admin33 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 33).one())
    output6 = Output()
    output6.hazardset = hazardset3
    # global / calculation_method_quality = 1 / scientific_quality = 2
    output6.administrativedivision = admin33
    output6.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output6)
    output7 = Output()
    output7.hazardset = hazardset4
    # local / calculation_method_quality = 1 / scientific_quality = 2
    output7.administrativedivision = admin33
    output7.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output7)

    # admin div (code 34) has 2 hazardsets,
    # both share the same ratings, are local, one is more recent
    # => test outcome = hazardset5
    admin34 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 34).one())
    output8 = Output()
    output8.hazardset = hazardset4
    # date = 2015-01-01 / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    output8.administrativedivision = admin34
    output8.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output8)
    output9 = Output()
    output9.hazardset = hazardset5
    # date = now() / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    output9.administrativedivision = admin34
    output9.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output9)

    # admin div (code 35) has 2 hazardsets,
    # both share the same ratings, are global, one is more recent
    # => test outcome = hazardset6
    admin35 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 35).one())
    output10 = Output()
    output10.hazardset = hazardset3
    # date = 2015-01-01 / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    output10.administrativedivision = admin35
    output10.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output10)
    output11 = Output()
    output11.hazardset = hazardset6
    # date = now() / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    output11.administrativedivision = admin35
    output11.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output11)

    DBSession.flush()
コード例 #14
0
    HazardSet,
    HazardType,
    Layer,
    Region,
    FurtherResource,
    ClimateChangeRecommendation,
    HazardTypeFurtherResourceAssociation,
    ClimateChangeRecAdministrativeDivisionAssociation as CcrAd,
    HazardCategoryAdministrativeDivisionAssociation,
    Contact,
    ContactAdministrativeDivisionHazardTypeAssociation as CAdHt,
)
from ..analytics import GoogleAnalytics

# An object for the "no data" category type.
_hazardlevel_nodata = HazardLevel()
_hazardlevel_nodata.mnemonic = "no-data"
_hazardlevel_nodata.title = "No Data"
_hazardlevel_nodata.description = "No data for this hazard type."
_hazardlevel_nodata.order = float("inf")


@view_config(route_name="report_overview", renderer="templates/report.jinja2")
@view_config(route_name="report_overview_slash",
             renderer="templates/report.jinja2")
@view_config(route_name="report", renderer="templates/report.jinja2")
@view_config(route_name="report_print", renderer="templates/pdf_report.jinja2")
def report(request):
    try:
        division_code = request.matchdict.get("divisioncode")
    except: