Exemplo n.º 1
0
    def test_not_corresponding_rasters(self, open_mock):
        """Difference in origin, resolution or size must not complete"""

        hazardset_id = "notpreprocessed"
        hazardtype = HazardType.get(DBSession, "FL")

        regions = DBSession.query(Region).all()

        hazardset = HazardSet(
            id=hazardset_id,
            hazardtype=hazardtype,
            local=False,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            regions=regions,
        )
        DBSession.add(hazardset)

        for level in ["HIG", "MED", "LOW"]:
            layer = Layer(
                hazardlevel=HazardLevel.get(DBSession, level),
                mask=False,
                return_period=None,
                data_lastupdated_date=datetime.now(),
                metadata_lastupdated_date=datetime.now(),
                geonode_id=new_geonode_id(DBSession),
                download_url="test",
                calculation_method_quality=5,
                scientific_quality=1,
                local=False,
                downloaded=True,
            )
            hazardset.layers.append(layer)

        mask_layer = Layer(
            hazardlevel=None,
            mask=True,
            return_period=None,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(DBSession),
            download_url="test",
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True,
        )
        hazardset.layers.append(mask_layer)

        DBSession.flush()

        self.completer().execute()

        hazardset = DBSession.query(HazardSet).one()
        self.assertEqual(
            hazardset.complete_error,
            "All layers should have the same origin, resolution and size",
        )
        self.assertEqual(hazardset.complete, False)
Exemplo n.º 2
0
def populate_preprocessed(type):
    hazardset_id = "preprocessed"
    hazardtype = HazardType.get(DBSession, type)

    regions = DBSession.query(Region).all()

    print("Populating hazardset {}".format(hazardset_id))
    hazardset = HazardSet(
        id=hazardset_id,
        hazardtype=hazardtype,
        local=False,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        regions=regions,
    )
    DBSession.add(hazardset)

    layer = Layer(
        hazardlevel=None,
        mask=False,
        return_period=None,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        geonode_id=new_geonode_id(DBSession),
        download_url="test",
        calculation_method_quality=5,
        scientific_quality=1,
        local=False,
        downloaded=True,
    )
    hazardset.layers.append(layer)

    hazardset.complete = True
    DBSession.flush()
Exemplo n.º 3
0
    def test_open_exception(self, open_mock):
        """Test handling of open exception"""

        hazardset_id = "notpreprocessed"
        hazardtype = HazardType.get(DBSession, "EQ")

        regions = DBSession.query(Region).all()

        hazardset = HazardSet(
            id=hazardset_id,
            hazardtype=hazardtype,
            local=False,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            regions=regions,
        )
        DBSession.add(hazardset)

        for level in ["HIG", "MED", "LOW"]:
            layer = Layer(
                hazardlevel=HazardLevel.get(DBSession, level),
                mask=False,
                return_period=None,
                data_lastupdated_date=datetime.now(),
                metadata_lastupdated_date=datetime.now(),
                geonode_id=new_geonode_id(DBSession),
                download_url="test",
                calculation_method_quality=5,
                scientific_quality=1,
                local=False,
                downloaded=True,
            )
            hazardset.layers.append(layer)

        DBSession.flush()

        self.completer().execute()

        hazardset = DBSession.query(HazardSet).one()
        self.assertEqual(hazardset.complete_error,
                         "Error opening layer notpreprocessed")
        self.assertEqual(hazardset.complete, False)
Exemplo n.º 4
0
    def test_complete_preprocessed(self, open_mock):
        """Test complete preprocessed hazardset"""

        hazardset_id = "preprocessed"
        hazardtype = HazardType.get(DBSession, "VA")

        regions = DBSession.query(Region).all()

        hazardset = HazardSet(
            id=hazardset_id,
            hazardtype=hazardtype,
            local=False,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            regions=regions,
        )
        DBSession.add(hazardset)

        layer = Layer(
            hazardlevel=None,
            mask=False,
            return_period=None,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(DBSession),
            download_url="test",
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True,
        )
        hazardset.layers.append(layer)

        DBSession.flush()

        self.completer().execute()

        hazardset = DBSession.query(HazardSet).one()
        self.assertEqual(hazardset.complete_error, None)
        self.assertEqual(hazardset.complete, True)
Exemplo n.º 5
0
    def harvest_layer(self, object):
        logger.info("Harvesting layer {id} - {title}".format(**object))
        title = object["title"]

        # we need to retrieve more information on this layer
        # since the regions array is not advertised by the main
        # regions listing from GeoNode
        o = self.fetch("api/layers/{id}/".format(**object))

        if "regions" not in list(o.keys()):
            warning(object, 'Attribute "regions" is missing')

        region_ids = []
        for r in o.get("regions", []):
            # r is like "/api/regions/1/"
            region_ids.append(r.split("/")[3])

        if len(region_ids) == 0:
            regions = []
        else:
            regions = self.dbsession.query(Region).filter(Region.id.in_(region_ids)).all()

        hazardset_id = o['hazard_set']
        if not hazardset_id:
            logger.info("  hazard_set is empty")
            return False

        # FIXME: temporary override
        if hazardset_id in excluded_hazardsets:
            logger.info("  hazard_set {} is excluded, skipping")
            return False

        hazardtype = self.check_hazard_type(o)
        if not hazardtype:
            return False

        type_settings = self.settings["hazard_types"][hazardtype.mnemonic]
        preprocessed = "values" in type_settings

        local = "GLOBAL" not in hazardset_id

        mask = False
        if preprocessed is True:
            hazardlevel = None
            # harvest hazard_unit for preprocessed layers
            hazard_unit = o['hazard_unit']
            if o['hazard_period']:
                logger.info('  return period found in preprocessed hazardset')
                return False
            hazard_period = None

        else:
            try:
                hazard_period = int(o['hazard_period'])
            except:
                hazard_period = None
            if hazard_period is None:
                logger.info('  no return period found')
                return False
            hazardlevel = None
            for level in ("LOW", "MED", "HIG"):
                if between(hazard_period, type_settings["return_periods"][level]):
                    hazardlevel = HazardLevel.get(self.dbsession, level)
                    break

            if "mask_return_period" in type_settings and between(
                hazard_period, type_settings["mask_return_period"]
            ):
                mask = True

            if hazardlevel is None and not mask:
                logger.info("  No corresponding hazard_level")
                return False

            hazard_unit = o['hazard_unit']
            if hazard_unit == '':
                logger.info('  hazard_unit is empty')
                return False

        if o['srid'] != 'EPSG:4326':
            logger.info('  srid is different from "EPSG:4326"')
            return False

        data_update_date = parse_date(o['data_update_date'])
        if not data_update_date:
            warning(o, 'data_update_date is empty: set to {}'.format(datetime.fromtimestamp(0)))
            # We use a very old date for good comparison in decision tree
            data_update_date = datetime.fromtimestamp(0)

        metadata_update_date = parse_date(o['metadata_update_date'])
        if not metadata_update_date:
            warning(o, 'metadata_update_date is empty: set to {}'.format(datetime.fromtimestamp(0)))
            # We use a very old date for good comparison in decision tree
            metadata_update_date = datetime.fromtimestamp(0)

        calculation_method_quality = o['calculation_method_quality']
        if not calculation_method_quality:
            warning(o, 'calculation_method_quality is empty: skip layer')
            return False
        calculation_method_quality = int(float(calculation_method_quality))

        scientific_quality = o['scientific_quality']
        if not scientific_quality:
            warning(o, 'scientific_quality is empty')
            return False
        scientific_quality = int(float(scientific_quality))

        download_url = o['download_url']
        if not download_url:
            warning(o, 'download_url is empty')
            return False

        hazardset = self.dbsession.query(HazardSet).get(hazardset_id)

        # Create hazardset before layer
        if hazardset is None:
            logger.info("  Create new hazardset {}".format(hazardset_id))
            hazardset = HazardSet()
            hazardset.id = hazardset_id
            hazardset.hazardtype = hazardtype
            self.dbsession.add(hazardset)

        # get detail_url and owner_organization from last updated layer
        geonode = self.settings["geonode"]
        geonode_base_url = geonode["url"]

        if o['detail_url'] and not mask:
            hazardset.detail_url = geonode_base_url + o['detail_url']
        if o['owner']['organization'] and not mask:
            hazardset.owner_organization = o['owner']['organization']
        if not mask:
            hazardset.regions = regions

        layer = self.dbsession.query(Layer).get(o['id'])
        if layer is None:
            logger.info("  Create new Layer {}".format(title))
            layer = Layer()
            layer.geonode_id = o['id']
            layer.hazardset = hazardset
            layer.mask = False

        else:
            # If data has changed
            if (
                layer.data_lastupdated_date != data_update_date
                or layer.download_url != download_url
            ):
                logger.info("  Invalidate downloaded")
                layer.downloaded = False
                hazardset.complete = False
                hazardset.processed = None
                # Remove file from cache
                layer.download_url = download_url
                path = self.layer_path(layer)
                if os.path.isfile(path):
                    os.unlink(path)

            # Some hazardset fields are calculated during completing
            if (
                layer.calculation_method_quality != calculation_method_quality
                or layer.scientific_quality != scientific_quality
                or layer.metadata_lastupdated_date != metadata_update_date
            ):
                logger.info("  Invalidate complete")
                hazardset.complete = False

            # Some fields invalidate outputs
            if layer.hazardunit != hazard_unit:
                logger.info("  Invalidate processed")
                hazardset.processed = None

        typename = o.get("typename", None)
        if typename is None:
            warning(o, 'Attribute "typename" is missing')
        layer.typename = typename

        layer.return_period = hazard_period
        layer.hazardunit = hazard_unit
        layer.data_lastupdated_date = data_update_date
        layer.metadata_lastupdated_date = metadata_update_date
        layer.download_url = download_url

        # TODO: retrieve quality attributes
        layer.calculation_method_quality = calculation_method_quality
        layer.scientific_quality = scientific_quality
        layer.local = local

        layer.set_harvested(True)
        self.dbsession.flush()
        return True
Exemplo n.º 6
0
def populate_notpreprocessed(type, unit):
    hazardset_id = "notpreprocessed"
    hazardtype = HazardType.get(DBSession, type)
    hazardtype_settings = settings["hazard_types"][hazardtype.mnemonic]

    regions = DBSession.query(Region).all()

    print("Populating hazardset {}".format(hazardset_id))
    hazardset = HazardSet(
        id=hazardset_id,
        hazardtype=hazardtype,
        local=False,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        regions=regions,
    )
    DBSession.add(hazardset)

    return_periods = hazardtype_settings["return_periods"]

    for level in ("HIG", "MED", "LOW"):
        hazardlevel = HazardLevel.get(DBSession, level)
        level_return_periods = return_periods[level]
        if isinstance(level_return_periods, list):
            return_period = level_return_periods[0]
        else:
            return_period = level_return_periods

        layer = Layer(
            hazardlevel=hazardlevel,
            mask=False,
            return_period=return_period,
            hazardunit=unit,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(DBSession),
            download_url="test",
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True,
        )
        hazardset.layers.append(layer)

    mask_return_periods = hazardtype_settings["mask_return_period"]
    if isinstance(mask_return_periods, list):
        mask_return_period = mask_return_periods[0]
    else:
        mask_return_period = mask_return_periods
    layer = Layer(
        hazardlevel=None,
        mask=True,
        return_period=mask_return_period,
        hazardunit=unit,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        geonode_id=new_geonode_id(DBSession),
        download_url="test",
        calculation_method_quality=5,
        scientific_quality=1,
        local=False,
        downloaded=True,
    )
    hazardset.layers.append(layer)

    hazardset.complete = True
    DBSession.flush()
Exemplo n.º 7
0
def make_layers():
    layers = []
    for level in ("HIG", "MED", "LOW"):
        layer = Layer()
        layer.hazardlevel = HazardLevel.get(level)
        layer.return_period = 1
        layer.hazardunit = "m"
        layer.data_lastupdated_date = datetime.now()
        layer.metadata_lastupdated_date = datetime.now()
        layer.geonode_id = new_geonode_id()
        layer.download_url = "http://something"
        layer.calculation_method_quality = 5
        layer.scientific_quality = 1
        layer.local = False
        layer.downloaded = True
        layers.append(layer)
    return layers