Exemplo n.º 1
0
def populate_preprocessed(type):
    hazardset_id = "preprocessed"
    hazardtype = HazardType.get(DBSession, type)

    regions = DBSession.query(Region).all()

    print("Populating hazardset {}".format(hazardset_id))
    hazardset = HazardSet(
        id=hazardset_id,
        hazardtype=hazardtype,
        local=False,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        regions=regions,
    )
    DBSession.add(hazardset)

    layer = Layer(
        hazardlevel=None,
        mask=False,
        return_period=None,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        geonode_id=new_geonode_id(DBSession),
        download_url="test",
        calculation_method_quality=5,
        scientific_quality=1,
        local=False,
        downloaded=True,
    )
    hazardset.layers.append(layer)

    hazardset.complete = True
    DBSession.flush()
Exemplo n.º 2
0
    def test_not_corresponding_rasters(self, open_mock):
        """Difference in origin, resolution or size must not complete"""

        hazardset_id = "notpreprocessed"
        hazardtype = HazardType.get(DBSession, "FL")

        regions = DBSession.query(Region).all()

        hazardset = HazardSet(
            id=hazardset_id,
            hazardtype=hazardtype,
            local=False,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            regions=regions,
        )
        DBSession.add(hazardset)

        for level in ["HIG", "MED", "LOW"]:
            layer = Layer(
                hazardlevel=HazardLevel.get(DBSession, level),
                mask=False,
                return_period=None,
                data_lastupdated_date=datetime.now(),
                metadata_lastupdated_date=datetime.now(),
                geonode_id=new_geonode_id(DBSession),
                download_url="test",
                calculation_method_quality=5,
                scientific_quality=1,
                local=False,
                downloaded=True,
            )
            hazardset.layers.append(layer)

        mask_layer = Layer(
            hazardlevel=None,
            mask=True,
            return_period=None,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(DBSession),
            download_url="test",
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True,
        )
        hazardset.layers.append(mask_layer)

        DBSession.flush()

        self.completer().execute()

        hazardset = DBSession.query(HazardSet).one()
        self.assertEqual(
            hazardset.complete_error,
            "All layers should have the same origin, resolution and size",
        )
        self.assertEqual(hazardset.complete, False)
Exemplo n.º 3
0
    def test_open_exception(self, open_mock):
        """Test handling of open exception"""

        hazardset_id = "notpreprocessed"
        hazardtype = HazardType.get(DBSession, "EQ")

        regions = DBSession.query(Region).all()

        hazardset = HazardSet(
            id=hazardset_id,
            hazardtype=hazardtype,
            local=False,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            regions=regions,
        )
        DBSession.add(hazardset)

        for level in ["HIG", "MED", "LOW"]:
            layer = Layer(
                hazardlevel=HazardLevel.get(DBSession, level),
                mask=False,
                return_period=None,
                data_lastupdated_date=datetime.now(),
                metadata_lastupdated_date=datetime.now(),
                geonode_id=new_geonode_id(DBSession),
                download_url="test",
                calculation_method_quality=5,
                scientific_quality=1,
                local=False,
                downloaded=True,
            )
            hazardset.layers.append(layer)

        DBSession.flush()

        self.completer().execute()

        hazardset = DBSession.query(HazardSet).one()
        self.assertEqual(hazardset.complete_error,
                         "Error opening layer notpreprocessed")
        self.assertEqual(hazardset.complete, False)
Exemplo n.º 4
0
    def test_complete_preprocessed(self, open_mock):
        """Test complete preprocessed hazardset"""

        hazardset_id = "preprocessed"
        hazardtype = HazardType.get(DBSession, "VA")

        regions = DBSession.query(Region).all()

        hazardset = HazardSet(
            id=hazardset_id,
            hazardtype=hazardtype,
            local=False,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            regions=regions,
        )
        DBSession.add(hazardset)

        layer = Layer(
            hazardlevel=None,
            mask=False,
            return_period=None,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(DBSession),
            download_url="test",
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True,
        )
        hazardset.layers.append(layer)

        DBSession.flush()

        self.completer().execute()

        hazardset = DBSession.query(HazardSet).one()
        self.assertEqual(hazardset.complete_error, None)
        self.assertEqual(hazardset.complete, True)
Exemplo n.º 5
0
    def harvest_layer(self, object):
        logger.info("Harvesting layer {id} - {title}".format(**object))
        title = object["title"]

        # we need to retrieve more information on this layer
        # since the regions array is not advertised by the main
        # regions listing from GeoNode
        o = self.fetch("api/layers/{id}/".format(**object))

        if "regions" not in list(o.keys()):
            warning(object, 'Attribute "regions" is missing')

        region_ids = []
        for r in o.get("regions", []):
            # r is like "/api/regions/1/"
            region_ids.append(r.split("/")[3])

        if len(region_ids) == 0:
            regions = []
        else:
            regions = self.dbsession.query(Region).filter(Region.id.in_(region_ids)).all()

        hazardset_id = o['hazard_set']
        if not hazardset_id:
            logger.info("  hazard_set is empty")
            return False

        # FIXME: temporary override
        if hazardset_id in excluded_hazardsets:
            logger.info("  hazard_set {} is excluded, skipping")
            return False

        hazardtype = self.check_hazard_type(o)
        if not hazardtype:
            return False

        type_settings = self.settings["hazard_types"][hazardtype.mnemonic]
        preprocessed = "values" in type_settings

        local = "GLOBAL" not in hazardset_id

        mask = False
        if preprocessed is True:
            hazardlevel = None
            # harvest hazard_unit for preprocessed layers
            hazard_unit = o['hazard_unit']
            if o['hazard_period']:
                logger.info('  return period found in preprocessed hazardset')
                return False
            hazard_period = None

        else:
            try:
                hazard_period = int(o['hazard_period'])
            except:
                hazard_period = None
            if hazard_period is None:
                logger.info('  no return period found')
                return False
            hazardlevel = None
            for level in ("LOW", "MED", "HIG"):
                if between(hazard_period, type_settings["return_periods"][level]):
                    hazardlevel = HazardLevel.get(self.dbsession, level)
                    break

            if "mask_return_period" in type_settings and between(
                hazard_period, type_settings["mask_return_period"]
            ):
                mask = True

            if hazardlevel is None and not mask:
                logger.info("  No corresponding hazard_level")
                return False

            hazard_unit = o['hazard_unit']
            if hazard_unit == '':
                logger.info('  hazard_unit is empty')
                return False

        if o['srid'] != 'EPSG:4326':
            logger.info('  srid is different from "EPSG:4326"')
            return False

        data_update_date = parse_date(o['data_update_date'])
        if not data_update_date:
            warning(o, 'data_update_date is empty: set to {}'.format(datetime.fromtimestamp(0)))
            # We use a very old date for good comparison in decision tree
            data_update_date = datetime.fromtimestamp(0)

        metadata_update_date = parse_date(o['metadata_update_date'])
        if not metadata_update_date:
            warning(o, 'metadata_update_date is empty: set to {}'.format(datetime.fromtimestamp(0)))
            # We use a very old date for good comparison in decision tree
            metadata_update_date = datetime.fromtimestamp(0)

        calculation_method_quality = o['calculation_method_quality']
        if not calculation_method_quality:
            warning(o, 'calculation_method_quality is empty: skip layer')
            return False
        calculation_method_quality = int(float(calculation_method_quality))

        scientific_quality = o['scientific_quality']
        if not scientific_quality:
            warning(o, 'scientific_quality is empty')
            return False
        scientific_quality = int(float(scientific_quality))

        download_url = o['download_url']
        if not download_url:
            warning(o, 'download_url is empty')
            return False

        hazardset = self.dbsession.query(HazardSet).get(hazardset_id)

        # Create hazardset before layer
        if hazardset is None:
            logger.info("  Create new hazardset {}".format(hazardset_id))
            hazardset = HazardSet()
            hazardset.id = hazardset_id
            hazardset.hazardtype = hazardtype
            self.dbsession.add(hazardset)

        # get detail_url and owner_organization from last updated layer
        geonode = self.settings["geonode"]
        geonode_base_url = geonode["url"]

        if o['detail_url'] and not mask:
            hazardset.detail_url = geonode_base_url + o['detail_url']
        if o['owner']['organization'] and not mask:
            hazardset.owner_organization = o['owner']['organization']
        if not mask:
            hazardset.regions = regions

        layer = self.dbsession.query(Layer).get(o['id'])
        if layer is None:
            logger.info("  Create new Layer {}".format(title))
            layer = Layer()
            layer.geonode_id = o['id']
            layer.hazardset = hazardset
            layer.mask = False

        else:
            # If data has changed
            if (
                layer.data_lastupdated_date != data_update_date
                or layer.download_url != download_url
            ):
                logger.info("  Invalidate downloaded")
                layer.downloaded = False
                hazardset.complete = False
                hazardset.processed = None
                # Remove file from cache
                layer.download_url = download_url
                path = self.layer_path(layer)
                if os.path.isfile(path):
                    os.unlink(path)

            # Some hazardset fields are calculated during completing
            if (
                layer.calculation_method_quality != calculation_method_quality
                or layer.scientific_quality != scientific_quality
                or layer.metadata_lastupdated_date != metadata_update_date
            ):
                logger.info("  Invalidate complete")
                hazardset.complete = False

            # Some fields invalidate outputs
            if layer.hazardunit != hazard_unit:
                logger.info("  Invalidate processed")
                hazardset.processed = None

        typename = o.get("typename", None)
        if typename is None:
            warning(o, 'Attribute "typename" is missing')
        layer.typename = typename

        layer.return_period = hazard_period
        layer.hazardunit = hazard_unit
        layer.data_lastupdated_date = data_update_date
        layer.metadata_lastupdated_date = metadata_update_date
        layer.download_url = download_url

        # TODO: retrieve quality attributes
        layer.calculation_method_quality = calculation_method_quality
        layer.scientific_quality = scientific_quality
        layer.local = local

        layer.set_harvested(True)
        self.dbsession.flush()
        return True
Exemplo n.º 6
0
def populate_notpreprocessed(type, unit):
    hazardset_id = "notpreprocessed"
    hazardtype = HazardType.get(DBSession, type)
    hazardtype_settings = settings["hazard_types"][hazardtype.mnemonic]

    regions = DBSession.query(Region).all()

    print("Populating hazardset {}".format(hazardset_id))
    hazardset = HazardSet(
        id=hazardset_id,
        hazardtype=hazardtype,
        local=False,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        regions=regions,
    )
    DBSession.add(hazardset)

    return_periods = hazardtype_settings["return_periods"]

    for level in ("HIG", "MED", "LOW"):
        hazardlevel = HazardLevel.get(DBSession, level)
        level_return_periods = return_periods[level]
        if isinstance(level_return_periods, list):
            return_period = level_return_periods[0]
        else:
            return_period = level_return_periods

        layer = Layer(
            hazardlevel=hazardlevel,
            mask=False,
            return_period=return_period,
            hazardunit=unit,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(DBSession),
            download_url="test",
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True,
        )
        hazardset.layers.append(layer)

    mask_return_periods = hazardtype_settings["mask_return_period"]
    if isinstance(mask_return_periods, list):
        mask_return_period = mask_return_periods[0]
    else:
        mask_return_period = mask_return_periods
    layer = Layer(
        hazardlevel=None,
        mask=True,
        return_period=mask_return_period,
        hazardunit=unit,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        geonode_id=new_geonode_id(DBSession),
        download_url="test",
        calculation_method_quality=5,
        scientific_quality=1,
        local=False,
        downloaded=True,
    )
    hazardset.layers.append(layer)

    hazardset.complete = True
    DBSession.flush()
Exemplo n.º 7
0
def populate_processing():
    hazardtype_eq = HazardType.get("EQ")
    hazardtype_fl = HazardType.get("FL")

    hazardset_id = "hazardset1"
    print("Populating hazardset {}".format(hazardset_id))
    hazardset1 = HazardSet()
    hazardset1.id = hazardset_id
    hazardset1.hazardtype = hazardtype_eq
    hazardset1.local = False
    hazardset1.calculation_method_quality = 0
    hazardset1.data_lastupdated_date = datetime.now()
    hazardset1.metadata_lastupdated_date = datetime.now()
    hazardset1.complete = True
    hazardset1.layers.extend(make_layers())
    DBSession.add(hazardset1)

    hazardset_id = "hazardset1b"
    print("Populating hazardset {}".format(hazardset_id))
    hazardset1b = HazardSet()
    hazardset1b.id = hazardset_id
    hazardset1b.hazardtype = hazardtype_fl
    hazardset1b.local = True
    hazardset1b.calculation_method_quality = 3
    hazardset1b.data_lastupdated_date = datetime.now()
    hazardset1b.metadata_lastupdated_date = datetime.now()
    hazardset1b.complete = True
    hazardset1b.layers.extend(make_layers())
    DBSession.add(hazardset1b)

    # create hazardsets 2, 3 ... 5
    hazardset_id = "hazardset2"
    # calculation_method_quality = 1 / scientific_quality = 0
    print("Populating hazardset {}".format(hazardset_id))
    hazardset2 = HazardSet()
    hazardset2.id = hazardset_id
    hazardset2.hazardtype = hazardtype_eq
    hazardset2.local = True
    hazardset2.calculation_method_quality = 1
    hazardset2.scientific_quality = 0
    hazardset2.data_lastupdated_date = datetime.now()
    hazardset2.metadata_lastupdated_date = datetime.now()
    hazardset2.complete = True
    hazardset2.layers.extend(make_layers())
    DBSession.add(hazardset2)

    hazardset_id = "hazardset3"
    # date = 2015-01-01 / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    print("Populating hazardset {}".format(hazardset_id))
    hazardset3 = HazardSet()
    hazardset3.id = hazardset_id
    hazardset3.hazardtype = hazardtype_eq
    hazardset3.local = False
    hazardset3.calculation_method_quality = 1
    hazardset3.scientific_quality = 2
    hazardset3.data_lastupdated_date = datetime(2015, 1, 1, 0, 0)
    hazardset3.metadata_lastupdated_date = datetime.now()
    hazardset3.complete = True
    hazardset3.layers.extend(make_layers())
    DBSession.add(hazardset3)

    hazardset_id = "hazardset4"
    # date = 2015-01-01 / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    print("Populating hazardset {}".format(hazardset_id))
    hazardset4 = HazardSet()
    hazardset4.id = hazardset_id
    hazardset4.hazardtype = hazardtype_eq
    hazardset4.local = True
    hazardset4.calculation_method_quality = 1
    hazardset4.scientific_quality = 2
    hazardset4.data_lastupdated_date = datetime(2015, 1, 1, 0, 0)
    hazardset4.metadata_lastupdated_date = datetime.now()
    hazardset4.complete = True
    hazardset4.layers.extend(make_layers())
    DBSession.add(hazardset4)

    hazardset_id = "hazardset5"
    # date = now() / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    print("Populating hazardset {}".format(hazardset_id))
    hazardset5 = HazardSet()
    hazardset5.id = hazardset_id
    hazardset5.hazardtype = hazardtype_eq
    hazardset5.local = True
    hazardset5.calculation_method_quality = 1
    hazardset5.scientific_quality = 2
    hazardset5.data_lastupdated_date = datetime.now()
    hazardset5.metadata_lastupdated_date = datetime.now()
    hazardset5.complete = True
    hazardset5.layers.extend(make_layers())
    DBSession.add(hazardset5)

    hazardset_id = "hazardset6"
    # date = now() / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    print("Populating hazardset {}".format(hazardset_id))
    hazardset6 = HazardSet()
    hazardset6.id = hazardset_id
    hazardset6.hazardtype = hazardtype_eq
    hazardset6.local = False
    hazardset6.calculation_method_quality = 1
    hazardset6.scientific_quality = 2
    hazardset6.data_lastupdated_date = datetime.now()
    hazardset6.metadata_lastupdated_date = datetime.now()
    hazardset6.complete = True
    hazardset6.layers.extend(make_layers())
    DBSession.add(hazardset6)

    # populate output table

    # admin div (code 30) has only one hazardset for EQ
    admin30 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 30).one())
    # => test outcome = hazardset1
    output1 = Output()
    output1.hazardset = hazardset1
    output1.administrativedivision = admin30
    output1.hazardlevel = HazardLevel.get("HIG")
    DBSession.add(output1)

    # admin div (code 30) also has another hazardset
    # but this one is for FL
    # => test outcome = hazardset1b
    output1b = Output()
    output1b.hazardset = hazardset1b
    output1b.administrativedivision = admin30
    output1b.hazardlevel = HazardLevel.get("NPR")
    DBSession.add(output1b)

    # admin div (code 31) has 2 hazardsets,
    # one with a higher calculation_method_quality
    # => test outcome = hazardset2
    admin31 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 31).one())
    output2 = Output()
    output2.hazardset = hazardset1  # calculation_method_quality = 0
    output2.administrativedivision = admin31
    output2.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output2)
    output3 = Output()
    output3.hazardset = hazardset2  # calculation_method_quality = 1
    output3.administrativedivision = admin31
    output3.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output3)

    # admin div (code 32) has 2 hazardsets,
    # both share the same calculation_method_quality,
    # one with a higher scientific_quality
    # => test outcome = hazardset3
    admin32 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 32).one())
    output4 = Output()
    output4.hazardset = hazardset2
    # calculation_method_quality = 1 / scientific_quality = 0
    output4.administrativedivision = admin32
    output4.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output4)
    output5 = Output()
    output5.hazardset = hazardset3
    # calculation_method_quality = 1 / scientific_quality = 2
    output5.administrativedivision = admin32
    output5.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output5)

    # admin div (code 33) has 2 hazardsets,
    # both share the same ratings, one is global, one local
    # => test outcome = hazardset4
    admin33 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 33).one())
    output6 = Output()
    output6.hazardset = hazardset3
    # global / calculation_method_quality = 1 / scientific_quality = 2
    output6.administrativedivision = admin33
    output6.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output6)
    output7 = Output()
    output7.hazardset = hazardset4
    # local / calculation_method_quality = 1 / scientific_quality = 2
    output7.administrativedivision = admin33
    output7.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output7)

    # admin div (code 34) has 2 hazardsets,
    # both share the same ratings, are local, one is more recent
    # => test outcome = hazardset5
    admin34 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 34).one())
    output8 = Output()
    output8.hazardset = hazardset4
    # date = 2015-01-01 / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    output8.administrativedivision = admin34
    output8.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output8)
    output9 = Output()
    output9.hazardset = hazardset5
    # date = now() / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    output9.administrativedivision = admin34
    output9.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output9)

    # admin div (code 35) has 2 hazardsets,
    # both share the same ratings, are global, one is more recent
    # => test outcome = hazardset6
    admin35 = (DBSession.query(AdministrativeDivision).filter(
        AdministrativeDivision.code == 35).one())
    output10 = Output()
    output10.hazardset = hazardset3
    # date = 2015-01-01 / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    output10.administrativedivision = admin35
    output10.hazardlevel = HazardLevel.get("MED")
    DBSession.add(output10)
    output11 = Output()
    output11.hazardset = hazardset6
    # date = now() / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    output11.administrativedivision = admin35
    output11.hazardlevel = HazardLevel.get("LOW")
    DBSession.add(output11)

    DBSession.flush()
Exemplo n.º 8
0
def populate_db():
    DBSession.query(Output).delete()
    DBSession.query(Layer).delete()
    DBSession.query(HazardSet).delete()

    DBSession.query(HazardTypeFurtherResourceAssociation).delete()
    DBSession.query(FurtherResource).delete()
    DBSession.query(HazardCategoryTechnicalRecommendationAssociation).delete()
    DBSession.query(TechnicalRecommendation).delete()
    DBSession.query(ClimateChangeRecommendation).delete()
    DBSession.query(HazardCategoryAdministrativeDivisionAssociation).delete()
    DBSession.query(CAdHt).delete()
    DBSession.query(Contact).delete()
    DBSession.query(Region).delete()
    DBSession.query(AdministrativeDivision).delete()

    hazardtype_eq = (DBSession.query(HazardType).filter(
        HazardType.mnemonic == "EQ").one())
    hazardset1 = HazardSet()
    hazardset1.id = "hazardset1"
    hazardset1.hazardtype = hazardtype_eq
    hazardset1.data_lastupdated_date = datetime.now()
    hazardset1.metadata_lastupdated_date = datetime.now()
    hazardset1.detail_url = "http://domain.com/path/"
    hazardset1.owner_organization = "data_provider"
    DBSession.add(hazardset1)

    shape = MultiPolygon([Polygon([(0, 0), (0, 1), (1, 1), (1, 0), (0, 0)])])
    geometry = from_shape(shape, 4326)

    # admin_div_10 is a country (division level 1)
    admin_div_10 = AdministrativeDivision(**{
        "code": 10,
        "leveltype_id": 1,
        "name": "Division level 1"
    })
    admin_div_10.geom = geometry
    DBSession.add(admin_div_10)

    # admin_div_11 is another country (division level 1)
    admin_div_11 = AdministrativeDivision(**{
        "code": 11,
        "leveltype_id": 1,
        "name": "Division level 1 2"
    })
    admin_div_11.geom = geometry
    DBSession.add(admin_div_11)

    # admin_div_12 is another country (division level 1)
    admin_div_12 = AdministrativeDivision(**{
        "code": 12,
        "leveltype_id": 1,
        "name": "Division level 1 3"
    })
    admin_div_12.geom = geometry
    DBSession.add(admin_div_12)

    # admin_div_13 is another country (division level 1)
    admin_div_13 = AdministrativeDivision(**{
        "code": 13,
        "leveltype_id": 1,
        "name": "Division level 1 4"
    })
    admin_div_13.geom = geometry
    DBSession.add(admin_div_13)

    # admin_div_20 is a province (division level 2)
    # its parent is admin_div_10
    admin_div_20 = AdministrativeDivision(**{
        "code": 20,
        "leveltype_id": 2,
        "name": "Division level 2"
    })
    admin_div_20.parent_code = admin_div_10.code
    admin_div_20.geom = geometry
    DBSession.add(admin_div_20)

    shape = MultiPolygon(
        [Polygon([(0, 0), (0, 1), (0.5, 1), (0.5, 0), (0, 0)])])
    geometry = from_shape(shape, 4326)

    # admin_div_31 is a region (division level 3)
    # its parent is admin_div_20
    admin_div_31 = AdministrativeDivision(**{
        "code": 31,
        "leveltype_id": 3,
        "name": "Division level 3 - 1"
    })
    admin_div_31.parent_code = admin_div_20.code
    admin_div_31.geom = geometry
    admin_div_31.hazardcategories = []

    shape = MultiPolygon(
        [Polygon([(0.5, 0), (0.5, 1), (1, 1), (1, 0), (0.5, 0)])])
    geometry = from_shape(shape, 4326)

    # admin_div_32 is a region (division level 3)
    # its parent is also admin_div_20
    admin_div_32 = AdministrativeDivision(**{
        "code": 32,
        "leveltype_id": 3,
        "name": "Division level 3 - 2"
    })
    admin_div_32.parent_code = admin_div_20.code
    admin_div_32.geom = geometry
    admin_div_32.hazardcategories = []

    # Here's a quick, graphical recap:
    #
    # admin_div_10 -> admin_div_20 -> admin_div_31
    #                             `-> admin_div_32
    # admin_div_11
    # admin_div_12
    # admin_div_13

    # GeoNode Regions
    # global_region contains all countries, **except admin_div_12**
    global_region = Region(**{"id": 1, "level": 0, "name": "Global region"})
    global_region.administrativedivisions.append(admin_div_10)
    global_region.administrativedivisions.append(admin_div_11)
    global_region.administrativedivisions.append(admin_div_13)

    # region_1 is a country
    # it matches GAULS's admin_div_10
    region_1 = Region(**{"id": 2, "level": 3, "name": "Country 1"})
    region_1.administrativedivisions.append(admin_div_10)

    # region_2 is another country
    # it matches GAULS's admin_div_11
    region_2 = Region(**{"id": 3, "level": 3, "name": "Country 2"})
    region_2.administrativedivisions.append(admin_div_11)

    # region_3 is another country
    # it matches GAULS's admin_div_12
    region_3 = Region(**{"id": 4, "level": 3, "name": "Country 3"})
    region_3.administrativedivisions.append(admin_div_12)

    # Here's a quick, graphical recap:
    #
    # global_region  -> admin_div_10 (region_1) -> admin_div_20 -> admin_div_31
    #            `                                             `-> admin_div_32
    #             `
    #              ` -> admin_div_11 (region_2)
    #               `-> admin_div_13
    #
    # region_3 = admin_div_12

    category_eq_hig = HazardCategory.get(DBSession, "EQ", "HIG")
    category_eq_hig.general_recommendation = "General recommendation for EQ HIG"

    category_fl_hig = HazardCategory.get(DBSession, "FL", "HIG")

    # admin_div_31 has (EQ, HIGH)
    association = HazardCategoryAdministrativeDivisionAssociation(
        **{"hazardcategory": category_eq_hig})
    association.hazardsets.append(hazardset1)
    admin_div_31.hazardcategories.append(association)

    # admin_div_31 has (RF, HIGH)
    admin_div_32.hazardcategories.append(
        HazardCategoryAdministrativeDivisionAssociation(
            **{"hazardcategory": category_fl_hig}))

    # admin_div_32 has (EQ, HIGH)
    admin_div_32.hazardcategories.append(
        HazardCategoryAdministrativeDivisionAssociation(
            **{"hazardcategory": category_eq_hig}))

    # admin_div_10 has (EQ, HIGH)
    admin_div_10.hazardcategories.append(
        HazardCategoryAdministrativeDivisionAssociation(
            **{"hazardcategory": category_eq_hig}))

    # admin_div_11 has no category (this is tested)
    # admin_div_12 has (EQ, HIGH)
    admin_div_12.hazardcategories.append(
        HazardCategoryAdministrativeDivisionAssociation(
            **{"hazardcategory": category_eq_hig}))

    # admin_div_13 has (EQ, HIGH)
    admin_div_13.hazardcategories.append(
        HazardCategoryAdministrativeDivisionAssociation(
            **{"hazardcategory": category_eq_hig}))

    climate_rec = ClimateChangeRecommendation(
        text="Climate change recommendation",
        hazardtype=HazardType.get(DBSession, "EQ"))
    climate_rec.associations.append(
        CcrAd(administrativedivision=admin_div_10,
              hazardtype=HazardType.get(DBSession, "EQ")))
    DBSession.add(climate_rec)

    climate_rec = ClimateChangeRecommendation(
        text="Climate change recommendation 2",
        hazardtype=HazardType.get(DBSession, "EQ"))
    climate_rec.associations.append(
        CcrAd(administrativedivision=admin_div_11,
              hazardtype=HazardType.get(DBSession, "EQ")))
    DBSession.add(climate_rec)

    technical_rec = TechnicalRecommendation(
        **{
            "text":
            "Recommendation #1 for earthquake, applied to"
            " hazard categories HIG, MED and LOW"
        })
    association = HazardCategoryTechnicalRecommendationAssociation(order=1)
    association.hazardcategory = category_eq_hig
    technical_rec.hazardcategory_associations.append(association)
    DBSession.add(technical_rec)

    technical_rec = TechnicalRecommendation(**{
        "text":
        "Educational web resources on earthquakes and"
        " seismic hazard"
    })
    association = HazardCategoryTechnicalRecommendationAssociation(order=1)
    association.hazardcategory = category_eq_hig
    technical_rec.hazardcategory_associations.append(association)
    DBSession.add(technical_rec)

    category_fl_med = HazardCategory.get(DBSession, "FL", "MED")
    category_fl_med.general_recommendation = "General recommendation for FL MED"

    admin_div_31.hazardcategories.append(
        HazardCategoryAdministrativeDivisionAssociation(
            **{"hazardcategory": category_fl_med}))
    DBSession.add(admin_div_31)
    admin_div_32.hazardcategories.append(
        HazardCategoryAdministrativeDivisionAssociation(
            **{"hazardcategory": category_fl_med}))
    DBSession.add(admin_div_32)

    # generic further resource for EQ:
    # it should be found on every EQ report page
    # (except admin_div_12 which is not linked with global region)
    further_resource = FurtherResource(
        **{
            "text": "Educational web resources on earthquakes and" +
            " seismic hazard",
            "id": 3,
        })
    association = HazardTypeFurtherResourceAssociation()
    association.hazardtype = hazardtype_eq
    association.region = global_region
    further_resource.hazardtype_associations.append(association)
    DBSession.add(further_resource)

    # further resource for EQ & region 1:
    # it should be found only on region 1 (and sub-divisions) page
    further_resource = FurtherResource(**{
        "text": "Further resource for earthquake for region 1",
        "id": 5
    })
    association = HazardTypeFurtherResourceAssociation()
    association.hazardtype = hazardtype_eq
    association.region = region_1
    further_resource.hazardtype_associations.append(association)
    DBSession.add(further_resource)

    # contact for EQ & admin_div_11:
    contact1 = Contact(
        **{
            "name": "Contact name",
            "url": "http://domain.com",
            "phone": "0123456789",
            "email": "*****@*****.**",
        })
    DBSession.add(contact1)
    association = CAdHt()
    association.hazardtype = hazardtype_eq
    association.administrativedivision = admin_div_10
    association.contact = contact1
    DBSession.add(association)

    # contact for EQ & admin_div_11:
    contact2 = Contact(
        **{
            "name": "Contact name 2",
            "url": "http://domain.com",
            "phone": "0123456789",
            "email": "*****@*****.**",
        })
    DBSession.add(contact1)
    association = CAdHt()
    association.hazardtype = hazardtype_eq
    association.administrativedivision = admin_div_10
    association.contact = contact2
    DBSession.add(association)

    Publication.new(DBSession)

    DBSession.flush()