コード例 #1
0
def populate_notpreprocessed(type, unit):
    hazardset_id = u'notpreprocessed'
    hazardtype = HazardType.get(type)
    hazardtype_settings = settings['hazard_types'][hazardtype.mnemonic]

    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset = HazardSet(
        id=hazardset_id,
        hazardtype=hazardtype,
        local=False,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now())
    DBSession.add(hazardset)

    return_periods = hazardtype_settings['return_periods']

    for level in (u'HIG', u'MED', u'LOW'):
        hazardlevel = HazardLevel.get(level)
        level_return_periods = return_periods[level]
        if isinstance(level_return_periods, list):
            return_period = level_return_periods[0]
        else:
            return_period = level_return_periods

        layer = Layer(
            hazardlevel=hazardlevel,
            mask=False,
            return_period=return_period,
            hazardunit=unit,
            data_lastupdated_date=datetime.now(),
            metadata_lastupdated_date=datetime.now(),
            geonode_id=new_geonode_id(),
            download_url='test',
            calculation_method_quality=5,
            scientific_quality=1,
            local=False,
            downloaded=True
        )
        hazardset.layers.append(layer)

    layer = Layer(
        hazardlevel=None,
        mask=True,
        return_period=hazardtype_settings['mask_return_period'],
        hazardunit=unit,
        data_lastupdated_date=datetime.now(),
        metadata_lastupdated_date=datetime.now(),
        geonode_id=new_geonode_id(),
        download_url='test',
        calculation_method_quality=5,
        scientific_quality=1,
        local=False,
        downloaded=True
    )
    hazardset.layers.append(layer)

    hazardset.complete = True
    DBSession.flush()
コード例 #2
0
def make_layers():
    layers = []
    for level in (u'HIG', u'MED', u'LOW'):
        layer = Layer()
        layer.hazardlevel = HazardLevel.get(level)
        layer.return_period = 1
        layer.hazardunit = u'm'
        layer.data_lastupdated_date = datetime.now()
        layer.metadata_lastupdated_date = datetime.now()
        layer.geonode_id = new_geonode_id()
        layer.download_url = u'http://something'
        layer.calculation_method_quality = 5
        layer.scientific_quality = 1
        layer.local = False
        layer.downloaded = True
        layers.append(layer)
    return layers
コード例 #3
0
def make_layers():
    layers = []
    for level in (u'HIG', u'MED', u'LOW'):
        layer = Layer()
        layer.hazardlevel = HazardLevel.get(level)
        layer.return_period = 1
        layer.hazardunit = u'm'
        layer.data_lastupdated_date = datetime.now()
        layer.metadata_lastupdated_date = datetime.now()
        layer.geonode_id = new_geonode_id()
        layer.download_url = u'http://something'
        layer.calculation_method_quality = 5
        layer.scientific_quality = 1
        layer.local = False
        layer.downloaded = True
        layers.append(layer)
    return layers
コード例 #4
0
def populate_processing():
    print 'populate processing'
    hazardset_id = u'test'
    hazardtype = HazardType.get(u'EQ')
    hazardtype_settings = settings['hazard_types'][hazardtype.mnemonic]

    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset = HazardSet()
    hazardset.id = hazardset_id
    hazardset.hazardtype = hazardtype
    hazardset.local = False
    hazardset.data_lastupdated_date = datetime.now()
    hazardset.metadata_lastupdated_date = datetime.now()
    DBSession.add(hazardset)

    return_periods = hazardtype_settings['global']['return_periods']
    unit = hazardtype_settings['thresholds'].keys()[0]

    for level in (u'HIG', u'MED', u'LOW'):
        hazardlevel = HazardLevel.get(level)
        return_period = return_periods[level]

        layer = Layer()
        layer.title = "{}-{}".format(id, return_period)
        layer.hazardlevel = hazardlevel
        layer.return_period = return_period
        layer.hazardunit = unit
        layer.data_lastupdated_date = datetime.now()
        layer.metadata_lastupdated_date = datetime.now()
        layer.geonode_id = new_geonode_id()
        layer.download_url = 'test'
        layer.calculation_method_quality = 5
        layer.scientific_quality = 1
        layer.local = False
        layer.downloaded = True
        hazardset.layers.append(layer)
        DBSession.flush()

    hazardset.complete = True
    DBSession.flush()
コード例 #5
0
def preprocessed_hazardlevel(type_settings, layer, reader, geometry):
    hazardlevel = None

    for polygon in geometry.geoms:
        window = reader.window(*polygon.bounds)
        data = reader.read(1, window=window, masked=True)

        if data.shape[0] * data.shape[1] == 0:
            continue
        if data.mask.all():
            continue

        mask = features.geometry_mask(
            [polygon],
            out_shape=data.shape,
            transform=reader.window_transform(window),
            all_touched=True)

        data.mask = data.mask | mask

        if data.mask.all():
            continue

        for level in (u'HIG', u'MED', u'LOW', u'VLO'):
            level_obj = HazardLevel.get(level)
            if level_obj <= hazardlevel:
                break

            if level in type_settings['values']:
                values = type_settings['values'][level]
                for value in values:
                    if value in data:
                        hazardlevel = level_obj
                        break

    return hazardlevel
コード例 #6
0
def process_hazardset(hazardset_id, force=False):
    hazardset = DBSession.query(HazardSet).get(hazardset_id)
    if hazardset is None:
        raise ProcessException('Hazardset {} does not exist.'
                               .format(hazardset_id))

    chrono = datetime.datetime.now()

    if hazardset.processed:
        if force:
            hazardset.processed = False
        else:
            raise ProcessException('Hazardset {} has already been processed.'
                                   .format(hazardset.id))

    logger.info("  Cleaning previous outputs")
    DBSession.query(Output) \
        .filter(Output.hazardset_id == hazardset.id) \
        .delete()
    DBSession.flush()

    type_settings = settings['hazard_types'][hazardset.hazardtype.mnemonic]

    with rasterio.drivers():
        try:
            logger.info("  Opening raster files")
            # Open rasters
            layers = {}
            readers = {}
            if 'values' in type_settings.keys():
                # preprocessed layer
                layer = DBSession.query(Layer) \
                    .filter(Layer.hazardset_id == hazardset.id) \
                    .one()
                reader = rasterio.open(layer_path(layer))

                layers[0] = layer
                readers[0] = reader

            else:
                for level in (u'HIG', u'MED', u'LOW'):
                    hazardlevel = HazardLevel.get(level)
                    layer = DBSession.query(Layer) \
                        .filter(Layer.hazardset_id == hazardset.id) \
                        .filter(Layer.hazardlevel_id == hazardlevel.id) \
                        .one()
                    reader = rasterio.open(layer_path(layer))

                    layers[level] = layer
                    readers[level] = reader
                if ('mask_return_period' in type_settings):
                    layer = DBSession.query(Layer) \
                        .filter(Layer.hazardset_id == hazardset.id) \
                        .filter(Layer.mask.is_(True)) \
                        .one()
                    reader = rasterio.open(layer_path(layer))
                    layers['mask'] = layer
                    readers['mask'] = reader

            outputs = create_outputs(hazardset, layers, readers)
            if outputs:
                DBSession.add_all(outputs)

        finally:
            logger.info("  Closing raster files")
            for key, reader in readers.iteritems():
                if reader and not reader.closed:
                    reader.close()

    hazardset.processed = True
    DBSession.flush()

    logger.info('  Successfully processed {}, {} outputs generated in {}'
                .format(hazardset.id,
                        len(outputs),
                        datetime.datetime.now() - chrono))

    return True
コード例 #7
0
def notpreprocessed_hazardlevel(hazardtype, type_settings, layers, readers,
                                geometry):
    level_VLO = HazardLevel.get(u'VLO')

    hazardlevel = None

    # Create some optimization caches
    polygons = {}
    bboxes = {}
    masks = {}

    inverted_comparison = ('inverted_comparison' in type_settings and
                           type_settings['inverted_comparison'])

    for level in (u'HIG', u'MED', u'LOW'):
        layer = layers[level]
        reader = readers[level]

        threshold = get_threshold(hazardtype,
                                  layer.local,
                                  layer.hazardlevel.mnemonic,
                                  layer.hazardunit)
        if threshold is None:
            raise ProcessException(
                'No threshold found for {} {} {} {}'
                .format(hazardtype,
                        'local' if layer.local else 'global',
                        layer.hazardlevel.mnemonic,
                        layer.hazardunit))

        for i in xrange(0, len(geometry.geoms)):
            if i not in polygons:
                polygon = geometry.geoms[i]
                bbox = polygon.bounds
                polygons[i] = polygon
                bboxes[i] = bbox
            else:
                polygon = polygons[i]
                bbox = bboxes[i]

            window = reader.window(*bbox)
            data = reader.read(1, window=window, masked=True)

            if data.shape[0] * data.shape[1] == 0:
                continue
            if data.mask.all():
                continue

            if inverted_comparison:
                data = data < threshold
            else:
                data = data > threshold

            if ('mask_return_period' in type_settings):
                mask = readers['mask'].read(1, window=window, masked=True)
                if inverted_comparison:
                    mask = mask < threshold
                else:
                    mask = mask > threshold

                data.mask = ma.getmaskarray(data) | mask.filled(False)
                if data.mask.all():
                    continue

            if i in masks:
                mask = masks[i]
            else:
                mask = features.geometry_mask(
                    [polygon],
                    out_shape=data.shape,
                    transform=reader.window_transform(window),
                    all_touched=True)
                masks[i] = mask

            data.mask = ma.getmaskarray(data) | mask

            if data.any():
                hazardlevel = layer.hazardlevel
                break  # No need to go further

            if data.mask.all():
                continue

            if hazardlevel is None:
                hazardlevel = level_VLO

        if hazardlevel == layer.hazardlevel:
            break  # No need to go further

    return hazardlevel
コード例 #8
0
ファイル: report.py プロジェクト: stufraser1/thinkhazard
from thinkhazard_common.models import (
    DBSession,
    AdministrativeDivision,
    HazardLevel,
    HazardCategory,
    HazardCategoryAdministrativeDivisionAssociation,
    HazardType,
    ClimateChangeRecommendation,
    TechnicalRecommendation,
    FurtherResource,
)


# An object for the "no data" category type.
_hazardlevel_nodata = HazardLevel()
_hazardlevel_nodata.mnemonic = 'no-data'
_hazardlevel_nodata.title = 'No data available'
_hazardlevel_nodata.description = 'No data for this hazard type.'
_hazardlevel_nodata.order = float('inf')


@view_config(route_name='report_overview', renderer='templates/report.jinja2')
@view_config(route_name='report_overview_slash',
             renderer='templates/report.jinja2')
@view_config(route_name='report', renderer='templates/report.jinja2')
def report(request):
    try:
        division_code = request.matchdict.get('divisioncode')
    except:
        raise HTTPBadRequest(detail='incorrect value for parameter '
コード例 #9
0
def populate_processing():
    hazardtypeEQ = HazardType.get(u'EQ')
    hazardtypeFL = HazardType.get(u'FL')

    hazardset_id = u'hazardset1'
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset1 = HazardSet()
    hazardset1.id = hazardset_id
    hazardset1.hazardtype = hazardtypeEQ
    hazardset1.local = False
    hazardset1.calculation_method_quality = 0
    hazardset1.data_lastupdated_date = datetime.now()
    hazardset1.metadata_lastupdated_date = datetime.now()
    hazardset1.complete = True
    hazardset1.layers.extend(make_layers())
    DBSession.add(hazardset1)

    hazardset_id = u'hazardset1b'
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset1b = HazardSet()
    hazardset1b.id = hazardset_id
    hazardset1b.hazardtype = hazardtypeFL
    hazardset1b.local = True
    hazardset1b.calculation_method_quality = 3
    hazardset1b.data_lastupdated_date = datetime.now()
    hazardset1b.metadata_lastupdated_date = datetime.now()
    hazardset1b.complete = True
    hazardset1b.layers.extend(make_layers())
    DBSession.add(hazardset1b)

    # create hazardsets 2, 3 ... 5
    hazardset_id = u'hazardset2'
    # calculation_method_quality = 1 / scientific_quality = 0
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset2 = HazardSet()
    hazardset2.id = hazardset_id
    hazardset2.hazardtype = hazardtypeEQ
    hazardset2.local = True
    hazardset2.calculation_method_quality = 1
    hazardset2.scientific_quality = 0
    hazardset2.data_lastupdated_date = datetime.now()
    hazardset2.metadata_lastupdated_date = datetime.now()
    hazardset2.complete = True
    hazardset2.layers.extend(make_layers())
    DBSession.add(hazardset2)

    hazardset_id = u'hazardset3'
    # date = 2015-01-01 / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset3 = HazardSet()
    hazardset3.id = hazardset_id
    hazardset3.hazardtype = hazardtypeEQ
    hazardset3.local = False
    hazardset3.calculation_method_quality = 1
    hazardset3.scientific_quality = 2
    hazardset3.data_lastupdated_date = datetime(2015, 1, 1, 0, 0)
    hazardset3.metadata_lastupdated_date = datetime.now()
    hazardset3.complete = True
    hazardset3.layers.extend(make_layers())
    DBSession.add(hazardset3)

    hazardset_id = u'hazardset4'
    # date = 2015-01-01 / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset4 = HazardSet()
    hazardset4.id = hazardset_id
    hazardset4.hazardtype = hazardtypeEQ
    hazardset4.local = True
    hazardset4.calculation_method_quality = 1
    hazardset4.scientific_quality = 2
    hazardset4.data_lastupdated_date = datetime(2015, 1, 1, 0, 0)
    hazardset4.metadata_lastupdated_date = datetime.now()
    hazardset4.complete = True
    hazardset4.layers.extend(make_layers())
    DBSession.add(hazardset4)

    hazardset_id = u'hazardset5'
    # date = now() / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset5 = HazardSet()
    hazardset5.id = hazardset_id
    hazardset5.hazardtype = hazardtypeEQ
    hazardset5.local = True
    hazardset5.calculation_method_quality = 1
    hazardset5.scientific_quality = 2
    hazardset5.data_lastupdated_date = datetime.now()
    hazardset5.metadata_lastupdated_date = datetime.now()
    hazardset5.complete = True
    hazardset5.layers.extend(make_layers())
    DBSession.add(hazardset5)

    hazardset_id = u'hazardset6'
    # date = now() / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset6 = HazardSet()
    hazardset6.id = hazardset_id
    hazardset6.hazardtype = hazardtypeEQ
    hazardset6.local = False
    hazardset6.calculation_method_quality = 1
    hazardset6.scientific_quality = 2
    hazardset6.data_lastupdated_date = datetime.now()
    hazardset6.metadata_lastupdated_date = datetime.now()
    hazardset6.complete = True
    hazardset6.layers.extend(make_layers())
    DBSession.add(hazardset6)

    # populate output table

    # admin div (code 30) has only one hazardset for EQ
    admin30 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 30).one()
    # => test outcome = hazardset1
    output1 = Output()
    output1.coverage_ratio = 10
    output1.hazardset = hazardset1
    output1.administrativedivision = admin30
    output1.hazardlevel = HazardLevel.get(u'HIG')
    DBSession.add(output1)

    # admin div (code 30) also has another hazardset
    # but this one is for FL
    # => test outcome = hazardset1b
    output1b = Output()
    output1b.coverage_ratio = 11
    output1b.hazardset = hazardset1b
    output1b.administrativedivision = admin30
    output1b.hazardlevel = HazardLevel.get(u'NPR')
    DBSession.add(output1b)

    # admin div (code 31) has 2 hazardsets,
    # one with a higher calculation_method_quality
    # => test outcome = hazardset2
    admin31 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 31).one()
    output2 = Output()
    output2.coverage_ratio = 20
    output2.hazardset = hazardset1  # calculation_method_quality = 0
    output2.administrativedivision = admin31
    output2.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output2)
    output3 = Output()
    output3.coverage_ratio = 30
    output3.hazardset = hazardset2  # calculation_method_quality = 1
    output3.administrativedivision = admin31
    output3.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output3)

    # admin div (code 32) has 2 hazardsets,
    # both share the same calculation_method_quality,
    # one with a higher scientific_quality
    # => test outcome = hazardset3
    admin32 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 32).one()
    output4 = Output()
    output4.coverage_ratio = 40
    output4.hazardset = hazardset2
    # calculation_method_quality = 1 / scientific_quality = 0
    output4.administrativedivision = admin32
    output4.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output4)
    output5 = Output()
    output5.coverage_ratio = 50
    output5.hazardset = hazardset3
    # calculation_method_quality = 1 / scientific_quality = 2
    output5.administrativedivision = admin32
    output5.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output5)

    # admin div (code 33) has 2 hazardsets,
    # both share the same ratings, one is global, one local
    # => test outcome = hazardset4
    admin33 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 33).one()
    output6 = Output()
    output6.coverage_ratio = 60
    output6.hazardset = hazardset3
    # global / calculation_method_quality = 1 / scientific_quality = 2
    output6.administrativedivision = admin33
    output6.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output6)
    output7 = Output()
    output7.coverage_ratio = 70
    output7.hazardset = hazardset4
    # local / calculation_method_quality = 1 / scientific_quality = 2
    output7.administrativedivision = admin33
    output7.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output7)

    # admin div (code 34) has 2 hazardsets,
    # both share the same ratings, are local, one is more recent
    # => test outcome = hazardset5
    admin34 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 34).one()
    output8 = Output()
    output8.coverage_ratio = 80
    output8.hazardset = hazardset4
    # date = 2015-01-01 / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    output8.administrativedivision = admin34
    output8.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output8)
    output9 = Output()
    output9.coverage_ratio = 90
    output9.hazardset = hazardset5
    # date = now() / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    output9.administrativedivision = admin34
    output9.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output9)

    # admin div (code 35) has 2 hazardsets,
    # both share the same ratings, are global, one is more recent
    # => test outcome = hazardset6
    admin35 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 35).one()
    output10 = Output()
    output10.coverage_ratio = 95
    output10.hazardset = hazardset3
    # date = 2015-01-01 / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    output10.administrativedivision = admin35
    output10.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output10)
    output11 = Output()
    output11.coverage_ratio = 99
    output11.hazardset = hazardset6
    # date = now() / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    output11.administrativedivision = admin35
    output11.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output11)

    DBSession.flush()
コード例 #10
0
def populate_processing():
    hazardtypeEQ = HazardType.get(u'EQ')
    hazardtypeFL = HazardType.get(u'FL')

    hazardset_id = u'hazardset1'
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset1 = HazardSet()
    hazardset1.id = hazardset_id
    hazardset1.hazardtype = hazardtypeEQ
    hazardset1.local = False
    hazardset1.calculation_method_quality = 0
    hazardset1.data_lastupdated_date = datetime.now()
    hazardset1.metadata_lastupdated_date = datetime.now()
    hazardset1.complete = True
    hazardset1.layers.extend(make_layers())
    DBSession.add(hazardset1)

    hazardset_id = u'hazardset1b'
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset1b = HazardSet()
    hazardset1b.id = hazardset_id
    hazardset1b.hazardtype = hazardtypeFL
    hazardset1b.local = True
    hazardset1b.calculation_method_quality = 3
    hazardset1b.data_lastupdated_date = datetime.now()
    hazardset1b.metadata_lastupdated_date = datetime.now()
    hazardset1b.complete = True
    hazardset1b.layers.extend(make_layers())
    DBSession.add(hazardset1b)

    # create hazardsets 2, 3 ... 5
    hazardset_id = u'hazardset2'
    # calculation_method_quality = 1 / scientific_quality = 0
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset2 = HazardSet()
    hazardset2.id = hazardset_id
    hazardset2.hazardtype = hazardtypeEQ
    hazardset2.local = True
    hazardset2.calculation_method_quality = 1
    hazardset2.scientific_quality = 0
    hazardset2.data_lastupdated_date = datetime.now()
    hazardset2.metadata_lastupdated_date = datetime.now()
    hazardset2.complete = True
    hazardset2.layers.extend(make_layers())
    DBSession.add(hazardset2)

    hazardset_id = u'hazardset3'
    # date = 2015-01-01 / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset3 = HazardSet()
    hazardset3.id = hazardset_id
    hazardset3.hazardtype = hazardtypeEQ
    hazardset3.local = False
    hazardset3.calculation_method_quality = 1
    hazardset3.scientific_quality = 2
    hazardset3.data_lastupdated_date = datetime(2015, 1, 1, 0, 0)
    hazardset3.metadata_lastupdated_date = datetime.now()
    hazardset3.complete = True
    hazardset3.layers.extend(make_layers())
    DBSession.add(hazardset3)

    hazardset_id = u'hazardset4'
    # date = 2015-01-01 / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset4 = HazardSet()
    hazardset4.id = hazardset_id
    hazardset4.hazardtype = hazardtypeEQ
    hazardset4.local = True
    hazardset4.calculation_method_quality = 1
    hazardset4.scientific_quality = 2
    hazardset4.data_lastupdated_date = datetime(2015, 1, 1, 0, 0)
    hazardset4.metadata_lastupdated_date = datetime.now()
    hazardset4.complete = True
    hazardset4.layers.extend(make_layers())
    DBSession.add(hazardset4)

    hazardset_id = u'hazardset5'
    # date = now() / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset5 = HazardSet()
    hazardset5.id = hazardset_id
    hazardset5.hazardtype = hazardtypeEQ
    hazardset5.local = True
    hazardset5.calculation_method_quality = 1
    hazardset5.scientific_quality = 2
    hazardset5.data_lastupdated_date = datetime.now()
    hazardset5.metadata_lastupdated_date = datetime.now()
    hazardset5.complete = True
    hazardset5.layers.extend(make_layers())
    DBSession.add(hazardset5)

    hazardset_id = u'hazardset6'
    # date = now() / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    print 'Populating hazardset {}'.format(hazardset_id)
    hazardset6 = HazardSet()
    hazardset6.id = hazardset_id
    hazardset6.hazardtype = hazardtypeEQ
    hazardset6.local = False
    hazardset6.calculation_method_quality = 1
    hazardset6.scientific_quality = 2
    hazardset6.data_lastupdated_date = datetime.now()
    hazardset6.metadata_lastupdated_date = datetime.now()
    hazardset6.complete = True
    hazardset6.layers.extend(make_layers())
    DBSession.add(hazardset6)

    # populate output table

    # admin div (code 30) has only one hazardset for EQ
    admin30 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 30).one()
    # => test outcome = hazardset1
    output1 = Output()
    output1.coverage_ratio = 10
    output1.hazardset = hazardset1
    output1.administrativedivision = admin30
    output1.hazardlevel = HazardLevel.get(u'HIG')
    DBSession.add(output1)

    # admin div (code 30) also has another hazardset
    # but this one is for FL
    # => test outcome = hazardset1b
    output1b = Output()
    output1b.coverage_ratio = 11
    output1b.hazardset = hazardset1b
    output1b.administrativedivision = admin30
    output1b.hazardlevel = HazardLevel.get(u'NPR')
    DBSession.add(output1b)

    # admin div (code 31) has 2 hazardsets,
    # one with a higher calculation_method_quality
    # => test outcome = hazardset2
    admin31 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 31).one()
    output2 = Output()
    output2.coverage_ratio = 20
    output2.hazardset = hazardset1  # calculation_method_quality = 0
    output2.administrativedivision = admin31
    output2.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output2)
    output3 = Output()
    output3.coverage_ratio = 30
    output3.hazardset = hazardset2  # calculation_method_quality = 1
    output3.administrativedivision = admin31
    output3.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output3)

    # admin div (code 32) has 2 hazardsets,
    # both share the same calculation_method_quality,
    # one with a higher scientific_quality
    # => test outcome = hazardset3
    admin32 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 32).one()
    output4 = Output()
    output4.coverage_ratio = 40
    output4.hazardset = hazardset2
    # calculation_method_quality = 1 / scientific_quality = 0
    output4.administrativedivision = admin32
    output4.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output4)
    output5 = Output()
    output5.coverage_ratio = 50
    output5.hazardset = hazardset3
    # calculation_method_quality = 1 / scientific_quality = 2
    output5.administrativedivision = admin32
    output5.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output5)

    # admin div (code 33) has 2 hazardsets,
    # both share the same ratings, one is global, one local
    # => test outcome = hazardset4
    admin33 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 33).one()
    output6 = Output()
    output6.coverage_ratio = 60
    output6.hazardset = hazardset3
    # global / calculation_method_quality = 1 / scientific_quality = 2
    output6.administrativedivision = admin33
    output6.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output6)
    output7 = Output()
    output7.coverage_ratio = 70
    output7.hazardset = hazardset4
    # local / calculation_method_quality = 1 / scientific_quality = 2
    output7.administrativedivision = admin33
    output7.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output7)

    # admin div (code 34) has 2 hazardsets,
    # both share the same ratings, are local, one is more recent
    # => test outcome = hazardset5
    admin34 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 34).one()
    output8 = Output()
    output8.coverage_ratio = 80
    output8.hazardset = hazardset4
    # date = 2015-01-01 / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    output8.administrativedivision = admin34
    output8.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output8)
    output9 = Output()
    output9.coverage_ratio = 90
    output9.hazardset = hazardset5
    # date = now() / local /
    # calculation_method_quality = 1 / scientific_quality = 2
    output9.administrativedivision = admin34
    output9.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output9)

    # admin div (code 35) has 2 hazardsets,
    # both share the same ratings, are global, one is more recent
    # => test outcome = hazardset6
    admin35 = DBSession.query(AdministrativeDivision)\
        .filter(AdministrativeDivision.code == 35).one()
    output10 = Output()
    output10.coverage_ratio = 95
    output10.hazardset = hazardset3
    # date = 2015-01-01 / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    output10.administrativedivision = admin35
    output10.hazardlevel = HazardLevel.get(u'MED')
    DBSession.add(output10)
    output11 = Output()
    output11.coverage_ratio = 99
    output11.hazardset = hazardset6
    # date = now() / global /
    # calculation_method_quality = 1 / scientific_quality = 2
    output11.administrativedivision = admin35
    output11.hazardlevel = HazardLevel.get(u'LOW')
    DBSession.add(output11)

    DBSession.flush()
コード例 #11
0
def harvest_layer(object, dry_run=False):
    title = object['title']
    '''
    typename = urllib.unquote(
        object['distribution_url'].split('/').pop())
    # print title
    '''

    hazardset_id = object['hazard_set']
    if not hazardset_id:
        logger.info('{} - hazard_set is empty'.format(title))
        return False

    hazard_type = object['hazard_type']
    if not hazard_type:
        logger.warning('{} - hazard_type is empty'.format(title))
        return False
    hazardtype = hazardtype_from_geonode(hazard_type)
    if hazardtype is None:
        logger.warning('{} - hazard_type not supported: {}'
                       .format(title, hazard_type))
        return False

    type_settings = settings['hazard_types'][hazardtype.mnemonic]
    preprocessed = 'values' in type_settings

    '''
    csw_wkt_geometry = object['csw_wkt_geometry']
    bounds = wkt_loads(csw_wkt_geometry).bounds
    # print '  bounds :', bounds

    # TODO: minimum global bbox should be defined in settings
    minimum_global_bounds = (-175, -45, 175, 45)
    from shapely.geometry import box
    local = not box(bounds).contains(box(minimum_global_bounds))
    '''
    local = 'GLOBAL' not in hazardset_id
    '''
    local = (
        bounds[0] > -175 or
        bounds[1] > -45 or
        bounds[2] < 175 or
        bounds[3] < 45)
    '''

    mask = False
    if preprocessed is True:
        hazardlevel = None
        hazard_unit = None
        if object['hazard_period']:
            logger.info('{} - Has a return period'.format(title))
            return False
        hazard_period = None
    else:
        hazard_period = int(object['hazard_period'])
        hazardlevel = None
        for level in (u'LOW', u'MED', u'HIG'):
            return_periods = type_settings['return_periods'][level]
            if isinstance(return_periods, list):
                if (hazard_period >= return_periods[0] and
                        hazard_period <= return_periods[1]):
                    hazardlevel = HazardLevel.get(level)
                    break
            else:
                if hazard_period == return_periods:
                    hazardlevel = HazardLevel.get(level)

        if ('mask_return_period' in type_settings and
                hazard_period == type_settings['mask_return_period']):
            mask = True

        if hazardlevel is None and not mask:
            logger.info('{} - No corresponding hazard_level'.format(title))
            return False

        hazard_unit = object['hazard_unit']
        if hazard_unit == '':
            logger.info('{} -  hazard_unit is empty'.format(title))
            return False

    if object['srid'] != 'EPSG:4326':
        logger.info('{} - srid is different from "EPSG:4326"'
                    .format(title))
        return False

    data_update_date = object['data_update_date']
    if not data_update_date:
        logger.warning('{} - data_update_date is empty'.format(title))
        # TODO: Restore bypassed constraint to get Volcanic data
        # return False
        data_update_date = datetime.now()

    metadata_update_date = object['metadata_update_date']
    if not metadata_update_date:
        logger.warning('{} - metadata_update_date is empty'.format(title))
        # return False
        metadata_update_date = datetime.now()

    calculation_method_quality = object['calculation_method_quality']
    if not calculation_method_quality:
        logger.warning('{} - calculation_method_quality is empty'
                       .format(title))
        return False
    calculation_method_quality = int(float(calculation_method_quality))

    scientific_quality = object['scientific_quality']
    if not scientific_quality:
        logger.warning('{} - scientific_quality is empty'.format(title))
        return False
    scientific_quality = int(float(scientific_quality))

    download_url = object['download_url']
    if not download_url:
        logger.warning('{} - download_url is empty'.format(title))
        return False

    hazardset = DBSession.query(HazardSet).get(hazardset_id)
    if hazardset is None:

        logger.info('{} - Create new hazardset {}'
                    .format(title, hazardset_id))
        hazardset = HazardSet()
        hazardset.id = hazardset_id
        hazardset.hazardtype = hazardtype
        hazardset.data_lastupdated_date = data_update_date
        hazardset.metadata_lastupdated_date = metadata_update_date
        DBSession.add(hazardset)
    else:
        # print '  Hazardset {} found'.format(hazardset_id)
        pass

    layer = DBSession.query(Layer) \
        .filter(Layer.hazardset_id == hazardset_id)
    if hazardlevel is not None:
        layer = layer.filter(Layer.hazardlevel_id == hazardlevel.id)
    if mask:
        layer = layer.filter(Layer.mask.is_(True))
    layer = layer.first()

    if layer is None:
        layer = Layer()
        logger.info('{} - Create new Layer {}'.format(title, title))
        DBSession.add(layer)

    else:
        if object['id'] == layer.geonode_id:
            # TODO: metadata change
            return False

        if hazard_period > layer.return_period:
            logger.info('{} - Use preferred return period {}'
                        .format(title, layer.return_period))
            return False

        logger.info('{} - Replace layer for level {}'
                    .format(title, hazardlevel.mnemonic))

    layer.hazardset = hazardset
    layer.hazardlevel = hazardlevel
    layer.mask = mask
    layer.return_period = hazard_period
    layer.hazardunit = hazard_unit
    layer.data_lastupdated_date = data_update_date
    layer.metadata_lastupdated_date = metadata_update_date
    layer.geonode_id = object['id']
    layer.download_url = download_url

    # TODO: retrieve quality attributes
    layer.calculation_method_quality = calculation_method_quality
    layer.scientific_quality = scientific_quality
    layer.local = local
    DBSession.flush()
    return True
コード例 #12
0
 def layerByLevel(self, level):
     hazardlevel = HazardLevel.get(level)
     return DBSession.query(Layer) \
         .filter(Layer.hazardset_id == self.id) \
         .filter(Layer.hazardlevel_id == hazardlevel.id) \
         .one_or_none()
コード例 #13
0
def process_hazardset(hazardset, force=False):
    print hazardset.id
    chrono = datetime.datetime.now()
    last_percent = 0

    level_VLO = HazardLevel.get(u'VLO')

    if hazardset is None:
        raise ProcessException('HazardSet {} does not exist.'
                               .format(hazardset.id))

    if hazardset.processed:
        if force:
            hazardset.processed = False
        else:
            raise ProcessException('HazardSet {} has already been processed.'
                                   .format(hazardset.id))

    # clean previous outputs
    DBSession.query(Output) \
        .filter(Output.hazardset_id == hazardset.id) \
        .delete()
    DBSession.flush()

    hazardtype = hazardset.hazardtype
    hazardtype_settings = settings['hazard_types'][hazardtype.mnemonic]
    thresholds = hazardtype_settings['thresholds']

    project = partial(
        pyproj.transform,
        pyproj.Proj(init='epsg:3857'),
        pyproj.Proj(init='epsg:4326'))

    layers = {}
    for level in (u'HIG', u'MED', u'LOW'):
        hazardlevel = HazardLevel.get(level)
        layer = DBSession.query(Layer) \
            .filter(Layer.hazardset_id == hazardset.id) \
            .filter(Layer.hazardlevel_id == hazardlevel.id) \
            .one()
        layers[level] = layer

    with rasterio.drivers():
        with rasterio.open(layers['HIG'].path()) as src_hig, \
                rasterio.open(layers['MED'].path()) as src_med, \
                rasterio.open(layers['LOW'].path()) as src_low:
            readers = {}
            readers['HIG'] = src_hig
            readers['MED'] = src_med
            readers['LOW'] = src_low

            polygon_hig = polygonFromBounds(src_hig.bounds)
            polygon_med = polygonFromBounds(src_med.bounds)
            polygon_low = polygonFromBounds(src_low.bounds)
            polygon = cascaded_union((
                polygon_hig,
                polygon_med,
                polygon_low))

            adminlevel_REG = AdminLevelType.get(u'REG')

            admindivs = DBSession.query(AdministrativeDivision) \
                .filter(AdministrativeDivision.leveltype_id == adminlevel_REG.id) \

            if hazardset.local:
                admindivs = admindivs \
                    .filter(
                        func.ST_Transform(AdministrativeDivision.geom, 4326)
                        .intersects(
                            func.ST_GeomFromText(polygon.wkt, 4326))) \
                    .filter(func.ST_Intersects(
                        func.ST_Transform(AdministrativeDivision.geom, 4326),
                        func.ST_GeomFromText(polygon.wkt, 4326)))

            current = 0
            outputs = 0
            total = admindivs.count()
            for admindiv in admindivs:
                # print ' ', admindiv.id, admindiv.code, admindiv.name

                current += 1
                if admindiv.geom is None:
                    print '   ', ('{}-{} has null geometry'
                                  .format(admindiv.code, admindiv.name))
                    continue

                reprojected = transform(
                    project,
                    to_shape(admindiv.geom))

                output = Output()
                output.hazardset = hazardset
                output.administrativedivision = admindiv
                output.hazardlevel = None

                # TODO: calculate coverage ratio
                output.coverage_ratio = 100

                for level in (u'HIG', u'MED', u'LOW'):
                    layer = layers[level]
                    src = readers[level]

                    if not reprojected.intersects(polygon):
                        continue

                    window = src.window(*reprojected.bounds)
                    data = src.read(1, window=window, masked=True)
                    if data.shape[0] * data.shape[1] == 0:
                        continue

                    threshold = thresholds[layer.hazardunit]
                    positive_data = (data > threshold).astype(rasterio.uint8)

                    division = features.rasterize(
                        ((g, 1) for g in [reprojected]),
                        out_shape=data.shape,
                        transform=src.window_transform(window),
                        all_touched=True)

                    masked = numpy.ma.masked_array(positive_data,
                                                   mask=~division.astype(bool))

                    if str(numpy.max(masked)) == str(numpy.ma.masked):
                        break
                    else:
                        if output.hazardlevel is None:
                            output.hazardlevel = level_VLO

                    if numpy.max(masked) > 0:
                        output.hazardlevel = layer.hazardlevel
                        break

                if output.hazardlevel is not None:
                    # print '    hazardlevel :', output.hazardlevel.mnemonic
                    DBSession.add(output)
                    outputs += 1

                percent = int(100.0 * current / total)
                if percent % 10 == 0 and percent != last_percent:
                    print '  ... processed {}%'.format(percent)
                    last_percent = percent
                    pass

    hazardset.processed = True

    DBSession.flush()
    transaction.commit()

    print ('Successfully processed {} divisions, {} outputs generated in {}'
           .format(total, outputs, datetime.datetime.now() - chrono))