Example #1
0
    def _decorated(*args, **kwargs):
        # Set up logging
        handler = get_handler(
            username=kwargs.get('username', None),
            taskname=kwargs.get('taskname', None))
        logger = logging.getLogger(kwargs.get('taskname', __name__))
        logger.addHandler(handler)
        logger.setLevel(kwargs.get('loglevel') or 20)

        try:
            with transaction.commit_on_success():
                result = the_func(*args, **kwargs)
            if result is None:  # you can provide your own result
                result = 'ok'
        except:
            logger.error('Exception')
            for exception_line in traceback.format_exc().split('\n'):
                logger.error(exception_line)
            result = 'failure'

        # Remove logging handler. Note that the handler is not removed if the_func crashes.
        if handler.task_execution:
            handler.task_execution.result = result
            handler.task_execution.dt_finish = datetime.datetime.now()
            handler.task_execution.save()

        logger.removeHandler(handler)

        return result
Example #2
0
def sync_layers_with_wmsserver(
    synctask=None, all=False, username=None, taskname=None, loglevel=20):

    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    if all:
        tasks = SyncTask.objects.all()
        logger.info('All sync tasks')
    else:
        tasks = [SyncTask.objects.get(synctask), ]
        logger.info('Task: %s' % synctask)

    for task in tasks:
        try:
            perform_sync_task(task=task)
        except Exception as e:
            logger.error('Something went wrong performing task %s' % task)
            print e
            continue

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
def validate_all(taskname='validate_all', username=None):
    """Import all currently available configurations.

    This method is a spike to see whether the import of water balance
    configurations actually works. As such, it is clearly a work in progress:

      - there are no unit tests;
      - it only supports water balance configurations;
      - dbf files are extracted to a hard-coded directory;
      - dbf files are not removed after the import;
      - zip files are not removed after the import;
      - there is no error handling.

    """
    logger = logging.getLogger(__name__)
    handler = get_handler(taskname=taskname, username=username)
    logger.addHandler(handler)
    retriever = create_configurations_retriever()
    for configuration in retriever.retrieve_configurations():
        zip_file = ZipFile(configuration.zip_file_path)
        zip_file.extract('aanafvoer_waterbalans.dbf', '/tmp')
        zip_file.extract('grondwatergebieden.dbf', '/tmp')
        zip_file.extract('pumpingstations.dbf', '/tmp')
        dbfimporter = DBFImporter()
        dbfimporter.logger = logger
        dbfimporter.fews_meta_info = configuration.meta_info
        dbfimporter.areas_filepath = '/tmp/aanafvoer_waterbalans.dbf'
        dbfimporter.buckets_filepath = '/tmp/grondwatergebieden.dbf'
        dbfimporter.structures_filepath = '/tmp/pumpingstations.dbf'
        dbfimporter.import_dbf()
    logger.removeHandler(handler)
Example #4
0
def test_task(username=None, db_name=None, taskname=None):
    """
    Test task
    """
    handler = get_handler(username=username, taskname=taskname)
    logger = logging.getLogger(__name__)
    logger.addHandler(handler)
    logger.setLevel(20)

    logger.info('I did my job')
    return 'OK'
Example #5
0
def workspace_update_trackrecords(username=None, taskname=None, loglevel=20):
    """
    Create or replace trackrecordslayers in correct workspace
    """
    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    layerworkspace = LayerWorkspace.objects.get(slug='p_map')
    layerworkspace.layers.clear()
    LayerWorkspaceItem.objects.create(
        layer_workspace=layerworkspace,
        layer=Layer.objects.get(slug='p-totaal-in-bodem'),
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layerworkspace,
        layer=Layer.objects.get(slug='witte-waas-gebieden'),
    )

    logger.info('Replaced P-layer')

    layerworkspace = LayerWorkspace.objects.get(slug='po4_map')
    layerworkspace.layers.clear()
    LayerWorkspaceItem.objects.create(
        layer_workspace=layerworkspace,
        layer=Layer.objects.get(slug='po4-in-bodemvocht'),
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layerworkspace,
        layer=Layer.objects.get(slug='witte-waas-gebieden'),
    )
    logger.info('Replaced PO4-layer')

    layerworkspace = LayerWorkspace.objects.get(slug='aqmad_map')
    layerworkspace.layers.clear()
    LayerWorkspaceItem.objects.create(
        layer_workspace=layerworkspace,
        layer=Layer.objects.get(slug='aqmad-water-ptot'),
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layerworkspace,
        layer=Layer.objects.get(slug='witte-waas-gebieden'),
    )
    logger.info('Replaced adqmad PO4-layer')

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #6
0
def sync_fewsnorm(username=None, db_name=None, taskname=None):
    """
    The fewsnorm sync function

    Option data_set_name
    """
    #logger = sync_fewsnorm.get_logger()

    handler = get_handler(username=username, taskname=taskname)
    logger = logging.getLogger(FEWSNORM_LOG_NAME)
    logger.addHandler(handler)
    logger.setLevel(20)

    sources = get_sources(db_name)
    if not sources:
        logger.info("No databases selected. Check your database "
                    "settings and db_name (if provided) .")
    for source in sources:
        logger.info('Updating %s...' % source)
        logger.debug(
            'Updating ParameterCache for fewsnorm %s...', source.name)
        parameters = source.sync_parameter_cache()

        logger.debug(
            'Updating ModuleCache for fewsnorm %s...', source.name)
        modules = source.sync_module_cache()

        logger.debug(
            'Updating TimeStepCache for fewsnorm %s...', source.name)
        time_steps = source.sync_time_step_cache()

        logger.debug(
            'Updating GeoLocationCache for fewsnorm %s...', source.name)
        locations = source.sync_location_cache()
        # from lizard_fewsnorm.models import GeoLocationCache
        # locations = dict([(l.ident, l) for l in GeoLocationCache.objects.filter(fews_norm_source=source)])

        logger.debug(
            'Updating QualifierSetCache for fewsnorm %s...', source.name)
        qualifier_sets = source.sync_qualifier_set_cache()

        logger.debug(
            'Updating TimeSeriesCache for fewsnorm %s...', source.name)
        source.sync_time_series_cache(
            locations, parameters, modules, time_steps, qualifier_sets)

    logger.removeHandler(handler)

    return 'OK'
Example #7
0
def rebuild_jdbc_cache_task(username=None, db_name=None,
                       taskname=None, *args, **options):
    """
    Rebuild filter cache for fewsjdbc.

    Options can be
        timeout
        deep
    """
    handler = get_handler(username=username, taskname=taskname)
    logger = logging.getLogger('rebuild_jdbc_cache')
    logger.addHandler(handler)
    logger.setLevel(20)

    rebuild_jdbc_cache(logger, *args, **options)
Example #8
0
def workspace_update_minimap(username=None, taskname=None, loglevel=20):
    """
    Add an area layer with a special style.
    """
    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    MINIMAP_LAYER_SLUG_KRW = 'minimap-krw'
    MINIMAP_LAYERWORKSPACE_SLUG_KRW = 'minimap-krw'
    MINIMAP_LAYER_SLUG_AREA = 'minimap-area'
    MINIMAP_LAYERWORKSPACE_SLUG_AREA = 'minimap-area'

    # For krw minimap
    _create_single_layer_workspace(
        layerworkspace_template_slug='watersysteemkaart',
        layerworkspace_slug=MINIMAP_LAYERWORKSPACE_SLUG_KRW,
        layerworkspace_name='MiniMap KRW',
        layer_template_slug='krw_waterlichaam',
        layer_style='vss_red_on_gray_line,vss_red_on_gray',
        layer_slug=MINIMAP_LAYER_SLUG_KRW,
        layer_name='MiniMap KRW',
        defaults={
            'use_location_filter': True,
            'location_filter': {'key': 'env', 'tpl': 'ident:{id}'},
            'is_local_server': True,
            'source_ident': 'workspace-update-command',
        }
    )

    # For area minimap
    _create_single_layer_workspace(
        layerworkspace_template_slug='watersysteemkaart',
        layerworkspace_slug=MINIMAP_LAYERWORKSPACE_SLUG_AREA,
        layerworkspace_name='MiniMap gebieden',
        layer_template_slug='witte-waas-gebieden',
        layer_style='vss_red_on_gray',
        layer_slug=MINIMAP_LAYER_SLUG_AREA,
        layer_name='MiniMap gebieden',
    )

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #9
0
def prepare_configurations_as_task(taskname, levelno=20, username=None):
    """Prepare the configurations for validation.

    This task has the same functionality as function ``prepare_configurations``
    """
    logger = logging.getLogger(taskname)
    handler = get_handler(taskname=taskname, username=None)
    logger.addHandler(handler)
    logger.setLevel(levelno)

    logger.info('Start the preparations of configurations')

    config_store = ConfigurationStore()
    config_store.logger = logger
    config_store.supply()

    logger.info('END PREPARATION')

    logger.removeHandler(handler)
def rebuild_unblobbed_cache(username=None, db_name=None, taskname=None):
    """
    Populate fews unblobbed cache for better user experience
    """
    handler = get_handler(username=username, taskname=taskname)
    logger = logging.getLogger(__name__)
    logger.addHandler(handler)
    logger.setLevel(20)

    logger.info('Processing filter tree...')
    fews_filters(ignore_cache=True)
    logger.info('Processing Timeserie.has_data_dict...')
    Timeserie.has_data_dict(ignore_cache=True)
    logger.info('Processing filters...')
    for f in Filter.objects.all():
        f.parameters()
    logger.info('Finished successfully.')

    return 'OK'
def import_dbf(fews_meta_info=None,
               areas_filepath=None,
               buckets_filepath=None,
               structures_filepath=None,
               taskname="",
               username=None,
               levelno=20):
    """Import a waterbalance configuration from dbf.

    This function is provided for convenience only. It allows us to test the
    waterbalance configuration import without the need of a
    ConfigurationToValidate.

    """
    handler = get_handler(taskname=taskname, username=username)
    logger = logging.getLogger(taskname)
    logger.addHandler(handler)
    logger.setLevel(int(levelno))

    dbfimporter = DBFImporter()
    dbfimporter.fews_meta_info = fews_meta_info
    dbfimporter.areas_filepath = areas_filepath
    dbfimporter.buckets_filepath = buckets_filepath
    dbfimporter.structures_filepath = structures_filepath

    # Enable lizard_history logging by starting a fake request
    try:
        user = User.objects.get(username=username)
    except (User.DoesNotExist, User.MultipleObjectsReturned):
        user = None
    utils.start_fake_request(user=user)

    try:
        dbfimporter.import_dbf()
    finally:
        # End the fake request, so that lizard_history will log the changes
        utils.end_fake_request()

    logger.removeHandler(handler)
    return "<<import dbf>>"
Example #12
0
def sync_layers_ekr(
    slug='vss_area_value', username=None, taskname=None, loglevel=20):
    """
    Actually: sync_layers for ekr, esf and measures.

    The task adds Layer objects with correct cql filters. The cql
    filternames are predefined as ValueType elsewhere, such as in
    vss.tasks.
    """

    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    source_ident = 'lizard-layers::%s' % slug
    layer = Layer.objects.get(slug=slug)
    original_tags = layer.tags.all()
    logger.info('template: %s' % layer)

    tag, _ = Tag.objects.get_or_create(slug=slug)
    logger.info('tag: %s' % tag)

    logger.debug('Invalidating existing layers...')
    existing_layers = dict(
        (l.slug, l) for l in
        Layer.objects.filter(source_ident=source_ident))
    for existing_layer in existing_layers.values():
        # Invalidate first and remove tags
        existing_layer.valid = False
        existing_layer.tags.clear()
        existing_layer.save()

    count_update, count_new = 0, 0

    group_tag_ekr = 'ekr-layers'
    group_tag_esf = 'esf-layers'
    group_tag_measure_status = 'measure-status-layers'
    name_cql_style = (
        ('EKR VIS', "name = 'EKR-VIS'", 'vss_ekr_value', group_tag_ekr),
        ('EKR FYTOPL', "name = 'EKR-FYTOPL'", 'vss_ekr_value', group_tag_ekr),
        ('EKR MAFAUNA', "name = 'EKR-MAFAUNA'", 'vss_ekr_value', group_tag_ekr),
        ('EKR OVWFLORA', "name = 'EKR-OVWFLORA'", 'vss_ekr_value', group_tag_ekr),
        ('EKR Minst gunstig', "name = 'EKR-ONGUNSTIG'", 'vss_ekr_value', group_tag_ekr),
        ('EKR Doelstelling', "name = 'EKR-DOELSCORE'", 'vss_ekr_score', group_tag_ekr),
        ('ESF 1', "name = 'ESF-1'", 'vss_esf', group_tag_esf),
        ('ESF 2', "name = 'ESF-2'", 'vss_esf', group_tag_esf),
        ('ESF 3', "name = 'ESF-3'", 'vss_esf', group_tag_esf),
        ('ESF 4', "name = 'ESF-4'", 'vss_esf', group_tag_esf),
        ('ESF 5', "name = 'ESF-5'", 'vss_esf', group_tag_esf),
        ('ESF 6', "name = 'ESF-6'", 'vss_esf', group_tag_esf),
        ('ESF 7', "name = 'ESF-7'", 'vss_esf', group_tag_esf),
        ('ESF 8', "name = 'ESF-8'", 'vss_esf', group_tag_esf),
        ('ESF 9', "name = 'ESF-9'", 'vss_esf', group_tag_esf),
        ('ESF STATUS', "name = 'ESF-STATUS'", 'vss_esf', group_tag_esf),
        ('Maatregel status', "name = 'MEASURE-STATUS'", 'vss_measure_status',
         group_tag_measure_status),
        ('Maatregel status planning', "name = 'MEASURE-STATUS-PLANNING'", 'vss_measure_status',
         group_tag_measure_status),
        ('Aantal maatregelen aan- afvoergebied', "name = 'MEASURE-COUNT'",
         'vss_measure_count', group_tag_measure_status),
        ('Aantal krw maatregelen', "name = 'MEASURE-COUNT-KRW'",
         'vss_measure_count', group_tag_measure_status),
        ('Aantal uitgevoerde maatregelen aan- afvoergebied', "name = 'MEASURE-COUNT-FINISHED'",
         'vss_measure_count', group_tag_measure_status),
        ('Aantal uitgevoerde krw maatregelen', "name = 'MEASURE-COUNT-FINISHED-KRW'",
         'vss_measure_count', group_tag_measure_status),
    )
    for name, cql, style, group_tag in name_cql_style:

        instance_slug = slugify(name)
        if instance_slug in existing_layers:
            # Update existing, the old existing tags have been
            # removed already.
            new_layer = existing_layers[instance_slug]
            logger.debug('Update: %s' % instance_slug)
            new_layer.data_set = layer.data_set
            count_update += 1
        else:
            # New
            logger.debug('New: %s' % instance_slug)
            new_layer = deepcopy(layer)
            new_layer.slug = instance_slug
            new_layer.id = None
            count_new += 1

        new_layer.filter = cql

        # Note that the same name can occur multiple times, but
        # with different mod, qua and/or stp.
        new_layer.name = name
        new_layer.source_ident = source_ident
        new_layer.valid = True
        new_layer.is_local_server = True
        new_layer.is_clickable = layer.is_local_server
        new_layer.js_popup_class = layer.js_popup_class
        new_layer.request_params = simplejson.dumps(dict(styles=style))
        new_layer.save()

        new_layer.tags.add(tag)
        for original_tag in original_tags:
            new_layer.tags.add(original_tag)
        group_tag, _ = Tag.objects.get_or_create(
            slug=group_tag,
        )
        new_layer.tags.add(group_tag)

    logger.info('new %d items' % count_new)
    logger.info('updated %d items' % count_update)

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #13
0
def sync_ekr(username=None, taskname=None, dataset=None, loglevel=20):
    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    logger.info('sync_ekr')

    # Actual code to do the task
    value_type_worst = ValueType.objects.get(name='EKR-ONGUNSTIG')
    value_type_score = ValueType.objects.get(name='EKR-DOELSCORE')

    parameter_types = list(ParameterType.objects.all())

    areas = Area.objects.filter(
        area_class=Area.AREA_CLASS_KRW_WATERLICHAAM,
        )
    if dataset:
        areas = areas.filter(data_set__name=dataset)
        logger.info('Data set: %s' % dataset)
    logger.info('Updating %d areas...' % areas.count())
    for area in areas:
        logger.debug('Updating area %s...' % area)
        values = []
        for parameter_type in parameter_types:
            timeseries = TimeSeriesCache.objects.filter(
                parametercache=parameter_type.parameter,
                geolocationcache__ident=area.ident,
            )
            value = None
            flag = None
            comment = None
            timestamp = None
            if timeseries:
                timeserie = timeseries[0]
                try:
                    event = timeserie.get_latest_event()
                    value = event.value
                    flag = event.flag
                    comment = event.comment
                    timestamp = event.timestamp
                except IndexError:
                    # No events at all
                    pass

            update_area_value(
                area=area,
                value=value,
                flag=flag,
                comment=comment,
                timestamp=timestamp,
                value_type=parameter_type.value_type,
            )
            values.append({
                'parameter_type': parameter_type,
                'value': value,
                'flag': flag,
                'comment': comment,
                'timestamp': timestamp,
            })

        try:
            # value_worst = min([v['value'] for v in values
            #                    if v['value'] is not None])
            value_worst = min(values, key=lambda v: v['value'])
        except ValueError:
            # All ekrs are None
            value_worst = None

        if value_worst is not None:
            update_area_value(
                area=area,
                value=value_worst['value'],
                comment=value_worst['comment'],
                value_type=value_type_worst,
                )
        else:
            update_area_value(
                area=area,
                value=None,
                comment=None,
                value_type=value_type_worst,
                )
        judgements = _judgements(values, area)
        overall_judgement = _overall_judgement(judgements)
        update_area_value(
            area=area,
            value=overall_judgement,
            comment=None,
            value_type=value_type_score,
        )
        logger.debug('worst: %s, overall: %s' % (
                str(value_worst), overall_judgement))

    logger.info('Finished')

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #14
0
def workspace_update_watersystem(username=None, taskname=None, loglevel=20):
    """
    Reconfigure layers for the watersystem map.
    """
    def _create_or_replace_merged_layer(name, slug, tag, layers):
        """
        Return created layer object.

        Laye is created with name and slug, and layers assembled from layers
        field of contributing layers. Contributing layers are looked up by
        the same layers field.
        """
        Layer.objects.filter(slug=slug).delete()

        contributing_layers = Layer.objects.filter(
            layers__in=layers,
        )
        layer = Layer.objects.create(
            slug=slug,
            name=name,
            source_ident='workspace-update-command',
            server=contributing_layers[0].server,
            layers=','.join(l.layers for l in contributing_layers),
        )
        layer.tags.add(tag)
        return layer

    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    WORKSPACE_SLUG = 'watersysteemkaart'
    TAG_SLUG = 'basis'

    # Clear the layer workspace, get the tag
    layer_workspace = LayerWorkspace.objects.get(
        slug=WORKSPACE_SLUG,
    )
    LayerWorkspaceItem.objects.filter(
        layer_workspace=layer_workspace,
    ).delete()
    tag = Tag.objects.get_or_create(slug=TAG_SLUG)[0]

    # Create layers and add to workspace
    # Kunstwerken basis
    layer = _create_or_replace_merged_layer(
        slug='kunstwerken-basis',
        name='Kunstwerken Basis',
        tag=tag,
        layers=['wsh:gemaal', 'wsh:stuw', 'wsh:sluis'],
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=True,
        index=10,
    )

    # Kunstwerken extra
    layer = _create_or_replace_merged_layer(
        slug='kunstwerken-extra',
        name='Kunstwerken Extra',
        tag=tag,
        layers=[
            'wsh:vispassage', 'wsh:vaste_dam', 'wsh:sifon',
            'wsh:duiker', 'wsh:coupure', 'wsh:brug', 'wsh:aquaduct',
        ],
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=False,
        index=20,
    )

    # Peilgebied
    layer = _create_or_replace_merged_layer(
        slug='peilgebied-basis',
        name='Peilgebied',
        tag=tag,
        layers=['wsh:peilgebied'],
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=False,
        index=30,
    )

    # Waterloop
    layer = _create_or_replace_merged_layer(
        slug='waterloop-basis',
        name='Waterloop',
        tag=tag,
        layers=['wsh:waterloop'],
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=False,
        index=40,
    )

    # Oppervlake waterdeel
    layer = _create_or_replace_merged_layer(
        slug='oppervlakte-waterdeel-basis',
        name='Oppervlakte waterdeel',
        tag=tag,
        layers=['wsh:oppervlakte_waterdeel'],
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=False,
        index=50,
    )

    # Waterlichaam
    layer = _create_or_replace_merged_layer(
        slug='krw_waterlichaam',
        name='KRW-waterlichaam',
        tag=tag,
        layers=[
            'vss:vss_krw_waterbody_polygon',
            'vss:vss_krw_waterbody_linestring',
        ],
    )
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=False,
        index=60,
    )

    layer = Layer.objects.get(slug='vss_measure')
    layer.name = 'Maatregelen'
    layer.js_popup_class = 'MeasurePopup'
    layer.save()
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=False,
        index=70,
    )

    layer = Layer.objects.get(slug='vss_annotation')
    layer.name = 'Analyse interpretaties'
    layer.js_popup_class = 'AnnotationPopup'
    layer.save()
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=False,
        index=80,
    )

    layer = Layer.objects.get(slug='witte-waas-gebieden')
    layer.name = 'Masker'
    layer.save()
    LayerWorkspaceItem.objects.create(
        layer_workspace=layer_workspace,
        layer=layer,
        visible=True,
        index=90,
    )

    logger.info('Reinstalled watersystem workspace.')

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #15
0
def workspace_update_baselayers(username=None, taskname=None, loglevel=20):
    """
    Reconfigure layers that have is_base_layer=True
    """
    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    TOP10NL_LAYER_SLUG = 'top10nl'
    TOP10NL_TAG_SLUG = 'server_pdok-top10'
    SEMI_TRANSPARENT_SLUG_POSTFIX = '-semi-transparent'
    SEMI_TRANSPARENT_NAME_POSTFIX = ' (semitransparant)'

    # Get open streetmap, rename if necessary
    try:
        osm = Layer.objects.get(name='openstreetmap')
        osm.name = 'OpenStreetMap'
        osm.save()
    except:
        osm = Layer.objects.get(name='OpenStreetMap')

    # Remove anything except osm
    Layer.objects.filter(
        is_base_layer=True,
    ).exclude(
        pk=osm.pk,
    ).update(is_base_layer=False)

    # Remove old baselayer(s) for the top10nl if it exists
    Layer.objects.filter(slug=TOP10NL_LAYER_SLUG).delete()

    # Add a baselayer for the top10nl
    tag = Tag.objects.get(slug=TOP10NL_TAG_SLUG)
    top10_layers = tag.layer_set.all()
    new_layer = top10_layers[0]
    new_layer.name = 'Top10NL'
    new_layer.slug = TOP10NL_LAYER_SLUG
    new_layer.is_base_layer = True
    new_layer.source_ident = 'workspace-update-command',
    new_layer.layers = ','.join(l.layers for l in top10_layers)
    new_layer.source_ident = None
    new_layer.pk = None  # We want a new layer.
    new_layer.save()

    logger.info('Created default baselayers.')

    # Add or replace baselayers with 50% opacity
    base_layers = Layer.objects.filter(is_base_layer=True)
    Layer.objects.filter(
        slug__in=[b.slug + SEMI_TRANSPARENT_SLUG_POSTFIX
                  for b in base_layers],
    ).delete()

    for b in base_layers:
        options = simplejson.loads(b.options)
        options.update(opacity=0.5)
        b.pk = None  # Clone the layer
        b.source_ident = 'workspace-update-command'
        b.slug += SEMI_TRANSPARENT_SLUG_POSTFIX
        b.name += SEMI_TRANSPARENT_NAME_POSTFIX
        b.options = simplejson.dumps(options)
        b.save()

    logger.info('Added transparent variants of default baselayers.')

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #16
0
def sync_ekr(username=None, taskname=None, dataset=None, loglevel=20):
    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    logger.info('sync_ekr')

    # Actual code to do the task
    value_type_worst = ValueType.objects.get(name='EKR-ONGUNSTIG')
    value_type_score = ValueType.objects.get(name='EKR-DOELSCORE')

    parameter_types = list(ParameterType.objects.all())

    areas = Area.objects.filter(area_class=Area.AREA_CLASS_KRW_WATERLICHAAM, )
    if dataset:
        areas = areas.filter(data_set__name=dataset)
        logger.info('Data set: %s' % dataset)
    logger.info('Updating %d areas...' % areas.count())
    for area in areas:
        logger.debug('Updating area %s...' % area)
        values = []
        for parameter_type in parameter_types:
            timeseries = TimeSeriesCache.objects.filter(
                parametercache=parameter_type.parameter,
                geolocationcache__ident=area.ident,
            )
            value = None
            flag = None
            comment = None
            timestamp = None
            if timeseries:
                timeserie = timeseries[0]
                try:
                    event = timeserie.get_latest_event()
                    value = event.value
                    flag = event.flag
                    comment = event.comment
                    timestamp = event.timestamp
                except IndexError:
                    # No events at all
                    pass

            update_area_value(
                area=area,
                value=value,
                flag=flag,
                comment=comment,
                timestamp=timestamp,
                value_type=parameter_type.value_type,
            )
            values.append({
                'parameter_type': parameter_type,
                'value': value,
                'flag': flag,
                'comment': comment,
                'timestamp': timestamp,
            })

        try:
            # value_worst = min([v['value'] for v in values
            #                    if v['value'] is not None])
            value_worst = min(values, key=lambda v: v['value'])
        except ValueError:
            # All ekrs are None
            value_worst = None

        if value_worst is not None:
            update_area_value(
                area=area,
                value=value_worst['value'],
                comment=value_worst['comment'],
                value_type=value_type_worst,
            )
        else:
            update_area_value(
                area=area,
                value=None,
                comment=None,
                value_type=value_type_worst,
            )
        judgements = _judgements(values, area)
        overall_judgement = _overall_judgement(judgements)
        update_area_value(
            area=area,
            value=overall_judgement,
            comment=None,
            value_type=value_type_score,
        )
        logger.debug('worst: %s, overall: %s' %
                     (str(value_worst), overall_judgement))

    logger.info('Finished')

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #17
0
def sync_layers_track(
    slug='vss_track_records', username=None, taskname=None, loglevel=20):
    """
    Sync layers for track records.
    """

    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    source_ident = 'lizard-layers::%s' % slug
    layer = Layer.objects.get(slug=slug)
    original_tags = layer.tags.all()
    logger.info('template: %s' % layer)

    tag, _ = Tag.objects.get_or_create(slug=slug)
    logger.info('tag: %s' % tag)

    logger.debug('Invalidating existing layers...')
    existing_layers = dict(
        (l.slug, l) for l in
        Layer.objects.filter(source_ident=source_ident))
    for existing_layer in existing_layers.values():
        # Invalidate first and remove tags
        existing_layer.valid = False
        existing_layer.tags.clear()
        existing_layer.save()

    count_update, count_new = 0, 0

    group_tag = 'track_records'
    parameter_id_Ptot = ParameterCache.objects.get(ident='Ptot.bodem').id
    parameter_id_PO4 = ParameterCache.objects.get(ident='PO4.bodem').id
    parameter_id_aqmad_Ptot = ParameterCache.objects.get(
        ident='Ptot.z-score.water',
    ).id
    name_cql_style = (
        (
            'PO4 in bodemvocht',
            "parameter_id = %s" % parameter_id_PO4,
            'vss_track_record_PO4',
        ),
        (
            'P-totaal in bodem',
            "parameter_id = %s" % parameter_id_Ptot,
            'vss_track_record_Ptot',
        ),
        (
            'AqMaD water Ptot',
            "parameter_id = %s" % parameter_id_aqmad_Ptot,
            'vss_aqmad_Ptot',
        ),

    )
    for name, cql, style in name_cql_style:

        instance_slug = slugify(name)
        if instance_slug in existing_layers:
            # Update existing, the old existing tags have been
            # removed already.
            new_layer = existing_layers[instance_slug]
            logger.debug('Update: %s' % instance_slug)
            new_layer.data_set = layer.data_set
            count_update += 1
        else:
            # New
            logger.debug('New: %s' % instance_slug)
            new_layer = deepcopy(layer)
            new_layer.slug = instance_slug
            new_layer.id = None
            count_new += 1

        new_layer.filter = cql

        # Note that the same name can occur multiple times, but
        # with different mod, qua and/or stp.
        new_layer.name = name
        new_layer.source_ident = source_ident
        new_layer.valid = True
        new_layer.is_local_server = True
        new_layer.is_clickable = layer.is_local_server
        new_layer.js_popup_class = layer.js_popup_class
        new_layer.request_params = simplejson.dumps(dict(styles=style))
        new_layer.save()

        new_layer.tags.add(tag)
        for original_tag in original_tags:
            new_layer.tags.add(original_tag)
        group_tag, _ = Tag.objects.get_or_create(
            slug=group_tag,
        )
        new_layer.tags.add(group_tag)

    logger.info('new %d items' % count_new)
    logger.info('updated %d items' % count_update)

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #18
0
def sync_layers_measure(
    slug='vss_measure', username=None, taskname=None, loglevel=20):

    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    logger.info('start sync')
    source_ident = 'lizard-layers::%s' % slug

    layer = Layer.objects.get(slug=slug)
    original_tags = layer.tags.all()
    logger.info('template: %s' % layer)

    tag, _ = Tag.objects.get_or_create(slug=slug)
    logger.info('tag: %s' % tag)

    logger.debug('Invalidating existing layers...')
    existing_layers = dict(
        (l.slug, l) for l in
        Layer.objects.filter(source_ident=source_ident))
    for existing_layer in existing_layers.values():
        # Invalidate first and remove tags
        existing_layer.valid = False
        existing_layer.tags.clear()
        existing_layer.save()

    count_update, count_new = 0, 0

    esf_name_template = "Maatregelen ESF%s"
    esf_cql_template = ("esf = %s AND ("
                        "is_target_esf = TRUE OR "
                        "positive = TRUE OR "
                        "negative = TRUE)")
    datalist = [
        # For measure layers based on type
        {
            'group_tag': 'maatregel-type',
            'cql_and_names': (
                ("type like 'BE%'", 'Beheermaatregelen'),
                ("type like 'BR%'", 'Bronmaatregelen'),
                ("type like 'IM%'", 'Immissiemaatregelen'),
                ("type like 'IN%'", 'Inrichtingsmaatregelen'),
                (
                    "type LIKE 'G%' OR type LIKE 'S%' OR type LIKE 'RO%'",
                    'Overige maatregelen',
                ),
            )
        },
        # For measure layers based on related esf
        {
            'group_tag': 'maatregel-esf',
            'cql_and_names': [(esf_cql_template % e, esf_name_template % e)
                              for e in range(1, 10)]
        },
    ]

    for datadict in datalist:
        for cql, name in datadict['cql_and_names']:

            instance_slug = slugify(name)
            if instance_slug in existing_layers:
                # Update existing, the old existing tags have been
                # removed already.
                new_layer = existing_layers[instance_slug]
                logger.debug('Update: %s' % instance_slug)
                new_layer.data_set = layer.data_set
                count_update += 1
            else:
                # New
                logger.debug('New: %s' % instance_slug)
                new_layer = deepcopy(layer)
                new_layer.slug = instance_slug
                new_layer.id = None
                count_new += 1

            new_layer.filter = cql

            # Note that the same name can occur multiple times, but
            # with different mod, qua and/or stp.
            new_layer.name = name
            new_layer.source_ident = source_ident
            new_layer.valid = True
            new_layer.is_local_server = True
            new_layer.is_clickable = layer.is_local_server
            new_layer.js_popup_class = layer.js_popup_class
            new_layer.save()

            new_layer.tags.add(tag)
            for original_tag in original_tags:
                new_layer.tags.add(original_tag)
            group_tag, _ = Tag.objects.get_or_create(
                slug=datadict['group_tag'],
            )
            new_layer.tags.add(group_tag)

    logger.info('new %d items' % count_new)
    logger.info('updated %d items' % count_update)

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'
Example #19
0
def sync_layers_fewsnorm(
    slug='vss_fews_locations', username=None, taskname=None, loglevel=20):

    # Set up logging
    handler = get_handler(username=username, taskname=taskname)
    logger.addHandler(handler)
    logger.setLevel(loglevel)

    # Actual code to do the task
    source_ident = 'fewsnorm::%s' % slug

    layer = Layer.objects.get(slug=slug)
    original_tags = layer.tags.all()
    logger.info('template: %s' % layer)

    tag, _ = Tag.objects.get_or_create(slug='fewsnorm_%s' % slug)
    logger.info('tag: %s' % tag)

    logger.debug('Invalidating existing layers...')
    existing_layers = dict(
        (l.slug, l) for l in
        Layer.objects.filter(source_ident=source_ident))
    for existing_layer in existing_layers.values():
        # Invalidate first.
        existing_layer.valid = False
        # Remove tags from many to many relationships, not delete
        # the tags themselves.
        for layer_tag in existing_layer.tags.all():
            existing_layer.tags.remove(layer_tag)
        existing_layer.save()

    count_update, count_new = 0, 0

    for par_mod_qua_stp in TimeSeriesCache.objects.filter(active=True).values(
        "parametercache__ident", "parametercache__name", "modulecache__ident",
        "qualifiersetcache__ident", "timestepcache__ident").distinct():
        par = par_mod_qua_stp['parametercache__ident']
        par_name = par_mod_qua_stp['parametercache__name']
        mod = par_mod_qua_stp['modulecache__ident']
        qua = par_mod_qua_stp['qualifiersetcache__ident']
        stp = par_mod_qua_stp['timestepcache__ident']

        instance_slug = '%s_%s_%s_%s_%s' % (slug, par, mod, qua, stp)
        if instance_slug in existing_layers:
            # Update existing, the old existing tags have been
            # removed already.
            new_layer = existing_layers[instance_slug]
            logger.debug('Update: %s' % instance_slug)
            new_layer.data_set = layer.data_set
            count_update += 1
        else:
            # New
            logger.debug('New: %s' % instance_slug)
            new_layer = deepcopy(layer)
            new_layer.slug = instance_slug
            new_layer.id = None
            count_new += 1

        layer_params = []
        if par:
            layer_params.append("par_ident='%s'" % par)
        if mod:
            layer_params.append("mod_ident='%s'" % mod)
        if qua:
            layer_params.append("qua_ident='%s'" % qua)
        if stp:
            layer_params.append("stp_ident='%s'" % stp)
        new_layer.filter = ' and '.join(layer_params)

        # Note that the same name can occur multiple times, but
        # with different mod, qua and/or stp.
        if qua is None:
            new_layer.name = '%s (%s)' % (par_name, stp)
        else:
            new_layer.name = '%s %s (%s)' % (par_name, qua, stp)
        new_layer.name = new_layer.name[:80]
        new_layer.source_ident = source_ident
        new_layer.valid = True
        new_layer.is_local_server = layer.is_local_server
        new_layer.is_clickable = layer.is_local_server
        new_layer.js_popup_class = layer.js_popup_class
        new_layer.save()

        new_layer.tags.add(tag)
        for original_tag in original_tags:
            new_layer.tags.add(original_tag)
        if mod is not None:
            # add tag
            mod_tag, _ = Tag.objects.get_or_create(slug='%s_%s' % (tag, mod))
            new_layer.tags.add(mod_tag)

    logger.info('new %d items' % count_new)
    logger.info('updated %d items' % count_update)

    # Remove logging handler
    logger.removeHandler(handler)

    return 'OK'