Exemplo n.º 1
0
def on_layer_post_save_db_entity_process_layer(sender, **kwargs):
    """
        For db_entity create/update/clone, this creates or updates the layer
    """
    from footprint.client.configuration.fixture import LayerConfigurationFixture
    from footprint.client.configuration.utils import resolve_fixture

    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    config_entity = db_entity_interest.config_entity.subclassed
    db_entity = db_entity_interest.db_entity

    logger.info("Handler: on_layer_post_save_db_enitty_process_layer for DbEntity: key %s, id %s" % (db_entity.key, db_entity.id))
    # Return if the db_entity was not created from an user imported feature
    # Configured layer saving is handled by after saving the ConfigEntity
    if not db_entity.feature_class_configuration.generated:
        return

    layer_configuration_fixture = resolve_fixture(
        "presentation",
        "layer",
        LayerConfigurationFixture,
        config_entity.schema(),
        config_entity=config_entity)

    # Update the layer via the layer library update_or_create
    # Find LayerLibraries matching the ConfigEntity class schope
    for layer_library_configuration in layer_configuration_fixture.layer_libraries():

        logger.info("Handler: Layer Library Configuration: %s" % layer_library_configuration)
        # Update/Create the layer library and layers
        _update_or_create_layer_library_and_layers(
            config_entity,
            layer_library_configuration,
            db_entity_keys=[db_entity.key])
Exemplo n.º 2
0
def on_config_entity_post_save_layer(sender, **kwargs):
    """
        Sync tilestache to a ConfigEntity class after the latter is saved
        :param **kwargs: optional "db_entity_keys" to limit the layers created to those DbEntities
    """
    from footprint.client.configuration.fixture import LayerConfigurationFixture
    from footprint.client.configuration.utils import resolve_fixture
    # Disable post save publishing on individual layers. The ConfigEntity is controlling publishing
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_save_layer for %s" %
                config_entity.full_name)

    #    Create LayerLibrary instances based on each LayerLibrary configuration if the configuration's scope
    #    matches that of config_entity
    client_layer_fixture = resolve_fixture("presentation",
                                           "layer",
                                           LayerConfigurationFixture,
                                           config_entity.schema(),
                                           config_entity=config_entity)

    layer_library_configurations = FixtureList(
        client_layer_fixture.layer_libraries()).matching_scope(
            class_scope=config_entity.__class__)
    logger.info("Processing LayerLibrary Configurations %s" % ', '.join(
        map(
            lambda layer_library_configuration: layer_library_configuration.
            key, layer_library_configurations)))

    for layer_library_configuration in layer_library_configurations:
        _update_or_create_layer_library_and_layers(
            config_entity, layer_library_configuration, **kwargs)

    reset_queries()
Exemplo n.º 3
0
def on_config_entity_post_save_layer(sender, **kwargs):
    """
        Sync tilestache to a ConfigEntity class after the latter is saved
        :param **kwargs: optional "db_entity_keys" to limit the layers created to those DbEntities
    """
    from footprint.client.configuration.fixture import LayerConfigurationFixture
    from footprint.client.configuration.utils import resolve_fixture
    # Disable post save publishing on individual layers. The ConfigEntity is controlling publishing
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_save_layer for %s" % config_entity.full_name)

    #    Create LayerLibrary instances based on each LayerLibrary configuration if the configuration's scope
    #    matches that of config_entity
    client_layer_fixture = resolve_fixture(
        "presentation",
        "layer",
        LayerConfigurationFixture,
        config_entity.schema(),
        config_entity=config_entity)

    layer_library_configurations = FixtureList(client_layer_fixture.layer_libraries()).matching_scope(
        class_scope=config_entity.__class__
    )
    logger.info("Processing LayerLibrary Configurations %s" % ', '.join(map(
        lambda layer_library_configuration: layer_library_configuration.key,
        layer_library_configurations)))

    for layer_library_configuration in layer_library_configurations:
        _update_or_create_layer_library_and_layers(config_entity, layer_library_configuration, **kwargs)

    reset_queries()
Exemplo n.º 4
0
def on_config_entity_post_save_group(sender, **kwargs):
    """
        Syncs the user, groups, and permissions for the ConfigEntity
        Some ConfigEntity classes create their own Groups and default
        Users. This makes it easy to give a client-specific user permission
        to certain ConfigEntity by joining the latter's group
    :param sender:
    :param kwargs:
    :return:
    """

    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if config_entity._no_post_save_publishing:
        return
    user = kwargs.get('user')
    logger.info("Handler: post_save_user for config_entity {config_entity} and user {username}".format(
        config_entity=config_entity.name,
        username=user.username if user else 'undefined'))

    if kwargs.get('created') and not config_entity.creator:
        # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API
        config_entity.creator = User.objects.get(username=UserGroupKey.SUPERADMIN)
        config_entity._no_post_save_publishing = True
        config_entity.save()
        config_entity._no_post_save_publishing = False

    # First update_or_create any default groups. This usually just applies to global_config
    from footprint.client.configuration.fixture import UserFixture
    from footprint.client.configuration.utils import resolve_fixture
    user_fixture = resolve_fixture("user", "user", UserFixture, config_entity.schema(),
                                   config_entity=config_entity)
    for group_fixture in user_fixture.groups():
        group = update_or_create_group(**group_fixture)
        logger.info("User Publishing. For ConfigEntity %s synced global UserGroup: %s" %
                    (config_entity.name, group.name))

    # Sync permissions for the ConfigEntity
    # Resolve the default ConfigEntity permissions for this config_entity
    # Update or Create the ConfigEntity Group(s) for this ConfigEntity
    config_entity_groups = _update_or_create_config_entity_groups(config_entity)
    # Get the mapping of groups to permission types for the config_entity's most relevant fixture
    # These group keys are generally all global groups.
    from footprint.client.configuration.fixture import ConfigEntitiesFixture
    config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, config_entity.schema())
    permission_lookup = config_entities_fixture.default_config_entity_permissions()
    # Set the permissions for the config_entity groups. This will also set all superior group permissions
    # to the same permissions or greater is they match something in the permission_lookup
    config_entity_group_permissions = sync_config_entity_group_permissions(
        config_entity, config_entity_groups, permission_lookup, permission_key_class=ConfigEntityPermissionKey, **kwargs)

    # Give the groups read permissions on the ancestor config_entities
    # TODO restrict this access further for the UserGroupKey.DEMO group
    groups = Group.objects.filter(name__in=config_entity_group_permissions.keys())
    for config_entity in config_entity.ancestors:
        config_entity.assign_permission_to_groups(groups, PermissionKey.VIEW)

    # TODO tell children to add themselves to all ancestors (resync)
    # This will only be needed if the parent ConfigEntity group permission configuration changes

    reset_queries()
def on_config_entity_post_save_analysis_modules(sender, **kwargs):
    """
        Sync a ConfigEntity's ResultPage presentation
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_save_analysis_module. ConfigEntity: %s" % config_entity.name)
    update_or_create_analysis_modules(config_entity, **kwargs)

    reset_queries()
Exemplo n.º 6
0
def on_config_entity_post_save_result(sender, **kwargs):
    """
        Sync a ConfigEntity's ResultPage presentation
        :param kwargs: 'db_entity_keys' Optional list to limit which DbEntities are processed
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)

    logger.info("Handler: on_config_entity_post_save_result for %s" % config_entity.full_name)
    update_or_create_result_libraries(config_entity)
Exemplo n.º 7
0
def on_config_entity_post_save_analysis_modules(sender, **kwargs):
    """
        Sync a ConfigEntity's ResultPage presentation
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_save_analysis_module. ConfigEntity: %s" % config_entity.name)
    update_or_create_analysis_modules(config_entity, **kwargs)

    reset_queries()
Exemplo n.º 8
0
def on_config_entity_post_save_result(sender, **kwargs):
    """
        Sync a ConfigEntity's ResultPage presentation
        :param kwargs: 'db_entity_keys' Optional list to limit which DbEntities are processed
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)

    logger.info("Handler: on_config_entity_post_save_result for %s" %
                config_entity.full_name)
    update_or_create_result_libraries(config_entity)
Exemplo n.º 9
0
def on_config_entity_post_save_user(sender, **kwargs):
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if config_entity._no_post_save_publishing:
        return
    if kwargs.get('created') and not config_entity.creator:
        # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API
        config_entity.creator = User.objects.get(
            username=UserGroupKey.SUPERADMIN)
        config_entity._no_post_save_publishing = True
        config_entity.save()
        config_entity._no_post_save_publishing = False

    # TODO these should be importable on top. Something is messed up
    user_fixture = resolve_fixture("user",
                                   "user",
                                   UserFixture,
                                   config_entity.schema(),
                                   config_entity=config_entity)

    # Get the ConfigEntityGroups of the ConfigEntity. GlobalConfig uses SuperAdmin as its Group
    config_entity_groups = config_entity.config_entity_groups() if \
        not isinstance(config_entity, GlobalConfig) else \
        [Group.objects.get(name=UserGroupKey.SUPERADMIN)]

    # Find all existing users of all ConfigEntity Groups of the ConfigEntity
    # Note that we use values() instead of all() to get dicts with just needed fields instead of model instances
    # TODO remove username from here once all users have emails. update_or_create_user() checks username for uniquess presently
    existing_user_dicts = flat_map(
        lambda group: group.user_set.all().values('email', 'username'),
        config_entity_groups)

    # Combine the existing users with the fixtures, giving the former preference. We favor
    # what's in the database because the user might have updated their profile
    # Only accept fixture users not matching users in the db (by email)
    existing_emails = map(
        lambda existing_user_dict: existing_user_dict['email'],
        existing_user_dicts)
    logger.debug("Found existing users %s" % ', '.join(existing_emails))
    new_fixture_users = filter(
        lambda fixture_user: fixture_user['email'] not in existing_emails,
        user_fixture.users())
    if len(new_fixture_users) > 0:
        logger.debug("Found new fixture users %s" % ', '.join(
            map(lambda fixture_user: fixture_user['email'],
                new_fixture_users)))
    user_dicts = existing_user_dicts + new_fixture_users

    # Update or create each user. This will create users of new fixtures and run post-save processing
    # on both existing and new.
    for user_dict in user_dicts:
        update_or_create_user(**user_dict)

    reset_queries()
Exemplo n.º 10
0
def on_db_entity_post_save_analysis_modules(sender, **kwargs):
    """
    Respond to whenever a db entity is added or updated
    kwargs:
        instance is the DbEntityInterest
        user_id is the User performing the save
        features are optionally the feature ids if this is called as the result of saving Feature instances
    :return:
    """
    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    if db_entity_interest.db_entity.source_db_entity_key:
        # No analysis dependent on result DbEntities, so just quit
        return
    behavior = db_entity_interest.db_entity.feature_behavior.behavior
    main_config_entity = db_entity_interest.config_entity
    # Todo children() should be all_scenario_descendents or similar to handle region
    config_entities = [main_config_entity] if isinstance(
        main_config_entity, Scenario) else [main_config_entity] + list(
            main_config_entity.children())

    for config_entity in config_entities:
        logger.info(
            "Checking which analysis modules should run for config_entity %s, db_entity %s, behavior %s",
            config_entity.name, db_entity_interest.db_entity.key, behavior)
        analysis_modules = config_entity.analysis_modules
        for analysis_module in analysis_modules:
            for analysis_tool in analysis_module.analysis_tools.all(
            ).select_subclasses():
                # If the DbEntity's Behavior matches or has the tool's Behavior as a parent,
                # it means that the DbEntity Feature save should trigger the tool to update
                # We only use this for the Scenario Builder and Agriculture Builder, although
                # other DbEntity Feature updates could trigger analysis tools to run
                # For example, the Scenario Builder has behavior 'scenario_editor_tool', which
                # is a parent behavior of DbEntity 'scenario_end_state's behavior 'scenario_end_state'
                if analysis_tool.behavior and behavior.has_behavior(
                        analysis_tool.behavior):
                    logger.info("Updating AnalysisTool %s for AnalysisModule %s for ConfigEntity %s" % \
                                (analysis_tool.key, analysis_module.key, config_entity.name))

                    # Note that even though an individual tool matched, we call start on the
                    # AnalysisModule, which runs its tools. We don't have any modules with multiple
                    # tools, so this hack works

                    # Update the updater field first
                    analysis_module.updater = get_user_model().objects.get(
                        id=kwargs['user_id'])
                    analysis_module._no_post_save_task_run = True
                    analysis_module.save()
                    analysis_module._no_post_save_task_run = False

                    # Run the module
                    analysis_module.start(ids=map(lambda feature: feature.id,
                                                  kwargs['features']))
def on_analysis_module_post_save(sender, **kwargs):
    analysis_module = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Post Save for AnalysisModule %s for ConfigEntity %s" % \
                (analysis_module.key, analysis_module.config_entity.name))
    if not kwargs.get('created', None):
        # Automatically start the analysis module on update since the client simply updates the
        # analysis module to force it to run.
        # If already started, don't start again. Saves happen during the task run in order
        # to update the analysis_module timestamps
        # The instance flag is used by post_save_config_entity_publishing to turn it off
        if not analysis_module._started and not analysis_module._no_post_save_task_run \
                and not AnalysisModule._no_post_save_task_run_global:
            analysis_module.start()
Exemplo n.º 12
0
def on_analysis_module_post_save(sender, **kwargs):
    analysis_module = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Post Save for AnalysisModule %s for ConfigEntity %s" % \
                (analysis_module.key, analysis_module.config_entity.name))
    if not kwargs.get('created', None):
        # Automatically start the analysis module on update since the client simply updates the
        # analysis module to force it to run.
        # If already started, don't start again. Saves happen during the task run in order
        # to update the analysis_module timestamps
        # The instance flag is used by post_save_config_entity_publishing to turn it off
        if not analysis_module._started and not analysis_module._no_post_save_task_run \
                and not AnalysisModule._no_post_save_task_run_global:
            analysis_module.start()
Exemplo n.º 13
0
def on_db_entity_post_save_result(sender, **kwargs):
    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    if db_entity_interest.db_entity.source_db_entity_key:
        # No import to do for Result DbEntities (The only thing that could change is the query, which is evaluated
        # in real-time and requires no post-save processing)
        return
    # db_entity_interest = kwargs['instance']
    config_entity = db_entity_interest.config_entity
    db_entity = db_entity_interest.db_entity
    logger.info("Handler: on_db_entity_post_save_result. DbEntity: %s" % db_entity.full_name)
    update_or_create_result_libraries(config_entity, db_entity_keys=[db_entity.key])

    reset_queries()
Exemplo n.º 14
0
def on_config_entity_post_save_tilestache(sender, **kwargs):
    """
        Update/Create the tilestache data for the layers given DbEntityInterest
        :param: kwargs: Optional db_entity_keys to limit the layers to those of the given keys
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_tilestache. ConfigEntity: %s" % config_entity.name)
    if not isinstance(config_entity, Scenario):
        return

    _on_post_save_tilestache(config_entity, **kwargs)

    reset_queries()
Exemplo n.º 15
0
def on_config_entity_pre_save(sender, **kwargs):
    """
        A presave event handler. Currently this just defaults the bounds of the instance to those of its parent
    :param sender:
    :param kwargs:
    :return:
    """
    instance = InstanceBundle.extract_single_instance(**kwargs)
    if instance._no_post_save_publishing:
        return
    if not instance.pk:
        # Inherit the parent's bounds if none are defined
        if not instance.bounds:
            instance.bounds = instance.parent_config_entity.bounds
Exemplo n.º 16
0
def on_config_entity_db_entities_post_save_user(sender, **kwargs):
    """
        Sync all ConfigEntity DbEntities permissions
    :param sender:
    :param kwargs:
    :return:
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if config_entity._no_post_save_publishing:
        return
    for db_entity_interest in config_entity.computed_db_entity_interests():
        on_db_entity_post_save_user(sender, instance=db_entity_interest)

    reset_queries()
Exemplo n.º 17
0
def on_config_entity_pre_save(sender, **kwargs):
    """
        A presave event handler. Currently this just defaults the bounds of the instance to those of its parent
    :param sender:
    :param kwargs:
    :return:
    """
    instance = InstanceBundle.extract_single_instance(**kwargs)
    if instance._no_post_save_publishing:
        return
    if not instance.pk:
        # Inherit the parent's bounds if none are defined
        if not instance.bounds:
            instance.bounds = instance.parent_config_entity.bounds
def on_config_entity_post_save_db_entity(sender, **kwargs):
    """
        CRUD a ConfigEntity's DbEntities. Called when creating, clone, updating, or syncing a scenario
        Create (kwargs['created']==True). Relies on the client configuration of its DbEntity instance
        Clone (kwargs['created']==True and kwargs['instance'].origin_instance). Relies on the origin instance clone DbEntities
        Update. For now don't do anything. We only expose primitive attributes to the user
        Sync (kwargs['sync']==True). Iterate through the client configuration and update DbEntities according to the configuration.
            For clones sync will sync to the origin instance.
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_save_db_entity. ConfigEntity: %s" % config_entity.name)
    crud_db_entities(config_entity, kwargs.get('crud_type'))

    reset_queries()
Exemplo n.º 19
0
def on_config_entity_post_save_tilestache(sender, **kwargs):
    """
        Update/Create the tilestache data for the layers given DbEntityInterest
        :param: kwargs: Optional db_entity_keys to limit the layers to those of the given keys
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_tilestache. ConfigEntity: %s" %
                config_entity.name)
    if not isinstance(config_entity, Scenario):
        return

    _on_post_save_tilestache(config_entity, **kwargs)

    reset_queries()
Exemplo n.º 20
0
def on_config_entity_post_save_db_entity(sender, **kwargs):
    """
        CRUD a ConfigEntity's DbEntities. Called when creating, clone, updating, or syncing a scenario
        Create (kwargs['created']==True). Relies on the client configuration of its DbEntity instance
        Clone (kwargs['created']==True and kwargs['instance'].origin_instance). Relies on the origin instance clone DbEntities
        Update. For now don't do anything. We only expose primitive attributes to the user
        Sync (kwargs['sync']==True). Iterate through the client configuration and update DbEntities according to the configuration.
            For clones sync will sync to the origin instance.
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_save_db_entity. ConfigEntity: %s" % config_entity.name)
    crud_db_entities(config_entity, kwargs.get('crud_type'))

    reset_queries()
Exemplo n.º 21
0
def on_config_entity_db_entities_post_save_user(sender, **kwargs):
    """
        Sync all ConfigEntity DbEntities permissions
    :param sender:
    :param kwargs:
    :return:
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if config_entity._no_post_save_publishing:
        return
    for db_entity_interest in config_entity.computed_db_entity_interests():
        on_db_entity_post_save_user(sender, instance=db_entity_interest)

    reset_queries()
Exemplo n.º 22
0
def on_db_entity_post_save_tilestache(sender, **kwargs):
    """
        Update/Create the tilestache data for a layer after a new db_entity is imported
    """
    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    config_entity = db_entity_interest.config_entity.subclassed
    db_entity = db_entity_interest.db_entity

    #return if the db_entity was not created from an user imported feature - tilestache will be handled elsewhere
    if not db_entity.feature_class_configuration.generated:
        return

    _on_post_save_tilestache(config_entity, db_entity_keys=[db_entity.key])

    reset_queries()
Exemplo n.º 23
0
def on_layer_post_save_tilestache(sender, **kwargs):
    """
        Update/Create the tilestache data for the layer(s) of the given Layer
    """

    logger.info("Handler: on_layer_post_save_tilestache")

    layer = InstanceBundle.extract_single_instance(**kwargs)
    db_entity_interest = layer.db_entity_interest
    config_entity = db_entity_interest.config_entity.subclassed
    db_entity = db_entity_interest.db_entity

    _on_post_save_tilestache(config_entity, db_entity_keys=[db_entity.key])

    reset_queries()
Exemplo n.º 24
0
def on_layer_post_save_tilestache(sender, **kwargs):
    """
        Update/Create the tilestache data for the layer(s) of the given Layer
    """

    logger.info("Handler: on_layer_post_save_tilestache")

    layer = InstanceBundle.extract_single_instance(**kwargs)
    db_entity_interest = layer.db_entity_interest
    config_entity = db_entity_interest.config_entity.subclassed
    db_entity = db_entity_interest.db_entity

    _on_post_save_tilestache(config_entity, db_entity_keys=[db_entity.key])

    reset_queries()
Exemplo n.º 25
0
def on_db_entity_post_save_tilestache(sender, **kwargs):
    """
        Update/Create the tilestache data for a layer after a new db_entity is imported
    """
    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    config_entity = db_entity_interest.config_entity.subclassed
    db_entity = db_entity_interest.db_entity

    #return if the db_entity was not created from an user imported feature - tilestache will be handled elsewhere
    if not db_entity.feature_class_configuration.generated:
        return

    _on_post_save_tilestache(config_entity, db_entity_keys=[db_entity.key])

    reset_queries()
Exemplo n.º 26
0
def on_db_entity_post_save_result(sender, **kwargs):
    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    if db_entity_interest.db_entity.source_db_entity_key:
        # No import to do for Result DbEntities (The only thing that could change is the query, which is evaluated
        # in real-time and requires no post-save processing)
        return
    # db_entity_interest = kwargs['instance']
    config_entity = db_entity_interest.config_entity
    db_entity = db_entity_interest.db_entity
    logger.info("Handler: on_db_entity_post_save_result. DbEntity: %s" %
                db_entity.full_name)
    update_or_create_result_libraries(config_entity,
                                      db_entity_keys=[db_entity.key])

    reset_queries()
def on_db_entity_post_save_analysis_modules(sender, **kwargs):
    """
    Respond to whenever a db entity is added or updated
    kwargs:
        instance is the DbEntityInterest
        user_id is the User performing the save
        features are optionally the feature ids if this is called as the result of saving Feature instances
    :return:
    """
    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    if db_entity_interest.db_entity.source_db_entity_key:
        # No analysis dependent on result DbEntities, so just quit
        return
    behavior = db_entity_interest.db_entity.feature_behavior.behavior
    main_config_entity = db_entity_interest.config_entity
    # Todo children() should be all_scenario_descendents or similar to handle region
    config_entities = [main_config_entity] if isinstance(main_config_entity, Scenario) else [main_config_entity]+list(main_config_entity.children())

    for config_entity in config_entities:
        logger.info("Checking which analysis modules should run for config_entity %s, db_entity %s, behavior %s",
                    config_entity.name, db_entity_interest.db_entity.key, behavior)
        analysis_modules = config_entity.analysis_modules
        for analysis_module in analysis_modules:
            for analysis_tool in analysis_module.analysis_tools.all().select_subclasses():
                # If the DbEntity's Behavior matches or has the tool's Behavior as a parent,
                # it means that the DbEntity Feature save should trigger the tool to update
                # We only use this for the Scenario Builder and Agriculture Builder, although
                # other DbEntity Feature updates could trigger analysis tools to run
                # For example, the Scenario Builder has behavior 'scenario_editor_tool', which
                # is a parent behavior of DbEntity 'scenario_end_state's behavior 'scenario_end_state'
                if analysis_tool.behavior and behavior.has_behavior(analysis_tool.behavior):
                    logger.info("Updating AnalysisTool %s for AnalysisModule %s for ConfigEntity %s" % \
                                (analysis_tool.key, analysis_module.key, config_entity.name))

                    # Note that even though an individual tool matched, we call start on the
                    # AnalysisModule, which runs its tools. We don't have any modules with multiple
                    # tools, so this hack works

                    # Update the updater field first
                    analysis_module.updater = get_user_model().objects.get(id=kwargs['user_id'])
                    analysis_module._no_post_save_task_run = True
                    analysis_module.save()
                    analysis_module._no_post_save_task_run = False

                    # Run the module
                    analysis_module.start(
                        ids=map(lambda feature: feature.id, kwargs['features'])
                    )
Exemplo n.º 28
0
def on_config_entity_post_save_user(sender, **kwargs):
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if config_entity._no_post_save_publishing:
        return
    if kwargs.get('created') and not config_entity.creator:
        # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API
        config_entity.creator = User.objects.get(username=UserGroupKey.SUPERADMIN)
        config_entity._no_post_save_publishing = True
        config_entity.save()
        config_entity._no_post_save_publishing = False

    # TODO these should be importable on top. Something is messed up
    user_fixture = resolve_fixture("user", "user", UserFixture, config_entity.schema(),
                                   config_entity=config_entity)

    # Get the ConfigEntityGroups of the ConfigEntity. GlobalConfig uses SuperAdmin as its Group
    config_entity_groups = config_entity.config_entity_groups() if \
        not isinstance(config_entity, GlobalConfig) else \
        [Group.objects.get(name=UserGroupKey.SUPERADMIN)]

    # Find all existing users of all ConfigEntity Groups of the ConfigEntity
    # Note that we use values() instead of all() to get dicts with just needed fields instead of model instances
    # TODO remove username from here once all users have emails. update_or_create_user() checks username for uniquess presently
    existing_user_dicts = flat_map(
        lambda group: group.user_set.all().values('email', 'username'),
        config_entity_groups
    )

    # Combine the existing users with the fixtures, giving the former preference. We favor
    # what's in the database because the user might have updated their profile
    # Only accept fixture users not matching users in the db (by email)
    existing_emails = map(lambda existing_user_dict: existing_user_dict['email'], existing_user_dicts)
    logger.debug("Found existing users %s" % ', '.join(existing_emails))
    new_fixture_users = filter(lambda fixture_user: fixture_user['email'] not in existing_emails, user_fixture.users())
    if len(new_fixture_users) > 0:
        logger.debug("Found new fixture users %s" % ', '.join(map(lambda fixture_user: fixture_user['email'], new_fixture_users)))
    user_dicts = existing_user_dicts + new_fixture_users

    # Update or create each user. This will create users of new fixtures and run post-save processing
    # on both existing and new.
    for user_dict in user_dicts:
        update_or_create_user(**user_dict)

    reset_queries()
Exemplo n.º 29
0
def on_db_entity_post_save_layer(sender, **kwargs):

    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    if db_entity_interest.db_entity.source_db_entity_key:
        # No layers related to DbEntity results, so just quit
        return
    config_entity = db_entity_interest.config_entity.subclassed

    scoped_config_entities = [config_entity] if isinstance(config_entity, Scenario) else config_entity.children()
    layer_exists = Layer.objects.filter(
        db_entity_interest=db_entity_interest,
        presentation__config_entity__in=scoped_config_entities
    ).exists()
    logger.info(
        "Handler: on_db_entity_post_save_layer for config_entity {config_entity}, db_entity {db_entity}, and user {username}.".format(
            config_entity=config_entity,
            db_entity=db_entity_interest.db_entity,
            username=db_entity_interest.db_entity.updater.username,
        ))
    if not layer_exists:
        return
Exemplo n.º 30
0
def on_layer_post_save_process_layer(sender, **kwargs):
    """
        For layer create/update/clone, this updates the saved layer
    """
    from footprint.client.configuration.fixture import LayerConfigurationFixture
    from footprint.client.configuration.utils import resolve_fixture

    layer = InstanceBundle.extract_single_instance(**kwargs)

    logger.info("Handler: No Post Save Publising %s" %
                layer._no_post_save_publishing)
    if layer._no_post_save_publishing:
        return

    db_entity_interest = layer.db_entity_interest
    db_entity = db_entity_interest.db_entity
    config_entity = db_entity_interest.config_entity.subclassed

    logger.info(
        "Handler: Layer Publishing. on_layer_post_save_process_layer %s for config entity %s and db_entity %s"
        % (layer.full_name, config_entity.name, db_entity.key))

    layer_configuration_fixture = resolve_fixture("presentation",
                                                  "layer",
                                                  LayerConfigurationFixture,
                                                  config_entity.schema(),
                                                  config_entity=config_entity)

    # Update the layer via the layer library update_or_create
    # Find LayerLibraries matching the ConfigEntity class schope
    for layer_library_configuration in layer_configuration_fixture.layer_libraries(
    ):

        logger.info("Handler: Layer Configuration: %s" %
                    layer_library_configuration)
        # Update/Create the layer library and layers
        _update_or_create_layer_library_and_layers(
            config_entity,
            layer_library_configuration,
            db_entity_keys=[db_entity.key])
Exemplo n.º 31
0
    def resolve_crud(**kwargs):
        """
            Resolves the desired CRUD operation to CrudKey.CREATE|CLONE|UPDATE|SYNC|DELETE
            The default value is CrudKey.UPDATE. kwargs['created'] resolves to CrudKey.CREATE or CLONE--
            the latter only in the instance is Cloneable and has an origin_instance.
            SYNC and DELETE are returned if the 'sync' or 'deleted' kwargs are set True, respectively.
            :param kwargs: contains 'instance' and optionally 'created', 'deleted', and 'sync'
        """

        instance = InstanceBundle.extract_single_instance(**kwargs)
        if kwargs.get('sync'):
            return CrudKey.SYNC
        if kwargs.get('created'):
            if isinstance(instance, Cloneable) and instance.origin_instance:
                return CrudKey.CLONE
            else:
                return CrudKey.CREATE
        if kwargs.get('deleted'):
            return CrudKey.DELETE
        else:
            # Default to SYNC for now
            return CrudKey.SYNC
Exemplo n.º 32
0
    def resolve_crud(**kwargs):
        """
            Resolves the desired CRUD operation to CrudKey.CREATE|CLONE|UPDATE|SYNC|DELETE
            The default value is CrudKey.UPDATE. kwargs['created'] resolves to CrudKey.CREATE or CLONE--
            the latter only in the instance is Cloneable and has an origin_instance.
            SYNC and DELETE are returned if the 'sync' or 'deleted' kwargs are set True, respectively.
            :param kwargs: contains 'instance' and optionally 'created', 'deleted', and 'sync'
        """

        instance = InstanceBundle.extract_single_instance(**kwargs)
        if kwargs.get('sync'):
            return CrudKey.SYNC
        if kwargs.get('created'):
            if isinstance(instance, Cloneable) and instance.origin_instance:
                return CrudKey.CLONE
            else:
                return CrudKey.CREATE
        if kwargs.get('deleted'):
            return CrudKey.DELETE
        else:
            # Default to SYNC for now
            return CrudKey.SYNC
Exemplo n.º 33
0
def on_db_entity_post_save_layer(sender, **kwargs):

    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    if db_entity_interest.db_entity.source_db_entity_key:
        # No layers related to DbEntity results, so just quit
        return
    config_entity = db_entity_interest.config_entity.subclassed

    scoped_config_entities = [config_entity] if isinstance(
        config_entity, Scenario) else config_entity.children()
    layer_exists = Layer.objects.filter(
        db_entity_interest=db_entity_interest,
        presentation__config_entity__in=scoped_config_entities).exists()
    logger.info(
        "Handler: on_db_entity_post_save_layer for config_entity {config_entity}, db_entity {db_entity}, and user {username}."
        .format(
            config_entity=config_entity,
            db_entity=db_entity_interest.db_entity,
            username=db_entity_interest.db_entity.updater.username,
        ))
    if not layer_exists:
        return
Exemplo n.º 34
0
def on_layer_post_save_process_layer(sender, **kwargs):
    """
        For layer create/update/clone, this updates the saved layer
    """
    from footprint.client.configuration.fixture import LayerConfigurationFixture
    from footprint.client.configuration.utils import resolve_fixture

    layer = InstanceBundle.extract_single_instance(**kwargs)

    logger.info("Handler: No Post Save Publising %s" % layer._no_post_save_publishing)
    if layer._no_post_save_publishing:
        return

    db_entity_interest = layer.db_entity_interest
    db_entity = db_entity_interest.db_entity
    config_entity = db_entity_interest.config_entity.subclassed

    logger.info("Handler: Layer Publishing. on_layer_post_save_process_layer %s for config entity %s and db_entity %s" %
                (layer.full_name, config_entity.name, db_entity.key))

    layer_configuration_fixture = resolve_fixture(
        "presentation",
        "layer",
        LayerConfigurationFixture,
        config_entity.schema(),
        config_entity=config_entity)

    # Update the layer via the layer library update_or_create
    # Find LayerLibraries matching the ConfigEntity class schope
    for layer_library_configuration in layer_configuration_fixture.layer_libraries():

        logger.info("Handler: Layer Configuration: %s" % layer_library_configuration)
        # Update/Create the layer library and layers
        _update_or_create_layer_library_and_layers(
            config_entity,
            layer_library_configuration,
            db_entity_keys=[db_entity.key])
Exemplo n.º 35
0
def on_layer_post_save_db_entity_process_layer(sender, **kwargs):
    """
        For db_entity create/update/clone, this creates or updates the layer
    """
    from footprint.client.configuration.fixture import LayerConfigurationFixture
    from footprint.client.configuration.utils import resolve_fixture

    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    config_entity = db_entity_interest.config_entity.subclassed
    db_entity = db_entity_interest.db_entity

    logger.info(
        "Handler: on_layer_post_save_db_enitty_process_layer for DbEntity: key %s, id %s"
        % (db_entity.key, db_entity.id))
    # Return if the db_entity was not created from an user imported feature
    # Configured layer saving is handled by after saving the ConfigEntity
    if not db_entity.feature_class_configuration.generated:
        return

    layer_configuration_fixture = resolve_fixture("presentation",
                                                  "layer",
                                                  LayerConfigurationFixture,
                                                  config_entity.schema(),
                                                  config_entity=config_entity)

    # Update the layer via the layer library update_or_create
    # Find LayerLibraries matching the ConfigEntity class schope
    for layer_library_configuration in layer_configuration_fixture.layer_libraries(
    ):

        logger.info("Handler: Layer Library Configuration: %s" %
                    layer_library_configuration)
        # Update/Create the layer library and layers
        _update_or_create_layer_library_and_layers(
            config_entity,
            layer_library_configuration,
            db_entity_keys=[db_entity.key])
def on_config_entity_post_save_built_form(sender, **kwargs):
    """
        Sync a ConfigEntity's BuiltFormSets
    """

    # Turn off BuiltForm instances own post-save presentation with this class scope flag
    # The ConfigEntity is managing creation and update, so we don't want to trigger publishers after every
    # BuiltForm is created/updated

    from footprint.client.configuration.fixture import ConfigEntitiesFixture
    from footprint.client.configuration.utils import resolve_fixture

    BuiltForm.no_post_save_publishing = True

    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_save_built_form. ConfigEntity: %s" % config_entity.name)

    if isinstance(config_entity, GlobalConfig) or isinstance(config_entity, Region):
        # For now only the GlobalConfig and Regions creates the sets
        config_entity.add_built_form_sets(*(set(built_form_sets(config_entity)) - set(config_entity.computed_built_form_sets())))

    BuiltForm.no_post_save_publishing = False

    reset_queries()
Exemplo n.º 37
0
def on_config_entity_post_save_built_form(sender, **kwargs):
    """
        Sync a ConfigEntity's BuiltFormSets
    """

    # Turn off BuiltForm instances own post-save presentation with this class scope flag
    # The ConfigEntity is managing creation and update, so we don't want to trigger publishers after every
    # BuiltForm is created/updated

    from footprint.client.configuration.fixture import ConfigEntitiesFixture
    from footprint.client.configuration.utils import resolve_fixture

    BuiltForm.no_post_save_publishing = True

    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    logger.info("Handler: on_config_entity_post_save_built_form. ConfigEntity: %s" % config_entity.name)

    if isinstance(config_entity, GlobalConfig) or isinstance(config_entity, Region):
        # For now only the GlobalConfig and Regions creates the sets
        config_entity.add_built_form_sets(*(set(built_form_sets(config_entity)) - set(config_entity.computed_built_form_sets())))

    BuiltForm.no_post_save_publishing = False

    reset_queries()
def on_config_entity_pre_delete_analysis_modules(sender, **kwargs):
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    AnalysisModule.objects.filter(config_entity=config_entity).delete()
    AnalysisTool.objects.filter(config_entity=config_entity).delete()
Exemplo n.º 39
0
def on_config_entity_post_save_group(sender, **kwargs):
    """
        Syncs the user, groups, and permissions for the ConfigEntity
        Some ConfigEntity classes create their own Groups and default
        Users. This makes it easy to give a client-specific user permission
        to certain ConfigEntity by joining the latter's group
    :param sender:
    :param kwargs:
    :return:
    """

    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if config_entity._no_post_save_publishing:
        return
    user = kwargs.get('user')
    logger.info(
        "Handler: post_save_user for config_entity {config_entity} and user {username}"
        .format(config_entity=config_entity.name,
                username=user.username if user else 'undefined'))

    if kwargs.get('created') and not config_entity.creator:
        # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API
        config_entity.creator = User.objects.get(
            username=UserGroupKey.SUPERADMIN)
        config_entity._no_post_save_publishing = True
        config_entity.save()
        config_entity._no_post_save_publishing = False

    # First update_or_create any default groups. This usually just applies to global_config
    from footprint.client.configuration.fixture import UserFixture
    from footprint.client.configuration.utils import resolve_fixture
    user_fixture = resolve_fixture("user",
                                   "user",
                                   UserFixture,
                                   config_entity.schema(),
                                   config_entity=config_entity)
    for group_fixture in user_fixture.groups():
        group = update_or_create_group(**group_fixture)
        logger.info(
            "User Publishing. For ConfigEntity %s synced global UserGroup: %s"
            % (config_entity.name, group.name))

    # Sync permissions for the ConfigEntity
    # Resolve the default ConfigEntity permissions for this config_entity
    # Update or Create the ConfigEntity Group(s) for this ConfigEntity
    config_entity_groups = _update_or_create_config_entity_groups(
        config_entity)
    # Get the mapping of groups to permission types for the config_entity's most relevant fixture
    # These group keys are generally all global groups.
    from footprint.client.configuration.fixture import ConfigEntitiesFixture
    config_entities_fixture = resolve_fixture("config_entity",
                                              "config_entities",
                                              ConfigEntitiesFixture,
                                              config_entity.schema())
    permission_lookup = config_entities_fixture.default_config_entity_permissions(
    )
    # Set the permissions for the config_entity groups. This will also set all superior group permissions
    # to the same permissions or greater is they match something in the permission_lookup
    config_entity_group_permissions = sync_config_entity_group_permissions(
        config_entity,
        config_entity_groups,
        permission_lookup,
        permission_key_class=ConfigEntityPermissionKey,
        **kwargs)

    # Give the groups read permissions on the ancestor config_entities
    # TODO restrict this access further for the UserGroupKey.DEMO group
    groups = Group.objects.filter(
        name__in=config_entity_group_permissions.keys())
    for config_entity in config_entity.ancestors:
        config_entity.assign_permission_to_groups(groups, PermissionKey.VIEW)

    # TODO tell children to add themselves to all ancestors (resync)
    # This will only be needed if the parent ConfigEntity group permission configuration changes

    reset_queries()
Exemplo n.º 40
0
def on_config_entity_pre_delete_results(sender, **kwargs):
    """
        Sync geoserver to a ConfigEntity class after the latter is saved
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
Exemplo n.º 41
0
def on_config_entity_post_save(sender, **kwargs):
    """
        Sync tilestache to a ConfigEntity class after the latter is saved
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
Exemplo n.º 42
0
def on_config_entity_post_save(sender, **kwargs):
    """
        Sync tilestache to a ConfigEntity class after the latter is saved
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
Exemplo n.º 43
0
def on_db_entity_post_save_user(sender, **kwargs):
    """
        Give all ConfigEntities Groups in the hierarchy appropriate permissions
    :param sender:
    :param kwargs:
    :return:
    """
    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    if db_entity_interest.deleted:
        # Nothing to do for deleted instances
        return

    config_entity = db_entity_interest.config_entity.subclassed
    db_entity = db_entity_interest.db_entity
    logger.info("Handler: on_db_entity_post_save_user. DbEntity: %s" % db_entity.full_name)

    # Get the ConfigEntity Group(s) for the DbEntity's ConfigEntity
    # The groups are configured based on the subclass of ConfigEntity
    config_entity_groups = config_entity.config_entity_groups()

    # Sync permissions for the DbEntity

    # Resolve the default DbEntity permissions for the DbEntity's ConfigEntity
    from footprint.client.configuration.fixture import ConfigEntityFixture
    config_entity = db_entity_interest.config_entity.subclassed
    config_entity_fixture = ConfigEntityFixture.resolve_config_entity_fixture(config_entity)
    permission_lookup = config_entity_fixture.default_db_entity_permissions()
    logger.info("For ConfigEntity {config_entity} will apply DbEntity permission to groups {groups} based on permission_lookup {permission_lookup}".format(
        groups=', '.join(map(lambda group: group.name, config_entity_groups)),
        config_entity=config_entity.name,
        permission_lookup=permission_lookup
    ))
    sync_config_entity_group_permissions(
            db_entity,
            config_entity_groups,
            permission_lookup,
            permission_key_class=DbEntityPermissionKey,
            **kwargs)

    # Repeat the process for all descendant ConfigEntities, just giving their Groups view permission.
    # There's no obvious use case for a ConfigEntity Group of a child ConfigEntity having edit permission
    # to the DbEntity of a parent ConfigEntity.
    for descendant in config_entity.descendants():
        descendant_config_entity_groups = descendant.config_entity_groups()
        # The permission lookup maps the global version of each group to VIEW permission
        # Example: The Manager Group of Project Foo would just map to UserGroupKey.MANAGER
        descendant_permission_lookup = map_to_dict(
            lambda config_entity_group: [config_entity_group.group_hierarchy.globalized_group().name, PermissionKey.VIEW],
            descendant_config_entity_groups
        )

        logger.info(
            "For descendant ConfigEntity {config_entity} will apply DbEntity permission \
            to groups {groups} based on permission_lookup {permission_lookup}".format(
                groups=', '.join(map(lambda group: group.name, descendant_config_entity_groups)),
                config_entity=descendant.name,
                permission_lookup=descendant_permission_lookup
            )
        )

        # Apply view permissions to the ConfigEntity groups of each descendant ConfigEntity
        sync_config_entity_group_permissions(
            db_entity,
            descendant_config_entity_groups,
            descendant_permission_lookup,
            permission_key_class=DbEntityPermissionKey,
            process_parent_config_entity_groups=False,
            **kwargs)

    reset_queries()
Exemplo n.º 44
0
def post_save_publishing(signal_path, config_entity, user, **kwargs):
    """
        The initial entry point and recursive entry point for all post save publishing methods
        :signal_path - the full module path of the signal that called this
        :param kwargs:
            signal_proportion_lookup - A dictionary of signal names to the proportion complete of the overall post save.
            The signal matching signal_path will be sought in the dictionary
            config_entity - The scope of whatever being post-saved, whether a config_entity or something within it
            dependent_signal_paths - Full module signal paths called in sequentially by this publisher
            crud_type - CrudKey.CREATE|CLONE|UPDATE|SYNC|DELETE
            instance_class - Optional. Overrides the class of the instance for use in communicating with the client.
            This is used when the client only cares about a base class, such as Feature or to for DbEntityInterest
            to be a DbEntity
            client_instance_path - Optional. Property path to resolve the instance to another instance for the client.
             (this is only used to convert DbEntityInterest to DbEntity)
    """
    api_key = ApiKey.objects.get(user=user).key

    # Gather instance ids, class, and optional instance keys
    bundle = InstanceBundle(**merge(kwargs, dict(user_id=user.id)))

    # Pass the arguments to the task and run via celery. Note that kwargs is being treated
    # as a dict here and passed along
    logger.info("Django post save: %s" % unicode(bundle))

    # Send the start event to the client if we aren't recursing.
    if not kwargs.get('recurse', False):
        event = 'postSavePublisherStarted'
        logger.info("Sending start message %s to user %s with %s" %
                    (event, user.username, unicode(bundle)))

        send_message_to_client(
            user.id,
            dict(
                event=event,
                config_entity_id=config_entity and config_entity.id,
                config_entity_class_name=config_entity
                and config_entity.__class__.__name__,
                class_name=bundle.class_name_for_client,
                # Always send 0 for initial
                proportion=0,
                ids=bundle.client_instance_ids,
                keys=bundle.keys,
                class_key=bundle.class_key))

    # Start Celery
    logger.info("Starting post save publishing with signal path %s" %
                signal_path)
    job = start_and_track_task(
        _post_save_publishing,
        api_key,
        config_entity,
        user,
        **merge(
            remove_keys(kwargs, ['instance']),
            dict(
                # If we are recursing (already in a celery worker, don't start a new celery task
                # When we get dependency order figured out, we can do this, but there's probably
                # a better way via the Task object or something
                current_job=kwargs.get('job', None),
                signal_path=signal_path,
                crud_type=kwargs.get('crud_type'),
                bundle=bundle)))

    return HttpResponse(job.hashid)
Exemplo n.º 45
0
def on_config_entity_post_save(sender, **kwargs):
    """
        Create the ConfigEntity's database schema on initial save.
        Post save starts a chain of asynchronous publishers that run according to a dependency tree.
        First publishers that are wired to the post_save_config_entity_initial signal
        run, followed by publishers dependent on signals that are dependent of
        post_save_config_entity_initial (see dependent_signal_paths)
        :param sender:
        :param kwargs:
            instance - the ConfigEntity
            created - True if the instance was just created
            sync - True if the instance should be synced to the configuration
        :return:
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if transaction.is_managed():
        transaction.commit()
    crud_type = CrudKey.resolve_crud(**kwargs)

    # Send a message to publishers to configure after creation or update of the config_entity
    # This is executed through a Celery task so that it can run asynchronously
    if config_entity._no_post_save_publishing:
        return
    if config_entity.deleted:
        # Also do nothing if the config_entity is deleted. At some point this should do some
        # processings, such as rekeying the scenario so it doesn't conflict with new scenario keys
        return

    for child_config_entity in config_entity.children():
        # Do any needed syncing of config_entity_children
        # This currently does nothing
        child_config_entity.parent_config_entity_saved()


    if CrudKey.CLONE == crud_type:
        config_entity.add_categories(*config_entity.origin_instance.categories.all())
    elif CrudKey.CREATE == crud_type:
        # Unless preconfigured, set the basic category based on type
        if config_entity.categories.count() == 0:
            category = Category.objects.update_or_create(
                key='category',
                value='Future' if isinstance(config_entity, FutureScenario) else 'Base')[0]
            config_entity.add_categories(category)

    # TODO The default user here should be the admin, and in fact all config_entity instances
    # should simply have to have a creator
    user = config_entity.creator if config_entity.creator else get_user_model().objects.get(username=GroupKey.SUPERADMIN)
    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_config_entity_initial')

    logger.info("Handler: post_save_config_entity for config_entity {config_entity} and user {username}".format(
        config_entity=config_entity,
        username=user.username))

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=config_entity,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_config_entity',
        crud_type=crud_type)
Exemplo n.º 46
0
def on_config_entity_post_save(sender, **kwargs):
    """
        Create the ConfigEntity's database schema on initial save.
        Post save starts a chain of asynchronous publishers that run according to a dependency tree.
        First publishers that are wired to the post_save_config_entity_initial signal
        run, followed by publishers dependent on signals that are dependent of
        post_save_config_entity_initial (see dependent_signal_paths)
        :param sender:
        :param kwargs:
            instance - the ConfigEntity
            created - True if the instance was just created
            sync - True if the instance should be synced to the configuration
        :return:
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if transaction.is_managed():
        transaction.commit()
    crud_type = CrudKey.resolve_crud(**kwargs)

    # Send a message to publishers to configure after creation or update of the config_entity
    # This is executed through a Celery task so that it can run asynchronously
    if config_entity._no_post_save_publishing:
        return
    if config_entity.deleted:
        # Also do nothing if the config_entity is deleted. At some point this should do some
        # processings, such as rekeying the scenario so it doesn't conflict with new scenario keys
        return

    for child_config_entity in config_entity.children():
        # Do any needed syncing of config_entity_children
        # This currently does nothing
        child_config_entity.parent_config_entity_saved()

    if CrudKey.CLONE == crud_type:
        config_entity.add_categories(
            *config_entity.origin_instance.categories.all())
    elif CrudKey.CREATE == crud_type:
        # Unless preconfigured, set the basic category based on type
        if config_entity.categories.count() == 0:
            category = Category.objects.update_or_create(
                key='category',
                value='Future'
                if isinstance(config_entity, FutureScenario) else 'Base')[0]
            config_entity.add_categories(category)

    # TODO The default user here should be the admin, and in fact all config_entity instances
    # should simply have to have a creator
    user = config_entity.creator if config_entity.creator else get_user_model(
    ).objects.get(username=GroupKey.SUPERADMIN)
    starting_signal_path = resolvable_module_attr_path(
        __name__, 'post_save_config_entity_initial')

    logger.info(
        "Handler: post_save_config_entity for config_entity {config_entity} and user {username}"
        .format(config_entity=config_entity, username=user.username))

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=config_entity,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_config_entity',
        crud_type=crud_type)
Exemplo n.º 47
0
def on_config_entity_post_save_policy(sender, **kwargs):
    """
        Sync a ConfigEntity's ResultPage presentation
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    update_or_create_policy_sets(config_entity, **kwargs)
Exemplo n.º 48
0
def on_config_entity_post_save_policy(sender, **kwargs):
    """
        Sync a ConfigEntity's ResultPage presentation
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    update_or_create_policy_sets(config_entity, **kwargs)
Exemplo n.º 49
0
def on_config_entity_pre_delete_analysis_modules(sender, **kwargs):
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    AnalysisModule.objects.filter(config_entity=config_entity).delete()
    AnalysisTool.objects.filter(config_entity=config_entity).delete()
Exemplo n.º 50
0
def on_config_entity_pre_delete_results(sender, **kwargs):
    """
        Sync geoserver to a ConfigEntity class after the latter is saved
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
Exemplo n.º 51
0
def on_db_entity_post_save_user(sender, **kwargs):
    """
        Give all ConfigEntities Groups in the hierarchy appropriate permissions
    :param sender:
    :param kwargs:
    :return:
    """
    db_entity_interest = InstanceBundle.extract_single_instance(**kwargs)
    if db_entity_interest.deleted:
        # Nothing to do for deleted instances
        return

    config_entity = db_entity_interest.config_entity.subclassed
    db_entity = db_entity_interest.db_entity
    logger.info("Handler: on_db_entity_post_save_user. DbEntity: %s" %
                db_entity.full_name)

    # Get the ConfigEntity Group(s) for the DbEntity's ConfigEntity
    # The groups are configured based on the subclass of ConfigEntity
    config_entity_groups = config_entity.config_entity_groups()

    # Sync permissions for the DbEntity

    # Resolve the default DbEntity permissions for the DbEntity's ConfigEntity
    from footprint.client.configuration.fixture import ConfigEntityFixture
    config_entity = db_entity_interest.config_entity.subclassed
    config_entity_fixture = ConfigEntityFixture.resolve_config_entity_fixture(
        config_entity)
    permission_lookup = config_entity_fixture.default_db_entity_permissions()
    logger.info(
        "For ConfigEntity {config_entity} will apply DbEntity permission to groups {groups} based on permission_lookup {permission_lookup}"
        .format(groups=', '.join(
            map(lambda group: group.name, config_entity_groups)),
                config_entity=config_entity.name,
                permission_lookup=permission_lookup))
    sync_config_entity_group_permissions(
        db_entity,
        config_entity_groups,
        permission_lookup,
        permission_key_class=DbEntityPermissionKey,
        **kwargs)

    # Repeat the process for all descendant ConfigEntities, just giving their Groups view permission.
    # There's no obvious use case for a ConfigEntity Group of a child ConfigEntity having edit permission
    # to the DbEntity of a parent ConfigEntity.
    for descendant in config_entity.descendants():
        descendant_config_entity_groups = descendant.config_entity_groups()
        # The permission lookup maps the global version of each group to VIEW permission
        # Example: The Manager Group of Project Foo would just map to UserGroupKey.MANAGER
        descendant_permission_lookup = map_to_dict(
            lambda config_entity_group: [
                config_entity_group.group_hierarchy.globalized_group().name,
                PermissionKey.VIEW
            ], descendant_config_entity_groups)

        logger.info(
            "For descendant ConfigEntity {config_entity} will apply DbEntity permission \
            to groups {groups} based on permission_lookup {permission_lookup}".
            format(groups=', '.join(
                map(lambda group: group.name,
                    descendant_config_entity_groups)),
                   config_entity=descendant.name,
                   permission_lookup=descendant_permission_lookup))

        # Apply view permissions to the ConfigEntity groups of each descendant ConfigEntity
        sync_config_entity_group_permissions(
            db_entity,
            descendant_config_entity_groups,
            descendant_permission_lookup,
            permission_key_class=DbEntityPermissionKey,
            process_parent_config_entity_groups=False,
            **kwargs)

    reset_queries()