def on_layer_post_save_db_entity_process_layer(sender, **kwargs): """ For db_entity create/update/clone, this creates or updates the layer """ from footprint.client.configuration.fixture import LayerConfigurationFixture from footprint.client.configuration.utils import resolve_fixture db_entity_interest = InstanceBundle.extract_single_instance(**kwargs) config_entity = db_entity_interest.config_entity.subclassed db_entity = db_entity_interest.db_entity logger.info("Handler: on_layer_post_save_db_enitty_process_layer for DbEntity: key %s, id %s" % (db_entity.key, db_entity.id)) # Return if the db_entity was not created from an user imported feature # Configured layer saving is handled by after saving the ConfigEntity if not db_entity.feature_class_configuration.generated: return layer_configuration_fixture = resolve_fixture( "presentation", "layer", LayerConfigurationFixture, config_entity.schema(), config_entity=config_entity) # Update the layer via the layer library update_or_create # Find LayerLibraries matching the ConfigEntity class schope for layer_library_configuration in layer_configuration_fixture.layer_libraries(): logger.info("Handler: Layer Library Configuration: %s" % layer_library_configuration) # Update/Create the layer library and layers _update_or_create_layer_library_and_layers( config_entity, layer_library_configuration, db_entity_keys=[db_entity.key])
def on_config_entity_post_save_layer(sender, **kwargs): """ Sync tilestache to a ConfigEntity class after the latter is saved :param **kwargs: optional "db_entity_keys" to limit the layers created to those DbEntities """ from footprint.client.configuration.fixture import LayerConfigurationFixture from footprint.client.configuration.utils import resolve_fixture # Disable post save publishing on individual layers. The ConfigEntity is controlling publishing config_entity = InstanceBundle.extract_single_instance(**kwargs) logger.info("Handler: on_config_entity_post_save_layer for %s" % config_entity.full_name) # Create LayerLibrary instances based on each LayerLibrary configuration if the configuration's scope # matches that of config_entity client_layer_fixture = resolve_fixture( "presentation", "layer", LayerConfigurationFixture, config_entity.schema(), config_entity=config_entity) layer_library_configurations = FixtureList(client_layer_fixture.layer_libraries()).matching_scope( class_scope=config_entity.__class__ ) logger.info("Processing LayerLibrary Configurations %s" % ', '.join(map( lambda layer_library_configuration: layer_library_configuration.key, layer_library_configurations))) for layer_library_configuration in layer_library_configurations: _update_or_create_layer_library_and_layers(config_entity, layer_library_configuration, **kwargs) reset_queries()
def on_config_entity_post_save_group(sender, **kwargs): """ Syncs the user, groups, and permissions for the ConfigEntity Some ConfigEntity classes create their own Groups and default Users. This makes it easy to give a client-specific user permission to certain ConfigEntity by joining the latter's group :param sender: :param kwargs: :return: """ config_entity = InstanceBundle.extract_single_instance(**kwargs) if config_entity._no_post_save_publishing: return user = kwargs.get('user') logger.info("Handler: post_save_user for config_entity {config_entity} and user {username}".format( config_entity=config_entity.name, username=user.username if user else 'undefined')) if kwargs.get('created') and not config_entity.creator: # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API config_entity.creator = User.objects.get(username=UserGroupKey.SUPERADMIN) config_entity._no_post_save_publishing = True config_entity.save() config_entity._no_post_save_publishing = False # First update_or_create any default groups. This usually just applies to global_config from footprint.client.configuration.fixture import UserFixture from footprint.client.configuration.utils import resolve_fixture user_fixture = resolve_fixture("user", "user", UserFixture, config_entity.schema(), config_entity=config_entity) for group_fixture in user_fixture.groups(): group = update_or_create_group(**group_fixture) logger.info("User Publishing. For ConfigEntity %s synced global UserGroup: %s" % (config_entity.name, group.name)) # Sync permissions for the ConfigEntity # Resolve the default ConfigEntity permissions for this config_entity # Update or Create the ConfigEntity Group(s) for this ConfigEntity config_entity_groups = _update_or_create_config_entity_groups(config_entity) # Get the mapping of groups to permission types for the config_entity's most relevant fixture # These group keys are generally all global groups. from footprint.client.configuration.fixture import ConfigEntitiesFixture config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, config_entity.schema()) permission_lookup = config_entities_fixture.default_config_entity_permissions() # Set the permissions for the config_entity groups. This will also set all superior group permissions # to the same permissions or greater is they match something in the permission_lookup config_entity_group_permissions = sync_config_entity_group_permissions( config_entity, config_entity_groups, permission_lookup, permission_key_class=ConfigEntityPermissionKey, **kwargs) # Give the groups read permissions on the ancestor config_entities # TODO restrict this access further for the UserGroupKey.DEMO group groups = Group.objects.filter(name__in=config_entity_group_permissions.keys()) for config_entity in config_entity.ancestors: config_entity.assign_permission_to_groups(groups, PermissionKey.VIEW) # TODO tell children to add themselves to all ancestors (resync) # This will only be needed if the parent ConfigEntity group permission configuration changes reset_queries()
def on_config_entity_post_save_analysis_modules(sender, **kwargs): """ Sync a ConfigEntity's ResultPage presentation """ config_entity = InstanceBundle.extract_single_instance(**kwargs) logger.info("Handler: on_config_entity_post_save_analysis_module. ConfigEntity: %s" % config_entity.name) update_or_create_analysis_modules(config_entity, **kwargs) reset_queries()
def on_config_entity_post_save_result(sender, **kwargs): """ Sync a ConfigEntity's ResultPage presentation :param kwargs: 'db_entity_keys' Optional list to limit which DbEntities are processed """ config_entity = InstanceBundle.extract_single_instance(**kwargs) logger.info("Handler: on_config_entity_post_save_result for %s" % config_entity.full_name) update_or_create_result_libraries(config_entity)
def on_db_entity_post_save_analysis_modules(sender, **kwargs): """ Respond to whenever a db entity is added or updated kwargs: instance is the DbEntityInterest user_id is the User performing the save features are optionally the feature ids if this is called as the result of saving Feature instances :return: """ db_entity_interest = InstanceBundle.extract_single_instance(**kwargs) if db_entity_interest.db_entity.source_db_entity_key: # No analysis dependent on result DbEntities, so just quit return behavior = db_entity_interest.db_entity.feature_behavior.behavior main_config_entity = db_entity_interest.config_entity # Todo children() should be all_scenario_descendents or similar to handle region config_entities = [main_config_entity] if isinstance( main_config_entity, Scenario) else [main_config_entity] + list( main_config_entity.children()) for config_entity in config_entities: logger.info( "Checking which analysis modules should run for config_entity %s, db_entity %s, behavior %s", config_entity.name, db_entity_interest.db_entity.key, behavior) analysis_modules = config_entity.analysis_modules for analysis_module in analysis_modules: for analysis_tool in analysis_module.analysis_tools.all( ).select_subclasses(): # If the DbEntity's Behavior matches or has the tool's Behavior as a parent, # it means that the DbEntity Feature save should trigger the tool to update # We only use this for the Scenario Builder and Agriculture Builder, although # other DbEntity Feature updates could trigger analysis tools to run # For example, the Scenario Builder has behavior 'scenario_editor_tool', which # is a parent behavior of DbEntity 'scenario_end_state's behavior 'scenario_end_state' if analysis_tool.behavior and behavior.has_behavior( analysis_tool.behavior): logger.info("Updating AnalysisTool %s for AnalysisModule %s for ConfigEntity %s" % \ (analysis_tool.key, analysis_module.key, config_entity.name)) # Note that even though an individual tool matched, we call start on the # AnalysisModule, which runs its tools. We don't have any modules with multiple # tools, so this hack works # Update the updater field first analysis_module.updater = get_user_model().objects.get( id=kwargs['user_id']) analysis_module._no_post_save_task_run = True analysis_module.save() analysis_module._no_post_save_task_run = False # Run the module analysis_module.start(ids=map(lambda feature: feature.id, kwargs['features']))
def on_analysis_module_post_save(sender, **kwargs): analysis_module = InstanceBundle.extract_single_instance(**kwargs) logger.info("Post Save for AnalysisModule %s for ConfigEntity %s" % \ (analysis_module.key, analysis_module.config_entity.name)) if not kwargs.get('created', None): # Automatically start the analysis module on update since the client simply updates the # analysis module to force it to run. # If already started, don't start again. Saves happen during the task run in order # to update the analysis_module timestamps # The instance flag is used by post_save_config_entity_publishing to turn it off if not analysis_module._started and not analysis_module._no_post_save_task_run \ and not AnalysisModule._no_post_save_task_run_global: analysis_module.start()
def on_config_entity_post_save_tilestache(sender, **kwargs): """ Update/Create the tilestache data for the layers given DbEntityInterest :param: kwargs: Optional db_entity_keys to limit the layers to those of the given keys """ config_entity = InstanceBundle.extract_single_instance(**kwargs) logger.info("Handler: on_config_entity_post_tilestache. ConfigEntity: %s" % config_entity.name) if not isinstance(config_entity, Scenario): return _on_post_save_tilestache(config_entity, **kwargs) reset_queries()
def on_config_entity_pre_save(sender, **kwargs): """ A presave event handler. Currently this just defaults the bounds of the instance to those of its parent :param sender: :param kwargs: :return: """ instance = InstanceBundle.extract_single_instance(**kwargs) if instance._no_post_save_publishing: return if not instance.pk: # Inherit the parent's bounds if none are defined if not instance.bounds: instance.bounds = instance.parent_config_entity.bounds
def on_config_entity_post_save_db_entity(sender, **kwargs): """ CRUD a ConfigEntity's DbEntities. Called when creating, clone, updating, or syncing a scenario Create (kwargs['created']==True). Relies on the client configuration of its DbEntity instance Clone (kwargs['created']==True and kwargs['instance'].origin_instance). Relies on the origin instance clone DbEntities Update. For now don't do anything. We only expose primitive attributes to the user Sync (kwargs['sync']==True). Iterate through the client configuration and update DbEntities according to the configuration. For clones sync will sync to the origin instance. """ config_entity = InstanceBundle.extract_single_instance(**kwargs) logger.info("Handler: on_config_entity_post_save_db_entity. ConfigEntity: %s" % config_entity.name) crud_db_entities(config_entity, kwargs.get('crud_type')) reset_queries()
def on_config_entity_post_save(sender, **kwargs): """ Create the ConfigEntity's database schema on initial save. Post save starts a chain of asynchronous publishers that run according to a dependency tree. First publishers that are wired to the post_save_config_entity_initial signal run, followed by publishers dependent on signals that are dependent of post_save_config_entity_initial (see dependent_signal_paths) :param sender: :param kwargs: instance - the ConfigEntity created - True if the instance was just created sync - True if the instance should be synced to the configuration :return: """ config_entity = InstanceBundle.extract_single_instance(**kwargs) if transaction.is_managed(): transaction.commit() crud_type = CrudKey.resolve_crud(**kwargs) # Send a message to publishers to configure after creation or update of the config_entity # This is executed through a Celery task so that it can run asynchronously if config_entity._no_post_save_publishing: return if config_entity.deleted: # Also do nothing if the config_entity is deleted. At some point this should do some # processings, such as rekeying the scenario so it doesn't conflict with new scenario keys return for child_config_entity in config_entity.children(): # Do any needed syncing of config_entity_children # This currently does nothing child_config_entity.parent_config_entity_saved() if CrudKey.CLONE == crud_type: config_entity.add_categories( *config_entity.origin_instance.categories.all()) elif CrudKey.CREATE == crud_type: # Unless preconfigured, set the basic category based on type if config_entity.categories.count() == 0: category = Category.objects.update_or_create( key='category', value='Future' if isinstance(config_entity, FutureScenario) else 'Base')[0] config_entity.add_categories(category) # TODO The default user here should be the admin, and in fact all config_entity instances # should simply have to have a creator user = config_entity.creator if config_entity.creator else get_user_model( ).objects.get(username=GroupKey.SUPERADMIN) starting_signal_path = resolvable_module_attr_path( __name__, 'post_save_config_entity_initial') logger.info( "Handler: post_save_config_entity for config_entity {config_entity} and user {username}" .format(config_entity=config_entity, username=user.username)) return post_save_publishing( starting_signal_path, config_entity, user, instance=config_entity, signal_proportion_lookup=signal_proportion_lookup, dependent_signal_paths=dependent_signal_paths, signal_prefix='post_save_config_entity', crud_type=crud_type)