def on_config_entity_post_save_group(sender, **kwargs): """ Syncs the user, groups, and permissions for the ConfigEntity Some ConfigEntity classes create their own Groups and default Users. This makes it easy to give a client-specific user permission to certain ConfigEntity by joining the latter's group :param sender: :param kwargs: :return: """ config_entity = InstanceBundle.extract_single_instance(**kwargs) if config_entity._no_post_save_publishing: return user = kwargs.get('user') logger.info("Handler: post_save_user for config_entity {config_entity} and user {username}".format( config_entity=config_entity.name, username=user.username if user else 'undefined')) if kwargs.get('created') and not config_entity.creator: # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API config_entity.creator = User.objects.get(username=UserGroupKey.SUPERADMIN) config_entity._no_post_save_publishing = True config_entity.save() config_entity._no_post_save_publishing = False # First update_or_create any default groups. This usually just applies to global_config from footprint.client.configuration.fixture import UserFixture from footprint.client.configuration.utils import resolve_fixture user_fixture = resolve_fixture("user", "user", UserFixture, config_entity.schema(), config_entity=config_entity) for group_fixture in user_fixture.groups(): group = update_or_create_group(**group_fixture) logger.info("User Publishing. For ConfigEntity %s synced global UserGroup: %s" % (config_entity.name, group.name)) # Sync permissions for the ConfigEntity # Resolve the default ConfigEntity permissions for this config_entity # Update or Create the ConfigEntity Group(s) for this ConfigEntity config_entity_groups = _update_or_create_config_entity_groups(config_entity) # Get the mapping of groups to permission types for the config_entity's most relevant fixture # These group keys are generally all global groups. from footprint.client.configuration.fixture import ConfigEntitiesFixture config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, config_entity.schema()) permission_lookup = config_entities_fixture.default_config_entity_permissions() # Set the permissions for the config_entity groups. This will also set all superior group permissions # to the same permissions or greater is they match something in the permission_lookup config_entity_group_permissions = sync_config_entity_group_permissions( config_entity, config_entity_groups, permission_lookup, permission_key_class=ConfigEntityPermissionKey, **kwargs) # Give the groups read permissions on the ancestor config_entities # TODO restrict this access further for the UserGroupKey.DEMO group groups = Group.objects.filter(name__in=config_entity_group_permissions.keys()) for config_entity in config_entity.ancestors: config_entity.assign_permission_to_groups(groups, PermissionKey.VIEW) # TODO tell children to add themselves to all ancestors (resync) # This will only be needed if the parent ConfigEntity group permission configuration changes reset_queries()
def minimum_initialization(**kwargs): """ A minimum initialization for unit tests :param kwargs: 'limit_to_classes' as an array of ConfigEntity classes to limit processing to those :return: """ from footprint.main.publishing.built_form_publishing import on_config_entity_post_save_built_form # Disable built_forms post_save_config_entity_initial.disconnect( on_config_entity_post_save_built_form, GlobalConfig, True, "built_form_publishing_on_config_entity_post_save") application_initialization(**kwargs) # Get access to the ConfigEntity fixtures for the configured client config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, settings.CLIENT) key_lambda = lambda config_entity: config_entity['key'] region_key = map(key_lambda, config_entities_fixture.regions())[0] update_of_create_regions(region_keys=[region_key]) project_key = map(key_lambda, config_entities_fixture.projects())[0] project = update_or_create_projects(region_keys=[region_key], project_keys=[project_key])[0] project_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, project.schema()) scenario_key = map(key_lambda, project_fixture.scenarios(project=project))[0] update_or_create_scenarios(region_keys=[region_key], project_keys=[project_key], scenario_keys=[scenario_key])
def minimum_initialization(**kwargs): """ A minimum initialization for unit tests :param kwargs: 'limit_to_classes' as an array of ConfigEntity classes to limit processing to those :return: """ from footprint.main.publishing.built_form_publishing import on_config_entity_post_save_built_form # Disable built_forms post_save_config_entity_initial.disconnect( on_config_entity_post_save_built_form, GlobalConfig, True, "built_form_publishing_on_config_entity_post_save") application_initialization(**kwargs) # Get access to the ConfigEntity fixtures for the configured client config_entities_fixture = resolve_fixture( "config_entity", "config_entities", ConfigEntitiesFixture, settings.CLIENT) key_lambda = lambda config_entity: config_entity['key'] region_key = map(key_lambda, config_entities_fixture.regions())[0] update_of_create_regions(region_keys=[region_key]) project_key = map(key_lambda, config_entities_fixture.projects())[0] project = update_or_create_projects(region_keys=[region_key], project_keys=[project_key])[0] project_fixture = resolve_fixture( "config_entity", "config_entities", ConfigEntitiesFixture, project.schema()) scenario_key = map(key_lambda, project_fixture.scenarios(project=project))[0] update_or_create_scenarios(region_keys=[region_key], project_keys=[project_key], scenario_keys=[scenario_key])
def application_initialization(**kwargs): """ Initialize or sync the application :param kwargs: 'limit_to_classes' as an array of ConfigEntity classes to limit processing to those 'no_post_save_publishing' set True to prevent the GlobalConfig save from starting publishers """ # Initialize lookup table data if SouthMigrationHistory.objects.filter(app_name='main').exists(): initialize_table_definitions() initialize_client_data() # Bootstrap the GlobalConfig. We'll fill it out later GlobalConfig._no_post_save_publishing = True global_config = GlobalConfig.objects.update_or_create( key=Keys.GLOBAL_CONFIG_KEY, defaults=dict(bounds=GEOSGeometry('MULTIPOLYGON EMPTY')))[0] GlobalConfig._no_post_save_publishing = False # Bootstrap the admin group and user so we can use them beforehand update_or_create_group(name=UserGroupKey.SUPERADMIN, config_entity=global_config) # These users have the same name as their group update_or_create_user(username=UserGroupKey.SUPERADMIN, password='******', groups=[UserGroupKey.SUPERADMIN], is_super_user=True) # Bootstrap the global config Behaviors behavior_fixture = resolve_fixture("behavior", "behavior", BehaviorFixture, 'global', **kwargs) for behavior in behavior_fixture.behaviors(): update_or_create_behavior(behavior) # Boot strap the global config AttributeGroups attribute_group_fixture = resolve_fixture("behavior", "attribute_group", AttributeGroupFixture, 'global', **kwargs) for attribute_group in attribute_group_fixture.attribute_groups(): update_or_create_attribute_group(attribute_group) # Cartocss template storage create_media_subdir('styles') create_media_subdir('cartocss') # Sync the DBEntities to tables in the global schema global_config = initialize_global_config(**kwargs) for region_fixture in region_fixtures(): # Create the Behavior instances. # These can be defined at the Region scope, but most # are simply defined at default_behavior.py behavior_fixture = resolve_fixture("behavior", "behavior", BehaviorFixture, region_fixture.schema, **kwargs) return map(lambda behavior: update_or_create_behavior(behavior), behavior_fixture.behaviors())
def application_initialization(**kwargs): """ Initialize or sync the application :param kwargs: 'limit_to_classes' as an array of ConfigEntity classes to limit processing to those 'no_post_save_publishing' set True to prevent the GlobalConfig save from starting publishers """ # Initialize lookup table data if SouthMigrationHistory.objects.filter(app_name='main').exists(): initialize_table_definitions() initialize_client_data() # Bootstrap the GlobalConfig. We'll fill it out later GlobalConfig._no_post_save_publishing = True global_config = GlobalConfig.objects.update_or_create( key=Keys.GLOBAL_CONFIG_KEY, defaults=dict(bounds=GEOSGeometry('MULTIPOLYGON EMPTY')) )[0] GlobalConfig._no_post_save_publishing = False # Bootstrap the admin group and user so we can use them beforehand update_or_create_group(name=UserGroupKey.SUPERADMIN, config_entity=global_config) # These users have the same name as their group update_or_create_user(username=UserGroupKey.SUPERADMIN, password='******', groups=[UserGroupKey.SUPERADMIN], is_super_user=True) # Bootstrap the global config Behaviors behavior_fixture = resolve_fixture("behavior", "behavior", BehaviorFixture, 'global', **kwargs) for behavior in behavior_fixture.behaviors(): update_or_create_behavior(behavior) # Boot strap the global config AttributeGroups attribute_group_fixture = resolve_fixture("behavior", "attribute_group", AttributeGroupFixture, 'global', **kwargs) for attribute_group in attribute_group_fixture.attribute_groups(): update_or_create_attribute_group(attribute_group) # Cartocss template storage create_media_subdir('styles') create_media_subdir('cartocss') # Sync the DBEntities to tables in the global schema global_config = initialize_global_config(**kwargs) for region_fixture in region_fixtures(): # Create the Behavior instances. # These can be defined at the Region scope, but most # are simply defined at default_behavior.py behavior_fixture = resolve_fixture("behavior", "behavior", BehaviorFixture, region_fixture.schema, **kwargs) return map( lambda behavior: update_or_create_behavior(behavior), behavior_fixture.behaviors())
def built_form_sets(config_entity): """ Constructs and persists buildings, buildingtypes, and placetypes and their associates and then returns them all as a persisted BuiltFormSet. One BuiltFormSet is returned in an array :param test: if test is set to true, a much more limited set of built forms is created """ from footprint.client.configuration.fixture import BuiltFormFixture from footprint.client.configuration.utils import resolve_fixture json_fixture = os.path.join(settings.ROOT_PATH, 'built_form_fixture.json') built_form_fixture = resolve_fixture("built_form", "built_form", BuiltFormFixture, settings.CLIENT, config_entity=config_entity) if settings.IMPORT_BUILT_FORMS == 'CSV' or (not os.path.exists(json_fixture)): logger.info('Importing built forms from csv source') # Get the fixture scoped for the config_entity # Create any built_form class sets that are configured for the client at the config_entity's class scope built_forms_dict = built_form_fixture.built_forms() built_form_fixture.tag_built_forms(built_forms_dict) built_forms = flatten(built_forms_dict.values()) return map( lambda built_form_set_config: update_or_create_built_form_set(built_form_set_config, built_forms), built_form_fixture.built_form_sets()) elif settings.IMPORT_BUILT_FORMS == 'JSON' and not BuiltForm.objects.count(): logger.info('Importing built forms from json fixture at ' + json_fixture) call_command('loaddata', json_fixture) return {}
def projects_of_region(region): class_scope = local_class_scope or region.schema() region_config_entities = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, class_scope) return region_config_entities.projects(region)
def feature_class_lookup(self): # Get the client region fixture (or the default region if the former doesn't exist) client_region = resolve_fixture("config_entity", "region", RegionFixture) region_class_lookup = client_region.feature_class_lookup() return merge( region_class_lookup, FeatureClassCreator(self.config_entity).key_to_dynamic_model_class_lookup(self.default_db_entities()) )
def feature_class_lookup(self): # Get the client global_config fixture (or the default region if the former doesn't exist) client_global_config = resolve_fixture("config_entity", "global_config", GlobalConfigFixture) global_config_feature_class_lookup = client_global_config.feature_class_lookup( ) return merge(global_config_feature_class_lookup, {})
def region_fixtures(**kwargs): """ Convenience method to fetch all region features of a client. Normally there is only one """ return unique( map( lambda schema: resolve_fixture("config_entity", "region", RegionFixture, schema, **kwargs), region_schemas_of_client()), lambda fixture: fixture.__class__)
def scenarios_of_project(project): project_config_entities = resolve_fixture( "config_entity", "config_entities", ConfigEntitiesFixture, local_class_scope or project.schema()) return project_config_entities.scenarios( project, region_keys=region_keys, project_keys=project_keys, scenario_keys=scenario_keys, class_scope=class_scope)
def db_entity_defaults(db_entity, config_entity=None): # Instantiate a FeatureClassCreator to make the FeatureClassConfiguration feature_class_creator = FeatureClassCreator(config_entity, db_entity, no_ensure=True) if config_entity: # Find the database of the configured client connection = resolve_fixture(None, "init", InitFixture, config_entity.schema()).import_database() # Remove the _test suffix when running unit tests. Until we can auto-population the _test version # of the database, we want to simply rely on the manually configured source database if connection: connection['database'] = connection['database'].replace( 'test_', '') else: # No config_entity abstract DbEntity case connection = None return dict( # The name is passed in or the titleized version of key name=db_entity.name or titleize(db_entity.key), # Postgres URL for local sources, or possibly a remote url (e.g. for background layer sources) # Unless overridden, create the url according to this postgres url scheme url=db_entity.url or \ ('postgres://{user}:{password}/{host}:{port}/{database}'.format( **merge(dict(port=5432), connection)) if connection else None), # Normally Equals the key, except for views of the table, like a Result DbEntity # Views leave this null and rely on query table=db_entity.table or (db_entity.key if not db_entity.query else None), # Query to create a "view" of the underlying data. Used by Result DbEntity instances query=db_entity.query, # How to group the features or query results. Not yet well hashed out group_by=db_entity.group_by, # The source DbEntity key if this DbEntity resulted from cloning a peer DbEntity source_db_entity_key=db_entity.source_db_entity_key, # Array used by remote data sources whose URLs have different host names # If so then the url will have a string variable for the host hosts=db_entity.hosts, # The User who created the DbEntity. TODO. Default should be an admin creator=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0], # The User who updated the DbEntity. TODO. Default should be an admin updater=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0], # The SRID of the Feature table srid=db_entity.srid, # This is a non-model object. So it is saved as a PickledObjectField # Whether the same instance is returned or not does not matter # If db_entity.feature_class_configuration is None, it will return None feature_class_configuration=feature_class_creator.complete_or_create_feature_class_configuration( db_entity.feature_class_configuration ), no_feature_class_configuration=db_entity.no_feature_class_configuration # feature_behavior is handled internally by DbEntity )
def project_specific_scenario_fixtures(**kwargs): """ Convenience method to find ScenarioFixture instances for all projects of the client. :kwargs: Optional arguments for result_fixture, such as the config_entity :return: """ return unique( map( lambda schema: resolve_fixture("config_entity", "scenario", ScenarioFixture, schema, **kwargs), project_schemas_of_client()), lambda fixture: fixture.__class__)
def on_config_entity_post_save_user(sender, **kwargs): config_entity = InstanceBundle.extract_single_instance(**kwargs) if config_entity._no_post_save_publishing: return if kwargs.get('created') and not config_entity.creator: # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API config_entity.creator = User.objects.get( username=UserGroupKey.SUPERADMIN) config_entity._no_post_save_publishing = True config_entity.save() config_entity._no_post_save_publishing = False # TODO these should be importable on top. Something is messed up user_fixture = resolve_fixture("user", "user", UserFixture, config_entity.schema(), config_entity=config_entity) # Get the ConfigEntityGroups of the ConfigEntity. GlobalConfig uses SuperAdmin as its Group config_entity_groups = config_entity.config_entity_groups() if \ not isinstance(config_entity, GlobalConfig) else \ [Group.objects.get(name=UserGroupKey.SUPERADMIN)] # Find all existing users of all ConfigEntity Groups of the ConfigEntity # Note that we use values() instead of all() to get dicts with just needed fields instead of model instances # TODO remove username from here once all users have emails. update_or_create_user() checks username for uniquess presently existing_user_dicts = flat_map( lambda group: group.user_set.all().values('email', 'username'), config_entity_groups) # Combine the existing users with the fixtures, giving the former preference. We favor # what's in the database because the user might have updated their profile # Only accept fixture users not matching users in the db (by email) existing_emails = map( lambda existing_user_dict: existing_user_dict['email'], existing_user_dicts) logger.debug("Found existing users %s" % ', '.join(existing_emails)) new_fixture_users = filter( lambda fixture_user: fixture_user['email'] not in existing_emails, user_fixture.users()) if len(new_fixture_users) > 0: logger.debug("Found new fixture users %s" % ', '.join( map(lambda fixture_user: fixture_user['email'], new_fixture_users))) user_dicts = existing_user_dicts + new_fixture_users # Update or create each user. This will create users of new fixtures and run post-save processing # on both existing and new. for user_dict in user_dicts: update_or_create_user(**user_dict) reset_queries()
def region_schemas_of_client(**kwargs): """ All region schemas of the client. Normally there is only one """ client_config_entities = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, settings.CLIENT, **kwargs) return map( lambda region_config: '%s__%s' % (client_config_entities.schema, region_config['key']), client_config_entities.regions())
def db_entity_defaults(db_entity, config_entity=None): # Instantiate a FeatureClassCreator to make the FeatureClassConfiguration feature_class_creator = FeatureClassCreator(config_entity, db_entity, no_ensure=True) if config_entity: # Find the database of the configured client connection = resolve_fixture(None, "init", InitFixture, config_entity.schema()).import_database() # Remove the _test suffix when running unit tests. Until we can auto-population the _test version # of the database, we want to simply rely on the manually configured source database if connection: connection['database'] = connection['database'].replace('test_', '') else: # No config_entity abstract DbEntity case connection = None return dict( # The name is passed in or the titleized version of key name=db_entity.name or titleize(db_entity.key), # Postgres URL for local sources, or possibly a remote url (e.g. for background layer sources) # Unless overridden, create the url according to this postgres url scheme url=db_entity.url or \ ('postgres://{user}:{password}/{host}:{port}/{database}'.format( **merge(dict(port=5432), connection)) if connection else None), # Normally Equals the key, except for views of the table, like a Result DbEntity # Views leave this null and rely on query table=db_entity.table or (db_entity.key if not db_entity.query else None), # Query to create a "view" of the underlying data. Used by Result DbEntity instances query=db_entity.query, # How to group the features or query results. Not yet well hashed out group_by=db_entity.group_by, # The source DbEntity key if this DbEntity resulted from cloning a peer DbEntity source_db_entity_key=db_entity.source_db_entity_key, # Array used by remote data sources whose URLs have different host names # If so then the url will have a string variable for the host hosts=db_entity.hosts, # The User who created the DbEntity. TODO. Default should be an admin creator=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0], # The User who updated the DbEntity. TODO. Default should be an admin updater=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0], # The SRID of the Feature table srid=db_entity.srid, # This is a non-model object. So it is saved as a PickledObjectField # Whether the same instance is returned or not does not matter # If db_entity.feature_class_configuration is None, it will return None feature_class_configuration=feature_class_creator.complete_or_create_feature_class_configuration( db_entity.feature_class_configuration ), no_feature_class_configuration=db_entity.no_feature_class_configuration # feature_behavior is handled internally by DbEntity )
def project_schemas_of_client(**kwargs): """ Extract all the schemas under of the settings.CLIENT in [client]_config_entities.project :kwargs: Optional arguments for result_fixture, such as the config_entity :return: """ client_config_entities = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, settings.CLIENT, **kwargs) return map( lambda project_config: '%s__%s' % (client_config_entities.schema, project_config['key']), client_config_entities.projects())
def get_client_source_data_connection(): from footprint.client.configuration.utils import resolve_fixture from footprint.client.configuration.fixture import InitFixture source_db = resolve_fixture(None, 'init', InitFixture).import_database() source_db['ENGINE'] = 'django.contrib.gis.db.backends.postgis' source_db['OPTIONS'] = {'autocommit': True} source_db['NAME'] = source_db['database'] source_db['PASSWORD'] = source_db['password'] source_db['USER'] = source_db['user'] source_db['HOST'] = source_db['host'] logger.info("Connecting to database {db} on {host}".format(db=source_db['NAME'], host=source_db['HOST'])) connections.databases['import'] = source_db return connections['import']
def primary_base_layer_style(client_land_use_definition_class, visible=False): # Resolve the client's specific color lookup for the PrimaryParcelFeature land_use_definition client_symbology = resolve_fixture( "presentation", "land_use_symbology", LandUseSymbologyFixture, settings.CLIENT) if not client_symbology: return color_lookup = client_symbology.land_use_color_lookup() # Create a default LayerStyle dict. This context will style the foreign key attribute of the LandUseDefinition return create_layer_style_for_related_field( 'land_use_definition__id', client_land_use_definition_class, color_lookup, 'land_use', visible)
def on_config_entity_post_save_user(sender, **kwargs): config_entity = InstanceBundle.extract_single_instance(**kwargs) if config_entity._no_post_save_publishing: return if kwargs.get('created') and not config_entity.creator: # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API config_entity.creator = User.objects.get(username=UserGroupKey.SUPERADMIN) config_entity._no_post_save_publishing = True config_entity.save() config_entity._no_post_save_publishing = False # TODO these should be importable on top. Something is messed up user_fixture = resolve_fixture("user", "user", UserFixture, config_entity.schema(), config_entity=config_entity) # Get the ConfigEntityGroups of the ConfigEntity. GlobalConfig uses SuperAdmin as its Group config_entity_groups = config_entity.config_entity_groups() if \ not isinstance(config_entity, GlobalConfig) else \ [Group.objects.get(name=UserGroupKey.SUPERADMIN)] # Find all existing users of all ConfigEntity Groups of the ConfigEntity # Note that we use values() instead of all() to get dicts with just needed fields instead of model instances # TODO remove username from here once all users have emails. update_or_create_user() checks username for uniquess presently existing_user_dicts = flat_map( lambda group: group.user_set.all().values('email', 'username'), config_entity_groups ) # Combine the existing users with the fixtures, giving the former preference. We favor # what's in the database because the user might have updated their profile # Only accept fixture users not matching users in the db (by email) existing_emails = map(lambda existing_user_dict: existing_user_dict['email'], existing_user_dicts) logger.debug("Found existing users %s" % ', '.join(existing_emails)) new_fixture_users = filter(lambda fixture_user: fixture_user['email'] not in existing_emails, user_fixture.users()) if len(new_fixture_users) > 0: logger.debug("Found new fixture users %s" % ', '.join(map(lambda fixture_user: fixture_user['email'], new_fixture_users))) user_dicts = existing_user_dicts + new_fixture_users # Update or create each user. This will create users of new fixtures and run post-save processing # on both existing and new. for user_dict in user_dicts: update_or_create_user(**user_dict) reset_queries()
def update_or_create_policy_sets(config_entity, **kwargs): """ Creates a ResultLibrary and its Result instances upon saving a config_entity if they do not yet exist. :param config_entity :param kwargs :return: """ # Create top-level policy if needed. global_policy = Policy.objects.update_or_create( key='global', schema=None, defaults=dict( name='Global', description='The parent policy of all', values={} ))[0] if isinstance(config_entity, GlobalConfig): from footprint.client.configuration.utils import resolve_fixture from footprint.client.configuration.fixture import PolicyConfigurationFixture client_policy = resolve_fixture( "policy", "policy", PolicyConfigurationFixture, config_entity.schema(), config_entity=config_entity) # Create each policy set and store them as a dict keyed by their key for policy_set_config in client_policy.policy_sets(): policy_set = PolicySet.objects.update_or_create( key=policy_set_config['key'], defaults=dict( name=policy_set_config['name'], description=policy_set_config.get('description', None) ) )[0] policies = map(lambda policy_config: global_policy.update_or_create_policy(policy_config), policy_set_config.get('policies', [])) policy_set.policies.add(*policies) config_entity.add_policy_sets(policy_set)
def resolve_config_entity_fixture(cls, config_entity): from footprint.main.models.config.scenario import Scenario from footprint.main.models.config.project import Project from footprint.main.models.config.region import Region from footprint.main.models.config.global_config import GlobalConfig if isinstance(config_entity, Scenario): module_fragment, subclass = ("scenario", ScenarioFixture) elif isinstance(config_entity, Project): module_fragment, subclass = ("project", ProjectFixture) elif isinstance(config_entity, Region): module_fragment, subclass = ("region", RegionFixture) elif isinstance(config_entity, GlobalConfig): module_fragment, subclass = ("global_config", GlobalConfigFixture) else: raise Exception("config_entity %s doesn't match an expected type" % config_entity) return resolve_fixture("config_entity", module_fragment, subclass, config_entity.schema(), config_entity=config_entity)
def create_scenario_clone(test_layer=False): scenario = FutureScenario.objects.filter(origin_instance__isnull=True)[0] if test_layer: cloned_layers = test_upload_layers(scenario) config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture) import_scenario_configurations = config_entities_fixture.import_scenarios(scenario) for new_scenario_configuration in import_scenario_configurations: # Wipe out data and instance if it already exists matches = scenario.__class__.objects.filter(key=new_scenario_configuration['key']) if matches: on_config_entity_pre_delete_data_import( None, instance=matches[0]) matches.delete() # Save the scenario to simulate cloning # Cloning happens because future_scenario is the clone's origin_instance scenarios = scenarios_per_project(scenario.project, import_scenario_configurations) for s in scenarios: print s, s.__dict__ return scenarios
def update_or_create_policy_sets(config_entity, **kwargs): """ Creates a ResultLibrary and its Result instances upon saving a config_entity if they do not yet exist. :param config_entity :param kwargs :return: """ # Create top-level policy if needed. global_policy = Policy.objects.update_or_create( key='global', schema=None, defaults=dict(name='Global', description='The parent policy of all', values={}))[0] if isinstance(config_entity, GlobalConfig): from footprint.client.configuration.utils import resolve_fixture from footprint.client.configuration.fixture import PolicyConfigurationFixture client_policy = resolve_fixture("policy", "policy", PolicyConfigurationFixture, config_entity.schema(), config_entity=config_entity) # Create each policy set and store them as a dict keyed by their key for policy_set_config in client_policy.policy_sets(): policy_set = PolicySet.objects.update_or_create( key=policy_set_config['key'], defaults=dict(name=policy_set_config['name'], description=policy_set_config.get( 'description', None)))[0] policies = map( lambda policy_config: global_policy.update_or_create_policy( policy_config), policy_set_config.get('policies', [])) policy_set.policies.add(*policies) config_entity.add_policy_sets(policy_set)
def update_or_create_scenarios(projects=[], **kwargs): """ Initializes scenarios using fixture data. The fixture data is expected in the form dict(BaseScenario=[dict(),...], FutureScenario=[dict()....]) where the dicts in the former are used to create BaseScenario instances and those in the latter to create FutureScenario instances. Use kwargs to limit class processing to one model class with e.g. class=FutureScenario :param scenario_fixtures: :return: """ projects = projects or update_or_create_projects(**kwargs) # Get the scenario fixtures for each Project instance and build the Scenario instances. # Flatten the results and return them # scenario_fixtures may be a function that accepts the current project in order to filter the fixtures return flat_map( lambda project: scenarios_per_project( project, # Resolve as the scenarios as specific to the project scope as available resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, project.schema()).scenarios(project), **kwargs), projects)
def construct_client_land_uses(client_built_form_class, prefix): land_use_symbology_fixture = resolve_fixture("presentation", "land_use_symbology", LandUseSymbologyFixture, settings.CLIENT) land_use_color_lookup = land_use_symbology_fixture.land_use_color_lookup( ) def update_or_create_template(land_use_definition): logger.debug("creating template for landuse: {lu}".format( lu=land_use_definition.land_use)) key = '%s__%s' % ( prefix, land_use_definition.land_use if isinstance( land_use_definition.land_use, int) else slugify( str(land_use_definition.land_use)).replace('-', '_')) logger.debug("Land Use Key: {key}".format(key=key)) built_form = client_built_form_class.objects.update_or_create( key=key, defaults=dict( creator=get_user_model().objects.get( username=UserGroupKey.SUPERADMIN), updater=get_user_model().objects.get( username=UserGroupKey.SUPERADMIN), name=land_use_definition.land_use, land_use_definition=land_use_definition, ))[0] built_form.medium = client_built_form_class.update_or_create_built_form_layer_style( key, land_use_color_lookup.get(land_use_definition.land_use, None)) built_form.save() return map(update_or_create_template, client_built_form_class.objects.all())
def update_or_create_scenarios(projects=[], **kwargs): """ Initializes scenarios using fixture data. The fixture data is expected in the form dict(BaseScenario=[dict(),...], FutureScenario=[dict()....]) where the dicts in the former are used to create BaseScenario instances and those in the latter to create FutureScenario instances. Use kwargs to limit class processing to one model class with e.g. class=FutureScenario :param scenario_fixtures: :return: """ projects = projects or update_or_create_projects(**kwargs) # Get the scenario fixtures for each Project instance and build the Scenario instances. # Flatten the results and return them # scenario_fixtures may be a function that accepts the current project in order to filter the fixtures return flat_map( lambda project: scenarios_per_project( project, # Resolve as the scenarios as specific to the project scope as available resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, project.schema()).scenarios(project), **kwargs), projects )
def update_or_create_users(): from footprint.client.configuration.utils import resolve_fixture init_fixture = resolve_fixture("user", "user", UserFixture, settings.CLIENT) for user_configuration in init_fixture.users(): update_or_create_user(**user_configuration)
def initialize_client_data(): # Initialize client-specific models logger.info("importing client fixtures for {client}".format(client=settings.CLIENT)) client_init = resolve_fixture(None, "init", InitFixture, settings.CLIENT) client_init.populate_models()
def initialize_client_data(): # Initialize client-specific models logger.info("importing client fixtures for {client}".format( client=settings.CLIENT)) client_init = resolve_fixture(None, "init", InitFixture, settings.CLIENT) client_init.populate_models()
# UrbanFootprint v1.5 # Copyright (C) 2017 Calthorpe Analytics # # This file is part of UrbanFootprint version 1.5 # # UrbanFootprint is distributed under the terms of the GNU General # Public License version 3, as published by the Free Software Foundation. This # code is distributed WITHOUT ANY WARRANTY, without implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License v3 for more details; see <http://www.gnu.org/licenses/>. __author__ = 'calthorpe_analytics' # Import all client models that have static tables so that we have a single migration path from footprint.client.configuration.fixture import InitFixture from footprint.client.configuration.utils import resolve_fixture, resolve_client_module from django.conf import settings # Load all client modules into the system, even though we only will configure one CLIENT # This forces South to create all client specific table definitions for client in settings.ALL_CLIENTS: client_init = resolve_fixture(None, "init", InitFixture, client) #client_init.import_database() for module_tuple in client_init.model_class_modules(): # Load the module so that Django and South find the classes resolve_client_module(module_tuple[0], module_tuple[1], client)
def update_or_create_result_libraries(config_entity, **kwargs): """ Creates a ResultLibrary and its Result instances upon saving a config_entity if they do not yet exist. :param config_entity :param kwargs: 'db_entity_keys' Optional list to limit the Results processed. Any result whose result_db_entity_key or source_db_entity_key is in db_entity_keys will pass through. :return: """ # Force adoption. This is primarily so scenarios get the result_library from their parent project. for presentation in config_entity.presentation_set.all(): presentation_class = type(presentation) presentation_subclass = presentation_class.objects.get_subclass(id=presentation.id) if presentation_subclass.presentation_media_alias: presentation_subclass._adopt_from_donor(presentation_subclass.presentation_media_alias) db_entity_keys = kwargs.get( 'db_entity_keys', map(lambda db_entity: db_entity.key, config_entity.owned_db_entities()) ) from footprint.client.configuration.fixture import ResultConfigurationFixture from footprint.client.configuration.utils import resolve_fixture client_result = resolve_fixture( "presentation", "result", ResultConfigurationFixture, config_entity.schema(), config_entity=config_entity) client_result.update_or_create_media(config_entity, db_entity_keys=kwargs.get('db_entity_keys')) # Create each ResultLibrary and store them as a dict keyed by their key result_library_lookup = map_to_dict(lambda result_library_config: [ result_library_config.key, ResultLibrary.objects.update_or_create( key=result_library_config.key, config_entity=config_entity, scope=config_entity.schema(), defaults=dict( name=result_library_config.name.format(titleize(config_entity.key)), description=result_library_config.description.format(config_entity.name) ) )[0]], client_result.result_libraries()) # Create each configured Result for result_config in filter( lambda result: result.result_db_entity_key in db_entity_keys or result.source_db_entity_key in db_entity_keys, client_result.results()): logger.info("Result Publishing Result DbEntity Key: %s from Source DbEntity Key %s" % (result_config.result_db_entity_key, result_config.source_db_entity_key)) # Make the db_entity the default selected one for its key previous = config_entity._no_post_save_publishing config_entity._no_post_save_publishing = True config_entity.save() config_entity._no_post_save_publishing = previous # Get the Results DbEntity if it already exists existing_db_entity_interest = one_or_none(DbEntityInterest.objects.filter( config_entity=config_entity, db_entity__key=result_config.result_db_entity_key )) existing_result = one_or_none(Result.objects.filter(db_entity_interest=existing_db_entity_interest)) # Create the db_entity and db_entity_interest for the result if it doesn't exist db_entity_interest = result_config.update_or_create_db_entity_interest(config_entity, existing_result and existing_result.db_entity_interest) db_entity = db_entity_interest.db_entity # Test the query. This will raise an error if the query was configured wrong db_entity.parse_query(config_entity) dummy_user = get_user_model().objects.get(username=UserGroupKey.SUPERADMIN) # Create a result for each result key given. result, created, updated = Result.objects.update_or_create( db_entity_interest=db_entity_interest, name=result_config.name, defaults=dict( # Use the Result's custom Medium, keyed by the Result key medium=result_config.resolve_result_medium(), configuration=result_config.get_presentation_medium_configuration(), creator=dummy_user, updater=dummy_user) ) # If created, add the result to the matching result libraries, always including the Default Library # Use the related_collection_adoption to make sure donor results are adopted prior # to adding the result if they haven't yet been if created: for library_key in [ResultLibraryKey.DEFAULT] + result_config.library_keys: result_library_lookup[library_key]._add('presentation_media', result)
def update_or_create_result_libraries(config_entity, **kwargs): """ Creates a ResultLibrary and its Result instances upon saving a config_entity if they do not yet exist. :param config_entity :param kwargs: 'db_entity_keys' Optional list to limit the Results processed. Any result whose result_db_entity_key or source_db_entity_key is in db_entity_keys will pass through. :return: """ # Force adoption. This is primarily so scenarios get the result_library from their parent project. for presentation in config_entity.presentation_set.all(): presentation_class = type(presentation) presentation_subclass = presentation_class.objects.get_subclass( id=presentation.id) if presentation_subclass.presentation_media_alias: presentation_subclass._adopt_from_donor( presentation_subclass.presentation_media_alias) db_entity_keys = kwargs.get( 'db_entity_keys', map(lambda db_entity: db_entity.key, config_entity.owned_db_entities())) from footprint.client.configuration.fixture import ResultConfigurationFixture from footprint.client.configuration.utils import resolve_fixture client_result = resolve_fixture("presentation", "result", ResultConfigurationFixture, config_entity.schema(), config_entity=config_entity) client_result.update_or_create_media( config_entity, db_entity_keys=kwargs.get('db_entity_keys')) # Create each ResultLibrary and store them as a dict keyed by their key result_library_lookup = map_to_dict( lambda result_library_config: [ result_library_config.key, ResultLibrary.objects.update_or_create( key=result_library_config.key, config_entity=config_entity, scope=config_entity.schema(), defaults=dict(name=result_library_config.name.format( titleize(config_entity.key)), description=result_library_config.description. format(config_entity.name)))[0] ], client_result.result_libraries()) # Create each configured Result for result_config in filter( lambda result: result.result_db_entity_key in db_entity_keys or result.source_db_entity_key in db_entity_keys, client_result.results()): logger.info( "Result Publishing Result DbEntity Key: %s from Source DbEntity Key %s" % (result_config.result_db_entity_key, result_config.source_db_entity_key)) # Make the db_entity the default selected one for its key previous = config_entity._no_post_save_publishing config_entity._no_post_save_publishing = True config_entity.save() config_entity._no_post_save_publishing = previous # Get the Results DbEntity if it already exists existing_db_entity_interest = one_or_none( DbEntityInterest.objects.filter( config_entity=config_entity, db_entity__key=result_config.result_db_entity_key)) existing_result = one_or_none( Result.objects.filter( db_entity_interest=existing_db_entity_interest)) # Create the db_entity and db_entity_interest for the result if it doesn't exist db_entity_interest = result_config.update_or_create_db_entity_interest( config_entity, existing_result and existing_result.db_entity_interest) db_entity = db_entity_interest.db_entity # Test the query. This will raise an error if the query was configured wrong db_entity.parse_query(config_entity) dummy_user = get_user_model().objects.get( username=UserGroupKey.SUPERADMIN) # Create a result for each result key given. result, created, updated = Result.objects.update_or_create( db_entity_interest=db_entity_interest, name=result_config.name, defaults=dict( # Use the Result's custom Medium, keyed by the Result key medium=result_config.resolve_result_medium(), configuration=result_config. get_presentation_medium_configuration(), creator=dummy_user, updater=dummy_user)) # If created, add the result to the matching result libraries, always including the Default Library # Use the related_collection_adoption to make sure donor results are adopted prior # to adding the result if they haven't yet been if created: for library_key in [ResultLibraryKey.DEFAULT ] + result_config.library_keys: result_library_lookup[library_key]._add( 'presentation_media', result)
from footprint.client.configuration.fixture import ConfigEntitiesFixture, PolicyConfigurationFixture, InitFixture from footprint.client.configuration.utils import resolve_fixture from footprint.main.lib.functions import remove_keys, merge, flat_map, map_to_dict from footprint.main.models.geospatial.behavior import BehaviorKey, Behavior from footprint.main.models.keys.user_group_key import UserGroupKey from footprint.main.models.presentation.medium import Medium from footprint.main.models.category import Category from footprint.main.models.config.project import Project from footprint.main.models.config.region import Region from footprint.main.models.config.global_config import global_config_singleton from django.conf import settings __author__ = 'calthorpe_analytics' client_name = settings.CLIENT config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, client_name) policy_fixture = resolve_fixture("policy", "policy", PolicyConfigurationFixture, settings.CLIENT) init_fixture = resolve_fixture(None, "init", InitFixture, settings.CLIENT) import logging logger = logging.getLogger(__name__) class SQLImportError(Exception): def __init__(self, value): super(SQLImportError, self).__init__(value) def update_of_create_regions(region_fixtures=config_entities_fixture.regions(), **kwargs): """ Create test regions according to the sample
from footprint.client.configuration.fixture import ConfigEntitiesFixture, PolicyConfigurationFixture, InitFixture from footprint.client.configuration.utils import resolve_fixture from footprint.main.lib.functions import remove_keys, merge, flat_map, map_to_dict from footprint.main.models.geospatial.behavior import BehaviorKey, Behavior from footprint.main.models.keys.user_group_key import UserGroupKey from footprint.main.models.presentation.medium import Medium from footprint.main.models.category import Category from footprint.main.models.config.project import Project from footprint.main.models.config.region import Region from footprint.main.models.config.global_config import global_config_singleton from django.conf import settings __author__ = 'calthorpe_analytics' client_name = settings.CLIENT config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, client_name) policy_fixture = resolve_fixture("policy", "policy", PolicyConfigurationFixture, settings.CLIENT) init_fixture = resolve_fixture(None, "init", InitFixture, settings.CLIENT) import logging logger = logging.getLogger(__name__) class SQLImportError(Exception): def __init__(self, value): super(SQLImportError, self).__init__(value) def update_of_create_regions( region_fixtures=config_entities_fixture.regions(), **kwargs):
def on_config_entity_post_save_group(sender, **kwargs): """ Syncs the user, groups, and permissions for the ConfigEntity Some ConfigEntity classes create their own Groups and default Users. This makes it easy to give a client-specific user permission to certain ConfigEntity by joining the latter's group :param sender: :param kwargs: :return: """ config_entity = InstanceBundle.extract_single_instance(**kwargs) if config_entity._no_post_save_publishing: return user = kwargs.get('user') logger.info( "Handler: post_save_user for config_entity {config_entity} and user {username}" .format(config_entity=config_entity.name, username=user.username if user else 'undefined')) if kwargs.get('created') and not config_entity.creator: # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API config_entity.creator = User.objects.get( username=UserGroupKey.SUPERADMIN) config_entity._no_post_save_publishing = True config_entity.save() config_entity._no_post_save_publishing = False # First update_or_create any default groups. This usually just applies to global_config from footprint.client.configuration.fixture import UserFixture from footprint.client.configuration.utils import resolve_fixture user_fixture = resolve_fixture("user", "user", UserFixture, config_entity.schema(), config_entity=config_entity) for group_fixture in user_fixture.groups(): group = update_or_create_group(**group_fixture) logger.info( "User Publishing. For ConfigEntity %s synced global UserGroup: %s" % (config_entity.name, group.name)) # Sync permissions for the ConfigEntity # Resolve the default ConfigEntity permissions for this config_entity # Update or Create the ConfigEntity Group(s) for this ConfigEntity config_entity_groups = _update_or_create_config_entity_groups( config_entity) # Get the mapping of groups to permission types for the config_entity's most relevant fixture # These group keys are generally all global groups. from footprint.client.configuration.fixture import ConfigEntitiesFixture config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture, config_entity.schema()) permission_lookup = config_entities_fixture.default_config_entity_permissions( ) # Set the permissions for the config_entity groups. This will also set all superior group permissions # to the same permissions or greater is they match something in the permission_lookup config_entity_group_permissions = sync_config_entity_group_permissions( config_entity, config_entity_groups, permission_lookup, permission_key_class=ConfigEntityPermissionKey, **kwargs) # Give the groups read permissions on the ancestor config_entities # TODO restrict this access further for the UserGroupKey.DEMO group groups = Group.objects.filter( name__in=config_entity_group_permissions.keys()) for config_entity in config_entity.ancestors: config_entity.assign_permission_to_groups(groups, PermissionKey.VIEW) # TODO tell children to add themselves to all ancestors (resync) # This will only be needed if the parent ConfigEntity group permission configuration changes reset_queries()