def config_entity_subclassed(self): """ Resolves the config_entity to its subclass version. This garbage should all be done elegantly by Django, maybe in the newest version. Otherwise refactor to generalize :return: """ return ConfigEntity._subclassed(self.config_entity)
def config_entity_subclassed(self): """ Resolves the config_entity to its subclass version. This garbage should all be done elegantly by Django, maybe in the newest version. Otherwise refactor to generalize :return: """ return ConfigEntity._subclassed(self.config_entity)
def lineage(cls): """ A PresentationConfig can have any ConfigEntity for a parent :param cls: :return: """ return ConfigEntity.__subclasses__()
def lineage(cls): """ A PresentationConfig can have any ConfigEntity for a parent :param cls: :return: """ return ConfigEntity.__subclasses__()
def create_related_field(self, field_name, related_field_configuration): """ Creates a ForeignKey or ManyToMany field based on related_field_configuration :param field_name: :param related_field_configuration: A dict containing related_key or related_class_name related_key: the instance of the sibling key of the config_entity whose dynamic_model_class is the relation type. For DbEntity/Features this would be the db_entity_key and the dynamic_model_class would be its FeatureClass For AnalysisModule this would be the analysis module key and its dynamic class related_class_name: rather than related_key, any model class, such as BuiltForm, to relate to. single: if True this is a ForeignKey (toOne) relationship. Otherwise a ManyToMany is created :return: A two-item tuple. First item is the field name and the second is the field. """ config_entity = ConfigEntity._subclassed_by_id(self.configuration.scope) # TODO temp coverage of a key name name change related_field_configuration['related_key'] = related_field_configuration.get('related_key', related_field_configuration.get('related_db_entity_key')) if related_field_configuration.get('related_key', False): # field name matches name of peer self.db_entity_key--get it's feature class name related_db_entity = get_single_value_or_none(config_entity.computed_db_entities(key=related_field_configuration['related_key'])) # Always call with no_ensure=True since the config_entity is the same. Otherwise we'd get infinite recursion related_class_name_or_model = self.__class__(self.config_entity, related_db_entity, no_ensure=True).dynamic_model_class() elif related_field_configuration.get('related_class_name', None): # A model class such as BuiltForm related_class_name_or_model = resolve_module_attr(related_field_configuration['related_class_name']) else: raise Exception("No related_key or related_class_name found on feature_class_configuration for self.configuration.key %s" % self.configuration.key) logger.info('Creating %r related field for %s using %s', related_field_configuration.get('single', None) and 'single' or 'm2m', field_name, related_field_configuration) if related_field_configuration.get('single', None): return [field_name, models.ForeignKey(related_class_name_or_model, null=True)] else: return [field_name, models.ManyToManyField(related_class_name_or_model, # Pass a custom, readable table name for the through class for ManyToMany relations db_table='"{schema}"."{table}_{field_name}"'.format( schema=config_entity.schema(), table=self.configuration.key, field_name=field_name))]
def create_related_field(self, field_name, related_field_configuration): """ Creates a ForeignKey or ManyToMany field based on related_field_configuration :param field_name: :param related_field_configuration: A dict containing related_key or related_class_name related_key: the instance of the sibling key of the config_entity whose dynamic_model_class is the relation type. For DbEntity/Features this would be the db_entity_key and the dynamic_model_class would be its FeatureClass For AnalysisModule this would be the analysis module key and its dynamic class related_class_name: rather than related_key, any model class, such as BuiltForm, to relate to. single: if True this is a ForeignKey (toOne) relationship. Otherwise a ManyToMany is created :return: A two-item tuple. First item is the field name and the second is the field. """ config_entity = ConfigEntity._subclassed_by_id(self.configuration.scope) # TODO temp coverage of a key name name change related_field_configuration['related_key'] = related_field_configuration.get('related_key', related_field_configuration.get('related_db_entity_key')) if related_field_configuration.get('related_key', False): # field name matches name of peer self.db_entity_key--get it's feature class name related_db_entity = get_single_value_or_none(config_entity.computed_db_entities(key=related_field_configuration['related_key'])) # Always call with no_ensure=True since the config_entity is the same. Otherwise we'd get infinite recursion related_class_name_or_model = self.__class__(self.config_entity, related_db_entity, no_ensure=True).dynamic_model_class() elif related_field_configuration.get('related_class_name', None): # A model class such as BuiltForm related_class_name_or_model = resolve_module_attr(related_field_configuration['related_class_name']) else: raise Exception("No related_key or related_class_name found on feature_class_configuration for self.configuration.key %s" % self.configuration.key) logger.info('Creating %r related field for %s using %s', related_field_configuration.get('single', None) and 'single' or 'm2m', field_name, related_field_configuration) if related_field_configuration.get('single', None): return [field_name, models.ForeignKey(related_class_name_or_model, null=True)] else: return [field_name, models.ManyToManyField(related_class_name_or_model, # Pass a custom, readable table name for the through class for ManyToMany relations db_table='"{schema}"."{table}_{field_name}"'.format( schema=config_entity.schema(), table=self.configuration.key, field_name=field_name))]
def geography_scope(self): return ConfigEntity._subclassed_by_id(self.configuration.geography_scope \ if self.configuration and self.configuration.key else \ self.config_entity.id)
def run_footprint_init(self, *args, **options): if not settings.CELERY_ALWAYS_EAGER: raise Exception('This command must run with settings.CELERY_ALWAYS_EQUAL = True. ' 'Add --settings=footprint.settings_init to the command line.') db_entity_keys = options.get('db_entity_keys').split(',') if options.get('db_entity_keys') else None # Replace so we can use options as kwargs options['db_entity_keys'] = db_entity_keys config_entity_keys = options.get('config_entity_keys').split(',') if options.get('config_entity_keys') else None # Replace so we can use options as kwargs options['config_entity_keys'] = config_entity_keys if not options.get('run_analysis'): AnalysisModule._no_post_save_task_run_global = True limit_to_classes = map( lambda cls: resolve_model('main.%s' % cls), (options.get('class').split(',') if options.get('class') else []) ) options['limit_to_classes'] = limit_to_classes # Perforance testing if options.get('memory'): ConfigEntity.init_heapy() ConfigEntity.start_heapy_diagnosis() # Delete all ConfigEntity intances so they can be recreated. # This will cascade delete related models, but it doesn't delete # BuiltForms and other independent models if options.get('recreate'): for cls in filter_classes(limit_to_classes): cls.objects.all().delete() # Delete deleted config_entities if options.get('recycle'): for cls in filter_classes(limit_to_classes): cls.objects.filter(deleted=True).delete() if options.get('delete_clones') or options.get('delete_scenario_clones'): # Delete clones and uploads for cls in filter_classes(limit_to_classes): all_config_entities = cls.objects.all() for config_entity in all_config_entities: if options.get('delete_clones'): db_entities = map( lambda db_entity_interest: db_entity_interest.db_entity, DbEntityInterest.objects.filter( config_entity=config_entity, db_entity__origin_instance__isnull=False) ) +\ filter( lambda db_entity: db_entity.feature_class_configuration and \ (isinstance(db_entity.feature_class_configuration, dict) or db_entity.feature_class_configuration.generated), config_entity.computed_db_entities()) layers_to_remove = Layer.objects.filter(layer_libraries__config_entity__in=[config_entity], db_entity_interest__db_entity__key__in=map(lambda db_entity: db_entity.key, db_entities)) for layer in layers_to_remove: # Drop the layer_selection classes layer_selection_class = get_or_create_layer_selection_class_for_layer(layer) drop_tables_for_dynamic_classes( layer_selection_class, layer_selection_class.features.field.rel.through ) layers_to_remove.delete() for layer in Layer.objects.all(): try: layer.db_entity_interest.db_entity except: # orphan try: # Drop the layer_selection classes layer_selection_class = get_or_create_layer_selection_class_for_layer(layer) drop_tables_for_dynamic_classes( layer_selection_class, layer_selection_class.features.field.rel.through ) layer.delete() except: pass # DbEntities for db_entity in db_entities: feature_class = None try: feature_class = FeatureClassCreator(config_entity, db_entity).dynamic_model_class() except Exception, e: logger.warn("No feature class for db_entity %s could be created. Exception: %s" % (db_entity.name, e.message)) DeleteImportProcessor().drop_data(config_entity, db_entity) db_entity.delete() if issubclass(cls, Scenario): cloned_config_entities = cls.objects.filter(origin_instance__isnull=False) # ConfigEntities and their schemas if options.get('delete_clones') or options.get('delete_scenario_clones'): for config_entity in cloned_config_entities: PGNamespace.objects.drop_schema(config_entity.schema()) for db_entity in config_entity.owned_db_entities(): db_entity.delete() cloned_config_entities.delete() if options.get('delete_clones') and False: for built_form_set in BuiltFormSet.objects.all(): built_form_set.built_forms.remove(*built_form_set.built_forms.filter(origin_instance__isnull=False)) # BuiltForms BuiltForm.objects.filter(origin_instance__isnull=False).delete() # Orphaned BuiltForm assets (only an issue when corrupt saves have happened) BuildingAttributeSet.objects.annotate( num_buildings=Count('building'), num_buildingtypes=Count('buildingtype'), num_placetypes=Count('building')).filter( num_buildings=0, num_buildingtypes=0, num_placetypes=0).delete() Medium.objects.annotate(num_built_form_sets=Count('builtform')).filter(num_built_form_sets=0, key__startswith='built_form').delete() BuiltFormExample.objects.annotate(num_built_form_sets=Count('builtform')).filter(num_built_form_sets=0).delete()
def on_db_entity_interest_post_save(sender, **kwargs): """ Called after a DbEntityInterest saves, but not when a config_entity is running post_save publishers In other words, this is only called after a direct DbEntityInterest save/update. This does the same as post_save_config_entity, but starts with the 'post_save_config_entity_db_entities' signal to do only DbEntity dependent presentation. """ db_entity_interest = kwargs['instance'] config_entity = ConfigEntity._subclassed(db_entity_interest.config_entity) db_entity = db_entity_interest.db_entity # TODO The default user should be the admin user = db_entity.updater if db_entity.updater else get_user_model( ).objects.get(username=UserGroupKey.SUPERADMIN) # post_save_db_entity presentation should always be disabled if we are saving a ConfigEntity logger.info( "Handler: post_save_db_entity_interest for config_entity {config_entity}, db_entity {db_entity}, " "and user {username}.".format(config_entity=config_entity, db_entity=db_entity_interest.db_entity, username=user.username)) if kwargs.get('created', None): db_entity = db_entity_interest.db_entity # TODO # While we test upload, just delete the previous DbEntitys with the same key name # in the ConfigEntity. db_entity_interest.config_entity.db_entities.filter( key=db_entity.key).exclude(id=db_entity.id).delete() # Make sure the db_entity's schema matches the config_entity's if not set # TODO we assume that the schema should match the config_entity, rather than # an ancestor or the config_entity (like the project or a scenario). There # are many cases where the schema should not be that of the config_entity, so # we might want to remove this default and force the saver to set it if not db_entity.schema or not db_entity.table: db_entity.schema = db_entity.schema or db_entity_interest.config_entity.schema( ) # Always base the table name on the key db_entity.table = db_entity.key db_entity_interest.db_entity.save() if db_entity_interest.config_entity.deleted: # Do nothing for deleted config_entities return # Define the data_importer if not already set if not (db_entity.feature_class_configuration and db_entity.feature_class_configuration.data_importer): feature_class_configuration = db_entity.feature_class_configuration = db_entity.feature_class_configuration or FeatureClassConfiguration( ) # Choose the correct importer, if any, to set up the feature_class_configuration and features if db_entity.origin_instance: # Import from the origin_instance. This could be a full copy or from the current layer selection features feature_class_configuration.data_importer = full_module_path( OriginDbEntityProcessor) elif '.json' in db_entity.url.lower(): # Import it using the geojson importer feature_class_configuration.data_importer = full_module_path( GeoJsonProcessor) # Indicate that the feature class configuration was generated not fixture based feature_class_configuration.generated = True elif '.zip' in db_entity.url.lower(): feature_class_configuration.data_importer = full_module_path( ZippedSqlFileProcessor) # Indicate that the feature class configuration was generated not fixture based feature_class_configuration.generated = True elif not db_entity.no_feature_class_configuration: feature_class_configuration.data_importer = full_module_path( DefaultImportProcessor) previous = DbEntityInterest._no_post_save_publishing DbEntityInterest._no_post_save_publishing = True db_entity.feature_class_configuration = feature_class_configuration db_entity.save() DbEntityInterest._no_post_save_publishing = previous # Post save presentation section # Quit if the publishers were turned off outside this method if DbEntityInterest._no_post_save_publishing or db_entity_interest._no_post_save_publishing: return # Use this to initialize the FeatureBehavior and other stuff that might not be set update_or_create_db_entity(config_entity, db_entity) starting_signal_path = resolvable_module_attr_path( __name__, 'post_save_db_entity_initial') return post_save_publishing( starting_signal_path, config_entity, user, instance=db_entity_interest, instance_class=DbEntity, client_instance_path='db_entity', instance_key=db_entity_interest.db_entity.key, signal_proportion_lookup=signal_proportion_lookup, dependent_signal_paths=dependent_signal_paths, signal_prefix='post_save_db_entity', # Update the setup_percent_complete instance attribute for new instances # of classes with this attribute (currently only DbEntity) update_setup_percent_complete=db_entity_interest.db_entity. setup_percent_complete == 0, **filter_keys(kwargs, ['created']))
def on_db_entity_interest_post_save(sender, **kwargs): """ Called after a DbEntityInterest saves, but not when a config_entity is running post_save publishers In other words, this is only called after a direct DbEntityInterest save/update. This does the same as post_save_config_entity, but starts with the 'post_save_config_entity_db_entities' signal to do only DbEntity dependent presentation. """ db_entity_interest = kwargs['instance'] config_entity = ConfigEntity._subclassed(db_entity_interest.config_entity) db_entity = db_entity_interest.db_entity # TODO The default user should be the admin user = db_entity.updater if db_entity.updater else get_user_model().objects.get(username=UserGroupKey.SUPERADMIN) # post_save_db_entity presentation should always be disabled if we are saving a ConfigEntity logger.info("Handler: post_save_db_entity_interest for config_entity {config_entity}, db_entity {db_entity}, " "and user {username}.".format( config_entity=config_entity, db_entity=db_entity_interest.db_entity, username=user.username )) if kwargs.get('created', None): db_entity = db_entity_interest.db_entity # TODO # While we test upload, just delete the previous DbEntitys with the same key name # in the ConfigEntity. db_entity_interest.config_entity.db_entities.filter(key=db_entity.key).exclude(id=db_entity.id).delete() # Make sure the db_entity's schema matches the config_entity's if not set # TODO we assume that the schema should match the config_entity, rather than # an ancestor or the config_entity (like the project or a scenario). There # are many cases where the schema should not be that of the config_entity, so # we might want to remove this default and force the saver to set it if not db_entity.schema or not db_entity.table: db_entity.schema = db_entity.schema or db_entity_interest.config_entity.schema() # Always base the table name on the key db_entity.table = db_entity.key db_entity_interest.db_entity.save() if db_entity_interest.config_entity.deleted: # Do nothing for deleted config_entities return # Define the data_importer if not already set if not (db_entity.feature_class_configuration and db_entity.feature_class_configuration.data_importer): feature_class_configuration = db_entity.feature_class_configuration = db_entity.feature_class_configuration or FeatureClassConfiguration() # Choose the correct importer, if any, to set up the feature_class_configuration and features if db_entity.origin_instance: # Import from the origin_instance. This could be a full copy or from the current layer selection features feature_class_configuration.data_importer = full_module_path(OriginDbEntityProcessor) elif '.json' in db_entity.url.lower(): # Import it using the geojson importer feature_class_configuration.data_importer = full_module_path(GeoJsonProcessor) # Indicate that the feature class configuration was generated not fixture based feature_class_configuration.generated = True elif '.zip' in db_entity.url.lower(): feature_class_configuration.data_importer = full_module_path(ZippedSqlFileProcessor) # Indicate that the feature class configuration was generated not fixture based feature_class_configuration.generated = True elif not db_entity.no_feature_class_configuration: feature_class_configuration.data_importer = full_module_path(DefaultImportProcessor) previous = DbEntityInterest._no_post_save_publishing DbEntityInterest._no_post_save_publishing = True db_entity.feature_class_configuration = feature_class_configuration db_entity.save() DbEntityInterest._no_post_save_publishing = previous # Post save presentation section # Quit if the publishers were turned off outside this method if DbEntityInterest._no_post_save_publishing or db_entity_interest._no_post_save_publishing: return # Use this to initialize the FeatureBehavior and other stuff that might not be set update_or_create_db_entity(config_entity, db_entity) starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_db_entity_initial') return post_save_publishing( starting_signal_path, config_entity, user, instance=db_entity_interest, instance_class=DbEntity, client_instance_path='db_entity', instance_key=db_entity_interest.db_entity.key, signal_proportion_lookup=signal_proportion_lookup, dependent_signal_paths=dependent_signal_paths, signal_prefix='post_save_db_entity', # Update the setup_percent_complete instance attribute for new instances # of classes with this attribute (currently only DbEntity) update_setup_percent_complete=db_entity_interest.db_entity.setup_percent_complete == 0, **filter_keys(kwargs, ['created']) )