Esempio n. 1
0
 def parent_field_names(self, with_id_fields=True):
     """
         The name of the fields that references the parent(s)
         :param with_id_fields: Default True, if False exclude the '*_id' fields from the results
     :return:
     """
     return flat_map(lambda field: [field.name] + ([field.attname] if with_id_fields else []), self.model._meta.parents.values())
Esempio n. 2
0
    def projects(self,
                 region=None,
                 region_keys=None,
                 project_keys=None,
                 class_scope=None):
        """
            Looks for the projects of the specified region or all regions of self.regions()
            Regions must already be saved to the database at this point.
        :param region:
        :return:
        """
        regions = [region] if region else map(
            lambda region: Region.objects.get(key=region['key']),
            self.regions())
        local_class_scope = class_scope

        def projects_of_region(region):
            class_scope = local_class_scope or region.schema()
            region_config_entities = resolve_fixture("config_entity",
                                                     "config_entities",
                                                     ConfigEntitiesFixture,
                                                     class_scope)
            return region_config_entities.projects(region)

        return flat_map(lambda region: projects_of_region(region), regions)
Esempio n. 3
0
    def scenarios(self,
                  project=None,
                  region_keys=None,
                  project_keys=None,
                  scenario_keys=None,
                  class_scope=None):
        """
            Looks for the scenarios of the specified project or all projects of self.projects()
            Projects must already be saved to the database at this point.
            :param project Optional Project to specify to limit the scenarios returned to those whose project_key
            matches the project.key and to those who have no project_key
            :param class_scope Optional Scenario subclass by which to filter the fixtures according to the
                fixture's class_scope attribute
        :return: A list of scenario fixtures
        """
        local_class_scope = class_scope
        projects = [project] if project else map(
            lambda project: Project.objects.get(key=project['key']),
            self.projects())

        def scenarios_of_project(project):
            project_config_entities = resolve_fixture(
                "config_entity", "config_entities", ConfigEntitiesFixture,
                local_class_scope or project.schema())
            return project_config_entities.scenarios(
                project,
                region_keys=region_keys,
                project_keys=project_keys,
                scenario_keys=scenario_keys,
                class_scope=class_scope)

        return flat_map(lambda project: scenarios_of_project(project),
                        projects)
Esempio n. 4
0
 def descendants_by_type(self, subclass):
     """
         Find all descendants of this ConfigEntity that match the given ConfigEntity subclass
         :param subclass: A ConfigEntity subclass
     """
     return flat_map(lambda child: child.descendants_by_type(subclass) if not isinstance(child, subclass) else [child],
                     self.subclassed_children())
Esempio n. 5
0
    def save(self, force_insert=False, force_update=False, using=None):
        """
            Overrides the default save to merge the self.config_entities properties after doing an initial save
        :param force_insert:
        :param force_update:
        :param using:
        :return:
        """

        # First save to create a pk
        super(CombinedConfigEntity, self).save(force_insert, force_update,
                                               using)
        # Add unique instances to each collection from the config_entities. References to the parent_config_entity's
        # instances will automatically be adopted first.
        for method in ConfigEntity.INHERITABLE_COLLECTIONS:
            # get the add_method or add_method_through method name
            getattr(self, '_add_{0}'.format(method))(*unique(
                flat_map(
                    lambda config_entity: getattr(
                        config_entity, 'computed_{0}'.format(method))(),
                    self.config_entities), lambda instance: instance.pk))

        # Combine the bounds of the config_entities to make this instance's bounds
        self.bounds = MultiPolygon(
            map(lambda config_entity: config_entity.bounds.cascaded_union,
                self.config_entities))
Esempio n. 6
0
 def descendants_by_type(self, subclass):
     """
         Find all descendants of this ConfigEntity that match the given ConfigEntity subclass
         :param subclass: A ConfigEntity subclass
     """
     return flat_map(lambda child: child.descendants_by_type(subclass) if not isinstance(child, subclass) else [child],
                     self.subclassed_children())
Esempio n. 7
0
    def save(self, force_insert=False, force_update=False, using=None):
        """
            Overrides the default save to merge the self.config_entities properties after doing an initial save
        :param force_insert:
        :param force_update:
        :param using:
        :return:
        """

        # First save to create a pk
        super(CombinedConfigEntity, self).save(force_insert, force_update, using)
        # Add unique instances to each collection from the config_entities. References to the parent_config_entity's
        # instances will automatically be adopted first.
        for method in ConfigEntity.INHERITABLE_COLLECTIONS:
            # get the add_method or add_method_through method name
            getattr(self, "_add_{0}".format(method))(
                *unique(
                    flat_map(
                        lambda config_entity: getattr(config_entity, "computed_{0}".format(method))(),
                        self.config_entities,
                    ),
                    lambda instance: instance.pk,
                )
            )

        # Combine the bounds of the config_entities to make this instance's bounds
        self.bounds = MultiPolygon(map(lambda config_entity: config_entity.bounds.cascaded_union, self.config_entities))
 def feature_class_configuration_from_geojson_introspection(self, data):
     """
         Creates a dynamic Feature class configuration by introspecting the db_entity's Feature table.
     :return: The Feature class configuration
     """
     properties = unique(flat_map(lambda feature: feature.properties.keys(), data.features))
     fields = map(lambda property: models.CharField(property), properties)
     return self.generate_configuration(fields)
Esempio n. 9
0
def layer_selections_of_config_entity(config_entity):
    """
        Returns all LayerSelection instances of the ConfigEntity
    :param config_entity:
    :return:
    """
    return flat_map(
        lambda layer: list(get_or_create_layer_selection_class_for_layer(layer, config_entity, no_table_creation=True)),
        Layer.objects.filters(config_entity=config_entity))
Esempio n. 10
0
 def lineage(cls, discovered=[]):
     """
         Returns the hierarchy of parent classes of this class. Duplicates are ignored. Order is from this class up
         until GlobalConfig
     :param cls:
     :return:
     """
     return unique([cls] + flat_map(lambda parent_class: parent_class.lineage(discovered + cls.parent_classes()),
                                    filter(lambda parent_class: parent_class not in discovered,
                                           cls.parent_classes())))
Esempio n. 11
0
 def lineage(cls, discovered=[]):
     """
         Returns the hierarchy of parent classes of this class. Duplicates are ignored. Order is from this class up
         until GlobalConfig
     :param cls:
     :return:
     """
     return unique([cls] + flat_map(lambda parent_class: parent_class.lineage(discovered + cls.parent_classes()),
                                    filter(lambda parent_class: parent_class not in discovered,
                                           cls.parent_classes())))
Esempio n. 12
0
def on_config_entity_post_save_user(sender, **kwargs):
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if config_entity._no_post_save_publishing:
        return
    if kwargs.get('created') and not config_entity.creator:
        # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API
        config_entity.creator = User.objects.get(
            username=UserGroupKey.SUPERADMIN)
        config_entity._no_post_save_publishing = True
        config_entity.save()
        config_entity._no_post_save_publishing = False

    # TODO these should be importable on top. Something is messed up
    user_fixture = resolve_fixture("user",
                                   "user",
                                   UserFixture,
                                   config_entity.schema(),
                                   config_entity=config_entity)

    # Get the ConfigEntityGroups of the ConfigEntity. GlobalConfig uses SuperAdmin as its Group
    config_entity_groups = config_entity.config_entity_groups() if \
        not isinstance(config_entity, GlobalConfig) else \
        [Group.objects.get(name=UserGroupKey.SUPERADMIN)]

    # Find all existing users of all ConfigEntity Groups of the ConfigEntity
    # Note that we use values() instead of all() to get dicts with just needed fields instead of model instances
    # TODO remove username from here once all users have emails. update_or_create_user() checks username for uniquess presently
    existing_user_dicts = flat_map(
        lambda group: group.user_set.all().values('email', 'username'),
        config_entity_groups)

    # Combine the existing users with the fixtures, giving the former preference. We favor
    # what's in the database because the user might have updated their profile
    # Only accept fixture users not matching users in the db (by email)
    existing_emails = map(
        lambda existing_user_dict: existing_user_dict['email'],
        existing_user_dicts)
    logger.debug("Found existing users %s" % ', '.join(existing_emails))
    new_fixture_users = filter(
        lambda fixture_user: fixture_user['email'] not in existing_emails,
        user_fixture.users())
    if len(new_fixture_users) > 0:
        logger.debug("Found new fixture users %s" % ', '.join(
            map(lambda fixture_user: fixture_user['email'],
                new_fixture_users)))
    user_dicts = existing_user_dicts + new_fixture_users

    # Update or create each user. This will create users of new fixtures and run post-save processing
    # on both existing and new.
    for user_dict in user_dicts:
        update_or_create_user(**user_dict)

    reset_queries()
Esempio n. 13
0
    def donees(self):
        """
            Used by the RelatedCollectionAdoption mixin. The donees of this Presentation are the corresponding
            Presentation of each child ConfigEntity of this presentation's ConfigEntity
        :return:
        """

        # We use filter here since the presence of a Presentation in the child is optional. We expect 0 or 1 for
        # each child
        return flat_map(
            lambda child_config_entity: child_config_entity.presentation_set.filter(key=self.key).select_subclasses(),
            self.config_entity.children())
Esempio n. 14
0
    def donees(self):
        """
            Used by the RelatedCollectionAdoption mixin. The donees of this Presentation are the corresponding
            Presentation of each child ConfigEntity of this presentation's ConfigEntity
        :return:
        """

        # We use filter here since the presence of a Presentation in the child is optional. We expect 0 or 1 for
        # each child
        return flat_map(
            lambda child_config_entity: child_config_entity.presentation_set.filter(key=self.key).select_subclasses(),
            self.config_entity.children())
Esempio n. 15
0
 def field_names_to_omit_from_query_values(self, related_models=[]):
     """
         Returns all field names related to table inheritance. We don't want these to show up
         in the query results
     :param related_models:
     :return:
     """
     from footprint.main.models.geospatial.feature import Feature
     return \
         Feature.API_EXCLUDED_FIELD_NAMES + \
         self.parent_field_names() + \
         map(lambda field: field.name,
             flat_map(
                lambda related_model: related_model._meta.parents.values(),
                related_models)
         )
Esempio n. 16
0
 def all_superiors(self):
     """
         Recursively returns all unique superiors. For a ConfigEntity Group the superiors are the corresponding
         global Group (e.g. Foo_Manager Group of Project Foo's global superior is the global Manager) as well
         as all ConfigEntity Groups of the parent ConfigEntity. For example, if Project Foo has a Foo_Manager
         and a Foo_User, and it's child Scenario Bar has a Bar_User, then Foo_Manager and Foo_User are
         the superiors of Bar_User. If a Bar_Manager existed its superiors would also be Foo_Manager and Foo_User,
         although the case of Bar_Manager having a Foo_User superior should probably be avoided (We could
         prevent this with code if such a configuration were ever desired)
     :return:
     """
     superiors = self.superiors.all()
     return unique(
         flat_map(
             lambda superior: [superior] + superior.group_hierarchy.
             all_superiors(), superiors), lambda obj: obj.id)
Esempio n. 17
0
 def all_superiors(self):
     """
         Recursively returns all unique superiors. For a ConfigEntity Group the superiors are the corresponding
         global Group (e.g. Foo_Manager Group of Project Foo's global superior is the global Manager) as well
         as all ConfigEntity Groups of the parent ConfigEntity. For example, if Project Foo has a Foo_Manager
         and a Foo_User, and it's child Scenario Bar has a Bar_User, then Foo_Manager and Foo_User are
         the superiors of Bar_User. If a Bar_Manager existed its superiors would also be Foo_Manager and Foo_User,
         although the case of Bar_Manager having a Foo_User superior should probably be avoided (We could
         prevent this with code if such a configuration were ever desired)
     :return:
     """
     superiors = self.superiors.all()
     return unique(
         flat_map(
             lambda superior: [superior]+superior.group_hierarchy.all_superiors(), superiors),
         lambda obj: obj.id)
Esempio n. 18
0
def on_config_entity_post_save_user(sender, **kwargs):
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if config_entity._no_post_save_publishing:
        return
    if kwargs.get('created') and not config_entity.creator:
        # Set the ConfigEntity.creator to the default admin group user if it wasn't set by the API
        config_entity.creator = User.objects.get(username=UserGroupKey.SUPERADMIN)
        config_entity._no_post_save_publishing = True
        config_entity.save()
        config_entity._no_post_save_publishing = False

    # TODO these should be importable on top. Something is messed up
    user_fixture = resolve_fixture("user", "user", UserFixture, config_entity.schema(),
                                   config_entity=config_entity)

    # Get the ConfigEntityGroups of the ConfigEntity. GlobalConfig uses SuperAdmin as its Group
    config_entity_groups = config_entity.config_entity_groups() if \
        not isinstance(config_entity, GlobalConfig) else \
        [Group.objects.get(name=UserGroupKey.SUPERADMIN)]

    # Find all existing users of all ConfigEntity Groups of the ConfigEntity
    # Note that we use values() instead of all() to get dicts with just needed fields instead of model instances
    # TODO remove username from here once all users have emails. update_or_create_user() checks username for uniquess presently
    existing_user_dicts = flat_map(
        lambda group: group.user_set.all().values('email', 'username'),
        config_entity_groups
    )

    # Combine the existing users with the fixtures, giving the former preference. We favor
    # what's in the database because the user might have updated their profile
    # Only accept fixture users not matching users in the db (by email)
    existing_emails = map(lambda existing_user_dict: existing_user_dict['email'], existing_user_dicts)
    logger.debug("Found existing users %s" % ', '.join(existing_emails))
    new_fixture_users = filter(lambda fixture_user: fixture_user['email'] not in existing_emails, user_fixture.users())
    if len(new_fixture_users) > 0:
        logger.debug("Found new fixture users %s" % ', '.join(map(lambda fixture_user: fixture_user['email'], new_fixture_users)))
    user_dicts = existing_user_dicts + new_fixture_users

    # Update or create each user. This will create users of new fixtures and run post-save processing
    # on both existing and new.
    for user_dict in user_dicts:
        update_or_create_user(**user_dict)

    reset_queries()
Esempio n. 19
0
    def permitted_ids(cls, groups, objects, permission_key=PermissionKey.VIEW):
        """
            Given one or more groups and list of instances return the ids of the objects
            to which the groups have permission.
        :param groups: Typically user.groups
        :param objects: The instances of the cls to test
        :param objects: The key to check permission for, defaults to View
        :return:
        """
        # Find the content_type_ids of the resource model and subclass models
        # Then find the corresponding view ids
        # This stuff almost never changes, so cache at the model class level it for speed

        # Get all classes
        content_type_dict = ContentType.objects.get_for_models(
            cls, *_all_subclasses(cls))
        # split keys/values into two matching lists
        models_with_content_types, models_content_types = zip(
            *content_type_dict.iteritems())

        content_type_ids = [
            content_type.id for content_type in models_content_types
        ]

        perm_ids = Permission.objects.filter(codename__in=flat_map(
            lambda model: PermissionKey.permission_keys(permission_key, model),
            models_with_content_types)).values_list('id', flat=True)

        group_ids = [group.id for group in groups]
        obj_ids = [unicode(obj.id) for obj in objects]

        from guardian.models import GroupObjectPermission
        # Find all the objects of this type that this group has permission to access.
        group_objects = GroupObjectPermission.objects.filter(
            content_type_id__in=content_type_ids,
            group__in=group_ids,
            permission_id__in=perm_ids,
            object_pk__in=obj_ids)

        return group_objects.values_list('object_pk', flat=True)
Esempio n. 20
0
    def permitted_ids(cls, groups, objects, permission_key=PermissionKey.VIEW):
        """
            Given one or more groups and list of instances return the ids of the objects
            to which the groups have permission.
        :param groups: Typically user.groups
        :param objects: The instances of the cls to test
        :param objects: The key to check permission for, defaults to View
        :return:
        """
        # Find the content_type_ids of the resource model and subclass models
        # Then find the corresponding view ids
        # This stuff almost never changes, so cache at the model class level it for speed

        # Get all classes
        content_type_dict = ContentType.objects.get_for_models(cls, *_all_subclasses(cls))
        # split keys/values into two matching lists
        models_with_content_types, models_content_types = zip(*content_type_dict.iteritems())

        content_type_ids = [content_type.id for content_type in models_content_types]

        perm_ids = Permission.objects.filter(
            codename__in=flat_map(
                lambda model: PermissionKey.permission_keys(permission_key,
                                                            model),
                models_with_content_types
            )).values_list('id', flat=True)

        group_ids = [group.id for group in groups]
        obj_ids = [unicode(obj.id) for obj in objects]

        from guardian.models import GroupObjectPermission
        # Find all the objects of this type that this group has permission to access.
        group_objects = GroupObjectPermission.objects.filter(
            content_type_id__in=content_type_ids,
            group__in=group_ids,
            permission_id__in=perm_ids,
            object_pk__in=obj_ids)

        return group_objects.values_list('object_pk', flat=True)
Esempio n. 21
0
def update_or_create_scenarios(projects=[], **kwargs):
    """
        Initializes scenarios using fixture data. The fixture data is expected in the form
        dict(BaseScenario=[dict(),...], FutureScenario=[dict()....]) where the dicts in the former are used
        to create BaseScenario instances and those in the latter to create FutureScenario instances.
        Use kwargs to limit class processing to one model class with e.g. class=FutureScenario
    :param scenario_fixtures:
    :return:
    """
    projects = projects or update_or_create_projects(**kwargs)

    # Get the scenario fixtures for each Project instance and build the Scenario instances.
    # Flatten the results and return them
    # scenario_fixtures may be a function that accepts the current project in order to filter the fixtures
    return flat_map(
        lambda project: scenarios_per_project(
            project,
            # Resolve as the scenarios as specific to the project scope as available
            resolve_fixture("config_entity",
                            "config_entities", ConfigEntitiesFixture,
                            project.schema()).scenarios(project),
            **kwargs),
        projects)
def update_or_create_scenarios(projects=[], **kwargs):
    """
        Initializes scenarios using fixture data. The fixture data is expected in the form
        dict(BaseScenario=[dict(),...], FutureScenario=[dict()....]) where the dicts in the former are used
        to create BaseScenario instances and those in the latter to create FutureScenario instances.
        Use kwargs to limit class processing to one model class with e.g. class=FutureScenario
    :param scenario_fixtures:
    :return:
    """
    projects = projects or update_or_create_projects(**kwargs)

    # Get the scenario fixtures for each Project instance and build the Scenario instances.
    # Flatten the results and return them
    # scenario_fixtures may be a function that accepts the current project in order to filter the fixtures
    return flat_map(
        lambda project: scenarios_per_project(
            project,
            # Resolve as the scenarios as specific to the project scope as available
            resolve_fixture("config_entity",
                            "config_entities",
                            ConfigEntitiesFixture,
                            project.schema()).scenarios(project), **kwargs),
        projects
    )
Esempio n. 23
0
    def values_query_set(self, query_set=None):
        """
            Returns a ValuesQuerySet based on the query_set and the related_models.
            The given query_set returns Feature class instances. We want the dictionaries with the related models
            joined in
            :param query_set. Optional instance QuerySet to use as the basis for producing the ValueQuerySet.
            If omitted then create_query_set is used to generate it
        """

        feature_class = self.feature_class
        query_set = query_set or self.create_query_set(feature_class.objects)

        # Combine the fields of the join models
        join_models = self.resolve_join_models()
        filtered_fields = limited_api_fields(feature_class)
        is_join_queryset = self.joins and len(self.joins) > 0

        # Limit the returned fields for the main model and related models
        # based on the api_include property on the model. If that property
        # is null, return everything except the wkb_geometry

        def related_field_paths_filter(join_model):
            related_filtered_fields = limited_api_fields(join_model)
            return related_field_paths(
                feature_class.objects,
                join_model,
                for_value_queryset=is_join_queryset,
                field_names=related_filtered_fields,
                excludes=Feature.API_EXCLUDED_FIELD_NAMES if not filtered_fields else [])

        all_field_paths = \
            model_field_paths(
                feature_class,
                for_value_queryset=is_join_queryset,
                field_names=filtered_fields,
                excludes=Feature.API_EXCLUDED_FIELD_NAMES if not filtered_fields else []) + \
            flat_map(
                related_field_paths_filter,
                join_models)

        # Convert the queryset to values with all main and related field paths
        # This makes us lose the model instance
        indistinct_query_set = query_set.values(*all_field_paths)

        # We need the result_map at this point so create it and save the results
        # This is a side effect but all callers need it immediately afterward anyway
        self.result_map = self.create_result_map(indistinct_query_set)

        # Prevent duplicates that occur by joining two feature tables via a primary geography table,
        # since there can be many primary geography features per main feature

        # To limit duplicate checks find the foreign key to each related model in the geographies join table
        # Remove the _id portion since the actual Django field always omits it
        # TODO We can use the following line instead once someone verifies that the only possible values here are
        # _id for non primary geographies and __id for primary geographies
        # lambda attr[:-len('_id')] if not attr.endswith('__id') else attr
        # This corrects the problem that the replace might not replace only at the end of the string
        related_foreign_key_attributes = map(
            lambda attr: attr.replace('_id', '') if not attr.endswith('__id') else attr,
            self.result_map.django_join_model_attributes
        )
        # If we don't have joins, no need for distinct.
        if not is_join_queryset:
            return indistinct_query_set

        # Limit the distinct clause to the pk and the join foreign keys.
        key_attributes = ['pk'] + related_foreign_key_attributes
        return indistinct_query_set.distinct(*key_attributes)
Esempio n. 24
0
def _model_ancestry(model):
    return list(model.__bases__) + flat_map(lambda base: model_ancestry(base), model.__bases__)
Esempio n. 25
0
def _model_ancestry(model):
    return list(model.__bases__) + flat_map(lambda base: model_ancestry(base),
                                            model.__bases__)
Esempio n. 26
0
 def computed_behaviors(self, level=0):
     """
         Returns the Behavior and all of its ancestors
     """
     return [self] + flat_map(lambda parent: parent.computed_behaviors(), self.parents.all())