def primary_geography_feature_class(self):
     """
         Finds the DbEntity which is the primary_geography and creates its feature_class
     :return:
     """
     db_entity = first(
         lambda db_entity: db_entity.feature_class_configuration.primary_geography,
         self.dynamic_model_configurations())
     if not db_entity:
         raise Exception("No primary geography feature class found for ConfigEntity %s" % self.config_entity)
     return self.__class__(self.config_entity, db_entity).dynamic_model_class()
Example #2
0
 def primary_geography_config_entity(self):
     """
         Looks up the ancestry for the closest primary_geography DbEntity and returns the owning config_entity
     :return:
     """
     primary_geography_db_entity = first(
         lambda db_entity: db_entity.feature_class_configuration and db_entity.feature_class_configuration.primary_geography,
         self.owned_db_entities())
     if primary_geography_db_entity:
         return self
     return self.parent_config_entity_subclassed.primary_geography_config_entity if self.parent_config_entity else None
Example #3
0
 def primary_geography_config_entity(self):
     """
         Looks up the ancestry for the closest primary_geography DbEntity and returns the owning config_entity
     :return:
     """
     primary_geography_db_entity = first(
         lambda db_entity: db_entity.feature_class_configuration and
         db_entity.feature_class_configuration.primary_geography,
         self.owned_db_entities())
     if primary_geography_db_entity:
         return self
     return self.parent_config_entity_subclassed.primary_geography_config_entity if self.parent_config_entity else None
Example #4
0
def match_subclasses(c, matching_lambda=lambda x: True, first_only=False):
    """
        Recursively find the subclasses matching lambda
    :param c: starting class
    :param matching_lambda: filter function that takes each subclass and returns true or false. Unmatched classes
    are still recursed beforehand. Defaults to returning all.
    :param first_only: Default False. Stop after the first match and return one result
    :return: A single result or None if first_only is True. Otherwise 0 or more results in an array
    """
    subclasses = c.__subclasses__()
    for d in list(subclasses):
        subclasses.extend(get_subclasses(d))
    unique_subclasses = unique(subclasses)
    return first(matching_lambda, unique_subclasses) if first_only else \
        filter(matching_lambda, unique_subclasses)
Example #5
0
def match_subclasses(c, matching_lambda=lambda x: True, first_only=False):
    """
        Recursively find the subclasses matching lambda
    :param c: starting class
    :param matching_lambda: filter function that takes each subclass and returns true or false. Unmatched classes
    are still recursed beforehand. Defaults to returning all.
    :param first_only: Default False. Stop after the first match and return one result
    :return: A single result or None if first_only is True. Otherwise 0 or more results in an array
    """
    subclasses = c.__subclasses__()
    for d in list(subclasses):
        subclasses.extend(get_subclasses(d))
    unique_subclasses = unique(subclasses)
    return first(matching_lambda, unique_subclasses) if first_only else \
        filter(matching_lambda, unique_subclasses)
Example #6
0
def best_matching_subclass(cls,
                           matching_lambda=lambda x: True,
                           limited_subclasses=None,
                           return_base_unless_match=False,
                           ascend_cls_until=None):
    """
        Find the best matching subclass according to the matching_lambda.
        This works by breadth-first. It searches immediate sublasses,
        and upon finding a match recurses on that subclass. If no match
        is found at a certain level it returns the match at the level above
    :param cls:
    :param matching_lambda: filter function that takes each subclass and returns true or false.
    :param limited_subclasses: If specified limits the search to the given subclasses.
    These subclasses are used recursively as well
    :param return_base_unless_match: Default false. Return the c if no match is found.
    :param ascend_cls_until: Default None. If set to a class, if no match is found for cls,
    recurse on cls.__base__ until a match or cls.__base__ equals the class specified here.
    Example. If cls is GlobalConfig and ascend_cls_until=FootprintModel, the method looks
    for a subclass with cls==GlobalConfig. If that fails as cls==ConfigEntity. If that fails
    then cls==FootprintModel so we give up and return None or if return_base_unless_match is True
    then we return ConfigEntity (which is probably useless)
    :return: The first match or None (or c if return_base_unless_match is True)
    """
    subclasses = filter(lambda limited_subclass: issubclass(limited_subclass, cls), limited_subclasses) if \
        limited_subclasses else \
        cls.__subclasses__()
    match = first(matching_lambda, unique(subclasses))
    if match:
        # If we have a match recurse on the match to try to find an even better match
        # specify return_base_unless_match since we already succeed at matching for this iteration
        return best_matching_subclass(match,
                                      matching_lambda,
                                      limited_subclasses,
                                      return_base_unless_match=True)
    elif ascend_cls_until and cls.__base__ != ascend_cls_until:
        # No match but recurse on cls.__base__ to find a more general match unless we reach the 'until' cls
        return best_matching_subclass(cls.__base__, matching_lambda,
                                      limited_subclasses,
                                      return_base_unless_match,
                                      ascend_cls_until)
    # Give up and return the current c or None
    return (cls if return_base_unless_match else None)
Example #7
0
def best_matching_subclass(
        cls,
        matching_lambda=lambda x: True,
        limited_subclasses=None,
        return_base_unless_match=False,
        ascend_cls_until=None
    ):
    """
        Find the best matching subclass according to the matching_lambda.
        This works by breadth-first. It searches immediate sublasses,
        and upon finding a match recurses on that subclass. If no match
        is found at a certain level it returns the match at the level above
    :param cls:
    :param matching_lambda: filter function that takes each subclass and returns true or false.
    :param limited_subclasses: If specified limits the search to the given subclasses.
    These subclasses are used recursively as well
    :param return_base_unless_match: Default false. Return the c if no match is found.
    :param ascend_cls_until: Default None. If set to a class, if no match is found for cls,
    recurse on cls.__base__ until a match or cls.__base__ equals the class specified here.
    Example. If cls is GlobalConfig and ascend_cls_until=FootprintModel, the method looks
    for a subclass with cls==GlobalConfig. If that fails as cls==ConfigEntity. If that fails
    then cls==FootprintModel so we give up and return None or if return_base_unless_match is True
    then we return ConfigEntity (which is probably useless)
    :return: The first match or None (or c if return_base_unless_match is True)
    """
    subclasses = filter(lambda limited_subclass: issubclass(limited_subclass, cls), limited_subclasses) if \
        limited_subclasses else \
        cls.__subclasses__()
    match = first(matching_lambda, unique(subclasses))
    if match:
        # If we have a match recurse on the match to try to find an even better match
        # specify return_base_unless_match since we already succeed at matching for this iteration
        return best_matching_subclass(match, matching_lambda, limited_subclasses, return_base_unless_match=True)
    elif ascend_cls_until and cls.__base__ != ascend_cls_until:
        # No match but recurse on cls.__base__ to find a more general match unless we reach the 'until' cls
        return best_matching_subclass(cls.__base__, matching_lambda, limited_subclasses, return_base_unless_match, ascend_cls_until)
    # Give up and return the current c or None
    return (cls if return_base_unless_match else None)
Example #8
0
def parse_query(config_entity, manager, filters=None, joins=None, aggregates=None, group_bys=None):
    queryset = manager
    group_by_values = None
    annotation_tuples = None

    # Make sure all related models have been created before querying
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator

    FeatureClassCreator(config_entity).ensure_dynamic_models()

    # Any joins are db_entity_keys and resolve to feature classes of the config_entity
    related_models = map(lambda join: config_entity.db_entity_feature_class(join), joins or [])

    # Use group_by_values to group by and then attach the aggregates to each unique result via annotation
    # If aggregates are specified but group by is not, we use aggregate to just get a single result
    # For now we assume any filtering should be applied BEFORE aggregation
    if filters:
        queryset = queryset.filter(parse_token(filters, manager, related_models))

    # We only need to join explicitly if the join is not included in one of the group by fields
    manual_joins = (
        joins or []
        if not group_bys
        else set(joins or [])
        - set(
            map(
                lambda group_by: resolve_db_entity_key_of_field_path(
                    parse_group_by(group_by, manager, related_models), manager, related_models
                ),
                group_bys,
            )
        )
    )

    if manual_joins:
        # If there are joins, filter the queryset by inner join on the related_model pk through geography
        for related_model in related_models:
            related_field_pk_path = resolve_field_path_via_geographies("pk", manager, [related_model])
            queryset = queryset.filter(**{"{0}__isnull".format(related_field_pk_path): False})

    # If there are aggregates, they are either part of the main table or join table
    if aggregates:
        # Resolve the field path using available joins via geographies or on the main model
        # Then send the resolved field
        annotation_tuples = map(lambda aggregate: parse_annotation(aggregate, manager, related_models), aggregates)

    if group_bys:
        group_by_values = map(lambda group_by: parse_group_by(group_by, manager, related_models), to_list(group_bys))

        annotation_tuples = annotation_tuples or []
        # Add a Count to the selection if one isn't present
        if not first(lambda annotation_tuple: Count == annotation_tuple[0], annotation_tuples):
            annotation_tuples.insert(0, (Count, group_by_values[0], "count"))

        # TODO. We might have to do rounding of floats here using an extra clause:
        # extra(select={'custom_field': 'round(field, 2)'})
        queryset = queryset.values(*group_by_values).order_by(*group_by_values)
    elif annotation_tuples:
        # If there are annotations but no group_bys, we need to fake a group by annotating
        # the count of the pk to each row and then grouping by it. Since every row
        # has one pk all the rows group together
        # Otherwise we'd have to use the aggregate function which doesn't give us
        # a query back
        queryset = queryset.annotate(count=Count("pk")).values("count")

    if annotation_tuples:

        for annotation_tuple in annotation_tuples:
            # Annotation built-in functions or custom functions of the queryset (like geo stuff)
            annotate_method = (
                getattr(queryset, annotation_tuple[0])
                if isinstance(annotation_tuple[0], basestring)
                else annotation_tuple[0]
            )

            if len(annotation_tuple) == 3:
                # Apply alias if specified
                queryset = queryset.annotate(**{annotation_tuple[2]: annotate_method(annotation_tuple[1])})
            else:
                # Otherwise default the name to the field
                queryset = queryset.annotate(annotate_method(annotation_tuple[1]))

    elif group_by_values:
        # If no annotations are specified, add in a count annotation to make the group by take effect
        # As long as we annotate a count of one we'll get the correct group_by, since django receives values(group_by1, group_by2, etc).annotate(count(group_by1))
        queryset = queryset.annotate(count=Count(group_by_values[0]))

    return queryset
Example #9
0
def parse_query(config_entity, manager, filters=None, joins=None, aggregates=None, group_bys=None):
    queryset = manager
    group_by_values = None
    annotation_tuples = None

    # Make sure all related models have been created before querying
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator
    FeatureClassCreator(config_entity).ensure_dynamic_models()

    # Any joins are db_entity_keys and resolve to feature classes of the config_entity
    related_models = map(lambda join: config_entity.db_entity_feature_class(join), joins or [])

    # Use group_by_values to group by and then attach the aggregates to each unique result via annotation
    # If aggregates are specified but group by is not, we use aggregate to just get a single result
    # For now we assume any filtering should be applied BEFORE aggregation
    if filters:
        queryset = queryset.filter(parse_token(filters, manager, related_models))

    # We only need to join explicitly if the join is not included in one of the group by fields
    manual_joins = joins or [] if not group_bys else \
        set(joins or [])-\
        set(map(lambda group_by: resolve_db_entity_key_of_field_path(parse_group_by(group_by, manager, related_models), manager, related_models), group_bys))

    if manual_joins:
        # If there are joins, filter the queryset by inner join on the related_model pk through geography
        for related_model in related_models:
            related_field_pk_path = resolve_field_path_via_geographies('pk', manager, [related_model])
            queryset = queryset.filter(**{'{0}__isnull'.format(related_field_pk_path):False})

    # If there are aggregates, they are either part of the main table or join table
    if aggregates:
        # Resolve the field path using available joins via geographies or on the main model
        # Then send the resolved field
        annotation_tuples = map(
            lambda aggregate: parse_annotation(aggregate, manager, related_models),
            aggregates)

    if group_bys:
        group_by_values = map(
            lambda group_by: parse_group_by(group_by, manager, related_models),
            to_list(group_bys))

        annotation_tuples = annotation_tuples or []
        # Add a Count to the selection if one isn't present
        if not first(lambda annotation_tuple: Count==annotation_tuple[0], annotation_tuples):
            annotation_tuples.insert(0, (Count, group_by_values[0], 'count'))

        # TODO. We might have to do rounding of floats here using an extra clause:
        # extra(select={'custom_field': 'round(field, 2)'})
        queryset = queryset.values(*group_by_values).order_by(*group_by_values)
    elif annotation_tuples:
        # If there are annotations but no group_bys, we need to fake a group by annotating
        # the count of the pk to each row and then grouping by it. Since every row
        # has one pk all the rows group together
        # Otherwise we'd have to use the aggregate function which doesn't give us
        # a query back
        queryset = queryset.annotate(count=Count('pk')).values('count')

    if annotation_tuples:

        for annotation_tuple in annotation_tuples:
            # Annotation built-in functions or custom functions of the queryset (like geo stuff)
            annotate_method = getattr(queryset, annotation_tuple[0]) if\
                isinstance(annotation_tuple[0], basestring) else\
                annotation_tuple[0]

            if len(annotation_tuple)==3:
                # Apply alias if specified
                queryset = queryset.annotate(**{annotation_tuple[2]:annotate_method(annotation_tuple[1])})
            else:
                # Otherwise default the name to the field
                queryset = queryset.annotate(annotate_method(annotation_tuple[1]))

    elif group_by_values:
        # If no annotations are specified, add in a count annotation to make the group by take effect
        # As long as we annotate a count of one we'll get the correct group_by, since django receives values(group_by1, group_by2, etc).annotate(count(group_by1))
        queryset = queryset.annotate(count=Count(group_by_values[0]))

    return queryset