def update_or_create_region(region_dict):
        if kwargs.get('limit_to_classes') and Region not in kwargs['limit_to_classes']:
            if Region.objects.filter(key=region_dict['key']).count() != 1:
                raise Exception("Trying to get Region %s, which hasn't been created" % region_dict['key'])
            region_tuple = Region.objects.get(key=region_dict['key']), False, False
        else:
            region_tuple = Region.objects.update_or_create(
                key=region_dict['key'],
                defaults=merge(
                    dict(
                        behavior=get_behavior('default_config_entity'),
                    ),
                    remove_keys(region_dict, ['key', 'media']),
                    dict(
                        creator=User.objects.get(username=UserGroupKey.SUPERADMIN),
                        parent_config_entity=global_config_singleton() if \
                            region_dict['key'] == settings.CLIENT else \
                            update_or_create_region(dict(key=settings.CLIENT, name=settings.CLIENT_NAME))[0]
            )))

        logger.info("{update_or_create} Region {config_entity}".format(update_or_create='Created' if region_tuple[1] else 'Updated', config_entity=region_tuple[0]))


        media = map(lambda medium_config:
                    Medium.objects.update_or_create(
                        key=medium_config.key,
                        defaults=remove_keys(medium_config.__dict__['kwargs'], 'key'))[0],
                    region_dict.get('media', []))

        existing_media = region_tuple[0].media.filter(id__in=map(lambda medium: medium.id, media))
        media_to_add = set(media) - set(existing_media)
        if len(media_to_add) > 0:
            region_tuple[0].media.add(*media_to_add)
        return region_tuple
    def update_or_create_project(project_dict):
        if kwargs.get('limit_to_classes') and Project not in kwargs['limit_to_classes']:
            if Project.objects.filter(key=project_dict['key']).count() != 1:
                raise Exception("Trying to get Project %s, which hasn't been created" % project_dict['key'])
            project_tuple = Project.objects.get(key=project_dict['key']), False, False
        else:
            project_tuple = Project.objects.update_or_create(
                key=project_dict['key'],
                defaults=merge(
                    dict(
                        behavior=get_behavior('default_config_entity'),
                    ),
                    remove_keys(project_dict, ['key', 'base_table', 'region_key', 'media']),
                    dict(
                        parent_config_entity=regions_by_key[project_dict['region_key']],
                        creator=User.objects.get(username=UserGroupKey.SUPERADMIN)
                    )
            ))

        logger.info("{update_or_create} Project {config_entity}".format(update_or_create='Created' if project_tuple[1] else 'Updated', config_entity=project_tuple[0]))

        media = map(lambda medium_config:
                    Medium.objects.update_or_create(
                        key=medium_config.key,
                        defaults=remove_keys(medium_config.__dict__['kwargs'], 'key'))[0],
                    project_dict.get('media', []))

        existing_media = project_tuple[0].media.filter(id__in=map(lambda medium: medium.id, media))
        media_to_add = set(media) - set(existing_media)
        if len(media_to_add) > 0:
            project_tuple[0].media.add(*media_to_add)
        return project_tuple
Exemplo n.º 3
0
    def subclass_resource_if_needed(self, view, request):
        """
            Overrides the FootprintResource method to perform subclassing of the resource based on the request params
        :param view:
        :param request:
        :return:
        """
        params = request.GET
        # TODO cache dynamic class creation results
        # Create the dynamic resource class
        dynamic_resource_class = self.create_subclass(params, method=request.method)
        # Dynamic model classes always have a config_entity. In the case
        # where the model class is not dynamic (e.g. ClientLandUseDefinitions subclasses),
        # we expect the config_entity__id to be sent with the request, so we thusly resolve the config_entity
        config_entity = dynamic_resource_class._meta.queryset.model.config_entity if\
            hasattr(dynamic_resource_class._meta.queryset.model, 'config_entity') else\
            self.resolve_config_entity(request.GET)

        # This might not be need anymore, but it indicates what other dynamic classes were created so that
        # permissions can be added for them
        additional_classes_used = []
        # We add permissions to the current user so they can access these dynamic classes if it's the first access by the user
        # TODO permissions would ideally be done ahead of time, of if we could automatically give the user full access to all. This might be fixed in the latest Django version
        # subclasses of a certain type, but I don't see how to do that in the Django docs
        user = self.resolve_user(params)
        #logger.info("Adding permissions for user %s to dynamic_resource_class %s" % (user.username, dynamic_resource_class.__name__))
        self.add_permissions_to_user(user, self.get_or_create_permissions_for_class(dynamic_resource_class, additional_classes_used, config_entity))

        # Extract and add GET parameters
        request._config_entity = config_entity
        request._filters = remove_keys(
            merge(request.GET, self.search_params(params)),
            self.remove_params(params))

        return dynamic_resource_class().wrap_view(view)
    def _computed(self, attribute, **query_kwargs):
        """
            Returns this instance's attribute's related values or through values (for attributes with an explicit through class)
             or the donor's if this instance hasn't overridden its values
        :param attribute: 'db_entities', etc. Not the through attribute name (e.g. dbentities_set)
        :param **query_kwargs: optionally specify query arguments to use with filter() on the results.
            One special param is with_deleted, which enables deleted objects to return, normally omited
        :return: this attribute's collection or its parents, with optional filtering applied after
        """
        resolved_attribute = self.through_attribute(self.many_field(attribute)) if has_explicit_through_class(self, attribute) else attribute
        params = (dict(deleted=query_kwargs.get('deleted', False)) if not query_kwargs.get('with_deleted') else dict())
        q_kwargs = remove_keys(query_kwargs, ['with_deleted'])

        if self.donor():
            results = get_list_or_if_empty(
                # Get instance's own Many items.
                # The instance will either have the donor's items added to it or not. If they are added this will
                # have items, obviously. If the instance has it's own items. The donor's items will already be here
                # as well.
                self._filter_computed(
                    getattr(self, resolved_attribute).filter(**params),
                    **q_kwargs),
                # If none exist get donor's
                lambda: self.donor()._computed(attribute, **q_kwargs)
            )
            #if self.donor().key=='layer_library__default':
                #name = self.donor().name
                #print '1 %s: %s' % (self.name, ', '.join(map(lambda x: x.db_entity_key, self._filter_computed(getattr(self, resolved_attribute).filter(**params), **q_kwargs))))
                #print '2 %s: %s' % (name, ', '.join(map(lambda x: x.db_entity_key, self.donor()._computed(attribute, **q_kwargs))))
                #print '3 %s: %s' % (self.name, ', '.join(map(lambda x: x.db_entity_key, results)))
            return results
        else:
            # No donor is defined so just consider this instance's items
            return self._filter_computed(getattr(self, resolved_attribute).filter(**params), **q_kwargs)
 def resolve_field(meta):
     type = meta['type']
     rest = merge(
         filter_dict(
             # Don't allow default='SEQUENCE'
             lambda key, value: not (key == 'default' and value ==
                                     'SEQUENCE'),
             # Ignore these keys
             remove_keys(meta, [
                 'type', 'auto_populate', 'visible', 'geometry_type',
                 'nullable'
             ])),
         dict(null=True))
     if type == 'string':
         return models.CharField(**rest)
     elif type == 'integer':
         return models.IntegerField(**rest)
     elif type == 'float':
         return models.FloatField(**rest)
     elif type == 'biginteger':
         return models.BigIntegerField(**rest)
     elif type == 'geometry':
         return models.GeometryField(geography=False, **rest)
     elif type == 'date':
         return models.DateField(**rest)
     elif type == 'datetime':
         return models.DateTimeField(**rest)
Exemplo n.º 6
0
 def dehydrate(self, bundle):
     # Use Resource's innate capability to add the schema of each field
     # to the results. Filter out meta fields and other things that the front-end doesn't need
     bundle.data['schemas'] = remove_keys(self.build_schema()['fields'], [
         'feature_field_title_lookup', 'feature_fields', 'resource_uri',
         'config_entity', 'db_entity', 'the_unique_id'
     ])
     return super(TemplateFeatureResource, self).dehydrate(bundle)
Exemplo n.º 7
0
    def analysis_module_configuration(cls, config_entity, **kwargs):
        if not config_entity:
            return cls.abstract_analysis_module_configuration(**kwargs)

        configuration = merge(remove_keys(kwargs, ['class_scope']),
                              dict(generated=False))

        return AnalysisModuleConfiguration(configuration)
Exemplo n.º 8
0
 def __init__(self, *args, **kwargs):
     super(Behavior, self).__init__(*args, **remove_keys(kwargs, ['parents', 'tags', 'template_feature_behavior']))
     # Put toManys in the holding tank until save time
     self._parents = kwargs.get('parents', [])
     self._tags = kwargs.get('tags', [])
     # This is saved in the other direction
     self._template_feature_behavior = kwargs.get('template_feature_behavior', None)
     self._intersection = kwargs.get('intersection', None)
Exemplo n.º 9
0
 def dehydrate(self, bundle):
     # Use Resource's innate capability to add the schema of each field
     # to the results. Filter out meta fields and other things that the front-end doesn't need
     bundle.data['schemas'] = remove_keys(
         self.build_schema()['fields'],
         ['feature_field_title_lookup', 'feature_fields', 'resource_uri', 'config_entity', 'db_entity', 'the_unique_id']
     )
     return super(TemplateFeatureResource, self).dehydrate(bundle)
Exemplo n.º 10
0
def to_raw_data(dct):
    model = dct['model']
    pk = dct['pk']
    raw_data = { }
    raw_data['model'] = model
    raw_data['pk'] = pk
    raw_data['fields'] = to_raw_data(remove_keys(dict, ['model', 'pk']))
    return raw_data
Exemplo n.º 11
0
    def construct_primary_components(self, client='default'):
        """
        :return: Dictionary keyed by Building name and valued by Building objects (UrbanFootprint v0.1 Built
        Form default set)
        """
        primary_components = {}
        for import_primary_component in self.load_crops_csv(client):
            fields = AgricultureAttributeSet._meta.fields
            agriculture_attribute_set = remove_keys(
                map_to_dict(
                    lambda field: [
                        field.attname,
                        getattr(import_primary_component, field.attname)
                    ], fields), ['id'])
            agriculture_attribute_set['name'] = import_primary_component.name
            if import_primary_component.name in primary_components:
                raise Exception("Duplicate entry for primary component: " +
                                import_primary_component.name)
            primary_components[import_primary_component.name] = dict(
                agriculture_attribute_set=agriculture_attribute_set)

        for import_primary_component in self.load_buildings_csv(client):
            building_attribute_set = dict(
                name=import_primary_component.name,
                address=import_primary_component.address,
                website=import_primary_component.website,
                lot_size_square_feet=import_primary_component.
                lot_size_square_feet,
                floors=import_primary_component.floors,
                total_far=import_primary_component.total_far,
                average_parking_space_square_feet=import_primary_component.
                average_parking_space_square_feet,
                surface_parking_spaces=import_primary_component.
                surface_parking_spaces,
                below_ground_structured_parking_spaces=import_primary_component
                .below_ground_parking_spaces,
                above_ground_structured_parking_spaces=import_primary_component
                .above_ground_parking_spaces,
                building_footprint_square_feet=import_primary_component.
                building_footprint_square_feet,
                surface_parking_square_feet=import_primary_component.
                surface_parking_square_feet,
                hardscape_other_square_feet=import_primary_component.
                hardscape_other_square_feet,
                irrigated_softscape_square_feet=import_primary_component.
                irrigated_softscape_square_feet,
                nonirrigated_softscape_square_feet=import_primary_component.
                nonirrigated_softscape_square_feet,
                irrigated_percent=import_primary_component.irrigated_percent,
                vacancy_rate=import_primary_component.vacancy_rate,
                household_size=import_primary_component.household_size)
            if import_primary_component.name in primary_components:
                raise Exception("Duplicate entry for primary component: " +
                                import_primary_component.name)
            primary_components[import_primary_component.name] = dict(
                building_attribute_set=building_attribute_set)

        return primary_components
Exemplo n.º 12
0
 def update_layer_of_scenario(scenario):
     logger.info("Updating layer of db_entity_key %s, Scenario %s" % (db_entity_key, scenario.name))
     db_entity_interest = DbEntityInterest.objects.get(config_entity=scenario, db_entity__key=db_entity_key)
     return Layer.objects.update_or_create(
         db_entity_interest=db_entity_interest,
         defaults=merge(
             remove_keys(model_dict(template_layer), ['db_entity_key']),
         )
     )[0]
Exemplo n.º 13
0
    def lookup_kwargs_with_identifiers(self, bundle, kwargs):
        """
            Override to remove feature_behavior from the lookup_kwargs,
            since it is actually defined in reverse--feature_behavior has a db_entity
        """

        return remove_keys(
            super(DbEntityResource, self).lookup_kwargs_with_identifiers(bundle, kwargs),
            ['feature_behavior'])
Exemplo n.º 14
0
 def __init__(self, *args, **kwargs):
     super(FeatureBehavior, self).__init__(
         *args,
         **remove_keys(kwargs, ['tags', 'attribute_group_configurations']))
     self._tags = self._tags or []
     self._tags.extend(kwargs.get('tags', []))
     self._attribute_group_configurations = kwargs.get(
         'attribute_group_configurations'
     ) or self._attribute_group_configurations or []
Exemplo n.º 15
0
    def lookup_kwargs_with_identifiers(self, bundle, kwargs):
        """
            Override to remove feature_behavior from the lookup_kwargs,
            since it is actually defined in reverse--feature_behavior has a db_entity
        """

        return remove_keys(
            super(DbEntityResource, self).lookup_kwargs_with_identifiers(bundle, kwargs),
            ['feature_behavior'])
Exemplo n.º 16
0
    def analysis_module_configuration(cls, config_entity, **kwargs):
        if not config_entity:
            return cls.abstract_analysis_module_configuration(**kwargs)

        configuration = merge(
            remove_keys(kwargs, ['class_scope']),
            dict(generated=False))

        return AnalysisModuleConfiguration(configuration)
Exemplo n.º 17
0
 def __init__(self, **kwargs):
     """
         kwargs are the attributes of the class. None are required
     """
     abstract_class_name = full_module_path(kwargs['abstract_class']) if \
                             kwargs.get('abstract_class') else \
                             kwargs.get('abstract_class_name', full_module_path(Feature))
     updated_kwargs = merge(remove_keys(kwargs, ['abstract_class']),
                            dict(abstract_class_name=abstract_class_name))
     self.__dict__.update(**updated_kwargs)
Exemplo n.º 18
0
 def update_layer_of_scenario(scenario):
     logger.info("Updating layer of db_entity_key %s, Scenario %s" %
                 (db_entity_key, scenario.name))
     db_entity_interest = DbEntityInterest.objects.get(
         config_entity=scenario, db_entity__key=db_entity_key)
     return Layer.objects.update_or_create(
         db_entity_interest=db_entity_interest,
         defaults=merge(
             remove_keys(model_dict(template_layer),
                         ['db_entity_key']), ))[0]
Exemplo n.º 19
0
    def abstract_analysis_module_configuration(cls, **kwargs):
        """
            Abstract version of the configuration for use when no ConfigEntity is specified
        """

        configuration = merge(
            remove_keys(kwargs, ['class_scope']),
            dict(class_attrs={'key': kwargs['key']}, generated=False))

        return AnalysisModuleConfiguration(configuration)
Exemplo n.º 20
0
 def __init__(self, **kwargs):
     """
         kwargs are the attributes of the class. None are required
     """
     abstract_class_name = (
         full_module_path(kwargs["abstract_class"])
         if kwargs.get("abstract_class")
         else kwargs.get("abstract_class_name", full_module_path(Feature))
     )
     updated_kwargs = merge(remove_keys(kwargs, ["abstract_class"]), dict(abstract_class_name=abstract_class_name))
     self.__dict__.update(**updated_kwargs)
Exemplo n.º 21
0
    def update_or_create_region(region_dict):
        if kwargs.get('limit_to_classes'
                      ) and Region not in kwargs['limit_to_classes']:
            if Region.objects.filter(key=region_dict['key']).count() != 1:
                raise Exception(
                    "Trying to get Region %s, which hasn't been created" %
                    region_dict['key'])
            region_tuple = Region.objects.get(
                key=region_dict['key']), False, False
        else:
            region_tuple = Region.objects.update_or_create(
                key=region_dict['key'],
                defaults=merge(
                    dict(
                        behavior=get_behavior('default_config_entity'),
                    ),
                    remove_keys(region_dict, ['key', 'media']),
                    dict(
                        creator=User.objects.get(username=UserGroupKey.SUPERADMIN),
                        parent_config_entity=global_config_singleton() if \
                            region_dict['key'] == settings.CLIENT else \
                            update_or_create_region(dict(key=settings.CLIENT, name=settings.CLIENT_NAME))[0]
            )))

        logger.info("{update_or_create} Region {config_entity}".format(
            update_or_create='Created' if region_tuple[1] else 'Updated',
            config_entity=region_tuple[0]))

        media = map(
            lambda medium_config: Medium.objects.
            update_or_create(key=medium_config.key,
                             defaults=remove_keys(
                                 medium_config.__dict__['kwargs'], 'key'))[0],
            region_dict.get('media', []))

        existing_media = region_tuple[0].media.filter(
            id__in=map(lambda medium: medium.id, media))
        media_to_add = set(media) - set(existing_media)
        if len(media_to_add) > 0:
            region_tuple[0].media.add(*media_to_add)
        return region_tuple
Exemplo n.º 22
0
 def dehydrate(self, bundle):
     #Filter out meta fields and other things that the front-end doesn't need
     bundle.data = remove_keys(
         merge(
             bundle.data,
             FeatureQuantitativeAttribute(bundle.obj.db_entity,
                                          bundle.obj.attribute).__dict__),
         [
             'db_entity', 'feature_class', 'config_entity', 'updated',
             'updater', 'year'
         ])
     return super(FeatureAttributeResource, self).dehydrate(bundle)
Exemplo n.º 23
0
    def abstract_analysis_module_configuration(cls, **kwargs):
        """
            Abstract version of the configuration for use when no ConfigEntity is specified
        """

        configuration = merge(
            remove_keys(kwargs, ['class_scope']),
            dict(
                class_attrs={'key': kwargs['key']},
                generated=False))

        return AnalysisModuleConfiguration(configuration)
Exemplo n.º 24
0
    def __init__(self, configuration):

        self.abstract_class_name = full_module_path(configuration['abstract_class']) if \
                                configuration.get('abstract_class') else \
                                configuration.get('abstract_class_name', full_module_path(AnalysisModule))
        self.configuration = remove_keys(configuration, ['key', 'name', 'description'])
        self.name = configuration.get('name')
        self.description = configuration.get('description')
        self.partner_description = configuration.get('partner_description')
        self.key = configuration.get('key')
        self.analysis_tools = configuration.get('analysis_tools', [])
        super(AnalysisModuleConfiguration, self).__init__()
Exemplo n.º 25
0
def cond_deep_flat_map_iterable(iterable):
    if (type(iterable) == dict):
        # Flatten the dictionary of the fields key if it exists
        flattened_dict = remove_keys(
            merge(iterable, iterable.get('fields', {})),
            ['fields']
        )
        # Map each key value, recursing into the value if it's a dict or iterable
        return dict([(key, cond_deep_flat_map_iterable(value) if is_list_tuple_or_dict(value) else value)
            for key, value in flattened_dict.iteritems()])
    else:
        return map(lambda item: cond_deep_flat_map_iterable(item) if is_list_tuple_or_dict(item) else item, iterable)
Exemplo n.º 26
0
def resolve_fixture(module,
                    module_fragment,
                    fixture_class,
                    schema=settings.CLIENT,
                    *args,
                    **kwargs):
    """
        Resolves an optional client-specific class located by the module_string relative to the module
        "client.configuration.[schema]" where schema is the matching ConfigEntity.schema().
        As of now the settings.CLIENT is equivalent to a region schema, so this is the default value of schema
    :param module; The directory name under the [schema] directory. Example: module='built_form' and
     module_fragment='built_form' will resolve to the module built_form.[schema].built_form. If module is set
     to None, then a top-level module will be returned. Example: module=None and module_fragment='init' will reolve
     to module '[schema]_init'.
    :param module_fragment: The non-schema fragment part of the module name (e.g. [schema]_config_entity => 'config_entity')
    :param fixture_class: The class to lookup in the module. Any subclass will match as well
    :param client: Optional client. Defaults to settings.CLIENT
    :param schema: Optional config_entity schema string to winnow in on a more specif fixture class
    :param args: Optional args to pass to the fixture class constructor
    :param kwargs: Optional args to pass to the fixture class constructor. Also optional args
    log_traceroute=True can be used log the fixture resolution for debugging
    no_parent_search. Deafult False. If True don't look for a parent fixture, just return null
     if not exact match is found for the given schema
    :return: Returns a matching subclass if one exists, otherwise the default version
    """
    if schema:
        try:
            if kwargs.get('log_traceroute'):
                logger.warn("Searching for module %s, module_fragment %s, fixture class %s for schema %s" %\
                            (module, module_fragment, fixture_class, schema))
            client_fixture_module = resolve_client_module(
                module, module_fragment, schema)
            class_members = inspect.getmembers(
                sys.modules[client_fixture_module.__name__], inspect.isclass)
            for name, cls in class_members:
                if issubclass(cls, fixture_class) and cls != fixture_class:
                    if kwargs.get('log_traceroute'):
                        logger.warn("Found matching class %s" % cls)
                    return cls(
                        schema, *args,
                        **remove_keys(kwargs,
                                      ['log_traceroute', 'no_parent_search']))
        except ImportError, e:
            if kwargs.get('log_traceroute'):
                logger.warn("%s" % e.message)
            # If nothing is found the default is returned below. The second two clauses allow module packages to be absent
            if not e.message == 'No module named %s' % form_module_name(module, module_fragment, schema) and \
               not e.message == 'No module named %s' % '.'.join(form_module_name(module, module_fragment, schema).split('.')[1:]) and \
               not e.message == 'No module named %s' % form_module_name(module, module_fragment, schema).split('.')[-1]:
                raise e
            if kwargs.get('no_parent_search'):
                # Give up, only an exact schema match is desired
                return None
Exemplo n.º 27
0
 def test_remove_keys(self):
     x = dict(foo=dict(bar=dict(car=1)))
     assert_equal(len(remove_keys(x, [])), 1)
     assert_equal(len(remove_keys(x, ['foo'])), 0)
     # Use a two-segment string
     assert_equal(len(remove_keys(x, ['foo.bar'])), 1) # foo remains
     assert_equal(len(remove_keys(x, ['foo.bar'])['foo']), 0) # bar does not
     # Use a *
     assert_equal(len(remove_keys(x, ['foo.*.car'])), 1) # foo remains
     assert_equal(len(remove_keys(x, ['foo.*.car'])['foo']), 1) # bar remains
     assert_equal(len(remove_keys(x, ['foo.*.car'])['foo']['bar']), 0) # car does not
Exemplo n.º 28
0
 def init(self):
     for analysis_tool_configuration in self.analysis_module_configuration.analysis_tools:
         analysis_tool_class = resolve_module_attr(analysis_tool_configuration.get('class_name'))
         analysis_tool_class._no_post_save_publishing = True
         analysis_tool, created, updated = analysis_tool_class.objects.update_or_create(
             config_entity=self.config_entity,
             key=analysis_tool_configuration['key'],
             defaults=remove_keys(analysis_tool_configuration, ['key', 'class_name'])
         )
         analysis_tool.initialize(created)
         analysis_tool_class._no_post_save_publishing = False
         if not analysis_tool in self.analysis_tools.all():
             self.analysis_tools.add(analysis_tool)
Exemplo n.º 29
0
    def __init__(self, configuration):

        self.abstract_class_name = full_module_path(configuration['abstract_class']) if \
                                configuration.get('abstract_class') else \
                                configuration.get('abstract_class_name', full_module_path(AnalysisModule))
        self.configuration = remove_keys(configuration,
                                         ['key', 'name', 'description'])
        self.name = configuration.get('name')
        self.description = configuration.get('description')
        self.partner_description = configuration.get('partner_description')
        self.key = configuration.get('key')
        self.analysis_tools = configuration.get('analysis_tools', [])
        super(AnalysisModuleConfiguration, self).__init__()
Exemplo n.º 30
0
    def update_or_create_project(project_dict):
        if kwargs.get('limit_to_classes'
                      ) and Project not in kwargs['limit_to_classes']:
            if Project.objects.filter(key=project_dict['key']).count() != 1:
                raise Exception(
                    "Trying to get Project %s, which hasn't been created" %
                    project_dict['key'])
            project_tuple = Project.objects.get(
                key=project_dict['key']), False, False
        else:
            project_tuple = Project.objects.update_or_create(
                key=project_dict['key'],
                defaults=merge(
                    dict(behavior=get_behavior('default_config_entity'), ),
                    remove_keys(project_dict,
                                ['key', 'base_table', 'region_key', 'media']),
                    dict(parent_config_entity=regions_by_key[
                        project_dict['region_key']],
                         creator=User.objects.get(
                             username=UserGroupKey.SUPERADMIN))))

        logger.info("{update_or_create} Project {config_entity}".format(
            update_or_create='Created' if project_tuple[1] else 'Updated',
            config_entity=project_tuple[0]))

        media = map(
            lambda medium_config: Medium.objects.
            update_or_create(key=medium_config.key,
                             defaults=remove_keys(
                                 medium_config.__dict__['kwargs'], 'key'))[0],
            project_dict.get('media', []))

        existing_media = project_tuple[0].media.filter(
            id__in=map(lambda medium: medium.id, media))
        media_to_add = set(media) - set(existing_media)
        if len(media_to_add) > 0:
            project_tuple[0].media.add(*media_to_add)
        return project_tuple
Exemplo n.º 31
0
 def init(self):
     for analysis_tool_configuration in self.analysis_module_configuration.analysis_tools:
         analysis_tool_class = resolve_module_attr(
             analysis_tool_configuration.get('class_name'))
         analysis_tool_class._no_post_save_publishing = True
         analysis_tool, created, updated = analysis_tool_class.objects.update_or_create(
             config_entity=self.config_entity,
             key=analysis_tool_configuration['key'],
             defaults=remove_keys(analysis_tool_configuration,
                                  ['key', 'class_name']))
         analysis_tool.initialize(created)
         analysis_tool_class._no_post_save_publishing = False
         if not analysis_tool in self.analysis_tools.all():
             self.analysis_tools.add(analysis_tool)
Exemplo n.º 32
0
 def matching_scope(self, delete_scope_keys=False, **kwargs):
     """
         Filters fixtures by given matching_dict. All keys specified in the matching_dict that are present
         in the fixture must match using isSubclass, where the matching_dict value must be a subclass of the
         fixture's value.
     :param delete_scope_keys: False by default, if true deletes any key specified in kwargs from the fixtures
     after testing the kwarg value against the fixture
     :param kwargs key/values whose values are classes.
     :return:
     """
     kwarg_keys = kwargs.keys()
     return self.__class__(map(lambda fixture: remove_keys(fixture, kwarg_keys) if delete_scope_keys else fixture,
                               filter(lambda fixtures: all_existing_classes_subclass(fixtures, **kwargs),
                                      self.fixtures)))
Exemplo n.º 33
0
 def __init__(self, *args, **kwargs):
     # Remove any kwargs specific to this class before calling super
     new_kwargs = merge(
         remove_keys(kwargs, ["config_entities", "parent_config_entity"]),
         {"parent_config_entity": kwargs.get("parent_config_entity", global_config_singleton())},
     )
     super(CombinedConfigEntity, self).__init__(*args, **new_kwargs)
     # Set the parent_config_entity, which will function as the parent, if needed. Also reference the config
     # entities whose combined data will makeup this instances ConfigEntity properties
     if kwargs["parent_config_entity"]:
         self.config_entities = kwargs["parent_config_entity"].children()
     elif "config_entities" in kwargs:
         # Just default to the GlobalConfig singleton. This could be smarter and traverse up all the config_entity
         # parents to find the common one.
         self.config_entities = kwargs["config_entities"]
Exemplo n.º 34
0
 def test_remove_keys(self):
     x = dict(foo=dict(bar=dict(car=1)))
     assert_equal(len(remove_keys(x, [])), 1)
     assert_equal(len(remove_keys(x, ['foo'])), 0)
     # Use a two-segment string
     assert_equal(len(remove_keys(x, ['foo.bar'])), 1)  # foo remains
     assert_equal(len(remove_keys(x, ['foo.bar'])['foo']),
                  0)  # bar does not
     # Use a *
     assert_equal(len(remove_keys(x, ['foo.*.car'])), 1)  # foo remains
     assert_equal(len(remove_keys(x, ['foo.*.car'])['foo']),
                  1)  # bar remains
     assert_equal(len(remove_keys(x, ['foo.*.car'])['foo']['bar']),
                  0)  # car does not
Exemplo n.º 35
0
 def __init__(self, *args, **kwargs):
     # Remove any kwargs specific to this class before calling super
     new_kwargs = merge(
         remove_keys(
             kwargs,
             ['config_entities', 'parent_config_entity']),
         {'parent_config_entity': kwargs.get('parent_config_entity', global_config_singleton())})
     super(CombinedConfigEntity, self).__init__(*args, **new_kwargs)
     # Set the parent_config_entity, which will function as the parent, if needed. Also reference the config
     # entities whose combined data will makeup this instances ConfigEntity properties
     if kwargs['parent_config_entity']:
         self.config_entities = kwargs['parent_config_entity'].children()
     elif 'config_entities' in kwargs:
         # Just default to the GlobalConfig singleton. This could be smarter and traverse up all the config_entity
         # parents to find the common one.
         self.config_entities = kwargs['config_entities']
Exemplo n.º 36
0
def on_feature_post_save_analysis_modules(sender, **kwargs):
    """
        Delegate to the DbEntity post save
    """
    logger.info("Handler: on_feature_post_save_tilestache")
    features = kwargs['instance']
    config_entity = features[0].config_entity
    db_entity_interest = config_entity.computed_db_entity_interests(db_entity__key=features[0].db_entity_key)[0]
    # Pretend that we are a DbEntityInterest calling one of its post-save publishing methods
    # features are a parameter used only by this caller
    on_db_entity_post_save_analysis_modules(
        sender,
        instance=db_entity_interest,
        features=features,
        **remove_keys(kwargs, ['instance'])
    )
def on_feature_post_save_analysis_modules(sender, **kwargs):
    """
        Delegate to the DbEntity post save
    """
    logger.info("Handler: on_feature_post_save_tilestache")
    features = kwargs['instance']
    config_entity = features[0].config_entity
    db_entity_interest = config_entity.computed_db_entity_interests(db_entity__key=features[0].db_entity_key)[0]
    # Pretend that we are a DbEntityInterest calling one of its post-save publishing methods
    # features are a parameter used only by this caller
    on_db_entity_post_save_analysis_modules(
        sender,
        instance=db_entity_interest,
        features=features,
        **remove_keys(kwargs, ['instance'])
    )
def update_or_create_built_form_set(built_form_set_config, built_forms):
    filtered_built_form_set_dict = remove_keys(built_form_set_config, ['clazz', 'keys', 'client', 'scope', 'attribute'])
    built_form_set, created, updated = BuiltFormSet.objects.update_or_create(
        **dict(
            key=built_form_set_config['key'],
            defaults=dict(**filtered_built_form_set_dict)
        )
    )
    if not created:
        for key, value in filtered_built_form_set_dict.items():
            setattr(built_form_set, key, value)
        built_form_set.save()

    existing_built_forms = built_form_set.built_forms.all()

    # for the built_form_sets based on
    class_filter = lambda built_form: \
        built_form not in existing_built_forms and isinstance(built_form, built_form_set_config['clazz'])

    attribute_filter = lambda built_form: \
        built_form not in existing_built_forms and getattr(built_form, built_form_set_config['attribute'], None)

    importer = BuiltFormImporter()

    built_forms_for_set = built_forms

    if built_form_set_config['clazz']:
        built_forms_for_set = filter(class_filter, built_forms_for_set)

    if built_form_set_config['attribute']:
        built_forms_for_set = filter(attribute_filter, built_forms_for_set)

    if built_form_set_config['client']:
        client = built_form_set_config['client']
        client_built_form_names = [bf.name for bf in importer.load_buildings_csv(client)] + \
            [bf.name for bf in importer.load_buildingtype_csv(client)] + \
            [bf.name for bf in importer.load_placetype_csv(client)]

        client_filter = lambda built_form: \
            built_form not in existing_built_forms and \
            (not client_built_form_names or getattr(built_form, 'name', None) in client_built_form_names)
        built_forms_for_set = filter(client_filter, built_forms_for_set)

    built_form_set.built_forms.add(*built_forms_for_set)
    return built_form_set
Exemplo n.º 39
0
def update_or_create_built_form_set(built_form_set_config, built_forms):
    filtered_built_form_set_dict = remove_keys(built_form_set_config, ['clazz', 'keys', 'client', 'scope', 'attribute'])
    built_form_set, created, updated = BuiltFormSet.objects.update_or_create(
        **dict(
            key=built_form_set_config['key'],
            defaults=dict(**filtered_built_form_set_dict)
        )
    )
    if not created:
        for key, value in filtered_built_form_set_dict.items():
            setattr(built_form_set, key, value)
        built_form_set.save()

    existing_built_forms = built_form_set.built_forms.all()

    # for the built_form_sets based on
    class_filter = lambda built_form: \
        built_form not in existing_built_forms and isinstance(built_form, built_form_set_config['clazz'])

    attribute_filter = lambda built_form: \
        built_form not in existing_built_forms and getattr(built_form, built_form_set_config['attribute'], None)

    importer = BuiltFormImporter()

    built_forms_for_set = built_forms

    if built_form_set_config['clazz']:
        built_forms_for_set = filter(class_filter, built_forms_for_set)

    if built_form_set_config['attribute']:
        built_forms_for_set = filter(attribute_filter, built_forms_for_set)

    if built_form_set_config['client']:
        client = built_form_set_config['client']
        client_built_form_names = [bf.name for bf in importer.load_buildings_csv(client)] + \
            [bf.name for bf in importer.load_buildingtype_csv(client)] + \
            [bf.name for bf in importer.load_placetype_csv(client)]

        client_filter = lambda built_form: \
            built_form not in existing_built_forms and \
            (not client_built_form_names or getattr(built_form, 'name', None) in client_built_form_names)
        built_forms_for_set = filter(client_filter, built_forms_for_set)

    built_form_set.built_forms.add(*built_forms_for_set)
    return built_form_set
Exemplo n.º 40
0
def update_or_create_analysis_modules(config_entity, **kwargs):
    """
        Creates a results library and Result instances upon saving a config_entity if they do not yet exist.
    :param config_entity
    :return:
    """
    from footprint.client.configuration.fixture import AnalysisModuleFixture
    from footprint.client.configuration import resolve_fixture
    analysis_module_fixture = resolve_fixture("analysis_module",
                                              "analysis_module",
                                              AnalysisModuleFixture,
                                              config_entity.schema(),
                                              config_entity=config_entity)

    for analysis_module_configuration in analysis_module_fixture.default_analysis_module_configurations(
    ):
        # Create the table the first time
        analysis_module, created, updated = AnalysisModule.objects.update_or_create(
            config_entity=config_entity,
            key=analysis_module_configuration.key,
            defaults=dict(
                name=analysis_module_configuration.name,
                description=analysis_module_configuration.description,
                partner_description=analysis_module_configuration.
                partner_description,
                configuration=remove_keys(
                    analysis_module_configuration.configuration,
                    ['key', 'name', 'description'])))

        # Update the updater field to the user calling the module, or default to superadmin
        if not analysis_module.updater:
            analysis_module.updater = kwargs.get(
                'user',
                get_user_model().objects.get(username=UserGroupKey.SUPERADMIN))
        # For the first run make the creator the updater
        if not analysis_module.creator:
            analysis_module.creator = analysis_module.updater
        # update_or_create will kick off the run for updates.
        # don't let it run here
        previous = analysis_module._no_post_save_task_run
        analysis_module._no_post_save_task_run = True
        analysis_module.save()
        analysis_module._no_post_save_task_run = previous
        analysis_module.init()
Exemplo n.º 41
0
    def construct_primary_components(self, client='default'):
        """
        :return: Dictionary keyed by Building name and valued by Building objects (UrbanFootprint v0.1 Built
        Form default set)
        """
        primary_components = {}
        for import_primary_component in self.load_crops_csv(client):
            fields = AgricultureAttributeSet._meta.fields
            agriculture_attribute_set = remove_keys(map_to_dict(
                lambda field: [field.attname, getattr(import_primary_component, field.attname)],
                fields), ['id'])
            agriculture_attribute_set['name'] = import_primary_component.name
            if import_primary_component.name in primary_components:
                raise Exception("Duplicate entry for primary component: " + import_primary_component.name)
            primary_components[import_primary_component.name] = dict(agriculture_attribute_set=agriculture_attribute_set)

        for import_primary_component in self.load_buildings_csv(client):
            building_attribute_set = dict(
                name=import_primary_component.name,
                address=import_primary_component.address,
                website=import_primary_component.website,
                lot_size_square_feet=import_primary_component.lot_size_square_feet,
                floors=import_primary_component.floors,
                total_far=import_primary_component.total_far,
                average_parking_space_square_feet=import_primary_component.average_parking_space_square_feet,
                surface_parking_spaces=import_primary_component.surface_parking_spaces,
                below_ground_structured_parking_spaces=import_primary_component.below_ground_parking_spaces,
                above_ground_structured_parking_spaces=import_primary_component.above_ground_parking_spaces,
                building_footprint_square_feet=import_primary_component.building_footprint_square_feet,
                surface_parking_square_feet=import_primary_component.surface_parking_square_feet,
                hardscape_other_square_feet=import_primary_component.hardscape_other_square_feet,
                irrigated_softscape_square_feet=import_primary_component.irrigated_softscape_square_feet,
                nonirrigated_softscape_square_feet=import_primary_component.nonirrigated_softscape_square_feet,
                irrigated_percent=import_primary_component.irrigated_percent,
                vacancy_rate=import_primary_component.vacancy_rate,
                household_size=import_primary_component.household_size
            )
            if import_primary_component.name in primary_components:
                raise Exception("Duplicate entry for primary component: " + import_primary_component.name)
            primary_components[import_primary_component.name] = dict(building_attribute_set=building_attribute_set)

        return primary_components
Exemplo n.º 42
0
def scenarios_per_project(project, scenario_fixtures, **kwargs):

    # Create the Scenarios from the fixtures
    # The fixtures are dict keyed by the Scenario subclass (BaseScenario and FutureScenario) with a list of
    # Scenario fixtures for each
    scenarios_created_updated = map(
        lambda scenario_fixture: scenario_fixture['class_scope'].objects.
        update_or_create(
            key=scenario_fixture['key'],
            defaults=merge(
                dict(behavior=get_behavior('default_config_entity')),
                remove_keys(scenario_fixture, [
                    'class_scope', 'key', 'project_key', 'categories', 'year'
                ]),
                dict(parent_config_entity=project,
                     year=scenario_fixture.get('year', project.base_year),
                     creator=User.objects.get(username=UserGroupKey.SUPERADMIN)
                     ))),
        # If kwargs['limit_to_classes'] is specified, only do Scenario subclasses that match it, if any
        filter(
            lambda scenario_fixture: scenario_fixture['class_scope'] in kwargs.
            get('limit_to_classes', [scenario_fixture['class_scope']]
                ) or [scenario_fixture['class_scope']], scenario_fixtures))

    for scenario_tuple in scenarios_created_updated:
        logger.info("{update_or_create} Scenario {config_entity}".format(
            update_or_create='Created' if scenario_tuple[1] else 'Updated',
            config_entity=scenario_tuple[0]))

    # Apply the categories, and other simple many-to-many attributes as needed
    for i, scenario_dict in enumerate(scenario_fixtures):
        for category in scenario_dict.get('categories', []):
            category, created, updated = Category.objects.update_or_create(
                key=category.key, value=category.value)
            scenario = scenarios_created_updated[i][0]
            scenario.add_categories(category)
            scenario._no_post_save_publishing = True
            scenario.save()
            scenario._no_post_save_publishing = False

    return map(lambda scenario_created_updated: scenario_created_updated[0],
               scenarios_created_updated)
def update_or_create_analysis_modules(config_entity, **kwargs):
    """
        Creates a results library and Result instances upon saving a config_entity if they do not yet exist.
    :param config_entity
    :return:
    """
    from footprint.client.configuration.fixture import AnalysisModuleFixture
    from footprint.client.configuration import resolve_fixture
    analysis_module_fixture = resolve_fixture(
        "analysis_module",
        "analysis_module",
        AnalysisModuleFixture,
        config_entity.schema(),
        config_entity=config_entity)

    for analysis_module_configuration in analysis_module_fixture.default_analysis_module_configurations():
        # Create the table the first time
        analysis_module, created, updated = AnalysisModule.objects.update_or_create(
            config_entity=config_entity,
            key=analysis_module_configuration.key,
            defaults=dict(
                name=analysis_module_configuration.name,
                description=analysis_module_configuration.description,
                partner_description=analysis_module_configuration.partner_description,
                configuration=remove_keys(analysis_module_configuration.configuration, ['key', 'name', 'description']))
        )

        # Update the updater field to the user calling the module, or default to superadmin
        if not analysis_module.updater:
            analysis_module.updater = kwargs.get('user', get_user_model().objects.get(username=UserGroupKey.SUPERADMIN))
        # For the first run make the creator the updater
        if not analysis_module.creator:
            analysis_module.creator = analysis_module.updater
        # update_or_create will kick off the run for updates.
        # don't let it run here
        previous = analysis_module._no_post_save_task_run
        analysis_module._no_post_save_task_run = True
        analysis_module.save()
        analysis_module._no_post_save_task_run = previous
        analysis_module.init()
Exemplo n.º 44
0
    def subclass_resource_if_needed(self, view, request):
        """
            Overrides the FootprintResource method to perform subclassing of the resource based on the request params
        :param view:
        :param request:
        :return:
        """
        params = request.GET
        # TODO cache dynamic class creation results
        # Create the dynamic resource class
        dynamic_resource_class = self.create_subclass(params,
                                                      method=request.method)
        # Dynamic model classes always have a config_entity. In the case
        # where the model class is not dynamic (e.g. ClientLandUseDefinitions subclasses),
        # we expect the config_entity__id to be sent with the request, so we thusly resolve the config_entity
        config_entity = dynamic_resource_class._meta.queryset.model.config_entity if\
            hasattr(dynamic_resource_class._meta.queryset.model, 'config_entity') else\
            self.resolve_config_entity(request.GET)

        # This might not be need anymore, but it indicates what other dynamic classes were created so that
        # permissions can be added for them
        additional_classes_used = []
        # We add permissions to the current user so they can access these dynamic classes if it's the first access by the user
        # TODO permissions would ideally be done ahead of time, of if we could automatically give the user full access to all. This might be fixed in the latest Django version
        # subclasses of a certain type, but I don't see how to do that in the Django docs
        user = self.resolve_user(params)
        #logger.info("Adding permissions for user %s to dynamic_resource_class %s" % (user.username, dynamic_resource_class.__name__))
        self.add_permissions_to_user(
            user,
            self.get_or_create_permissions_for_class(dynamic_resource_class,
                                                     additional_classes_used,
                                                     config_entity))

        # Extract and add GET parameters
        request._config_entity = config_entity
        request._filters = remove_keys(
            merge(request.GET, self.search_params(params)),
            self.remove_params(params))

        return dynamic_resource_class().wrap_view(view)
def scenarios_per_project(project, scenario_fixtures, **kwargs):

    # Create the Scenarios from the fixtures
    # The fixtures are dict keyed by the Scenario subclass (BaseScenario and FutureScenario) with a list of
    # Scenario fixtures for each
    scenarios_created_updated = map(
        lambda scenario_fixture:
        scenario_fixture['class_scope'].objects.update_or_create(
            key=scenario_fixture['key'],
            defaults=merge(
                dict(behavior=get_behavior('default_config_entity')),
                remove_keys(scenario_fixture,
                            ['class_scope', 'key', 'project_key', 'categories', 'year']),
                dict(
                    parent_config_entity=project,
                    year=scenario_fixture.get('year', project.base_year),
                    creator=User.objects.get(username=UserGroupKey.SUPERADMIN))
           )),
        # If kwargs['limit_to_classes'] is specified, only do Scenario subclasses that match it, if any
        filter(lambda scenario_fixture:
               scenario_fixture['class_scope'] in kwargs.get('limit_to_classes', [scenario_fixture['class_scope']])
               or [scenario_fixture['class_scope']],
               scenario_fixtures))


    for scenario_tuple in scenarios_created_updated:
        logger.info("{update_or_create} Scenario {config_entity}".format(update_or_create='Created' if scenario_tuple[1] else 'Updated', config_entity=scenario_tuple[0]))

    # Apply the categories, and other simple many-to-many attributes as needed
    for i, scenario_dict in enumerate(scenario_fixtures):
        for category in scenario_dict.get('categories', []):
            category, created, updated = Category.objects.update_or_create(key=category.key, value=category.value)
            scenario = scenarios_created_updated[i][0]
            scenario.add_categories(category)
            scenario._no_post_save_publishing=True
            scenario.save()
            scenario._no_post_save_publishing=False

    return map(lambda scenario_created_updated: scenario_created_updated[0], scenarios_created_updated)
Exemplo n.º 46
0
 def resolve_field(meta):
     type = meta['type']
     rest = merge(filter_dict(
         # Don't allow default='SEQUENCE'
         lambda key, value: not (key=='default' and value=='SEQUENCE'),
         # Ignore these keys
         remove_keys(meta, ['type', 'auto_populate', 'visible', 'geometry_type', 'nullable'])
     ), dict(null=True))
     if type=='string':
         return models.CharField(**rest)
     elif type=='integer':
         return models.IntegerField(**rest)
     elif type=='float':
         return models.FloatField(**rest)
     elif type=='biginteger':
         return models.BigIntegerField(**rest)
     elif type=='geometry':
         return models.GeometryField(geography=False, **rest)
     elif type=='date':
         return models.DateField(**rest)
     elif type=='datetime':
         return models.DateTimeField(**rest)
 def _computed_related(self, attribute, **query_kwargs):
     """
         Like _computed, but returns the related item of the through class instances for attributes having a through class. Attributes without an explict through class behave just like _computed()
     :param attribute: 'db_entities', etc. Not the through attribute name (e.g. dbentityinterest_set)
     :param query_kwargs: optional args to filter the results
     :return: The related class instances
     """
     modified_query_kwargs = remove_keys(query_kwargs, ['with_deleted'])
     deleted_kwargs = (dict(deleted=False) if not query_kwargs.get('with_deleted') else dict())
     if self.donor():
         return get_list_or_if_empty(
             # Get instance's own Many items
             self._filter_computed(
                 getattr(self, attribute).filter(**deleted_kwargs),
                 **modified_query_kwargs),
             # If none exist get donor's
             lambda: self.donor()._computed_related(attribute, **modified_query_kwargs))
     else:
         # No donor is defined so just consider this instance's items
         return self._filter_computed(
             getattr(self, attribute).filter(**deleted_kwargs),
             **modified_query_kwargs)
Exemplo n.º 48
0
def update_or_create_db_entity_and_interest(config_entity, config_db_entity):
    """
        Sync a single db_entity_configuration or db_entity and its db_entity_interest
        :return A tuple of the DbEntityInterest and the created flag
    """
    unique_key_combo = ['key', 'schema']

    db_entity, created, updated = DbEntity.objects.update_or_create(
        # key and schema uniquely identify the DbEntity
        key=config_db_entity.key,
        schema=config_db_entity.schema,
        defaults=remove_keys(model_dict(config_db_entity), unique_key_combo))

    db_entity.feature_behavior = config_db_entity.feature_behavior
    db_entity.save()

    logger.info("ConfigEntity/DbEntity Publishing. DbEntity: %s" %
                db_entity.full_name)

    # Create the DbEntityInterest through class instance which associates the ConfigEntity instance
    # to the DbEntity instance. For now the interest attribute is hard-coded to OWNER. This might
    # be used in the future to indicate other levels of interest
    interest = Interest.objects.get(key=Keys.INTEREST_OWNER)
    db_entity_interest, created, updated = DbEntityInterest.objects.update_or_create(
        config_entity=config_entity, db_entity=db_entity, interest=interest)

    #update the geography scope after the db_entity_interest saves as this is required to find 'owned' db_entites in a config entity
    if not db_entity.no_feature_class_configuration:
        feature_class_creator = FeatureClassCreator(config_entity,
                                                    db_entity,
                                                    no_ensure=True)
        db_entity.feature_class_configuration.geography_scope = config_entity.id if db_entity.feature_class_configuration.primary_geography \
            else feature_class_creator.resolved_geography_scope.id

        db_entity.save()

    return db_entity_interest, created
Exemplo n.º 49
0
def update_or_create_db_entity_and_interest(config_entity, config_db_entity):
    """
        Sync a single db_entity_configuration or db_entity and its db_entity_interest
        :return A tuple of the DbEntityInterest and the created flag
    """
    unique_key_combo = ['key', 'schema']

    db_entity, created, updated = DbEntity.objects.update_or_create(
        # key and schema uniquely identify the DbEntity
        key=config_db_entity.key,
        schema=config_db_entity.schema,
        defaults=remove_keys(model_dict(config_db_entity), unique_key_combo))

    db_entity.feature_behavior = config_db_entity.feature_behavior
    db_entity.save()

    logger.info("ConfigEntity/DbEntity Publishing. DbEntity: %s" % db_entity.full_name)

    # Create the DbEntityInterest through class instance which associates the ConfigEntity instance
    # to the DbEntity instance. For now the interest attribute is hard-coded to OWNER. This might
    # be used in the future to indicate other levels of interest
    interest = Interest.objects.get(key=Keys.INTEREST_OWNER)
    db_entity_interest, created, updated = DbEntityInterest.objects.update_or_create(
        config_entity=config_entity,
        db_entity=db_entity,
        interest=interest)

    #update the geography scope after the db_entity_interest saves as this is required to find 'owned' db_entites in a config entity
    if not db_entity.no_feature_class_configuration:
        feature_class_creator = FeatureClassCreator(config_entity, db_entity, no_ensure=True)
        db_entity.feature_class_configuration.geography_scope = config_entity.id if db_entity.feature_class_configuration.primary_geography \
            else feature_class_creator.resolved_geography_scope.id

        db_entity.save()

    return db_entity_interest, created
Exemplo n.º 50
0
 def dehydrate_medium_context(self, bundle):
     # Remove data that isn't needed by the API
     return remove_keys(['attributes'])
Exemplo n.º 51
0
            logger.debug("Existing Intersection: %s" % model_dict(existing_intersection, include_primary_key=True))
            # TODO this seems problematic. The existing FeatureBehavior's tags should take precedence over the
            # Behavior's unless the former has no tags
            updated_existing_feature_behavior._tags = template_feature_behavior._tags
        else:
            # Get a new instance from the Behavior
            updated_existing_feature_behavior = behavior.feature_behavior_from_behavior_template()
        updated_existing_feature_behavior.set_defaults()

        # Intersection properties are defined on the Behavior and possibly extended or overridden on the FeatureBehavior
        # Every FeatureBehavior has its own Intersection instance so that we can customize the intersection for
        # the DbEntity. We always remove the is_template property that might have come the Behavior's Intersection
        intersection_dict = remove_keys(
            merge(
                model_dict(updated_existing_feature_behavior.intersection_subclassed),
                model_dict(configured_feature_behavior.intersection_subclassed),
            ),
            ['is_template']
        )
        logger.debug("Intersection after merge %s" % intersection_dict)
        # Get or create the intersection instance based on the subclass of the source(s)
        intersection_class = \
            (configured_feature_behavior and configured_feature_behavior.intersection_subclassed and configured_feature_behavior.intersection_subclassed.__class__) or\
            (updated_existing_feature_behavior and updated_existing_feature_behavior.intersection and updated_existing_feature_behavior.intersection_subclassed.__class__)
        intersection = intersection_class()
        # Update to match the settings
        intersection.__dict__.update(
            intersection_dict
        )
        if intersection.__class__ == Intersection:
            raise Exception("Expected subclass: %s %s %s" % (intersection_dict, configured_feature_behavior.intersection_subclassed, updated_existing_feature_behavior.intersection_subclassed))
Exemplo n.º 52
0
def post_save_publishing(signal_path, config_entity, user, **kwargs):
    """
        The initial entry point and recursive entry point for all post save publishing methods
        :signal_path - the full module path of the signal that called this
        :param kwargs:
            signal_proportion_lookup - A dictionary of signal names to the proportion complete of the overall post save.
            The signal matching signal_path will be sought in the dictionary
            config_entity - The scope of whatever being post-saved, whether a config_entity or something within it
            dependent_signal_paths - Full module signal paths called in sequentially by this publisher
            crud_type - CrudKey.CREATE|CLONE|UPDATE|SYNC|DELETE
            instance_class - Optional. Overrides the class of the instance for use in communicating with the client.
            This is used when the client only cares about a base class, such as Feature or to for DbEntityInterest
            to be a DbEntity
            client_instance_path - Optional. Property path to resolve the instance to another instance for the client.
             (this is only used to convert DbEntityInterest to DbEntity)
    """
    api_key = ApiKey.objects.get(user=user).key

    # Gather instance ids, class, and optional instance keys
    bundle = InstanceBundle(**merge(kwargs, dict(user_id=user.id)))

    # Pass the arguments to the task and run via celery. Note that kwargs is being treated
    # as a dict here and passed along
    logger.info("Django post save: %s" % unicode(bundle))

    # Send the start event to the client if we aren't recursing.
    if not kwargs.get('recurse', False):
        event = 'postSavePublisherStarted'
        logger.info("Sending start message %s to user %s with %s" % (event, user.username, unicode(bundle)))

        send_message_to_client(
            user.id,
            dict(
                event=event,
                config_entity_id=config_entity and config_entity.id,
                config_entity_class_name=config_entity and config_entity.__class__.__name__,
                class_name=bundle.class_name_for_client,
                # Always send 0 for initial
                proportion=0,
                ids=bundle.client_instance_ids,
                keys=bundle.keys,
                class_key=bundle.class_key
            )
        )

    # Start Celery
    logger.info("Starting post save publishing with signal path %s" % signal_path)
    job = start_and_track_task(_post_save_publishing,
                               api_key,
                               config_entity,
                               user,
                               **merge(
                                     remove_keys(kwargs, ['instance']),
                                     dict(
                                         # If we are recursing (already in a celery worker, don't start a new celery task
                                         # When we get dependency order figured out, we can do this, but there's probably
                                         # a better way via the Task object or something
                                         current_job=kwargs.get('job', None),
                                         signal_path=signal_path,
                                         crud_type=kwargs.get('crud_type'),
                                         bundle=bundle
                               )))

    return HttpResponse(job.hashid)
Exemplo n.º 53
0
        FeatureClassCreator(config_entity)

    # Get the publisher_name, proportion, and signal_path
    publishing_info = get_publishing_info(**kwargs)

    try:
        # Make sure no transactions are outstanding
        # This shoudln't be needed once Django is upgraded
        transaction.commit()
    except Exception, e:
        pass

    # Updated the kwargs to include the resolved instance. This will be sent when we recurse on post_save_publishing
    # Also use first=False to indicate recursion so we don't resend the start signal to the client
    updated_kwargs = merge(
        remove_keys(kwargs, ['signal_path', 'current_job', 'bundle']),
        dict(instance=bundle.instances, user_id=bundle.user_id, recurse=True, current_job=job))
    logger.warn("kwargs %s" % updated_kwargs)

    logger.info("Running handlers for signal {signal_path} for {bundle}".format(
        config_entity=config_entity,
        username=user.username,
        signal_path=publishing_info['signal_path'],
        bundle=unicode(bundle)))

    # Send the signal. The listening publishers will run in sequence
    # We always send the signal in the context of the underlying config_entity class if one exists
    resolve_module_attr(publishing_info['signal_path']).send(
        sender=config_entity.__class__ if config_entity else bundle.clazz, **updated_kwargs
    )
    try:
Exemplo n.º 54
0
 def __init__(self, *args, **kwargs):
     super(FeatureBehavior, self).__init__(*args, **remove_keys(kwargs, ['tags', 'attribute_group_configurations']))
     self._tags = self._tags or []
     self._tags.extend(kwargs.get('tags', []))
     self._attribute_group_configurations = kwargs.get('attribute_group_configurations') or self._attribute_group_configurations or []