Ejemplo n.º 1
0
    def clone_db_entity_and_interest_for_result(self, config_entity, existing_db_entity_interest, reference_db_entity_key, **kwargs):
        """
            Clone the selected db_entity of key reference_db_entity and replace any of its attributes with those
            specified in **kwargs. **kwargs should contain a unique key property
        :param config_entity
        :param existing_db_entity_interest: The existing DbEntityInterest if one exists
        :param reference_db_entity_key: key of the DbEntity to clone
        :param kwargs: replacement values containing at the very least 'key'
        :return: The DbEntityInterest which references the cloned db_entity
        """
        source_db_entity = config_entity.computed_db_entities().get(key=reference_db_entity_key)
        # Avoid circular reference
        from footprint.main.publishing.db_entity_publishing import clone_or_update_db_entity_and_interest

        db_entity_interest = clone_or_update_db_entity_and_interest(
            config_entity,
            source_db_entity,
            DbEntity(**merge(
                kwargs,
                dict(
                    feature_class_configuration=FeatureClassConfiguration(
                        **merge(source_db_entity.feature_class_configuration.__dict__,
                                dict(feature_class_owner=reference_db_entity_key))),
                )
            )),
            existing_db_entity_interest=existing_db_entity_interest,
            override_on_update=True
        )
        # Run this manually here. It should be triggered by saving the DbEntity, but something
        # is disabling the publisher
        # TODO the DbEntity publihser should be turned on here so this should be neeed
        from footprint.main.publishing.user_publishing import on_db_entity_post_save_user
        on_db_entity_post_save_user(None, instance=db_entity_interest)
        return db_entity_interest
Ejemplo n.º 2
0
    def create_subclass(self, params, **kwargs):
        """
            Subclass this class to create a resource class specific to the config_entity.
        :param params.layer__id or db_entity__id: The layer id. Optional. Used to resolve the Feature/FeatureResource subclasses if we are in FeatureResource (not in a subclass)
        :return: The subclassed resource class
        """

        if params.get('file_dataset__id'):
            # A FileDataset corresponds to an UploadDatasetTask
            # We receive this parameter for unsaved DbEntities that are requesting feature metdata
            uploadDatasetTask = UploadDatasetTask.objects.get(id=int(params.get('file_dataset__id')))
            metadata = uploadDatasetTask.metadata
            return self.dynamic_resource_subclass(
                **merge(
                    dict(config_entity=uploadDatasetTask.upload_task.config_entity.subclassed, metadata=metadata, params=params),
                    kwargs
                )
            )
        else:
            # Resolve the LayerSelecton of the Layer in context of the DbEntity if one exists. This gives us a predefined
            # queryset of the features. If no LayerSelection exists (because no Layer exists) we resolve the subclass
            # with the db_entity. The latter implies that we want the full queryset.
            config_entity = self.resolve_config_entity(params)
            layer_selection = self.resolve_layer_selection(params)
            logger.debug("For FeatureResource resolved config_entity %s and layer %s" % (
                config_entity.key, layer_selection.layer.db_entity.key if layer_selection else 'None'
            ))
            return self.dynamic_resource_subclass(
                **merge(
                    dict(config_entity=config_entity, layer_selection=layer_selection) if layer_selection else dict(db_entity=self.resolve_db_entity(params)),
                    dict(params=params),
                    kwargs
                )
            )
Ejemplo n.º 3
0
    def complete_or_create_feature_class_configuration(self, feature_class_configuration, **overrides):
        """
            Clones the given feature_class_configuration to make it specific to the ConfigEntity
            If the feature_class_configuration is null a simple one is created
            :param feature_class_configuration: Used for the basis of cloning from another DbEntity, or
            as the preconfigured instance that was defined in an initializer, such as default_project.py
            :param overrides: Override anything in the feature_class_configuration. This is used for cloning
            when we need to specify generated=YES, source_from_origin_layer_selection=True, etc
        """
        if self.db_entity and self.db_entity.no_feature_class_configuration:
            # If nothing is passed it means that the DbEntity doesn't represent a feature table, e.g. background imagery
            return None

        key = self.db_entity.key if self.db_entity else self.DETACHED_FROM_DB_ENTITY

        return FeatureClassConfiguration(**merge(
            dict(
                # Indicates that the configuration was generated by introspecting a table, rather than by deliberate configuration
                generated=False,
                # Indicates that the features should be created from an origin LayerSelection features.
                source_from_origin_layer_selection=False,
                # The origin Layer id
                origin_layer_id=None,
                # The user of the origin layer
                origin_user=None,
                # The default data_importer for features
                # The default imports based on the url of the db_entity--usually a special database url
                data_importer=None,
            ),
            # Override the above with any configured attributes
            merge(feature_class_configuration.__dict__, overrides) if feature_class_configuration else {},
            # Override or define ConfigEntity specific attributes
            dict(
                key=key,
                # The scope is the id of the config_entity
                scope=self.config_entity and self.config_entity.id,
                # The config_entity id scopes the geography table, except for scenarios which
                # always use their Project config_entity id as the scope
                geography_scope=self.config_entity.id,
                schema=self.config_entity.schema(),
                class_attrs=merge(
                    feature_class_configuration.class_attrs if feature_class_configuration else {},
                    {'config_entity__id': self.config_entity.id, 'override_db': self.config_entity.db, 'db_entity_key': key}),
            ) if self.config_entity else dict(
                # Abstract case
                key=key,
                class_attrs=merge(
                    feature_class_configuration.class_attrs if feature_class_configuration else {},
                    {'db_entity_key': key})
            )
        ))
Ejemplo n.º 4
0
    def dynamic_resource_subclass(self, instance, **kwargs):
        """
            Creates a dynamic resource using reversion to get the queryset of revisions for the instance
        :param instance:
        :param kwargs:
        :return:
        """

        feature_class = kwargs['feature_class']

        def get_versioned_version(feature_version, feature_class, key):
            """
                Return the related object for related attribute indicated by this key for the give feature_version
            :param feature_version:
            :param key:
            :return:
            """
            related_descriptor = getattr(feature_class, key)
            return related_descriptor.field.rel.to.objects.get(
                id=feature_version.field_dict[key])

        @using_bundle_cache
        def feature(bundle):
            # Get the current version of the Feature instance
            feature_instance = feature_class.objects.get(
                id=bundle.obj.field_dict['id'])
            # Update it (without saving). This doesn't take care of our related fields
            feature_instance.__dict__.update(**bundle.obj.field_dict)
            # Take care of our related fields by settings the Revisionable mixin's _version_field_dict
            # We instruct the related Resource field to check this _version_field_dict
            # to grab the versioned value
            feature_instance._version_field_dict = map_dict_to_dict(
                lambda key, value:
                [key,
                 get_versioned_version(bundle.obj, feature_class, key)],
                feature_class.dynamic_model_class_creator.related_descriptors(
                ))

            # Set the magic version property so that we grab the right meta data
            feature_instance._version = bundle.obj
            return feature_instance

        return self.__class__.resolve_resource_class(
            FeatureVersionProxy,
            related_descriptors=merge(
                dict(
                    # Instruct the dynamic resource class to create a dynamic FeatureResource class
                    # for the related field.
                    feature=dict(
                        field=models.ForeignKey(feature_class),
                        callable_attribute=feature,
                        full=True,
                        null=True,
                        # Instruct the dynamic resource dehydrate related fields
                        # by replacing the default model_class field value with the _* version
                        # of the field if defined. This allows us to set the _* on our in-memory
                        # versioned Feature and read those _* field instead of the database (current version)
                        # field values
                        use_version_fields=True))),
            queryset=feature_revision_manager.get_for_object(instance))
Ejemplo n.º 5
0
    def feature_behavior_from_behavior_template(self):
        """
            The method to create a FeatureBehavior from the template_feature_behavior.
            This might be a subclass instance of FeatureBehavior and/or preconfigured values
            Override FeatureBehavior.set_defaults to set preconfigured values

        """
        template_feature_behavior = self.subclassed_template_feature_behavior
        # Clone it, making a new intersection instance by removing the id
        # If the FeatureBehavior has no Intersection get it from the Behavior
        # Some behaviors, like background_imagery and result also have no Intersection
        intersection = template_feature_behavior.intersection_subclassed or self.intersection_subclassed
        if not intersection:
            raise Exception(
                "No intersection for the template FeatureBehavior resolved")
        intersection.id = 0
        logger.debug(
            "Intersection from template is %s of subclass %s" %
            (model_dict(intersection, include_primary_key=True) if intersection
             else 'None', intersection.__class__ if intersection else 'None'))
        feature_behavior = template_feature_behavior.__class__(**merge(
            model_dict(template_feature_behavior),
            dict(behavior=self, is_template=False, intersection=intersection)))
        # Set any defaults defined by the base or subclass
        feature_behavior.set_defaults()
        return feature_behavior
Ejemplo n.º 6
0
    def subclass_resource_if_needed(self, view, request):
        """
            Overrides the FootprintResource method to perform subclassing of the resource based on the request params
        :param view:
        :param request:
        :return:
        """
        params = request.GET
        # TODO cache dynamic class creation results
        # Create the dynamic resource class
        dynamic_resource_class = self.create_subclass(params, method=request.method)
        # Dynamic model classes always have a config_entity. In the case
        # where the model class is not dynamic (e.g. ClientLandUseDefinitions subclasses),
        # we expect the config_entity__id to be sent with the request, so we thusly resolve the config_entity
        config_entity = dynamic_resource_class._meta.queryset.model.config_entity if\
            hasattr(dynamic_resource_class._meta.queryset.model, 'config_entity') else\
            self.resolve_config_entity(request.GET)

        # This might not be need anymore, but it indicates what other dynamic classes were created so that
        # permissions can be added for them
        additional_classes_used = []
        # We add permissions to the current user so they can access these dynamic classes if it's the first access by the user
        # TODO permissions would ideally be done ahead of time, of if we could automatically give the user full access to all. This might be fixed in the latest Django version
        # subclasses of a certain type, but I don't see how to do that in the Django docs
        user = self.resolve_user(params)
        #logger.info("Adding permissions for user %s to dynamic_resource_class %s" % (user.username, dynamic_resource_class.__name__))
        self.add_permissions_to_user(user, self.get_or_create_permissions_for_class(dynamic_resource_class, additional_classes_used, config_entity))

        # Extract and add GET parameters
        request._config_entity = config_entity
        request._filters = remove_keys(
            merge(request.GET, self.search_params(params)),
            self.remove_params(params))

        return dynamic_resource_class().wrap_view(view)
Ejemplo n.º 7
0
    def import_db(self, database_name, local_dump_file):
        # Try to connect
        main_db = pg_connection_parameters(settings.DATABASES['default'])
        db = merge(main_db, dict(database=database_name))

        db_conn_string = "--host={host} --port={port} --user={user}".format(**db)
        self.run_as_pg('createdb {db_conn_string} {name}'.format(
            db_conn_string=db_conn_string,
            name=database_name), **db)

        self.run_as_pg('psql {db_conn_string} -c "CREATE EXTENSION IF NOT EXISTS POSTGIS" {name}'.format(
            db_conn_string=db_conn_string,
            name=database_name), **db)

        self.run_as_pg('psql {db_conn_string} -c "CREATE EXTENSION IF NOT EXISTS DBLINK" {name}'.format(
            db_conn_string=db_conn_string,
            name=database_name), **db)

        self.run_as_pg('''psql {db_conn_string} -c 'ALTER DATABASE {name} SET search_path = "$user",public,postgis;' postgres'''.format(
            db_conn_string=db_conn_string,
            name=database_name), **db)

        self.run_as_pg('psql {db_conn_string} -f {local_dump_file} {name}'.format(
            db_conn_string=db_conn_string,
            local_dump_file=local_dump_file,
            name=database_name), **db)
    def update_or_create_project(project_dict):
        if kwargs.get('limit_to_classes') and Project not in kwargs['limit_to_classes']:
            if Project.objects.filter(key=project_dict['key']).count() != 1:
                raise Exception("Trying to get Project %s, which hasn't been created" % project_dict['key'])
            project_tuple = Project.objects.get(key=project_dict['key']), False, False
        else:
            project_tuple = Project.objects.update_or_create(
                key=project_dict['key'],
                defaults=merge(
                    dict(
                        behavior=get_behavior('default_config_entity'),
                    ),
                    remove_keys(project_dict, ['key', 'base_table', 'region_key', 'media']),
                    dict(
                        parent_config_entity=regions_by_key[project_dict['region_key']],
                        creator=User.objects.get(username=UserGroupKey.SUPERADMIN)
                    )
            ))

        logger.info("{update_or_create} Project {config_entity}".format(update_or_create='Created' if project_tuple[1] else 'Updated', config_entity=project_tuple[0]))

        media = map(lambda medium_config:
                    Medium.objects.update_or_create(
                        key=medium_config.key,
                        defaults=remove_keys(medium_config.__dict__['kwargs'], 'key'))[0],
                    project_dict.get('media', []))

        existing_media = project_tuple[0].media.filter(id__in=map(lambda medium: medium.id, media))
        media_to_add = set(media) - set(existing_media)
        if len(media_to_add) > 0:
            project_tuple[0].media.add(*media_to_add)
        return project_tuple
Ejemplo n.º 9
0
        def __new__(meta, name, bases, attrs):
            # Register any additional model fields specified in fields
            def add_field(sender, **kwargs):
                if sender.__name__ == computed_name:
                    for field_name, field in fields.items():
                        field.contribute_to_class(sender, field_name)
            class_prepared.connect(add_field)

            def create_class_property(class_attr):
                related_attr = class_attr.split('__')[0]
                related_class_name = related_class_lookup.get(related_attr, None)
                if not related_class_name:
                    raise Exception("Expected related_class_lookup to contain %s, since class_attrs contain %s" % (related_attr, class_attr) )
                related_class = resolve_module_attr(related_class_name)
                # Create the getter property that uses the class manager to lookup up the related model by id
                def getter(cls):
                    return related_class.objects.get(id=getattr(cls, class_attr))
                return ClassProperty(classmethod(getter))

            # Create class-level getter properties to resolve things like the config_entity since we only store the id
            class_properties = map_to_dict(
                lambda class_attr: [class_attr.split('__')[0], create_class_property(class_attr)],
                filter(lambda class_attr: class_attr.endswith('__id'), class_attrs))

            return models.base.ModelBase.__new__(
                meta,
                computed_name,
                (base_class,),
                # Merge the manager objects (if the abstract_class has one) with attrs and class_attrs
                merge(dict(objects=base_class.objects.__class__()) if hasattr(base_class, 'objects') else {},
                      attrs,
                      class_attrs,
                      class_properties))
Ejemplo n.º 10
0
def resolve_parent_fixture(module, module_fragment, fixture_class, schema,
                           *args, **kwargs):
    # Resolve the parent schema. Client or region schemas become global. global has no schema
    # TODO use kwargs['config_entity'].parent_schema to resolve if we get to a point where
    # config_entity is required
    parent_schema = '__'.join(
        schema.split('__')[0:-1]) or ('global' if schema != 'global' else None)
    if parent_schema and (kwargs.get('schema_config_entity')
                          or kwargs.get('config_entity')):
        parent_config_entity = kwargs.get(
            'schema_config_entity',
            kwargs['config_entity']).parent_config_entity_subclassed
        new_kwargs = merge(kwargs,
                           dict(schema_config_entity=parent_config_entity))
    else:
        new_kwargs = kwargs

    if parent_schema:
        fixture = resolve_fixture(module, module_fragment, fixture_class,
                                  parent_schema, *args, **new_kwargs)
    else:
        fixture = resolve_default_fixture(module, module_fragment,
                                          fixture_class, parent_schema, *args,
                                          **new_kwargs)
    if not fixture:
        raise Exception(
            "Fixture in null. This should never happen: module:%s, module_fragment:%s, fixture_class:%s, schema:%s"
            % (module, module_fragment, fixture_class, schema))
    return fixture
Ejemplo n.º 11
0
def create_layer_selection(config_entity, layer, attribute_id):
    db_entity = layer.db_entity_interest.db_entity
    connection = connection_dict(layer.config_entity.db)

    tilestache_layers = []

    users = set(get_users_with_perms(config_entity)) | set(
        get_users_with_perms(layer.db_entity_interest.db_entity))

    # Make sure layer_selection instances exist for the users
    from footprint.main.publishing.layer_publishing import update_or_create_layer_selections_for_layer
    update_or_create_layer_selections_for_layer(layer, users=users)

    logger.info("Get/Create layer_selection for config_entity %s, layer %s, users %s" %\
                (config_entity.key, layer.db_entity_key, ','.join(map(lambda user: user.username, users))))
    # Each layer has a dynamic class representing its SelectedFeature table
    get_or_create_layer_selection_class_for_layer(layer)
    if not users:
        return tilestache_layers

    config_entity.db_entity_feature_class(key=layer.db_entity_key)
    layer_selection_class = get_or_create_layer_selection_class_for_layer(
        layer, config_entity)
    # Take the first user to create a template query
    user = list(users)[0]
    # Each LayerSelection instance is per user
    layer_selection = layer_selection_class.objects.get_or_create(user=user)[0]
    # Extract the query from the QuerySet
    query = re.sub(
        r'"layer_selection_id" = \d+', r'"layer_selection_id" = {user_id}',
        str(
            layer_selection.selected_features.values('wkb_geometry',
                                                     'id').query))
    logger.info(
        "Creating tilestache layer_selection for layer %s, user %s, query: %s"
        % (layer.full_name, user.username, query))
    user_id_lookup = map_to_dict(
        lambda layer_selection: [layer_selection.user.id, layer_selection.id],
        layer_selection_class.objects.all())

    # Embed the id in the Geojson for each feature.
    # Nothing else is needed, since all other attributes can be looked up based on the id
    id_field = map(lambda field: field.name + '_id',
                   layer_selection.feature_class._meta.parents.values())[0]

    vector_selection_layer = build_vector_layer_config(
        parameters=merge(
            connection,
            dict(query=query,
                 column="wkb_geometry",
                 user_id_lookup=user_id_lookup)),
        provider_id_property=id_field,
        client_id_property=db_entity._meta.pk.name)

    layer_key = "layer:{layer},attr_id:{attribute},type:{type}".format(
        layer=layer.id, attribute=attribute_id, type='selection')
    logger.info("Creating layer %s" % layer_key)
    tilestache_layers.append(
        TSLayer(key=layer_key, value=vector_selection_layer))
    return tilestache_layers
Ejemplo n.º 12
0
 def feature_class_lookup(self):
     # Get the client project fixture (or the default region if the former doesn't exist)
     project_class_lookup = merge(*map(
         lambda project_fixture: project_fixture.feature_class_lookup(),
         project_specific_project_fixtures(
             config_entity=self.config_entity)))
     return project_class_lookup
Ejemplo n.º 13
0
    def import_db(self, database_name, local_dump_file):
        # Try to connect
        main_db = pg_connection_parameters(settings.DATABASES['default'])
        db = merge(main_db, dict(database=database_name))

        db_conn_string = "--host={host} --port={port} --user={user}".format(
            **db)
        self.run_as_pg(
            'createdb {db_conn_string} {name}'.format(
                db_conn_string=db_conn_string, name=database_name), **db)

        self.run_as_pg(
            'psql {db_conn_string} -c "CREATE EXTENSION IF NOT EXISTS POSTGIS" {name}'
            .format(db_conn_string=db_conn_string, name=database_name), **db)

        self.run_as_pg(
            'psql {db_conn_string} -c "CREATE EXTENSION IF NOT EXISTS DBLINK" {name}'
            .format(db_conn_string=db_conn_string, name=database_name), **db)

        self.run_as_pg(
            '''psql {db_conn_string} -c 'ALTER DATABASE {name} SET search_path = "$user",public,postgis;' postgres'''
            .format(db_conn_string=db_conn_string, name=database_name), **db)

        self.run_as_pg(
            'psql {db_conn_string} -f {local_dump_file} {name}'.format(
                db_conn_string=db_conn_string,
                local_dump_file=local_dump_file,
                name=database_name), **db)
    def update_or_create_region(region_dict):
        if kwargs.get('limit_to_classes') and Region not in kwargs['limit_to_classes']:
            if Region.objects.filter(key=region_dict['key']).count() != 1:
                raise Exception("Trying to get Region %s, which hasn't been created" % region_dict['key'])
            region_tuple = Region.objects.get(key=region_dict['key']), False, False
        else:
            region_tuple = Region.objects.update_or_create(
                key=region_dict['key'],
                defaults=merge(
                    dict(
                        behavior=get_behavior('default_config_entity'),
                    ),
                    remove_keys(region_dict, ['key', 'media']),
                    dict(
                        creator=User.objects.get(username=UserGroupKey.SUPERADMIN),
                        parent_config_entity=global_config_singleton() if \
                            region_dict['key'] == settings.CLIENT else \
                            update_or_create_region(dict(key=settings.CLIENT, name=settings.CLIENT_NAME))[0]
            )))

        logger.info("{update_or_create} Region {config_entity}".format(update_or_create='Created' if region_tuple[1] else 'Updated', config_entity=region_tuple[0]))


        media = map(lambda medium_config:
                    Medium.objects.update_or_create(
                        key=medium_config.key,
                        defaults=remove_keys(medium_config.__dict__['kwargs'], 'key'))[0],
                    region_dict.get('media', []))

        existing_media = region_tuple[0].media.filter(id__in=map(lambda medium: medium.id, media))
        media_to_add = set(media) - set(existing_media)
        if len(media_to_add) > 0:
            region_tuple[0].media.add(*media_to_add)
        return region_tuple
Ejemplo n.º 15
0
def get_dynamic_resource_class(super_class,
                               model_class,
                               fields={},
                               meta_fields={}):
    """
        Subclass the super_class and change the queryset to the model_class's and abstract to False
    :param super_class: The Resource class to subclass
    :param model_class: The concrete subclassed model class which we want the subclassed Resource class to query
    :param fields: Additional fields to give the class.
    :param meta_fields: Additional meta fields, such as fields or excludes. You can also pass
    a query_set here to use something more specific than the model_class's default QuerySet
    :return:
    """
    class_name = get_dynamic_resource_class_name(super_class, model_class)
    if not meta_fields.get('queryset'):
        # Find the matching resource in the cache if there are no meta_fields that would mutate it
        # Return the class if it was already created
        modname = globals()['__name__']
        existing_class = resolve_module_attr('%s.%s' % (modname, class_name))
        if existing_class:
            return existing_class

    logger.info(
        "Creating Resource Class %s for model class %s with the super class %s"
        % (class_name, model_class.__name__, super_class.__name__))
    queryset = model_class.objects.all()
    meta_attributes = merge(dict(queryset=queryset, abstract=False),
                            meta_fields)
    meta_class = type('Meta', (super_class.Meta, ), meta_attributes)
    resource_attributes = dict(Meta=meta_class, **fields)
    resource_class = ModelDeclarativeMetaclass(class_name, (super_class, ),
                                               resource_attributes)
    return resource_class
Ejemplo n.º 16
0
 def resolve_field(meta):
     type = meta['type']
     rest = merge(
         filter_dict(
             # Don't allow default='SEQUENCE'
             lambda key, value: not (key == 'default' and value ==
                                     'SEQUENCE'),
             # Ignore these keys
             remove_keys(meta, [
                 'type', 'auto_populate', 'visible', 'geometry_type',
                 'nullable'
             ])),
         dict(null=True))
     if type == 'string':
         return models.CharField(**rest)
     elif type == 'integer':
         return models.IntegerField(**rest)
     elif type == 'float':
         return models.FloatField(**rest)
     elif type == 'biginteger':
         return models.BigIntegerField(**rest)
     elif type == 'geometry':
         return models.GeometryField(geography=False, **rest)
     elif type == 'date':
         return models.DateField(**rest)
     elif type == 'datetime':
         return models.DateTimeField(**rest)
Ejemplo n.º 17
0
    def importer(self, config_entity, db_entity, **kwargs):
        """
            Replaces the normal ImportProcessor importer with one to import a shapefile from disk
        """
        user = db_entity.creator

        if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table):
            # The table already exists. Skip the import an log a warning
            logger.warn("The target table for the layer selection import already exists. Skipping table import.")
        else:
            feature_class_creator = FeatureClassCreator(config_entity, db_entity)
            origin_feature_class_configuration = db_entity.origin_instance.feature_class_configuration
            # Create the new DbEntity FeatureClassConfiguration from the origin's. Pass in what has already been
            # created for the new feature_class_configuration. This should have things like generated=True
            feature_class_configuration = feature_class_creator.complete_or_create_feature_class_configuration(
                origin_feature_class_configuration,
                **merge(db_entity.feature_class_configuration.__dict__, dict(generated=True)))
            # Update the DbEntity
            feature_class_creator.update_db_entity(feature_class_configuration)

            if feature_class_configuration.source_from_origin_layer_selection and \
               feature_class_configuration.origin_layer_id:
                # If desired, limit the layer clone to that of the source layer's current LayerSelection for the
                # User doing the update
                layer_selection_class = get_or_create_layer_selection_class_for_layer(
                    Layer.objects.get(id=feature_class_configuration.origin_layer_id), True)
                layer_selection = layer_selection_class.objects.get(user=user)
                features = layer_selection.selected_features
            else:
                # Leave blank to copy all features by default
                features = None

            DefaultImportProcessor().peer_importer(config_entity, db_entity, import_from_origin=True, source_queryset=features)
Ejemplo n.º 18
0
 def create_result_map(self, values_query_set):
     related_models = self.resolve_join_models()
     logger.debug(
         "Creating result map for related models %s feature class %s" %
         (', '.join(map(lambda r: str(r),
                        related_models)), self.feature_class))
     feature_class_creator = FeatureClassCreator.from_dynamic_model_class(
         self.feature_class)
     geography_scopes = feature_class_creator.geography_scopes()
     # Get the related model paths final segment. We want to map these to the db_entity_key names
     related_model_path_to_name = map_to_dict(
         lambda related_model: [
             resolve_related_model_path_via_geographies(
                 self.feature_class.objects, related_model).split('__')[1],
             related_model.db_entity_key
         ], related_models)
     return values_query_set.create_result_map(
         related_models=related_models,
         # map_path_segments maps related object paths to their model name,
         # and removes the geographies segment of the path
         map_path_segments=merge(
             # Map each geography scope to its corresponding field on the feature class
             map_to_dict(
                 lambda geography_scope: [
                     feature_class_creator.geographies_field(geography_scope
                                                             ).name, None
                 ], geography_scopes),
             related_model_path_to_name))
Ejemplo n.º 19
0
    def dump_permissions(cls, instance=None):
        """
            Dump all the instance permissions for a class, or if instance is specified, just for that instance
        :param cls:
        :return:
        """

        # This import causes problems on top of the file
        from guardian.models import GroupObjectPermission
        content_type_id = ContentType.objects.get_for_model(cls)
        return map(lambda dct: merge(dct,
                                     dict(instance=cls.objects.get(id=dct['object_pk']))),
                   GroupObjectPermission.objects.filter(
                       **merge(dict(content_type_id=content_type_id),
                               dict(object_pk=instance.id) if instance else dict())
        ).values('group__name', 'permission__name', 'object_pk'))
Ejemplo n.º 20
0
 def create_result_map(self, values_query_set):
     related_models = self.resolve_join_models()
     logger.debug("Creating result map for related models %s feature class %s" % (', '.join(map(lambda r: str(r), related_models)), self.feature_class))
     feature_class_creator = FeatureClassCreator.from_dynamic_model_class(self.feature_class)
     geography_scopes = feature_class_creator.geography_scopes()
     # Get the related model paths final segment. We want to map these to the db_entity_key names
     related_model_path_to_name = map_to_dict(
         lambda related_model:
         [resolve_related_model_path_via_geographies(
             self.feature_class.objects,
             related_model).split('__')[1],
          related_model.db_entity_key],
         related_models
     )
     return values_query_set.create_result_map(
         related_models=related_models,
         # map_path_segments maps related object paths to their model name,
         # and removes the geographies segment of the path
         map_path_segments=merge(
             # Map each geography scope to its corresponding field on the feature class
             map_to_dict(
                 lambda geography_scope: [
                     feature_class_creator.geographies_field(geography_scope).name,
                     None
                 ],
                 geography_scopes),
             related_model_path_to_name)
     )
Ejemplo n.º 21
0
def get_dynamic_resource_class(super_class, model_class, fields={}, meta_fields={}):
    """
        Subclass the super_class and change the queryset to the model_class's and abstract to False
    :param super_class: The Resource class to subclass
    :param model_class: The concrete subclassed model class which we want the subclassed Resource class to query
    :param fields: Additional fields to give the class.
    :param meta_fields: Additional meta fields, such as fields or excludes. You can also pass
    a query_set here to use something more specific than the model_class's default QuerySet
    :return:
    """
    class_name = get_dynamic_resource_class_name(super_class, model_class)
    if not meta_fields.get('queryset'):
        # Find the matching resource in the cache if there are no meta_fields that would mutate it
        # Return the class if it was already created
        modname = globals()['__name__']
        existing_class = resolve_module_attr('%s.%s' % (modname, class_name))
        if existing_class:
            return existing_class

    logger.info("Creating Resource Class %s for model class %s with the super class %s" % (class_name, model_class.__name__, super_class.__name__))
    queryset = model_class.objects.all()
    meta_attributes = merge(dict(queryset=queryset, abstract=False), meta_fields)
    meta_class = type('Meta', (super_class.Meta,), meta_attributes)
    resource_attributes = dict(Meta=meta_class, **fields)
    resource_class = ModelDeclarativeMetaclass(class_name, (super_class,), resource_attributes)
    return resource_class
Ejemplo n.º 22
0
    def dump_permissions(cls, instance=None):
        """
            Dump all the instance permissions for a class, or if instance is specified, just for that instance
        :param cls:
        :return:
        """

        # This import causes problems on top of the file
        from guardian.models import GroupObjectPermission
        content_type_id = ContentType.objects.get_for_model(cls)
        return map(
            lambda dct: merge(
                dct, dict(instance=cls.objects.get(id=dct['object_pk']))),
            GroupObjectPermission.objects.filter(**merge(
                dict(content_type_id=content_type_id),
                dict(object_pk=instance.id) if instance else dict())).values(
                    'group__name', 'permission__name', 'object_pk'))
Ejemplo n.º 23
0
 def feature_class_lookup(self):
     # Get the client global_config fixture (or the default region if the former doesn't exist)
     client_global_config = resolve_fixture("config_entity",
                                            "global_config",
                                            GlobalConfigFixture)
     global_config_feature_class_lookup = client_global_config.feature_class_lookup(
     )
     return merge(global_config_feature_class_lookup, {})
Ejemplo n.º 24
0
 def feature_class_lookup(self):
     # Get the client region fixture (or the default region if the former doesn't exist)
     client_region = resolve_fixture("config_entity", "region", RegionFixture)
     region_class_lookup = client_region.feature_class_lookup()
     return merge(
         region_class_lookup,
         FeatureClassCreator(self.config_entity).key_to_dynamic_model_class_lookup(self.default_db_entities())
     )
Ejemplo n.º 25
0
    def analysis_module_configuration(cls, config_entity, **kwargs):
        if not config_entity:
            return cls.abstract_analysis_module_configuration(**kwargs)

        configuration = merge(remove_keys(kwargs, ['class_scope']),
                              dict(generated=False))

        return AnalysisModuleConfiguration(configuration)
    def dynamic_join_model_class(self, join_models, related_field_names):
        """
            Creates an unmanaged subclass of the feature class with extra fields to represent the
            the fields of the join_models. This also adds fields for any fields specified in the
            related_model_lookup. This is not for join models but ForeignKeys such as BuiltForm
            These latter fields must be specified explicitly because the main model and join models
            can't populate their foreign keys from the query because the query has to be
            a ValuesQuerySet in order to do the join. So we create id versions of the fields here
            (e.g. built_form_id) which the query can fill and then use that to manually
            set the foreign key reference in the Tastypie resource.
            :param join_models: Other feature models whose attributes should be added to the subclass
            :param related_field_names: List of field names of foreign key id fields (AutoFields)

        """
        main_model_class = self.dynamic_model_class()
        manager = main_model_class.objects
        # Exclude the following field types. Since the base Feature defines an id we'll still get that, which we want
        exclude_field_types = (ForeignKey, ToManyField, OneToOneField, GeometryField)
        all_field_paths_to_fields = merge(
            # Create fields to represented foreign key id fields
            # Our query fetches these ids since it can't fetch related objects (since its a values() query)
            map_to_dict(
                lambda field_name: [field_name.replace('__', '_x_'),
                                    IntegerField(field_name.replace('__', '_x_'), null=True)],
                related_field_names
            ),
            # The join fields for each joined related model
            *map(
                lambda related_model: related_field_paths_to_fields(
                    manager,
                    related_model,
                    exclude_field_types=exclude_field_types,
                    fields=limited_api_fields(related_model),
                    separator='_x_'),
                join_models)
        )

        abstract_feature_class = resolve_module_attr(self.configuration.abstract_class_name)
        # Make sure the class name is unique to the related models and the given ConfigEntity
        related_models_unique_id = '_'.join(sorted(map(lambda related_model: related_model.__name__, join_models), ))
        dynamic_model_clazz = dynamic_model_class(
            main_model_class,
            self.db_entity.schema,
            self.db_entity.table,
            class_name="{0}{1}{2}{3}Join".format(
                abstract_feature_class.__name__,
                self.db_entity.id,
                self.config_entity.id,
                related_models_unique_id),
            fields=all_field_paths_to_fields,
            class_attrs=self.configuration.class_attrs or {},
            related_class_lookup=self.configuration.related_class_lookup or {},
            is_managed=False,
            cacheable=False)
        logger.info("Created dynamic join model class %s" % dynamic_model_clazz)
        logger.debug("Created with model fields %s" % map(lambda field: field.name, dynamic_model_clazz._meta.fields))
        logger.debug("Created with related and join fields %s" % all_field_paths_to_fields)
        return dynamic_model_clazz
Ejemplo n.º 27
0
    def dynamic_resource_subclass(self, instance, **kwargs):
        """
            Creates a dynamic resource using reversion to get the queryset of revisions for the instance
        :param instance:
        :param kwargs:
        :return:
        """

        feature_class = kwargs['feature_class']

        def get_versioned_version(feature_version, feature_class, key):
            """
                Return the related object for related attribute indicated by this key for the give feature_version
            :param feature_version:
            :param key:
            :return:
            """
            related_descriptor = getattr(feature_class, key)
            return related_descriptor.field.rel.to.objects.get(id=feature_version.field_dict[key])

        @using_bundle_cache
        def feature(bundle):
            # Get the current version of the Feature instance
            feature_instance = feature_class.objects.get(id=bundle.obj.field_dict['id'])
            # Update it (without saving). This doesn't take care of our related fields
            feature_instance.__dict__.update(**bundle.obj.field_dict)
            # Take care of our related fields by settings the Revisionable mixin's _version_field_dict
            # We instruct the related Resource field to check this _version_field_dict
            # to grab the versioned value
            feature_instance._version_field_dict = map_dict_to_dict(
                lambda key, value: [key, get_versioned_version(bundle.obj, feature_class, key)],
                feature_class.dynamic_model_class_creator.related_descriptors())

            # Set the magic version property so that we grab the right meta data
            feature_instance._version = bundle.obj
            return feature_instance

        return self.__class__.resolve_resource_class(
            FeatureVersionProxy,
            related_descriptors=merge(
                dict(
                    # Instruct the dynamic resource class to create a dynamic FeatureResource class
                    # for the related field.
                    feature=dict(
                        field=models.ForeignKey(feature_class),
                        callable_attribute=feature,
                        full=True,
                        null=True,
                        # Instruct the dynamic resource dehydrate related fields
                        # by replacing the default model_class field value with the _* version
                        # of the field if defined. This allows us to set the _* on our in-memory
                        # versioned Feature and read those _* field instead of the database (current version)
                        # field values
                        use_version_fields=True
                    )
                )
            ),
            queryset=feature_revision_manager.get_for_object(instance))
Ejemplo n.º 28
0
    def analysis_module_configuration(cls, config_entity, **kwargs):
        if not config_entity:
            return cls.abstract_analysis_module_configuration(**kwargs)

        configuration = merge(
            remove_keys(kwargs, ['class_scope']),
            dict(generated=False))

        return AnalysisModuleConfiguration(configuration)
Ejemplo n.º 29
0
 def tables_with_geometry(self, schema=None, table=None):
     """
         Returns tables with a column data type of 'geometry'
     :param schema: Optional schema to search
     :param table: Optional table to which to limit search. This guarantees 0 or 1 result
     :return:
     """
     return self.filter(**merge(dict(udt_name='geometry'),
                                compact_dict(dict(table_schema=schema, table_name=table))))
Ejemplo n.º 30
0
 def update_layer_of_scenario(scenario):
     logger.info("Updating layer of db_entity_key %s, Scenario %s" % (db_entity_key, scenario.name))
     db_entity_interest = DbEntityInterest.objects.get(config_entity=scenario, db_entity__key=db_entity_key)
     return Layer.objects.update_or_create(
         db_entity_interest=db_entity_interest,
         defaults=merge(
             remove_keys(model_dict(template_layer), ['db_entity_key']),
         )
     )[0]
Ejemplo n.º 31
0
 def analysis_module_configuration(self):
     """
         Restores the AnalysisModuleConfiguration that created this AnalysisModule
     """
     return AnalysisModuleConfiguration(
         merge(
             self.configuration,
             dict(name=self.name,
                  description=self.description,
                  key=self.key)))
Ejemplo n.º 32
0
 def __init__(self, **kwargs):
     """
         kwargs are the attributes of the class. None are required
     """
     abstract_class_name = full_module_path(kwargs['abstract_class']) if \
                             kwargs.get('abstract_class') else \
                             kwargs.get('abstract_class_name', full_module_path(Feature))
     updated_kwargs = merge(remove_keys(kwargs, ['abstract_class']),
                            dict(abstract_class_name=abstract_class_name))
     self.__dict__.update(**updated_kwargs)
Ejemplo n.º 33
0
 def update_layer_of_scenario(scenario):
     logger.info("Updating layer of db_entity_key %s, Scenario %s" %
                 (db_entity_key, scenario.name))
     db_entity_interest = DbEntityInterest.objects.get(
         config_entity=scenario, db_entity__key=db_entity_key)
     return Layer.objects.update_or_create(
         db_entity_interest=db_entity_interest,
         defaults=merge(
             remove_keys(model_dict(template_layer),
                         ['db_entity_key']), ))[0]
Ejemplo n.º 34
0
    def abstract_analysis_module_configuration(cls, **kwargs):
        """
            Abstract version of the configuration for use when no ConfigEntity is specified
        """

        configuration = merge(
            remove_keys(kwargs, ['class_scope']),
            dict(class_attrs={'key': kwargs['key']}, generated=False))

        return AnalysisModuleConfiguration(configuration)
Ejemplo n.º 35
0
    def sync_permissions(self, additional_permissions=None, permission_key_class=PermissionKey, superior_permission_lookup={}):
        """
            Syncs the instance permissions using the current values of group_permission_configuration.
            The superior groups of the object's and additional_permissions are also synced to have
            equal or higher permissions. They only get higher permissions if they match an entry
            in superior_permissions
        :param created: True if the instance was just created.
        :param additional_permissions: A dict representing permissions to use in additions
            to any stored in self.group_permission_configuration. This is used for groups that
            are dynamically created for the ConfigEntity
        :param permission_key_class: Default PermissionKey, pass a subclass to if a Permissions implementer
            implements extra permissions
        :param superior_permission_lookup: When the superior Group permissions are set, this is checked
        to see if the group should receive higher permissions than those of the subordinates. If a key match
        is found, the permission value is used. Otherwise the subordinate value is used
        :return:
        """

        # Add the permissions given in group_permissions_configuration
        configuration = merge(self.group_permission_configuration or {}, additional_permissions or {})

        if configuration:
            # Get all GroupHierarchy instances to save making extra queries
            group_hierarchy_lookup = map_to_dict(
                lambda group_hierarchy: [group_hierarchy.group.name, group_hierarchy],
                GroupHierarchy.objects.filter(group__name__in=configuration.keys()))
            for group_name, permission_key in configuration.items():
                # Assign the permission to the group to the AttributeGroupConfiguration object
                # We can have multiple permission_keys if the permission_key is PermissionKey.ALL
                try:
                    subordinate_group_hierarchy = group_hierarchy_lookup[group_name]
                except:
                    raise Exception("Couldn't find group %s among group_hierarcy_lookup, which has keys %s" %
                                    (group_name, ', '.join(group_hierarchy_lookup.keys())))
                subordinate_group = subordinate_group_hierarchy.group
                # Remove any permissions this group has on the object, in case we changed
                # the configuration
                for class_permission_key in permission_key_class.permission_keys(permission_key_class.ALL, self.__class__):
                    remove_perm(class_permission_key, subordinate_group, self)
                logger.info("Setting permissions for %s and its superiors that don't have their own configurations", subordinate_group.name)
                # Find superior groups that aren't explicitly listed in the configuration.
                # These get the same permissions as the subordinate group, unless a higher
                # permission is specified in superior_permission_lookup
                groups = set([subordinate_group]) | set(subordinate_group_hierarchy.all_superiors())
                report = {}
                for group in filter(lambda group: group == subordinate_group or group.name not in configuration.keys(), groups):
                    permission = self.best_matching_permission(group, superior_permission_lookup)
                    self.assign_permission_to_groups([group], permission, permission_key_class=permission_key_class)
                    if permission not in report:
                        report[permission] = []
                    report[permission].append(group.name)
                # Log the results
                for permission, group_names in report.iteritems():
                    logger.info("For class %s, instance %s assigned permission key %s to groups %s",
                                self.__class__.__name__, self.key, permission, ', '.join(group_names))
Ejemplo n.º 36
0
def db_entity_defaults(db_entity, config_entity=None):

    # Instantiate a FeatureClassCreator to make the FeatureClassConfiguration
    feature_class_creator = FeatureClassCreator(config_entity,
                                                db_entity,
                                                no_ensure=True)
    if config_entity:
        # Find the database of the configured client
        connection = resolve_fixture(None, "init", InitFixture,
                                     config_entity.schema()).import_database()
        # Remove the _test suffix when running unit tests. Until we can auto-population the _test version
        # of the database, we want to simply rely on the manually configured source database
        if connection:
            connection['database'] = connection['database'].replace(
                'test_', '')
    else:
        # No config_entity abstract DbEntity case
        connection = None

    return dict(
        # The name is passed in or the titleized version of key
        name=db_entity.name or titleize(db_entity.key),
        # Postgres URL for local sources, or possibly a remote url (e.g. for background layer sources)
        # Unless overridden, create the url according to this postgres url scheme
        url=db_entity.url or \
            ('postgres://{user}:{password}/{host}:{port}/{database}'.format(
                **merge(dict(port=5432), connection)) if connection else None),
        # Normally Equals the key, except for views of the table, like a Result DbEntity
        # Views leave this null and rely on query
        table=db_entity.table or (db_entity.key if not db_entity.query else None),
        # Query to create a "view" of the underlying data. Used by Result DbEntity instances
        query=db_entity.query,
        # How to group the features or query results. Not yet well hashed out
        group_by=db_entity.group_by,
        # The source DbEntity key if this DbEntity resulted from cloning a peer DbEntity
        source_db_entity_key=db_entity.source_db_entity_key,
        # Array used by remote data sources whose URLs have different host names
        # If so then the url will have a string variable for the host
        hosts=db_entity.hosts,
        # The User who created the DbEntity. TODO. Default should be an admin
        creator=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0],
        # The User who updated the DbEntity. TODO. Default should be an admin
        updater=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0],

        # The SRID of the Feature table
        srid=db_entity.srid,
        # This is a non-model object. So it is saved as a PickledObjectField
        # Whether the same instance is returned or not does not matter
        # If db_entity.feature_class_configuration is None, it will return None
        feature_class_configuration=feature_class_creator.complete_or_create_feature_class_configuration(
            db_entity.feature_class_configuration
        ),
        no_feature_class_configuration=db_entity.no_feature_class_configuration
        # feature_behavior is handled internally by DbEntity
    )
Ejemplo n.º 37
0
 def __init__(self, **kwargs):
     """
         kwargs are the attributes of the class. None are required
     """
     abstract_class_name = (
         full_module_path(kwargs["abstract_class"])
         if kwargs.get("abstract_class")
         else kwargs.get("abstract_class_name", full_module_path(Feature))
     )
     updated_kwargs = merge(remove_keys(kwargs, ["abstract_class"]), dict(abstract_class_name=abstract_class_name))
     self.__dict__.update(**updated_kwargs)
Ejemplo n.º 38
0
def create_vector_layer(config_entity, layer, attribute):
    # If the db_entity doesn't have an explicit query create a query from the table and schema that joins
    # in the geography column.
    db_entity = layer.db_entity_interest.db_entity
    query = create_query(attribute, config_entity, layer)
    connection = connection_dict(layer.config_entity.db)

    vector_layer = build_vector_layer_config(merge(connection,
                dict(query=query, column="wkb_geometry",)), client_id_property=db_entity._meta.pk.name)

    return TSLayer(key="layer_{0}_{1}_vector".format(layer.id, attribute), value=vector_layer)
Ejemplo n.º 39
0
    def create_subclass(self, params, **kwargs):
        """
            Subclass this class to create a resource class specific to the Feature
        :param params.layer__id or db_entity__id: The Layer id or DbEntity id. Optional. Used to resolve the Feature/FeatureResource subclasses if we are in FeatureResource (not in a subclass)
        :return: The subclassed resource class
        """

        db_entity = self.resolve_db_entity(params)
        config_entity = db_entity.config_entity.subclassed
        feature_class = self.model_subclass(db_entity, config_entity)
        instance = self.resolve_instance(params, feature_class)
        return self.dynamic_resource_subclass(instance, **merge(kwargs, dict(feature_class=feature_class)))
Ejemplo n.º 40
0
 def dehydrate(self, bundle):
     #Filter out meta fields and other things that the front-end doesn't need
     bundle.data = remove_keys(
         merge(
             bundle.data,
             FeatureQuantitativeAttribute(bundle.obj.db_entity,
                                          bundle.obj.attribute).__dict__),
         [
             'db_entity', 'feature_class', 'config_entity', 'updated',
             'updater', 'year'
         ])
     return super(FeatureAttributeResource, self).dehydrate(bundle)
Ejemplo n.º 41
0
    def abstract_analysis_module_configuration(cls, **kwargs):
        """
            Abstract version of the configuration for use when no ConfigEntity is specified
        """

        configuration = merge(
            remove_keys(kwargs, ['class_scope']),
            dict(
                class_attrs={'key': kwargs['key']},
                generated=False))

        return AnalysisModuleConfiguration(configuration)
Ejemplo n.º 42
0
def cond_deep_flat_map_iterable(iterable):
    if (type(iterable) == dict):
        # Flatten the dictionary of the fields key if it exists
        flattened_dict = remove_keys(
            merge(iterable, iterable.get('fields', {})),
            ['fields']
        )
        # Map each key value, recursing into the value if it's a dict or iterable
        return dict([(key, cond_deep_flat_map_iterable(value) if is_list_tuple_or_dict(value) else value)
            for key, value in flattened_dict.iteritems()])
    else:
        return map(lambda item: cond_deep_flat_map_iterable(item) if is_list_tuple_or_dict(item) else item, iterable)
Ejemplo n.º 43
0
 def feature_class_lookup(self):
     """
         Adds mappings of custom Feature classes
     :return:
     """
     parent_fixture = self.parent_fixture
     feature_class_lookup = parent_fixture.feature_class_lookup()
     return merge(
         feature_class_lookup,
         FeatureClassCreator(
             self.config_entity).key_to_dynamic_model_class_lookup(
                 self.default_db_entities()))
Ejemplo n.º 44
0
    def create_subclass(self, params, **kwargs):
        """
            Subclass this class to create a resource class specific to the Feature
        :param params.layer__id or db_entity__id: The Layer id or DbEntity id. Optional. Used to resolve the Feature/FeatureResource subclasses if we are in FeatureResource (not in a subclass)
        :return: The subclassed resource class
        """

        db_entity = self.resolve_db_entity(params)
        config_entity = db_entity.config_entity.subclassed
        feature_class = self.model_subclass(db_entity, config_entity)
        instance = self.resolve_instance(params, feature_class)
        return self.dynamic_resource_subclass(instance, **merge(kwargs, dict(feature_class=feature_class)))
Ejemplo n.º 45
0
def create_join(source_class, source_field, related_class, related_field,
                **kwargs):
    """
        Creates a join between two classes without using a modeled Django association. We do this to in order to populate a Through table manually during import, or two simply create
        a join query that Django does not support through its related field mechanisms.
        Always try to use Django's implicit joining with filter() + extra(select=...) to create a join before resorting to this. Also try filter() with F() functions (see Django docs)
    :param source_class:
    :param source_field:
    :param related_class:
    :param related_field:
    :param **kwargs: Optional arguments for join
        'extra' dict of extras to select
        'join_on_base' if True use the base class for the join
        'join_related_on_base' if True use the base class of the join class for the join
    :return:
    """

    # We either want to join to the related_class or its base, the latter case being when the related_class
    # inherits the field that we want to join
    resolved_related_class = related_class.__base__ if kwargs.get(
        'join_related_on_base', False) else related_class

    selections = source_class.objects.extra(**merge(
        dict(
            select={
                'related_pk':
                '{join_class_table}.{join_class_pk}'.format(
                    join_class_table=resolved_related_class._meta.db_table,
                    join_class_pk=resolved_related_class._meta.pk.column)
            }),
        kwargs.get('extra', {}) or {}))
    # setup initial FROM clause
    selections.query.join((None, source_class._meta.db_table, None, None))

    if kwargs.get('join_on_base', False):
        # Manually join in the base model so that is joins before the join below
        parent_field = source_class._meta.parents.values()[0]
        connection = (source_class._meta.db_table,
                      source_class.__base__._meta.db_table,
                      parent_field.column, 'id')
        selections.query.join(connection)

    # join to join class
    connection = (
        (source_class.__base__ if kwargs.get('join_on_base', False) else
         source_class)._meta.db_table,
        resolved_related_class._meta.db_table,
        source_field.column,
        related_field.column,
    )
    selections.query.join(connection)

    return selections
Ejemplo n.º 46
0
def update_or_create_layer_style(layer_style_configuration,
                                 style_key,
                                 existing_layer_style=None):
    """
        Creates a LayerStyle the StyleAttribute for cartocss styling.
        A Template is not specific to a Layer instance, rather specific to underlying Feature clss of the Layer
    :param layer_style_configuration: A dict() containing values specific to the model (i.e. the Feature subclass).
    This will be merged with layer_style that matches the subclass and 0 or more attributes of the Feature subclass
    that the Layer represents.
    :param style_key: The unique key of the template. Use the same key to share the template among instances
    :param existing_layer_style. The optional layer style class upon which the template key is named. If this is omitted,
    a generic template is created that doesn't load any predefined style info from the system.
        TODO There is no particular purpose for a Template based on only a db_entity_key at this point.
        The one based on a styled_class (Feature class) can load the template matching the class and attributes to
        provide default styling for the layer. We might have a case for having various generic default styling for a
        layer that is not based on a feature_class.
    :return:
    """
    logger.info("existing_layer_style %s" % model_dict(existing_layer_style))

    layer_style, created, updated = LayerStyle.objects.update_or_create(
        key=style_key,
        defaults=merge(
            # look first for whether the layer style exists and update otherwise create it
            model_dict(existing_layer_style)
            if existing_layer_style else dict(),
            dict(
                name=style_key,
                content_type=ContentTypeKey.CSS,
                # This represents the master version of the LayerStyle and will not change
                # unless the backend configuration is updated
                geometry_type=layer_style_configuration.get('geometry_type'),
                html_class=style_key)))

    # iterate over the configured style attribtues and update or create new instances and cartocss
    for style_attribute_config in layer_style_configuration.get(
            'style_attributes') or []:

        style_attribute, created, updated = StyleAttribute.objects.update_or_create(
            key=style_attribute_config.get('key')
            if style_attribute_config.get('key') else style_key + "__default",
            defaults=dict(name=style_attribute_config.get('name')
                          if style_attribute_config.get('name') else 'Default',
                          attribute=style_attribute_config.get('attribute'),
                          style_type=style_attribute_config.get('style_type'),
                          opacity=style_attribute_config.get('opacity') or 1,
                          style_value_contexts=style_attribute_config.get(
                              'style_value_contexts')))
        layer_style.style_attributes.add(style_attribute)
        layer_style.save()

    return layer_style
Ejemplo n.º 47
0
    def create_subclass(self, params, **kwargs):
        """
            Subclass this class to create a resource class specific to the Feature
        :param params.layer__id: The layer id. Optional. Used to resolve the Feature/FeatureResource subclasses if we are in FeatureResource (not in a subclass)
        :return: The subclassed resource class
        """

        layer = self.resolve_layer(params)
        config_entity = layer.config_entity.subclassed
        # Use the abstract resource class queryset model or given db_entity_key to fetch the feature subclass
        feature_class = self.resolve_model_class(config_entity=config_entity, layer=layer)
        instance = self.resolve_instance(params, feature_class)
        return self.dynamic_resource_subclass(instance, params=params, **merge(kwargs, dict(feature_class=feature_class)))
Ejemplo n.º 48
0
def update_or_create_layer_style(layer_style_configuration, style_key, existing_layer_style=None):
    """
        Creates a LayerStyle the StyleAttribute for cartocss styling.
        A Template is not specific to a Layer instance, rather specific to underlying Feature clss of the Layer
    :param layer_style_configuration: A dict() containing values specific to the model (i.e. the Feature subclass).
    This will be merged with layer_style that matches the subclass and 0 or more attributes of the Feature subclass
    that the Layer represents.
    :param style_key: The unique key of the template. Use the same key to share the template among instances
    :param existing_layer_style. The optional layer style class upon which the template key is named. If this is omitted,
    a generic template is created that doesn't load any predefined style info from the system.
        TODO There is no particular purpose for a Template based on only a db_entity_key at this point.
        The one based on a styled_class (Feature class) can load the template matching the class and attributes to
        provide default styling for the layer. We might have a case for having various generic default styling for a
        layer that is not based on a feature_class.
    :return:
    """
    logger.info("existing_layer_style %s" % model_dict(existing_layer_style))

    layer_style, created, updated = LayerStyle.objects.update_or_create(
        key=style_key,
        defaults=merge(
            # look first for whether the layer style exists and update otherwise create it
            model_dict(existing_layer_style) if existing_layer_style else dict(),
            dict(
                name=style_key,
                content_type=ContentTypeKey.CSS,
                # This represents the master version of the LayerStyle and will not change
                # unless the backend configuration is updated
                geometry_type=layer_style_configuration.get('geometry_type'),
                html_class=style_key
            )
        )
    )

    # iterate over the configured style attribtues and update or create new instances and cartocss
    for style_attribute_config in layer_style_configuration.get('style_attributes') or []:

        style_attribute, created, updated = StyleAttribute.objects.update_or_create(
            key=style_attribute_config.get('key') if style_attribute_config.get('key') else style_key + "__default",
            defaults=dict(
                name=style_attribute_config.get('name') if style_attribute_config.get('name') else 'Default',
                attribute=style_attribute_config.get('attribute'),
                style_type=style_attribute_config.get('style_type'),
                opacity=style_attribute_config.get('opacity') or 1,
                style_value_contexts=style_attribute_config.get('style_value_contexts')
            )
        )
        layer_style.style_attributes.add(style_attribute)
        layer_style.save()

    return layer_style
Ejemplo n.º 49
0
def db_entity_defaults(db_entity, config_entity=None):

    # Instantiate a FeatureClassCreator to make the FeatureClassConfiguration
    feature_class_creator = FeatureClassCreator(config_entity, db_entity, no_ensure=True)
    if config_entity:
        # Find the database of the configured client
        connection = resolve_fixture(None, "init", InitFixture, config_entity.schema()).import_database()
        # Remove the _test suffix when running unit tests. Until we can auto-population the _test version
        # of the database, we want to simply rely on the manually configured source database
        if connection:
            connection['database'] = connection['database'].replace('test_', '')
    else:
        # No config_entity abstract DbEntity case
        connection = None

    return dict(
        # The name is passed in or the titleized version of key
        name=db_entity.name or titleize(db_entity.key),
        # Postgres URL for local sources, or possibly a remote url (e.g. for background layer sources)
        # Unless overridden, create the url according to this postgres url scheme
        url=db_entity.url or \
            ('postgres://{user}:{password}/{host}:{port}/{database}'.format(
                **merge(dict(port=5432), connection)) if connection else None),
        # Normally Equals the key, except for views of the table, like a Result DbEntity
        # Views leave this null and rely on query
        table=db_entity.table or (db_entity.key if not db_entity.query else None),
        # Query to create a "view" of the underlying data. Used by Result DbEntity instances
        query=db_entity.query,
        # How to group the features or query results. Not yet well hashed out
        group_by=db_entity.group_by,
        # The source DbEntity key if this DbEntity resulted from cloning a peer DbEntity
        source_db_entity_key=db_entity.source_db_entity_key,
        # Array used by remote data sources whose URLs have different host names
        # If so then the url will have a string variable for the host
        hosts=db_entity.hosts,
        # The User who created the DbEntity. TODO. Default should be an admin
        creator=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0],
        # The User who updated the DbEntity. TODO. Default should be an admin
        updater=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0],

        # The SRID of the Feature table
        srid=db_entity.srid,
        # This is a non-model object. So it is saved as a PickledObjectField
        # Whether the same instance is returned or not does not matter
        # If db_entity.feature_class_configuration is None, it will return None
        feature_class_configuration=feature_class_creator.complete_or_create_feature_class_configuration(
            db_entity.feature_class_configuration
        ),
        no_feature_class_configuration=db_entity.no_feature_class_configuration
        # feature_behavior is handled internally by DbEntity
    )
Ejemplo n.º 50
0
def add_categories(bundle, *submitted_categories):
    """
            When the user updates the values of one or more categories, we assume that they want to delete the current Category instances with the same keys and replace them with the selected Category value. For instance, if a scenario has the Category key:'category' value:'smart' and the user chooses 'dumb' for the new value, we want to delete the Category instance valued by 'smart' and insert the one valued by 'dumb'. But we don't want to mess with Category instances that have different keys
    """
    logger = logging.getLogger(__name__)
    try:
        submitted_categories_by_key = map_to_keyed_collections(lambda category: category.key, submitted_categories)
        existing_categories_by_key = map_to_keyed_collections(lambda category: category.key, bundle.obj.categories.all())
        categories_to_add_or_maintain = flat_map_values(lambda key, categories: unique(categories, lambda category: category.value),
                                                        merge(existing_categories_by_key, submitted_categories_by_key))
        bundle.obj.categories.clear()
        bundle.obj.categories.add(*categories_to_add_or_maintain)
    except Exception, e:
        logger.critical(e.message)
Ejemplo n.º 51
0
def add_categories(bundle, *submitted_categories):
    """
            When the user updates the values of one or more categories, we assume that they want to delete the current Category instances with the same keys and replace them with the selected Category value. For instance, if a scenario has the Category key:'category' value:'smart' and the user chooses 'dumb' for the new value, we want to delete the Category instance valued by 'smart' and insert the one valued by 'dumb'. But we don't want to mess with Category instances that have different keys
    """
    logger = logging.getLogger(__name__)
    try:
        submitted_categories_by_key = map_to_keyed_collections(lambda category: category.key, submitted_categories)
        existing_categories_by_key = map_to_keyed_collections(lambda category: category.key, bundle.obj.categories.all())
        categories_to_add_or_maintain = flat_map_values(lambda key, categories: unique(categories, lambda category: category.value),
                                                        merge(existing_categories_by_key, submitted_categories_by_key))
        bundle.obj.categories.clear()
        bundle.obj.categories.add(*categories_to_add_or_maintain)
    except Exception, e:
        logger.critical(e.message)
Ejemplo n.º 52
0
def create_join(source_class, source_field, related_class, related_field, **kwargs):
    """
        Creates a join between two classes without using a modeled Django association. We do this to in order to populate a Through table manually during import, or two simply create
        a join query that Django does not support through its related field mechanisms.
        Always try to use Django's implicit joining with filter() + extra(select=...) to create a join before resorting to this. Also try filter() with F() functions (see Django docs)
    :param source_class:
    :param source_field:
    :param related_class:
    :param related_field:
    :param **kwargs: Optional arguments for join
        'extra' dict of extras to select
        'join_on_base' if True use the base class for the join
        'join_related_on_base' if True use the base class of the join class for the join
    :return:
    """

    # We either want to join to the related_class or its base, the latter case being when the related_class
    # inherits the field that we want to join
    resolved_related_class = related_class.__base__ if kwargs.get('join_related_on_base', False) else related_class

    selections = source_class.objects.extra(**merge(dict(
        select={'related_pk': '{join_class_table}.{join_class_pk}'.format(
            join_class_table=resolved_related_class._meta.db_table, join_class_pk=resolved_related_class._meta.pk.column)
        }),
                                                    kwargs.get('extra', {}) or {})
    )
    # setup initial FROM clause
    selections.query.join((None, source_class._meta.db_table, None, None))

    if kwargs.get('join_on_base', False):
        # Manually join in the base model so that is joins before the join below
        parent_field = source_class._meta.parents.values()[0]
        connection = (
            source_class._meta.db_table,
            source_class.__base__._meta.db_table,
            parent_field.column,
            'id'
        )
        selections.query.join(connection)

    # join to join class
    connection = (
        (source_class.__base__ if kwargs.get('join_on_base', False) else source_class)._meta.db_table,
        resolved_related_class._meta.db_table,
        source_field.column,
        related_field.column,
    )
    selections.query.join(connection)

    return selections
Ejemplo n.º 53
0
 def __init__(self, *args, **kwargs):
     # Remove any kwargs specific to this class before calling super
     new_kwargs = merge(
         remove_keys(kwargs, ["config_entities", "parent_config_entity"]),
         {"parent_config_entity": kwargs.get("parent_config_entity", global_config_singleton())},
     )
     super(CombinedConfigEntity, self).__init__(*args, **new_kwargs)
     # Set the parent_config_entity, which will function as the parent, if needed. Also reference the config
     # entities whose combined data will makeup this instances ConfigEntity properties
     if kwargs["parent_config_entity"]:
         self.config_entities = kwargs["parent_config_entity"].children()
     elif "config_entities" in kwargs:
         # Just default to the GlobalConfig singleton. This could be smarter and traverse up all the config_entity
         # parents to find the common one.
         self.config_entities = kwargs["config_entities"]
Ejemplo n.º 54
0
    def clone_db_entity_and_interest_for_result(self, config_entity,
                                                existing_db_entity_interest,
                                                reference_db_entity_key,
                                                **kwargs):
        """
            Clone the selected db_entity of key reference_db_entity and replace any of its attributes with those
            specified in **kwargs. **kwargs should contain a unique key property
        :param config_entity
        :param existing_db_entity_interest: The existing DbEntityInterest if one exists
        :param reference_db_entity_key: key of the DbEntity to clone
        :param kwargs: replacement values containing at the very least 'key'
        :return: The DbEntityInterest which references the cloned db_entity
        """
        source_db_entity = config_entity.computed_db_entities().get(
            key=reference_db_entity_key)
        # Avoid circular reference
        from footprint.main.publishing.db_entity_publishing import clone_or_update_db_entity_and_interest

        db_entity_interest = clone_or_update_db_entity_and_interest(
            config_entity,
            source_db_entity,
            DbEntity(**merge(
                kwargs,
                dict(feature_class_configuration=FeatureClassConfiguration(
                    **merge(
                        source_db_entity.feature_class_configuration.__dict__,
                        dict(feature_class_owner=reference_db_entity_key))),
                     ))),
            existing_db_entity_interest=existing_db_entity_interest,
            override_on_update=True)
        # Run this manually here. It should be triggered by saving the DbEntity, but something
        # is disabling the publisher
        # TODO the DbEntity publihser should be turned on here so this should be neeed
        from footprint.main.publishing.user_publishing import on_db_entity_post_save_user
        on_db_entity_post_save_user(None, instance=db_entity_interest)
        return db_entity_interest
Ejemplo n.º 55
0
 def __init__(self, *args, **kwargs):
     # Remove any kwargs specific to this class before calling super
     new_kwargs = merge(
         remove_keys(
             kwargs,
             ['config_entities', 'parent_config_entity']),
         {'parent_config_entity': kwargs.get('parent_config_entity', global_config_singleton())})
     super(CombinedConfigEntity, self).__init__(*args, **new_kwargs)
     # Set the parent_config_entity, which will function as the parent, if needed. Also reference the config
     # entities whose combined data will makeup this instances ConfigEntity properties
     if kwargs['parent_config_entity']:
         self.config_entities = kwargs['parent_config_entity'].children()
     elif 'config_entities' in kwargs:
         # Just default to the GlobalConfig singleton. This could be smarter and traverse up all the config_entity
         # parents to find the common one.
         self.config_entities = kwargs['config_entities']
Ejemplo n.º 56
0
def create_layer_selection(config_entity, layer, attribute_id):
    db_entity = layer.db_entity_interest.db_entity
    connection = connection_dict(layer.config_entity.db)

    tilestache_layers = []

    users = set(get_users_with_perms(config_entity)) | set(get_users_with_perms(layer.db_entity_interest.db_entity))

    # Make sure layer_selection instances exist for the users
    from footprint.main.publishing.layer_publishing import update_or_create_layer_selections_for_layer
    update_or_create_layer_selections_for_layer(layer, users=users)

    logger.info("Get/Create layer_selection for config_entity %s, layer %s, users %s" %\
                (config_entity.key, layer.db_entity_key, ','.join(map(lambda user: user.username, users))))
    # Each layer has a dynamic class representing its SelectedFeature table
    get_or_create_layer_selection_class_for_layer(layer)
    if not users:
        return tilestache_layers

    config_entity.db_entity_feature_class(key=layer.db_entity_key)
    layer_selection_class = get_or_create_layer_selection_class_for_layer(layer, config_entity)
    # Take the first user to create a template query
    user = list(users)[0]
    # Each LayerSelection instance is per user
    layer_selection = layer_selection_class.objects.get_or_create(user=user)[0]
    # Extract the query from the QuerySet
    query = re.sub(
        r'"layer_selection_id" = \d+',
        r'"layer_selection_id" = {user_id}',
        str(layer_selection.selected_features.values('wkb_geometry', 'id').query))
    logger.info("Creating tilestache layer_selection for layer %s, user %s, query: %s" % (layer.full_name, user.username, query))
    user_id_lookup = map_to_dict(lambda layer_selection: [layer_selection.user.id, layer_selection.id], layer_selection_class.objects.all())

    # Embed the id in the Geojson for each feature.
    # Nothing else is needed, since all other attributes can be looked up based on the id
    id_field = map(lambda field: field.name + '_id', layer_selection.feature_class._meta.parents.values())[0]

    vector_selection_layer = build_vector_layer_config(
        parameters=merge(connection, dict(query=query, column="wkb_geometry", user_id_lookup=user_id_lookup)),
        provider_id_property=id_field,
        client_id_property=db_entity._meta.pk.name
    )

    layer_key = "layer:{layer},attr_id:{attribute},type:{type}".format(layer=layer.id, attribute=attribute_id, type='selection')
    logger.info("Creating layer %s" % layer_key)
    tilestache_layers.append(TSLayer(key=layer_key, value=vector_selection_layer))
    return tilestache_layers