Beispiel #1
0
    def importer(self, config_entity, db_entity, **kwargs):
        """
            Replaces the normal ImportProcessor importer with one to import a shapefile from disk
        """
        user = db_entity.creator

        if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table):
            # The table already exists. Skip the import an log a warning
            logger.warn("The target table for the layer selection import already exists. Skipping table import.")
        else:
            feature_class_creator = FeatureClassCreator(config_entity, db_entity)
            origin_feature_class_configuration = db_entity.origin_instance.feature_class_configuration
            # Create the new DbEntity FeatureClassConfiguration from the origin's. Pass in what has already been
            # created for the new feature_class_configuration. This should have things like generated=True
            feature_class_configuration = feature_class_creator.complete_or_create_feature_class_configuration(
                origin_feature_class_configuration,
                **merge(db_entity.feature_class_configuration.__dict__, dict(generated=True)))
            # Update the DbEntity
            feature_class_creator.update_db_entity(feature_class_configuration)

            if feature_class_configuration.source_from_origin_layer_selection and \
               feature_class_configuration.origin_layer_id:
                # If desired, limit the layer clone to that of the source layer's current LayerSelection for the
                # User doing the update
                layer_selection_class = get_or_create_layer_selection_class_for_layer(
                    Layer.objects.get(id=feature_class_configuration.origin_layer_id), True)
                layer_selection = layer_selection_class.objects.get(user=user)
                features = layer_selection.selected_features
            else:
                # Leave blank to copy all features by default
                features = None

            DefaultImportProcessor().peer_importer(config_entity, db_entity, import_from_origin=True, source_queryset=features)
Beispiel #2
0
def db_entity_defaults(db_entity, config_entity=None):

    # Instantiate a FeatureClassCreator to make the FeatureClassConfiguration
    feature_class_creator = FeatureClassCreator(config_entity,
                                                db_entity,
                                                no_ensure=True)
    if config_entity:
        # Find the database of the configured client
        connection = resolve_fixture(None, "init", InitFixture,
                                     config_entity.schema()).import_database()
        # Remove the _test suffix when running unit tests. Until we can auto-population the _test version
        # of the database, we want to simply rely on the manually configured source database
        if connection:
            connection['database'] = connection['database'].replace(
                'test_', '')
    else:
        # No config_entity abstract DbEntity case
        connection = None

    return dict(
        # The name is passed in or the titleized version of key
        name=db_entity.name or titleize(db_entity.key),
        # Postgres URL for local sources, or possibly a remote url (e.g. for background layer sources)
        # Unless overridden, create the url according to this postgres url scheme
        url=db_entity.url or \
            ('postgres://{user}:{password}/{host}:{port}/{database}'.format(
                **merge(dict(port=5432), connection)) if connection else None),
        # Normally Equals the key, except for views of the table, like a Result DbEntity
        # Views leave this null and rely on query
        table=db_entity.table or (db_entity.key if not db_entity.query else None),
        # Query to create a "view" of the underlying data. Used by Result DbEntity instances
        query=db_entity.query,
        # How to group the features or query results. Not yet well hashed out
        group_by=db_entity.group_by,
        # The source DbEntity key if this DbEntity resulted from cloning a peer DbEntity
        source_db_entity_key=db_entity.source_db_entity_key,
        # Array used by remote data sources whose URLs have different host names
        # If so then the url will have a string variable for the host
        hosts=db_entity.hosts,
        # The User who created the DbEntity. TODO. Default should be an admin
        creator=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0],
        # The User who updated the DbEntity. TODO. Default should be an admin
        updater=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0],

        # The SRID of the Feature table
        srid=db_entity.srid,
        # This is a non-model object. So it is saved as a PickledObjectField
        # Whether the same instance is returned or not does not matter
        # If db_entity.feature_class_configuration is None, it will return None
        feature_class_configuration=feature_class_creator.complete_or_create_feature_class_configuration(
            db_entity.feature_class_configuration
        ),
        no_feature_class_configuration=db_entity.no_feature_class_configuration
        # feature_behavior is handled internally by DbEntity
    )
Beispiel #3
0
def db_entity_defaults(db_entity, config_entity=None):

    # Instantiate a FeatureClassCreator to make the FeatureClassConfiguration
    feature_class_creator = FeatureClassCreator(config_entity, db_entity, no_ensure=True)
    if config_entity:
        # Find the database of the configured client
        connection = resolve_fixture(None, "init", InitFixture, config_entity.schema()).import_database()
        # Remove the _test suffix when running unit tests. Until we can auto-population the _test version
        # of the database, we want to simply rely on the manually configured source database
        if connection:
            connection['database'] = connection['database'].replace('test_', '')
    else:
        # No config_entity abstract DbEntity case
        connection = None

    return dict(
        # The name is passed in or the titleized version of key
        name=db_entity.name or titleize(db_entity.key),
        # Postgres URL for local sources, or possibly a remote url (e.g. for background layer sources)
        # Unless overridden, create the url according to this postgres url scheme
        url=db_entity.url or \
            ('postgres://{user}:{password}/{host}:{port}/{database}'.format(
                **merge(dict(port=5432), connection)) if connection else None),
        # Normally Equals the key, except for views of the table, like a Result DbEntity
        # Views leave this null and rely on query
        table=db_entity.table or (db_entity.key if not db_entity.query else None),
        # Query to create a "view" of the underlying data. Used by Result DbEntity instances
        query=db_entity.query,
        # How to group the features or query results. Not yet well hashed out
        group_by=db_entity.group_by,
        # The source DbEntity key if this DbEntity resulted from cloning a peer DbEntity
        source_db_entity_key=db_entity.source_db_entity_key,
        # Array used by remote data sources whose URLs have different host names
        # If so then the url will have a string variable for the host
        hosts=db_entity.hosts,
        # The User who created the DbEntity. TODO. Default should be an admin
        creator=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0],
        # The User who updated the DbEntity. TODO. Default should be an admin
        updater=db_entity.creator if hasattr(db_entity, 'creator') else get_user_model().objects.filter()[0],

        # The SRID of the Feature table
        srid=db_entity.srid,
        # This is a non-model object. So it is saved as a PickledObjectField
        # Whether the same instance is returned or not does not matter
        # If db_entity.feature_class_configuration is None, it will return None
        feature_class_configuration=feature_class_creator.complete_or_create_feature_class_configuration(
            db_entity.feature_class_configuration
        ),
        no_feature_class_configuration=db_entity.no_feature_class_configuration
        # feature_behavior is handled internally by DbEntity
    )
Beispiel #4
0
def annotated_related_feature_class_pk_via_geographies(manager, config_entity, db_entity_keys):
    """
        To join a related model by geographic join
    """
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator
    feature_class_creator = FeatureClassCreator.from_dynamic_model_class(manager.model)

    def resolve_related_model_pk(db_entity_key):
        related_model = config_entity.db_entity_feature_class(db_entity_key)
        # The common Geography class
        geography_class = feature_class_creator.common_geography_class(related_model)
        geography_scope = feature_class_creator.common_geography_scope(related_model)
        logger.warn("Resolved geography scope %s", geography_scope)
        # Find the geographies ManyToMany fields that relates this model to the related_model
        # via a Geography class. Which geography class depends on their common geography scope
        geographies_field = feature_class_creator.geographies_field(geography_scope)
        try:
            # Find the queryable field name from the geography class to the related model
            related_model_geographies_field_name = resolve_queryable_name_of_type(geography_class, related_model)
        except:
            # Sometimes the geography class hasn't had its fields cached properly. Fix here
            clear_many_cache(geography_class)
            related_model_geographies_field_name = resolve_queryable_name_of_type(geography_class, related_model)

        return '%s__%s__pk' % (geographies_field.name, related_model_geographies_field_name)

    pk_paths = map_to_dict(lambda db_entity_key:
        [db_entity_key, Min(resolve_related_model_pk(db_entity_key))],
        db_entity_keys)

    return manager.annotate(**pk_paths)
Beispiel #5
0
 def create_result_map(self, values_query_set):
     related_models = self.resolve_join_models()
     logger.debug(
         "Creating result map for related models %s feature class %s" %
         (', '.join(map(lambda r: str(r),
                        related_models)), self.feature_class))
     feature_class_creator = FeatureClassCreator.from_dynamic_model_class(
         self.feature_class)
     geography_scopes = feature_class_creator.geography_scopes()
     # Get the related model paths final segment. We want to map these to the db_entity_key names
     related_model_path_to_name = map_to_dict(
         lambda related_model: [
             resolve_related_model_path_via_geographies(
                 self.feature_class.objects, related_model).split('__')[1],
             related_model.db_entity_key
         ], related_models)
     return values_query_set.create_result_map(
         related_models=related_models,
         # map_path_segments maps related object paths to their model name,
         # and removes the geographies segment of the path
         map_path_segments=merge(
             # Map each geography scope to its corresponding field on the feature class
             map_to_dict(
                 lambda geography_scope: [
                     feature_class_creator.geographies_field(geography_scope
                                                             ).name, None
                 ], geography_scopes),
             related_model_path_to_name))
Beispiel #6
0
 def create_layer_from_layer_selection(self, params):
     """
         Used to create a new Layer from the current LayerSelection features
     :param params:
     :return:
     """
     # Resolve the source layer from the layer_selection__id
     source_layer = self.resolve_layer(params)
     config_entity = source_layer.config_entity
     db_entity = source_layer.db_entity_interest.db_enitty
     feature_class = FeatureClassCreator(config_entity,
                                         db_entity).dynamic_model_class()
     layer = Layer.objects.get(presentation__config_entity=config_entity,
                               db_entity_key=db_entity.key)
     layer_selection = get_or_create_layer_selection_class_for_layer(
         layer, config_entity, False).objects.all()[0]
     # TODO no need to do geojson here
     feature_dict = dict(type="Feature")
     feature_dicts = map(
         lambda feature:
         deep_merge(feature_dict,
                    {"geometry": geojson.loads(feature.wkb_geometry.json)}),
         layer_selection.selected_features or feature_class.objects.all())
     json = dict({"type": "FeatureCollection", "features": feature_dicts})
     db_entity_configuration = update_or_create_db_entity(
         config_entity,
         **dict(class_scope=FutureScenario,
                name='Import From Selection Test',
                key='import_selection_test',
                url='file://notusingthis'))
     self.make_geojson_db_entity(config_entity,
                                 db_entity_configuration,
                                 data=json)
def get_sample_feature(config_entity_key, db_entity_key):
    config_entity = ConfigEntity.objects.get(key=config_entity_key).subclassed
    db_entity = config_entity.computed_db_entity(key=db_entity_key)
    feature_class = FeatureClassCreator(config_entity,
                                        db_entity).dynamic_model_class()
    feature = feature_class.objects.filter()[0]
    return feature
Beispiel #8
0
 def feature_class(self):
     """
         Resolves the concrete feature class by finding the db_entity owner
     :return:
     """
     return FeatureClassCreator(self.db_entity_owner,
                                self).dynamic_model_class()
 def create_result_map(self, values_query_set):
     related_models = self.resolve_join_models()
     logger.debug("Creating result map for related models %s feature class %s" % (', '.join(map(lambda r: str(r), related_models)), self.feature_class))
     feature_class_creator = FeatureClassCreator.from_dynamic_model_class(self.feature_class)
     geography_scopes = feature_class_creator.geography_scopes()
     # Get the related model paths final segment. We want to map these to the db_entity_key names
     related_model_path_to_name = map_to_dict(
         lambda related_model:
         [resolve_related_model_path_via_geographies(
             self.feature_class.objects,
             related_model).split('__')[1],
          related_model.db_entity_key],
         related_models
     )
     return values_query_set.create_result_map(
         related_models=related_models,
         # map_path_segments maps related object paths to their model name,
         # and removes the geographies segment of the path
         map_path_segments=merge(
             # Map each geography scope to its corresponding field on the feature class
             map_to_dict(
                 lambda geography_scope: [
                     feature_class_creator.geographies_field(geography_scope).name,
                     None
                 ],
                 geography_scopes),
             related_model_path_to_name)
     )
Beispiel #10
0
def annotated_related_feature_class_pk_via_geographies(manager, config_entity, db_entity_keys):
    """
        To join a related model by geographic join
    """
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator

    feature_class_creator = FeatureClassCreator.from_dynamic_model_class(manager.model)

    def resolve_related_model_pk(db_entity_key):
        related_model = config_entity.db_entity_feature_class(db_entity_key)
        # The common Geography class
        geography_class = feature_class_creator.common_geography_class(related_model)
        geography_scope = feature_class_creator.common_geography_scope(related_model)
        logger.warn("Resolved geography scope %s", geography_scope)
        # Find the geographies ManyToMany fields that relates this model to the related_model
        # via a Geography class. Which geography class depends on their common geography scope
        geographies_field = feature_class_creator.geographies_field(geography_scope)
        try:
            # Find the queryable field name from the geography class to the related model
            related_model_geographies_field_name = resolve_queryable_name_of_type(geography_class, related_model)
        except:
            # Sometimes the geography class hasn't had its fields cached properly. Fix here
            clear_many_cache(geography_class)
            related_model_geographies_field_name = resolve_queryable_name_of_type(geography_class, related_model)

        return "%s__%s__pk" % (geographies_field.name, related_model_geographies_field_name)

    pk_paths = map_to_dict(
        lambda db_entity_key: [db_entity_key, Min(resolve_related_model_pk(db_entity_key))], db_entity_keys
    )

    return manager.annotate(**pk_paths)
 def feature_class_lookup(self):
     # Get the client region fixture (or the default region if the former doesn't exist)
     client_region = resolve_fixture("config_entity", "region", RegionFixture)
     region_class_lookup = client_region.feature_class_lookup()
     return merge(
         region_class_lookup,
         FeatureClassCreator(self.config_entity).key_to_dynamic_model_class_lookup(self.default_db_entities())
     )
Beispiel #12
0
def resolve_field_path_via_geographies(field_path, manager, related_models):
    """
        Resolve the given field path in case its not absolute.
        For instance, if it is 'block' and one of our related models accessible via geographies__relatedmodel has that property,
        return 'geographies_[scope_id]__relatedmodel__block'
        It will also be tested against the main manager after all related models fail,
        e.g. manager.values(field_path) if successful would simply return field_path
    :param field_path: django field path. e.g. du or built_form__name
    :param manager: The main manager by which the related models are resolved and by which the full path is computed
    :param related_models: models joined to the manager. For instance. manager.model is CanvasFeature, a related_model could be
        CensusBlock, which might be related to the former via 'geographies_[scope_id]__censusblock9rel'. The relationship is computed
        by assuming that the related model is related by geographies and looking for a field matching its type
    :return:
    """
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator

    feature_class_creator = FeatureClassCreator.from_dynamic_model_class(manager.model)
    for related_model in related_models:
        try:
            # See if the field_name resolves
            # There's probably a more efficient way to do this
            related_model.objects.values(field_path)
            resolved_field_path = field_path
        except:
            # See if the first segment matches the related_model db_entity_key
            first_segment = field_path.split("__")[0]
            if first_segment != related_model.db_entity_key:
                # If not, move on
                continue
            # Take all but the first segment
            resolved_field_path = "__".join(field_path.split("__")[1:])
        # Success, find the path to this model from geographies
        geography_class = feature_class_creator.common_geography_class(related_model)
        geographies_field = feature_class_creator.geographies_field(
            feature_class_creator.common_geography_scope(related_model)
        )
        geography_related_field_name = resolve_queryable_name_of_type(geography_class, related_model)
        return "%s__%s__%s" % (geographies_field.name, geography_related_field_name, resolved_field_path)
    # See if it matches the main model
    try:
        if field_path.split("__")[0] == manager.model.db_entity_key:
            # If the manager model db_entity_key was used in the path, just strip it out
            updated_field_path = "__".join(field_path.split("__")[1:])
            manager.values(updated_field_path)
        else:
            # Otherwise test query with the full path
            updated_field_path = field_path
            manager.values(updated_field_path)
        # Success, return the field_path
        return updated_field_path
    except:
        logger.exception(
            "Cannot resolve field path %s to the main model %s or any joined models %s",
            field_path,
            manager.model,
            related_models,
        )
        raise
def resolve_field_path_via_geographies(field_path, manager, related_models):
    """
        Resolve the given field path in case its not absolute.
        For instance, if it is 'block' and one of our related models accessible via geographies__relatedmodel has that property,
        return 'geographies_[scope_id]__relatedmodel__block'
        It will also be tested against the main manager after all related models fail,
        e.g. manager.values(field_path) if successful would simply return field_path
    :param field_path: django field path. e.g. du or built_form__name
    :param manager: The main manager by which the related models are resolved and by which the full path is computed
    :param related_models: models joined to the manager. For instance. manager.model is CanvasFeature, a related_model could be
        CensusBlock, which might be related to the former via 'geographies_[scope_id]__censusblock9rel'. The relationship is computed
        by assuming that the related model is related by geographies and looking for a field matching its type
    :return:
    """
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator
    feature_class_creator = FeatureClassCreator.from_dynamic_model_class(
        manager.model)
    for related_model in related_models:
        try:
            # See if the field_name resolves
            # There's probably a more efficient way to do this
            related_model.objects.values(field_path)
            resolved_field_path = field_path
        except:
            # See if the first segment matches the related_model db_entity_key
            first_segment = field_path.split('__')[0]
            if first_segment != related_model.db_entity_key:
                # If not, move on
                continue
            # Take all but the first segment
            resolved_field_path = '__'.join(field_path.split('__')[1:])
        # Success, find the path to this model from geographies
        geography_class = feature_class_creator.common_geography_class(
            related_model)
        geographies_field = feature_class_creator.geographies_field(
            feature_class_creator.common_geography_scope(related_model))
        geography_related_field_name = resolve_queryable_name_of_type(
            geography_class, related_model)
        return '%s__%s__%s' % (geographies_field.name,
                               geography_related_field_name,
                               resolved_field_path)
    # See if it matches the main model
    try:
        if field_path.split('__')[0] == manager.model.db_entity_key:
            # If the manager model db_entity_key was used in the path, just strip it out
            updated_field_path = '__'.join(field_path.split('__')[1:])
            manager.values(updated_field_path)
        else:
            # Otherwise test query with the full path
            updated_field_path = field_path
            manager.values(updated_field_path)
        # Success, return the field_path
        return updated_field_path
    except:
        logger.exception(
            'Cannot resolve field path %s to the main model %s or any joined models %s',
            field_path, manager.model, related_models)
        raise
    def importer(self, config_entity, db_entity, **kwargs):
        """
            Creates various GeojsonFeature classes by importing geojson and saving it to the database via a dynamic subclass of GeojsonFeature
        :schema: The optional schema to use for the dynamic subclass's meta db_table attribute, which will allow the class's table to be saved in the specified schema. Defaults to public
        :data: Optional python dict data to use instead of loading from the db_entity.url
        :return: a list of lists. Each list is a list of features of distinct subclass of GeoJsonFeature that is created dynamically. To persist these features, you must first create the subclass's table in the database using create_table_for_dynamic_class(). You should also register the table as a DbEntity.
        """
        if self.seed_data:
            data = geojson.loads(jsonify(self.seed_data), object_hook=geojson.GeoJSON.to_instance)
        else:
            fp = open(db_entity.url.replace('file://', ''))
            data = geojson.load(fp, object_hook=geojson.GeoJSON.to_instance)
        feature_class_creator = FeatureClassCreator(config_entity, db_entity)
        # find all unique properties
        feature_class_configuration = feature_class_creator.feature_class_configuration_from_geojson_introspection(data)
        feature_class_creator.update_db_entity(feature_class_configuration)
        feature_class = feature_class_creator.dynamic_model_class(base_only=True)
        # Create our base table. Normally this is done by the import, but we're just importing into memory
        create_tables_for_dynamic_classes(feature_class)
        # Now write each feature to our newly created table
        for feature in map(lambda feature: self.instantiate_sub_class(feature_class, feature), data.features):
            feature.save()
        # Create the rel table too
        rel_feature_class = feature_class_creator.dynamic_model_class()
        create_tables_for_dynamic_classes(rel_feature_class)

        # PostGIS 2 handles this for us now
        # if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table):
        #     # Tell PostGIS about the new geometry column or the table
        #     sync_geometry_columns(db_entity.schema, db_entity.table)

        # Create association classes and tables and populate them with data
        create_and_populate_relations(config_entity, db_entity)
Beispiel #15
0
 def create_join_feature_class(self):
     """
         Make an unmanaged Django model based on the joined fields
     """
     return FeatureClassCreator(
         self.config_entity,
         self.layer.db_entity_interest.db_entity).dynamic_join_model_class(
             self.resolve_join_models(),
             self.result_map.related_model_lookup.keys())
Beispiel #16
0
 def feature_class_lookup(self):
     # Get the client project fixture (or the default region if the former doesn't exist)
     project = merge(*map(
         lambda project_fixture: project_fixture.feature_class_lookup(),
         project_specific_project_fixtures(
             config_entity=self.config_entity)))
     return merge(
         project,
         FeatureClassCreator(
             self.config_entity).key_to_dynamic_model_class_lookup(
                 self.default_db_entities()))
 def feature_class_lookup(self):
     """
         Adds mappings of custom Feature classes
     :return:
     """
     parent_fixture = self.parent_fixture
     feature_class_lookup = parent_fixture.feature_class_lookup()
     return merge(
         feature_class_lookup,
         FeatureClassCreator(
             self.config_entity).key_to_dynamic_model_class_lookup(
                 self.default_db_entities()))
def create_layer_selections(layers):
    """
        Create LayerSelection classes and instances for the given Scenario subclasses among the
        classes in limit_to_classes. Also filters by db_entity_key if they are specified
    :return:
    """
    for config_entity in unique(map(lambda layer: layer.config_entity,
                                    layers)):
        FeatureClassCreator(config_entity).ensure_dynamic_models()
    for selection_layer in layers:
        # Recreate
        get_or_create_layer_selection_class_for_layer(selection_layer)
    update_or_create_layer_selections(config_entity=None)
Beispiel #19
0
def resolve_related_model_path_via_geographies(manager, related_model):
    """
        Returns the query string path 'geographies_[scope_id]__[field name of the related model form the main model]'
        The scope_id is the id of the ConfigEntity that both models share in common by ascending the ConfigEntity
        hierarchy starting at each models' geography_scope
    """
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator
    feature_class_creator = FeatureClassCreator.from_dynamic_model_class(manager.model)
    geography_scope = feature_class_creator.common_geography_scope(related_model)
    geographies_field = feature_class_creator.geographies_field(geography_scope)
    geography_class = feature_class_creator.common_geography_class(related_model)
    geography_related_field_name = resolve_queryable_name_of_type(geography_class, related_model)
    return '%s__%s' % (geographies_field.name, geography_related_field_name)
Beispiel #20
0
def resolve_related_model_path_via_geographies(manager, related_model):
    """
        Returns the query string path 'geographies_[scope_id]__[field name of the related model form the main model]'
        The scope_id is the id of the ConfigEntity that both models share in common by ascending the ConfigEntity
        hierarchy starting at each models' geography_scope
    """
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator

    feature_class_creator = FeatureClassCreator.from_dynamic_model_class(manager.model)
    geography_scope = feature_class_creator.common_geography_scope(related_model)
    geographies_field = feature_class_creator.geographies_field(geography_scope)
    geography_class = feature_class_creator.common_geography_class(related_model)
    geography_related_field_name = resolve_queryable_name_of_type(geography_class, related_model)
    return "%s__%s" % (geographies_field.name, geography_related_field_name)
Beispiel #21
0
 def result_map(cls):
     """
         Creates an caches a result map for the Feature class. The result_map has useful meta data about
         the class
     :param cls:
     :return:
     """
     if cls._result_map:
         return cls._result_map
     from footprint.main.models.feature.feature_class_creator import FeatureClassCreator
     feature_class_creator = FeatureClassCreator.from_dynamic_model_class(cls)
     if not feature_class_creator.dynamic_model_class_is_ready:
         return None
     cls._result_map = feature_class_creator.dynamic_model_class().objects.all().create_result_map()
     return cls._result_map
Beispiel #22
0
 def result_map(cls):
     """
         Creates an caches a result map for the Feature class. The result_map has useful meta data about
         the class
     :param cls:
     :return:
     """
     if cls._result_map:
         return cls._result_map
     from footprint.main.models.feature.feature_class_creator import FeatureClassCreator
     feature_class_creator = FeatureClassCreator.from_dynamic_model_class(
         cls)
     if not feature_class_creator.dynamic_model_class_is_ready:
         return None
     cls._result_map = feature_class_creator.dynamic_model_class(
     ).objects.all().create_result_map()
     return cls._result_map
def delete_layer_selections(layers):
    for config_entity in unique(map(lambda layer: layer.config_entity,
                                    layers)):
        FeatureClassCreator(config_entity).ensure_dynamic_models()
    for selection_layer in layers:
        try:
            # Drop the table
            layer_selection_class = get_or_create_layer_selection_class_for_layer(
                selection_layer, no_table_creation=True)

            if layer_selection_class:
                if hasattr(layer_selection_class.features, 'through'):
                    layer_selection_features_class = layer_selection_class.features.through
                    drop_layer_selection_table(layer_selection_features_class)
                drop_layer_selection_table(layer_selection_class)

        except DatabaseError, e:
            logger.warning(
                "Couldn't destroy LayerSelection tables. Maybe the public.layer table no longer exists: %s"
                % e.message)
Beispiel #24
0
def update_or_create_db_entity_and_interest(config_entity, config_db_entity):
    """
        Sync a single db_entity_configuration or db_entity and its db_entity_interest
        :return A tuple of the DbEntityInterest and the created flag
    """
    unique_key_combo = ['key', 'schema']

    db_entity, created, updated = DbEntity.objects.update_or_create(
        # key and schema uniquely identify the DbEntity
        key=config_db_entity.key,
        schema=config_db_entity.schema,
        defaults=remove_keys(model_dict(config_db_entity), unique_key_combo))

    db_entity.feature_behavior = config_db_entity.feature_behavior
    db_entity.save()

    logger.info("ConfigEntity/DbEntity Publishing. DbEntity: %s" %
                db_entity.full_name)

    # Create the DbEntityInterest through class instance which associates the ConfigEntity instance
    # to the DbEntity instance. For now the interest attribute is hard-coded to OWNER. This might
    # be used in the future to indicate other levels of interest
    interest = Interest.objects.get(key=Keys.INTEREST_OWNER)
    db_entity_interest, created, updated = DbEntityInterest.objects.update_or_create(
        config_entity=config_entity, db_entity=db_entity, interest=interest)

    #update the geography scope after the db_entity_interest saves as this is required to find 'owned' db_entites in a config entity
    if not db_entity.no_feature_class_configuration:
        feature_class_creator = FeatureClassCreator(config_entity,
                                                    db_entity,
                                                    no_ensure=True)
        db_entity.feature_class_configuration.geography_scope = config_entity.id if db_entity.feature_class_configuration.primary_geography \
            else feature_class_creator.resolved_geography_scope.id

        db_entity.save()

    return db_entity_interest, created
Beispiel #25
0
def _post_save_publishing(job, config_entity, user, **kwargs):
    """
        Runs all configured publishers via the Django signals they depend upon.
        This is done in Celery in order to support long running tasks launched from a client.
        Peer tasks are run in parallel. Dependent tasks are called after the tasks they depend on complete

    :param hash_id: Job hash id
    :param config_entity:
    :param user: The current user or None if no user is in scope
    :return:
    """

    if not settings.FOOTPRINT_INIT:
        # There is a poorly-understood issue related to uploading .gdb
        # files with mutliple layers (possibly caused by some race
        # condition) that, without sleeping here, causes the celery
        # task to run without an `instance` (the object that triggered
        # post save processing). Testing had shown 10 seconds to be the
        # shortest amount of time to wait here that permits the post-save
        # processing to complete successfully.
        time.sleep(10)

    bundle = kwargs['bundle']
    # Make sure all the Feature subclasses that the celery worker might need are created
    if config_entity:
        config_entity._feature_classes_created = False
        FeatureClassCreator(config_entity)

    # Get the publisher_name, proportion, and signal_path
    publishing_info = get_publishing_info(**kwargs)

    try:
        # Make sure no transactions are outstanding
        # This shoudln't be needed once Django is upgraded
        transaction.commit()
    except Exception, e:
        pass
Beispiel #26
0
    def create_empty_source_table(self,
                                  clazz,
                                  source_table_name,
                                  source_db_connection,
                                  extra_fields={}):
        project = Project(key='sutter_county', id=0)
        db_entity = DbEntity(key=Keys.DB_ABSTRACT_BASE_AGRICULTURE_FEATURE,
                             feature_class_configuration=dict(
                                 abstract_class=full_module_path(clazz)))
        SourceClass = FeatureClassCreator(project, db_entity,
                                          no_ensure=True).dynamic_model_class(
                                              base_only=True,
                                              schema='public',
                                              table=source_table_name)
        # class SourceClass(clazz):
        #     class Meta(clazz.Meta):
        #         db_table = source_table_name
        create_tables_for_dynamic_classes(SourceClass)
        for field in SourceClass._meta.fields[1:]:
            setattr(field, 'null', True)

        drop_table = "DROP TABLE IF EXISTS {final_table} CASCADE;".format(
            final_table=source_table_name)

        sql, refs = source_db_connection.creation.sql_create_model(
            SourceClass, color_style())
        add_geometry_fields = '''
            ALTER TABLE {final_table} ADD COLUMN geography_id VARCHAR;
            ALTER TABLE {final_table} ADD COLUMN wkb_geometry GEOMETRY;'''.format(
            final_table=source_table_name)

        sql = drop_table + sql[0] + add_geometry_fields
        for dbfield, fieldtype in extra_fields.items():
            sql += 'ALTER TABLE {final_table} ADD COLUMN {field} {type}'.format(
                final_table=source_table_name, field=dbfield, type=fieldtype)
        source_db_connection.cursor().execute(sql)
Beispiel #27
0
def parse_query(config_entity, manager, filters=None, joins=None, aggregates=None, group_bys=None):
    queryset = manager
    group_by_values = None
    annotation_tuples = None

    # Make sure all related models have been created before querying
    from footprint.main.models.feature.feature_class_creator import FeatureClassCreator
    FeatureClassCreator(config_entity).ensure_dynamic_models()

    # Any joins are db_entity_keys and resolve to feature classes of the config_entity
    related_models = map(lambda join: config_entity.db_entity_feature_class(join), joins or [])

    # Use group_by_values to group by and then attach the aggregates to each unique result via annotation
    # If aggregates are specified but group by is not, we use aggregate to just get a single result
    # For now we assume any filtering should be applied BEFORE aggregation
    if filters:
        queryset = queryset.filter(parse_token(filters, manager, related_models))

    # We only need to join explicitly if the join is not included in one of the group by fields
    manual_joins = joins or [] if not group_bys else \
        set(joins or [])-\
        set(map(lambda group_by: resolve_db_entity_key_of_field_path(parse_group_by(group_by, manager, related_models), manager, related_models), group_bys))

    if manual_joins:
        # If there are joins, filter the queryset by inner join on the related_model pk through geography
        for related_model in related_models:
            related_field_pk_path = resolve_field_path_via_geographies('pk', manager, [related_model])
            queryset = queryset.filter(**{'{0}__isnull'.format(related_field_pk_path):False})

    # If there are aggregates, they are either part of the main table or join table
    if aggregates:
        # Resolve the field path using available joins via geographies or on the main model
        # Then send the resolved field
        annotation_tuples = map(
            lambda aggregate: parse_annotation(aggregate, manager, related_models),
            aggregates)

    if group_bys:
        group_by_values = map(
            lambda group_by: parse_group_by(group_by, manager, related_models),
            to_list(group_bys))

        annotation_tuples = annotation_tuples or []
        # Add a Count to the selection if one isn't present
        if not first(lambda annotation_tuple: Count==annotation_tuple[0], annotation_tuples):
            annotation_tuples.insert(0, (Count, group_by_values[0], 'count'))

        # TODO. We might have to do rounding of floats here using an extra clause:
        # extra(select={'custom_field': 'round(field, 2)'})
        queryset = queryset.values(*group_by_values).order_by(*group_by_values)
    elif annotation_tuples:
        # If there are annotations but no group_bys, we need to fake a group by annotating
        # the count of the pk to each row and then grouping by it. Since every row
        # has one pk all the rows group together
        # Otherwise we'd have to use the aggregate function which doesn't give us
        # a query back
        queryset = queryset.annotate(count=Count('pk')).values('count')

    if annotation_tuples:

        for annotation_tuple in annotation_tuples:
            # Annotation built-in functions or custom functions of the queryset (like geo stuff)
            annotate_method = getattr(queryset, annotation_tuple[0]) if\
                isinstance(annotation_tuple[0], basestring) else\
                annotation_tuple[0]

            if len(annotation_tuple)==3:
                # Apply alias if specified
                queryset = queryset.annotate(**{annotation_tuple[2]:annotate_method(annotation_tuple[1])})
            else:
                # Otherwise default the name to the field
                queryset = queryset.annotate(annotate_method(annotation_tuple[1]))

    elif group_by_values:
        # If no annotations are specified, add in a count annotation to make the group by take effect
        # As long as we annotate a count of one we'll get the correct group_by, since django receives values(group_by1, group_by2, etc).annotate(count(group_by1))
        queryset = queryset.annotate(count=Count(group_by_values[0]))

    return queryset
Beispiel #28
0
    def dynamic_resource_subclass(self,
                                  layer_selection=None,
                                  db_entity=None,
                                  feature_class=None,
                                  config_entity=None,
                                  metadata=None,
                                  params=None,
                                  **kwargs):
        """
            Creates the dynamic Feature Resource class by passing in a layer_selection, db_entity, or feature_class
        :param layer_selection: Required if db_entity or metadata aren't present
        :param db_entity: Required if layer_selection or metadata aren't present
        :param metadata: Required along with config_entity if layer_selection or db_entity aren't present
        :param kwargs:
        :return:
        """
        feature_class_configuration = None
        if layer_selection:
            # Coming in relative to a LayerSelection, which puts us in the context of the LayerSelection's
            # feature query for this Feature subclass
            layer = layer_selection.layer
            # If we pass in a ConfigEntity it means we want to scope the Feature class to its scope.
            # The ConfigEntity defaults to that of the Layer, but we can override it to be a lower
            # scope to make sure that we have access to lower DbEntities of performing joins
            config_entity = config_entity.subclassed if config_entity else layer.config_entity.subclassed
            logger.debug(
                "Resolving FeatureResource subclass for layer_selection: {0}, config_entity: {1}"
                .format(layer_selection.unique_id, config_entity.id))
            # Resolve the dynamic Feature class with the given config_entity so that we can access all DbEntities
            # of the ConfigEntity for joins
            feature_class = config_entity.db_entity_feature_class(
                layer.db_entity.key)
        elif db_entity:
            # Coming in relative to a DbEntity, meaning we don't care about a particular LayerSelection's
            # feature query for this Feature subclass
            config_entity = db_entity.config_entity
            logger.debug(
                "Resolving FeatureResource subclass for db_entity: {0}, config_entity: {1}"
                .format(db_entity.id, config_entity.id))
            # Resolve the dynamic Feature class with the given config_entity so that we can access all DbEntities
            # of the ConfigEntity for joins
            feature_class = config_entity.db_entity_feature_class(
                db_entity.key)
        elif metadata:
            # Coming in with metadata, meaning this is and uploaded or ArcGis table with no DbEntity yet
            # We need to construct a FeatureClass from the metadata
            logger.debug(
                "Resolving FeatureResource subclass for metadata: {0}, config_entity: {1}"
                .format(metadata, config_entity.id))
            feature_class_creator = FeatureClassCreator(config_entity)
            feature_class_configuration = feature_class_creator.feature_class_configuration_from_metadata(
                metadata['schema'])
            feature_class = FeatureClassCreator(
                config_entity,
                feature_class_configuration).dynamic_model_class()

        if not feature_class_configuration:
            # If we didn't already ensure all dynamic model classes have been created
            # This only need to run once to get all dynamic feature subclasses into memory,
            # in case they are needed by an association, join, or something similar
            feature_class_creator = FeatureClassCreator.from_dynamic_model_class(
                feature_class)
            feature_class_creator.ensure_dynamic_models()

        logger.debug("Resolving resource for Feature subclass: {0}".format(
            feature_class))

        # Resolve the FeatureResource subclass based on the given Feature subclass
        # If self is already a subclass, just return self
        # Else, return a preconfigured subclass or one dynamically created. The latter will probably be the only way in the future.
        # If not already subclassed
        is_singleton_feature = issubclass(self.__class__,
                                          SingletonFeatureResourceMixin)
        is_template_feature = self.__class__ == TemplateFeatureResource
        if self.__class__ in [
                FeatureResource, TemplateFeatureResource,
                FeatureCategoryAttributeResource,
                FeatureQuantitativeAttributeResource
        ]:
            if is_singleton_feature or params.get('is_feature_attribute'):
                queryset = feature_class.objects.none()
            elif kwargs.get('method', None) == 'PATCH':
                # It's possible to PATCH with an active join query.
                # But we don't want to use a join query when patching
                queryset = feature_class.objects.all()
            else:
                # Get the queryset stored by the layer_selection or an empty query if we don't have a layer_selection
                queryset = layer_selection.selected_features_or_values if\
                    layer_selection else \
                    feature_class.objects.none()

                if layer_selection and not (is_singleton_feature or kwargs.get(
                        'query_may_be_empty')) and queryset.count() == 0:
                    raise Exception(
                        "Unexpected empty queryset for layer_selection features: %s"
                        % queryset.query)
            is_values_queryset = isinstance(queryset, ValuesQuerySet)

            #returns queryset ordered by the table id
            queryset = queryset.order_by('id')

            if is_values_queryset:
                join_feature_class = layer_selection.create_join_feature_class(
                ) if is_values_queryset else feature_class
                logger.info("Created join_feature_class: %s" %
                            join_feature_class)
                # Force the queryset to our new class so that Tastypie can map the dict results to it
                queryset.model = join_feature_class

                return self.__class__.resolve_resource_class(
                    join_feature_class,
                    queryset=queryset,
                    base_resource_class=self.join_feature_resource_class(
                        join_feature_class),
                    additional_fields_dict=dict(
                        # Pass these to the feature resource to help it resolve
                        # field mappings and add related fields (just need for join_feature_class)
                        # Use the layer_selection if it exists since it might have filtered or extra query fields
                        result_field_lookup=(layer_selection
                                             or db_entity).result_field_lookup
                        if not metadata else {},
                        related_field_lookup=(
                            layer_selection or db_entity).related_field_lookup
                        if not metadata else {},
                        # We use these in the FeatureResource to create a unique id for each join Feature
                        join_model_attributes=layer_selection
                        and layer_selection.result_map.join_model_attributes),
                    is_join_query=True,
                    limit_fields=layer_selection.result_map['result_fields'])
            else:
                abstract_feature_resource_class = self.__class__
                resource_class = abstract_feature_resource_class.resolve_resource_class(
                    feature_class,
                    queryset=queryset,
                    # Give FeatureResource a reference to the layer_selection
                    additional_fields_dict=merge(
                        dict(
                            # Pass this to the feature resource to help it resolve field mappings
                            result_field_lookup=(layer_selection or db_entity).
                            result_field_lookup if not metadata else {}),
                        dict(
                            # Not sure why it doesn't work to just stick this on the TemplateFeatureResource
                            feature_fields=ListField(
                                attribute='feature_fields',
                                null=True,
                                blank=True,
                                readonly=True),
                            feature_field_title_lookup=PickledDictField(
                                attribute='feature_field_title_lookup',
                                null=True,
                                blank=True,
                                readonly=True),
                        ) if is_template_feature else dict()),
                    for_template=is_template_feature)
                return resource_class
        return self
    def dynamic_resource_subclass(self, layer_selection=None, db_entity=None, feature_class=None, config_entity=None, metadata=None, params=None, **kwargs):
        """
            Creates the dynamic Feature Resource class by passing in a layer_selection, db_entity, or feature_class
        :param layer_selection: Required if db_entity or metadata aren't present
        :param db_entity: Required if layer_selection or metadata aren't present
        :param metadata: Required along with config_entity if layer_selection or db_entity aren't present
        :param kwargs:
        :return:
        """
        feature_class_configuration = None
        if layer_selection:
            # Coming in relative to a LayerSelection, which puts us in the context of the LayerSelection's
            # feature query for this Feature subclass
            layer = layer_selection.layer
            # If we pass in a ConfigEntity it means we want to scope the Feature class to its scope.
            # The ConfigEntity defaults to that of the Layer, but we can override it to be a lower
            # scope to make sure that we have access to lower DbEntities of performing joins
            config_entity = config_entity.subclassed if config_entity else layer.config_entity.subclassed
            logger.debug("Resolving FeatureResource subclass for layer_selection: {0}, config_entity: {1}".format(layer_selection.unique_id, config_entity.id))
            # Resolve the dynamic Feature class with the given config_entity so that we can access all DbEntities
            # of the ConfigEntity for joins
            feature_class = config_entity.db_entity_feature_class(layer.db_entity.key)
        elif db_entity:
            # Coming in relative to a DbEntity, meaning we don't care about a particular LayerSelection's
            # feature query for this Feature subclass
            config_entity = db_entity.config_entity
            logger.debug("Resolving FeatureResource subclass for db_entity: {0}, config_entity: {1}".format(db_entity.id, config_entity.id))
            # Resolve the dynamic Feature class with the given config_entity so that we can access all DbEntities
            # of the ConfigEntity for joins
            feature_class = config_entity.db_entity_feature_class(db_entity.key)
        elif metadata:
            # Coming in with metadata, meaning this is and uploaded or ArcGis table with no DbEntity yet
            # We need to construct a FeatureClass from the metadata
            logger.debug("Resolving FeatureResource subclass for metadata: {0}, config_entity: {1}".format(metadata, config_entity.id))
            feature_class_creator = FeatureClassCreator(
                config_entity
            )
            feature_class_configuration = feature_class_creator.feature_class_configuration_from_metadata(metadata['schema'])
            feature_class = FeatureClassCreator(
                config_entity,
                feature_class_configuration
            ).dynamic_model_class()

        if not feature_class_configuration:
            # If we didn't already ensure all dynamic model classes have been created
            # This only need to run once to get all dynamic feature subclasses into memory,
            # in case they are needed by an association, join, or something similar
            feature_class_creator = FeatureClassCreator.from_dynamic_model_class(feature_class)
            feature_class_creator.ensure_dynamic_models()

        logger.debug("Resolving resource for Feature subclass: {0}".format(feature_class))

        # Resolve the FeatureResource subclass based on the given Feature subclass
        # If self is already a subclass, just return self
        # Else, return a preconfigured subclass or one dynamically created. The latter will probably be the only way in the future.
        # If not already subclassed
        is_singleton_feature = issubclass(self.__class__, SingletonFeatureResourceMixin)
        is_template_feature = self.__class__ == TemplateFeatureResource
        if self.__class__ in [FeatureResource, TemplateFeatureResource, FeatureCategoryAttributeResource,
                              FeatureQuantitativeAttributeResource]:
            if is_singleton_feature or params.get('is_feature_attribute'):
                queryset = feature_class.objects.none()
            elif kwargs.get('method', None) == 'PATCH':
                # It's possible to PATCH with an active join query.
                # But we don't want to use a join query when patching
                queryset = feature_class.objects.all()
            else:
                # Get the queryset stored by the layer_selection or an empty query if we don't have a layer_selection
                queryset = layer_selection.selected_features_or_values if\
                    layer_selection else \
                    feature_class.objects.none()

                if layer_selection and not (is_singleton_feature or kwargs.get('query_may_be_empty')) and queryset.count()==0:
                    raise Exception(
                        "Unexpected empty queryset for layer_selection features: %s" %
                        queryset.query)
            is_values_queryset = isinstance(queryset, ValuesQuerySet)

            #returns queryset ordered by the table id
            queryset = queryset.order_by('id')

            if is_values_queryset:
                join_feature_class = layer_selection.create_join_feature_class() if is_values_queryset else feature_class
                logger.info("Created join_feature_class: %s" % join_feature_class)
                # Force the queryset to our new class so that Tastypie can map the dict results to it
                queryset.model = join_feature_class

                return self.__class__.resolve_resource_class(
                    join_feature_class,
                    queryset=queryset,
                    base_resource_class=self.join_feature_resource_class(join_feature_class),
                    additional_fields_dict=dict(
                        # Pass these to the feature resource to help it resolve
                        # field mappings and add related fields (just need for join_feature_class)
                        # Use the layer_selection if it exists since it might have filtered or extra query fields
                        result_field_lookup=(layer_selection or db_entity).result_field_lookup if not metadata else {},
                        related_field_lookup=(layer_selection or db_entity).related_field_lookup if not metadata else {},
                        # We use these in the FeatureResource to create a unique id for each join Feature
                        join_model_attributes=layer_selection and layer_selection.result_map.join_model_attributes
                    ),
                    is_join_query=True,
                    limit_fields=layer_selection.result_map['result_fields']
                )
            else:
                abstract_feature_resource_class = self.__class__
                resource_class = abstract_feature_resource_class.resolve_resource_class(
                    feature_class,
                    queryset=queryset,
                    # Give FeatureResource a reference to the layer_selection
                    additional_fields_dict=merge(
                        dict(
                            # Pass this to the feature resource to help it resolve field mappings
                            result_field_lookup=(layer_selection or db_entity).result_field_lookup if not metadata else {}
                        ),
                        dict(
                            # Not sure why it doesn't work to just stick this on the TemplateFeatureResource
                            feature_fields=ListField(attribute='feature_fields', null=True, blank=True, readonly=True),
                            feature_field_title_lookup=PickledDictField(attribute='feature_field_title_lookup', null=True, blank=True, readonly=True),
                        ) if is_template_feature else dict()
                    ),
                    for_template=is_template_feature
                )
                return resource_class
        return self
    def importer(self, config_entity, db_entity, **kwargs):
        """
            Replaces the normal ImportProcessor importer with one to import a sql from disk
        """
        if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table):
            # The table already exists. Skip the import an log a warning
            logger.warn("The target table for the feature table import already exists. Skipping table import.")
        else:
            # We don't store the upload_id alone, so pull it off the url
            upload_id = db_entity.url.replace('file:///tmp/', '').replace('.sql.zip', '')
            # Unpack the zipfile and return the path the sql file was placed at
            if db_entity.url.startswith('file://'):
                file_path = db_entity.url[len('file://'):]

            logger.warn(file_path)
            path = unpack_zipfile(file_path, upload_id)
            # The file is always the name of the table defined therein
            table_name = path.split('/')[-1].split('.')[0].lower()
            db_entity.url = 'file://%s' % path
            # Update the db_entity.url from the zip file url to the file_path
            # This lets ImportData find it.
            logger.info("Url of DbEntity is %s" % db_entity.url)
            db_entity.save()

            # Perform some sed updates to get the sql file ready for import
            regex_substitutions = []
            sql_file_path = file_url_to_path(db_entity.url)

            # Add IF EXISTS to the drop table to prevent an error if IF EXISTS doesn't exist yet
            regex_substitutions.append((r'DROP TABLE (?!IF EXISTS)', r'DROP TABLE IF EXISTS'))

            # TODO temp, fix an AC bug. It seems that using a capitalized column is problematic (?)
            # The suggested solution is to double quote it, but quotes cause other problems, so we simply lowercase
            regex_substitutions.append((r' OGC_FID ', ' ogc_fid ', (4, 4)))  # only line 4
            regex_substitutions.append((r'PRIMARY KEY \(ogc_fid\)', 'PRIMARY KEY (ogc_fid)', (4, 4)))  # only line 4
            # TODO end temp fix

            # Update the index name to include the schema. This format matches that created for preconfigured feature
            # tables (see import_data.py)
            spatial_index_name = '{schema}_{key}_geom_idx'.format(schema=db_entity.schema, key=db_entity.key)
            regex_substitutions.append((r'CREATE INDEX ".*" ON', 'CREATE INDEX "%s" ON' % spatial_index_name, (6, 6)))  # only line 6 6

            # Remove the reference to the geometry_columns, since we use a materialized view
            regex_substitutions.append((r'^DELETE FROM geometry_columns', '--DELETE FROM geometry_columns', (2, 2)))

            # Update the sql to have a unique table name which matches the DbEntity key
            # Also change public to our import schema to keep it from causing trouble in the public schema
            # Otherwise we run into all kinds of trouble trying to get the SQL into the system
            regex_substitutions.append((r'"public"."%s"' % table_name, '"import"."%s"' % db_entity.key))

            regex_substitutions.append((r"'%s'" % table_name, "'%s'" % db_entity.key, (2, 5)))

            regex_substitutions.append((r'"%s_pk"' % table_name, '"%s_pk"' % db_entity.key, (4, 4)))

            # Update public to the import schema
            regex_substitutions.append((r"AddGeometryColumn\('public'", "AddGeometryColumn('%s'" % settings.IMPORT_SCHEMA, (5, 5)))

            regex_substitutions.append((r'"%s_wkb_geometry_geom_idx"' % table_name, '"%s_wkb_geometry_geom_idx"' % db_entity.key, (6, 6)))

            for command in regex_substitutions:
                logger.info("Applying the following substitution %s" % ', '.join(command[0:2]))
            apply_regexes_to_file(sql_file_path, regex_substitutions)

            ImportData(config_entity=config_entity, db_entity_key=db_entity.key).run()

        # Add our normal primary key in the id column if negit eded
        add_primary_key_if_needed(db_entity)

        feature_class_creator = FeatureClassCreator(config_entity, db_entity)
        # Inspect the imported table to create the feature_class_configuration
        feature_class_configuration = feature_class_creator.feature_class_configuration_from_introspection()

        # Merge the created feature_class_configuration with the on already defined for the db_entity
        feature_class_creator.update_db_entity(feature_class_configuration)
        logger.info("Finished import for DbEntity: %s, feature_class_configuration: %s" % (db_entity, db_entity.feature_class_configuration))

        # Create association classes and tables and populate them with data
        create_and_populate_relations(config_entity, feature_class_creator.db_entity)
Beispiel #31
0
class DbEntities(models.Model):

    db_entities = models.ManyToManyField('DbEntity',
                                         through='DbEntityInterest')

    class Meta:
        abstract = True

    def add_db_entity_interests(self, *db_entity_interests):
        """
            Adds one or more unsaved DbEntityInterests to the instance's collection.
            If the instance has not yet overridden its parents' db_entities by adding at least one DbEntityInterest,
            the parents DbEntityInterests will be adopted and then the db_entities give here will be added
        :return:
        """
        # This check exists to avoid infinite recursion, since db_entities are sometimes added post_config_entity_save handler
        if len(db_entity_interests) > 0:
            # Even though the field name is db_entities, we need to pass the DbEntityInterests
            self._add('db_entities', *db_entity_interests)

    def remove_db_entity_interests(self, *db_entity_interests):
        self._remove('db_entities', *db_entity_interests)

    def feature_class_of_base_class(self, base_feature_class):
        """
            Finds the feature_class of this config_entity that is derived from the given base class
        :param base_feature_class:
        :return:
        """
        db_entities = filter(
            lambda db_entity:
            # Get the configured abstract class
            # The source_db_entity_key is a hack to prevent Result db_entities from being chosen
            issubclass(
                resolve_module_attr(
                    db_entity.feature_class_configuration.abstract_class_name,
                    object), base_feature_class) and not db_entity.
            source_db_entity_key,
            self.computed_db_entities())
        if len(db_entities) != 1:
            raise Exception(
                "Expected exactly one db_entity matching the base_class {0} but got {1}"
                .format(base_feature_class,
                        db_entities if len(db_entities) > 0 else 'none'))
        return self.db_entity_feature_class(db_entities[0].key)

    def db_entity_feature_class(self,
                                key,
                                abstract_class=False,
                                base_feature_class=False):
        """
            Resolves the Feature class subclass of this config_entity for the given key, or the base class version
            if base_class-True. Note that this is slightly different than asking a DbEntity for its corresponding
            Feature class. A DbEntity will always use its owning ConfigEntity for the config_entity property
            of the Feature class. This method uses self as the ConfigEntity, which is useful when self
            is a lower ConfigEntity in the hierarchy (such as a Scenario) and the Feature class needs
            access to related Feature classes that are also at a low level. This works because a lower ConfigEntity
            has access to DbEntities that the higher one does not, which allows queries between from a higher
            DbEntity to join a lower one.
        :param key: The DbEntity key
        :param abstract_class, Default False, if True returns the abstract base class instead of the subclass
        :param base_feature_class, Default False, if True returns the base class instead of the default rel class
        :return:
        """
        try:
            original_db_entity = self.computed_db_entities().get(key=key)
        except Exception, e:
            raise Exception(
                "The DbEntity key %s could not be found in the computed DbEntities of the ConfigEntity %s, which contains %s"
                % (key, self.name, ', '.join(
                    map(lambda db_entity: db_entity.key,
                        self.computed_db_entities()))))

        source_db_entity_key = original_db_entity.source_db_entity_key if original_db_entity.source_db_entity_key else key
        db_entity = self.computed_db_entities().get(key=source_db_entity_key)
        # Resolve the source_db_entity_key of the DbEntity or just use key
        from footprint.main.models.feature.feature_class_creator import FeatureClassCreator
        if abstract_class:
            subclass = resolve_module_attr(
                db_entity.feature_class_configuration.abstract_class_name)
        elif base_feature_class or db_entity.feature_class_configuration.no_table_associations:
            subclass = FeatureClassCreator(
                self, db_entity).dynamic_model_class(base_only=True)
        else:
            subclass = FeatureClassCreator(self,
                                           db_entity).dynamic_model_class()
        if not subclass:
            raise Exception(
                "For config_entity {0} no class associated with db_entity_key {1}{2}. "
                "Register a table in default_db_entities() of the owning config_entity."
                .format(
                    unicode(self), key,
                    ", even after resolving source_db_entity_key to {0}".
                    format(source_db_entity_key)
                    if source_db_entity_key != key else ''))
        return subclass
Beispiel #32
0
def crud_db_entities(config_entity, crud, db_entity_keys=None):
    """
        Creates or updates the db_entities of the ConfigEntity
    :param config_entity
    :param crud CrudKey.CREATE, CrudType.CLONE, CrudType.UPDATE, CrudType.SYNC, CrudType.DELETE (unimplemented)
    :return:
    """
    from footprint.client.configuration.fixture import ConfigEntityFixture
    # If not present, create the database schema for this ConfigEntity's feature table data
    PGNamespace.objects.create_schema(config_entity.schema())
    client_fixture = ConfigEntityFixture.resolve_config_entity_fixture(
        config_entity)
    db_entity_filter = dict(key__in=db_entity_keys) if db_entity_keys else {}

    # Process the DbEntities from the origin_instance or the db_entity_configuration from the fixtures,
    # but only the first time this scenario is saved
    # We only get those scoped (owned) by the class of our config_entity. The scoped above will be adopted automatically
    # and need not be created. This means a Scenario creates DbEntities scoped to Scenario and adopts those scoped
    # to Project or Region. It does not clone the latter.
    if CrudKey.CLONE == crud:
        # CRUD the DbEntities to match the origin instance
        origin_instance = config_entity.origin_instance
        # Clone the DbEntities from the origin ConfigEntity.
        db_entities = map(
            lambda source_db_entity: clone_or_update_db_entity_and_interest(
                config_entity, source_db_entity,
                DbEntity(schema=config_entity.schema(),
                         feature_class_configuration=FeatureClassConfiguration(
                             geography_scope=FeatureClassCreator(
                                 config_entity).resolved_geography_scope.id,
                             class_attrs={
                                 'config_entity__id': config_entity.id,
                                 'override_db': config_entity.db,
                                 'db_entity_key': source_db_entity.key
                             }))).db_entity,
            origin_instance.owned_db_entities(**db_entity_filter))
    elif crud in [CrudKey.SYNC, CrudKey.CREATE]:
        #TODO examine the two conditions below more carefully. We want syncing to be the same for clones and non-clones
        if config_entity.origin_instance:
            # Syncing previously cloned instance
            db_entities = config_entity.owned_db_entities(**db_entity_filter)
            update_or_create_db_entities_and_interests(config_entity,
                                                       *db_entities)
        else:
            # Create or Sync new instance
            # Get the default DbEntity configurations from the fixture
            default_db_entities = filter(
                lambda db_entity: db_entity.key in db_entity_keys
                if db_entity_keys else True,
                client_fixture.default_db_entities())
            # Find additional owned (not adopted) db_entities that aren't defaults, namely those that were created by the user
            additional_db_entities = filter(
                lambda db_entity: db_entity.key in db_entity_keys
                if db_entity_keys else True,
                client_fixture.non_default_owned_db_entities())
            # Combine the defaults with the additions
            db_entities = default_db_entities + list(additional_db_entities)
            update_or_create_db_entities_and_interests(config_entity,
                                                       *db_entities)
    elif CrudKey.UPDATE == crud:
        # No complex updates are enabled for scenarios, so no post-save processing is needed
        return
    elif CrudKey.DELETE == crud:
        raise NotImplementedError("DELETE is not implemented")

    # Disable the post_post_save signal while saving to prevent an infinite loop
    previous = config_entity._no_post_save_publishing
    config_entity._no_post_save_publishing = True
    # Save post_create changes. This is just to store selected DbEntities
    config_entity.save()
    config_entity._no_post_save_publishing = previous

    reset_queries()