def importer(self, config_entity, db_entity, **kwargs):
        """
            Creates various GeojsonFeature classes by importing geojson and saving it to the database via a dynamic subclass of GeojsonFeature
        :schema: The optional schema to use for the dynamic subclass's meta db_table attribute, which will allow the class's table to be saved in the specified schema. Defaults to public
        :data: Optional python dict data to use instead of loading from the db_entity.url
        :return: a list of lists. Each list is a list of features of distinct subclass of GeoJsonFeature that is created dynamically. To persist these features, you must first create the subclass's table in the database using create_table_for_dynamic_class(). You should also register the table as a DbEntity.
        """
        if self.seed_data:
            data = geojson.loads(jsonify(self.seed_data), object_hook=geojson.GeoJSON.to_instance)
        else:
            fp = open(db_entity.url.replace('file://', ''))
            data = geojson.load(fp, object_hook=geojson.GeoJSON.to_instance)
        feature_class_creator = FeatureClassCreator(config_entity, db_entity)
        # find all unique properties
        feature_class_configuration = feature_class_creator.feature_class_configuration_from_geojson_introspection(data)
        feature_class_creator.update_db_entity(feature_class_configuration)
        feature_class = feature_class_creator.dynamic_model_class(base_only=True)
        # Create our base table. Normally this is done by the import, but we're just importing into memory
        create_tables_for_dynamic_classes(feature_class)
        # Now write each feature to our newly created table
        for feature in map(lambda feature: self.instantiate_sub_class(feature_class, feature), data.features):
            feature.save()
        # Create the rel table too
        rel_feature_class = feature_class_creator.dynamic_model_class()
        create_tables_for_dynamic_classes(rel_feature_class)

        # PostGIS 2 handles this for us now
        # if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table):
        #     # Tell PostGIS about the new geometry column or the table
        #     sync_geometry_columns(db_entity.schema, db_entity.table)

        # Create association classes and tables and populate them with data
        create_and_populate_relations(config_entity, db_entity)
Example #2
0
    def importer(self, config_entity, db_entity, **kwargs):
        """
            Replaces the normal ImportProcessor importer with one to import a shapefile from disk
        """
        user = db_entity.creator

        if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table):
            # The table already exists. Skip the import an log a warning
            logger.warn("The target table for the layer selection import already exists. Skipping table import.")
        else:
            feature_class_creator = FeatureClassCreator(config_entity, db_entity)
            origin_feature_class_configuration = db_entity.origin_instance.feature_class_configuration
            # Create the new DbEntity FeatureClassConfiguration from the origin's. Pass in what has already been
            # created for the new feature_class_configuration. This should have things like generated=True
            feature_class_configuration = feature_class_creator.complete_or_create_feature_class_configuration(
                origin_feature_class_configuration,
                **merge(db_entity.feature_class_configuration.__dict__, dict(generated=True)))
            # Update the DbEntity
            feature_class_creator.update_db_entity(feature_class_configuration)

            if feature_class_configuration.source_from_origin_layer_selection and \
               feature_class_configuration.origin_layer_id:
                # If desired, limit the layer clone to that of the source layer's current LayerSelection for the
                # User doing the update
                layer_selection_class = get_or_create_layer_selection_class_for_layer(
                    Layer.objects.get(id=feature_class_configuration.origin_layer_id), True)
                layer_selection = layer_selection_class.objects.get(user=user)
                features = layer_selection.selected_features
            else:
                # Leave blank to copy all features by default
                features = None

            DefaultImportProcessor().peer_importer(config_entity, db_entity, import_from_origin=True, source_queryset=features)
    def importer(self, config_entity, db_entity, **kwargs):
        """
            Replaces the normal ImportProcessor importer with one to import a sql from disk
        """
        if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table):
            # The table already exists. Skip the import an log a warning
            logger.warn("The target table for the feature table import already exists. Skipping table import.")
        else:
            # We don't store the upload_id alone, so pull it off the url
            upload_id = db_entity.url.replace('file:///tmp/', '').replace('.sql.zip', '')
            # Unpack the zipfile and return the path the sql file was placed at
            if db_entity.url.startswith('file://'):
                file_path = db_entity.url[len('file://'):]

            logger.warn(file_path)
            path = unpack_zipfile(file_path, upload_id)
            # The file is always the name of the table defined therein
            table_name = path.split('/')[-1].split('.')[0].lower()
            db_entity.url = 'file://%s' % path
            # Update the db_entity.url from the zip file url to the file_path
            # This lets ImportData find it.
            logger.info("Url of DbEntity is %s" % db_entity.url)
            db_entity.save()

            # Perform some sed updates to get the sql file ready for import
            regex_substitutions = []
            sql_file_path = file_url_to_path(db_entity.url)

            # Add IF EXISTS to the drop table to prevent an error if IF EXISTS doesn't exist yet
            regex_substitutions.append((r'DROP TABLE (?!IF EXISTS)', r'DROP TABLE IF EXISTS'))

            # TODO temp, fix an AC bug. It seems that using a capitalized column is problematic (?)
            # The suggested solution is to double quote it, but quotes cause other problems, so we simply lowercase
            regex_substitutions.append((r' OGC_FID ', ' ogc_fid ', (4, 4)))  # only line 4
            regex_substitutions.append((r'PRIMARY KEY \(ogc_fid\)', 'PRIMARY KEY (ogc_fid)', (4, 4)))  # only line 4
            # TODO end temp fix

            # Update the index name to include the schema. This format matches that created for preconfigured feature
            # tables (see import_data.py)
            spatial_index_name = '{schema}_{key}_geom_idx'.format(schema=db_entity.schema, key=db_entity.key)
            regex_substitutions.append((r'CREATE INDEX ".*" ON', 'CREATE INDEX "%s" ON' % spatial_index_name, (6, 6)))  # only line 6 6

            # Remove the reference to the geometry_columns, since we use a materialized view
            regex_substitutions.append((r'^DELETE FROM geometry_columns', '--DELETE FROM geometry_columns', (2, 2)))

            # Update the sql to have a unique table name which matches the DbEntity key
            # Also change public to our import schema to keep it from causing trouble in the public schema
            # Otherwise we run into all kinds of trouble trying to get the SQL into the system
            regex_substitutions.append((r'"public"."%s"' % table_name, '"import"."%s"' % db_entity.key))

            regex_substitutions.append((r"'%s'" % table_name, "'%s'" % db_entity.key, (2, 5)))

            regex_substitutions.append((r'"%s_pk"' % table_name, '"%s_pk"' % db_entity.key, (4, 4)))

            # Update public to the import schema
            regex_substitutions.append((r"AddGeometryColumn\('public'", "AddGeometryColumn('%s'" % settings.IMPORT_SCHEMA, (5, 5)))

            regex_substitutions.append((r'"%s_wkb_geometry_geom_idx"' % table_name, '"%s_wkb_geometry_geom_idx"' % db_entity.key, (6, 6)))

            for command in regex_substitutions:
                logger.info("Applying the following substitution %s" % ', '.join(command[0:2]))
            apply_regexes_to_file(sql_file_path, regex_substitutions)

            ImportData(config_entity=config_entity, db_entity_key=db_entity.key).run()

        # Add our normal primary key in the id column if negit eded
        add_primary_key_if_needed(db_entity)

        feature_class_creator = FeatureClassCreator(config_entity, db_entity)
        # Inspect the imported table to create the feature_class_configuration
        feature_class_configuration = feature_class_creator.feature_class_configuration_from_introspection()

        # Merge the created feature_class_configuration with the on already defined for the db_entity
        feature_class_creator.update_db_entity(feature_class_configuration)
        logger.info("Finished import for DbEntity: %s, feature_class_configuration: %s" % (db_entity, db_entity.feature_class_configuration))

        # Create association classes and tables and populate them with data
        create_and_populate_relations(config_entity, feature_class_creator.db_entity)