def get_shapefile_info(shapefile_path):
    from django.contrib.gis import gdal
    ds = gdal.DataSource(shapefile_path)
    layer = ds[0]
    fields = []
    for i in range(0, len(layer.fields)):
        fields.append(
            dict(name=layer.fields[i],
                 type=layer.field_types[i].__name__,
                 width=layer.field_widths[i],
                 precision=layer.field_precisions[i]))
    srid = None
    if layer.srs is not None:
        if layer.srs.srid is None:
            try:
                srid = get_srid(layer.srs.proj4)
            except:
                pass
        else:
            srid = layer.srs.srid

    return DynamicObject(
        dict(
            name=layer.name,
            # num_features=layer.num_feat,
            type=layer.geom_type.name,
            # fields=fields,
            # extent=layer.extent.tuple,
            srid=srid))
Пример #2
0
 def to_python(self, value):
     value = super(GeometryFileField, self).to_python(value)
     if value is None:
         return value
     filename = value.name
     tmpdir = None
     if zipfile.is_zipfile(value):
         tmpdir = tempfile.mkdtemp()
         zf = zipfile.ZipFile(value)
         # Extract all files from the temporary directory using only the
         # base file name, avoids security issues with relative paths in the
         # zip.
         for item in zf.namelist():
             filename = os.path.join(tmpdir, os.path.basename(item))
             with open(filename, 'wb') as f:
                 f.write(zf.read(item))
             if filename.endswith('.shp'):
                 break
     # Attempt to union all geometries from GDAL data source.
     try:
         geoms = gdal.DataSource(filename)[0].get_geoms()
         geom = reduce(lambda g1, g2: g1.union(g2), geoms)
         if not geom.srs:
             raise gdal.OGRException('Cannot determine SRS')
     except (gdal.OGRException, gdal.OGRIndexError):
         geom = None
     finally:
         if tmpdir and os.path.isdir(tmpdir):
             shutil.rmtree(tmpdir)
     return geom
Пример #3
0
    def handle(self, *args, **options):
        data_source, model_name = options.pop('data_source'), options.pop('model_name')
        if not gdal.HAS_GDAL:
            raise CommandError('GDAL is required to inspect geospatial data sources.')

        # Getting the OGR DataSource from the string parameter.
        try:
            ds = gdal.DataSource(data_source)
        except gdal.OGRException as msg:
            raise CommandError(msg)

        # Returning the output of ogrinspect with the given arguments
        # and options.
        from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping
        # Filter options to params accepted by `_ogrinspect`
        ogr_options = dict((k, v) for k, v in options.items()
                           if k in inspect.getargspec(_ogrinspect).args and v is not None)
        output = [s for s in _ogrinspect(ds, model_name, **ogr_options)]

        if options['mapping']:
            # Constructing the keyword arguments for `mapping`, and
            # calling it on the data source.
            kwargs = {'geom_name': options['geom_name'],
                      'layer_key': options['layer_key'],
                      'multi_geom': options['multi_geom'],
                      }
            mapping_dict = mapping(ds, **kwargs)
            # This extra legwork is so that the dictionary definition comes
            # out in the same order as the fields in the model definition.
            rev_mapping = dict((v, k) for k, v in mapping_dict.items())
            output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name,
                           '%s_mapping = {' % model_name.lower()])
            output.extend("    '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields)
            output.extend(["    '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}'])
        return '\n'.join(output) + '\n'
Пример #4
0
    def _get_datasource(self):

        handler = HandlerFactory.get(self.file)

        ext_set = set(ShapefileReader.REQUIRED_EXTENSIONS)
        files_name = None
        for name in handler.namelist:
            fname, ext = os.path.splitext(name)

            if ext not in ShapefileReader.REQUIRED_EXTENSIONS:
                continue

            if not files_name:
                shapefile_name = fname
            elif fname != shapefile_name:
                raise CompressedShapeError(_(u'The files are not all from '
                                             u'the same shapefile.'))

            ext_set.discard(ext)

        if ext_set:
            raise CompressedShapeError(_(u'The following files are missing '
                                         u'in the shapefile: {}')
                                       .format(list(ext_set)))

        handler.extract(self._tmp_dir)
        shapefile_path = os.path.join(self._tmp_dir, shapefile_name+'.shp')

        return gdal.DataSource(shapefile_path)
Пример #5
0
def upload(request):
    ctoken = {}
    ctoken.update(csrf(request))
    if request.method == 'POST':
        form = UploadForm(request.POST, request.FILES)
        if form.is_valid():
            shp = form.handle(request.FILES['file_obj'])
            ds = gdal.DataSource(shp)
            ct = CoordTransform(SpatialReference('EPSG:4326'),
                                SpatialReference('EPSG:900913'))

            dados = []
            for layer in ds[0]:
                aux = layer.geom
                aux.transform(ct)
                dados.append(aux)

            form = UploadForm()
            return render_to_response(
                'googlev3_upload.html',
                RequestContext(request, {
                    'form': form,
                    'dados': dados,
                    'token': ctoken
                }))
    else:
        form = UploadForm()
    return render_to_response('googlev3_upload.html',
                              RequestContext(request, {'form': form}))
Пример #6
0
    def handle(self, *args, **options):
        data_source, model_name = options.pop('data_source'), options.pop('model_name')

        # Getting the OGR DataSource from the string parameter.
        try:
            ds = gdal.DataSource(data_source)
        except gdal.GDALException as msg:
            raise CommandError(msg)

        # Returning the output of ogrinspect with the given arguments
        # and options.
        from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping
        # Filter options to params accepted by `_ogrinspect`
        ogr_options = {k: v for k, v in options.items()
                       if k in get_func_args(_ogrinspect) and v is not None}
        output = [s for s in _ogrinspect(ds, model_name, **ogr_options)]

        if options['mapping']:
            # Constructing the keyword arguments for `mapping`, and
            # calling it on the data source.
            kwargs = {
                'geom_name': options['geom_name'],
                'layer_key': options['layer_key'],
                'multi_geom': options['multi_geom'],
            }
            mapping_dict = mapping(ds, **kwargs)
            # This extra legwork is so that the dictionary definition comes
            # out in the same order as the fields in the model definition.
            rev_mapping = {v: k for k, v in mapping_dict.items()}
Пример #7
0
 def _from_file(self, fileobj, tmpdir):
     if zipfile.is_zipfile(fileobj):
         with zipfile.ZipFile(fileobj) as zf:
             extracted = []
             for item in zf.infolist():
                 fname = os.path.abspath(os.path.join(
                     tmpdir, item.filename))
                 if fname.startswith(tmpdir):
                     zf.extract(item, tmpdir)
                     extracted.append(fname)
             for path in extracted:
                 if path.endswith('.shp'):
                     fname = path
     else:
         # NOTE: is_zipfile() seeks to end of file or at least 110 bytes.
         fileobj.seek(0)
         with tempfile.NamedTemporaryFile(dir=tmpdir, delete=False) as fp:
             shutil.copyfileobj(fileobj, fp)
         fname = fp.name
     # Attempt to union all geometries from GDAL data source.
     try:
         geoms = gdal.DataSource(fname)[0].get_geoms()
         geom = reduce(lambda g1, g2: g1.union(g2), geoms)
         if not geom.srs:
             raise gdal.OGRException('Cannot determine SRS')
     except (gdal.OGRException, gdal.OGRIndexError):
         raise forms.ValidationError(
             GeometryField.default_error_messages['invalid_geom'],
             code='invalid_geom')
     return geom
    def handle(self, *args, **options):
        path = pathjoin(dirname(__file__), pardir, pardir, 'azavea-geo-data',
                        'Neighborhoods_Philadelphia',
                        'Neighborhoods_Philadelphia.shp')

        shapefile = gdal.DataSource(path)
        for layer in shapefile:
            for feature in layer:
                try:
                    region = Region.objects.get(layer='neighborhoods',
                                                name=feature.get('listname'))
                    print 'Updating "%s"' % region.name
                except Region.DoesNotExist:
                    region = Region(layer='neighborhoods',
                                    name=feature.get('listname'))
                    print 'Loading "%s"' % region.name

        # Transform the projection from 102729 (NAD 1983 StatePlane
        # Pennsylvania South FIPS 3702 Feet) to 4326 (WSG 1984).
        #
        # Source: http://svn.osgeo.org/postgis/tags/0.7.4/spatial_ref_sys.sql
                region.shape = feature.geom.geos
                region.shape.srid = 102729
                region.shape.transform(4326)

                region.save()
Пример #9
0
    def handle(self, *args, **options):
        try:
            data_source, model_name = args
        except ValueError:
            raise CommandError('Invalid arguments, must provide: %s' %
                               self.args)

        if not gdal.HAS_GDAL:
            raise CommandError(
                'GDAL is required to inspect geospatial data sources.')

        # Removing options with `None` values.
        options = dict([(k, v) for k, v in options.items() if not v is None])

        # Getting the OGR DataSource from the string parameter.
        try:
            ds = gdal.DataSource(data_source)
        except gdal.OGRException as msg:
            raise CommandError(msg)

        # Whether the user wants to generate the LayerMapping dictionary as well.
        show_mapping = options.pop('mapping', False)

        # Getting rid of settings that `_ogrinspect` doesn't like.
        verbosity = options.pop('verbosity', False)
        settings = options.pop('settings', False)

        # Returning the output of ogrinspect with the given arguments
        # and options.
        from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping
        output = [s for s in _ogrinspect(ds, model_name, **options)]
        if show_mapping:
            # Constructing the keyword arguments for `mapping`, and
            # calling it on the data source.
            kwargs = {
                'geom_name': options['geom_name'],
                'layer_key': options['layer_key'],
                'multi_geom': options['multi_geom'],
            }
            mapping_dict = mapping(ds, **kwargs)
            # This extra legwork is so that the dictionary definition comes
            # out in the same order as the fields in the model definition.
            rev_mapping = dict([(v, k) for k, v in mapping_dict.items()])
            output.extend([
                '',
                '# Auto-generated `LayerMapping` dictionary for %s model' %
                model_name,
                '%s_mapping = {' % model_name.lower()
            ])
            output.extend([
                "    '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld)
                for ogr_fld in ds[options['layer_key']].fields
            ])
            output.extend([
                "    '%s' : '%s'," %
                (options['geom_name'], mapping_dict[options['geom_name']]), '}'
            ])
        return '\n'.join(output) + '\n'
Пример #10
0
def geoms_from_shapefile(filename, id=None):
    ds = gdal.DataSource(filename)
    layer = ds[0]
    id_column = lambda f: f.get(id) if id else f.fid
    geoms = {}
    for f in layer:
        try:
            geoms[id_column(f)] = geos.GEOSGeometry(f.geom.wkt)
        except Exception, e:
            print e
Пример #11
0
    def handle(self, *args, **options):
        data_source, model_name = options.pop("data_source"), options.pop("model_name")

        # Getting the OGR DataSource from the string parameter.
        try:
            ds = gdal.DataSource(data_source)
        except gdal.GDALException as msg:
            raise CommandError(msg)

        # Returning the output of ogrinspect with the given arguments
        # and options.
        from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping

        # Filter options to params accepted by `_ogrinspect`
        ogr_options = {
            k: v
            for k, v in options.items()
            if k in get_func_args(_ogrinspect) and v is not None
        }
        output = [s for s in _ogrinspect(ds, model_name, **ogr_options)]

        if options["mapping"]:
            # Constructing the keyword arguments for `mapping`, and
            # calling it on the data source.
            kwargs = {
                "geom_name": options["geom_name"],
                "layer_key": options["layer_key"],
                "multi_geom": options["multi_geom"],
            }
            mapping_dict = mapping(ds, **kwargs)
            # This extra legwork is so that the dictionary definition comes
            # out in the same order as the fields in the model definition.
            rev_mapping = {v: k for k, v in mapping_dict.items()}
            output.extend(
                [
                    "",
                    "",
                    "# Auto-generated `LayerMapping` dictionary for %s model"
                    % model_name,
                    "%s_mapping = {" % model_name.lower(),
                ]
            )
            output.extend(
                "    '%s': '%s'," % (rev_mapping[ogr_fld], ogr_fld)
                for ogr_fld in ds[options["layer_key"]].fields
            )
            output.extend(
                [
                    "    '%s': '%s',"
                    % (options["geom_name"], mapping_dict[options["geom_name"]]),
                    "}",
                ]
            )
        return "\n".join(output) + "\n"
Пример #12
0
 def getShapefileLayers(self):
     shapefiles = self.serializer.path_directory
     shapefiles = [
         shapefile for shapefile in os.listdir(shapefiles)
         if shapefile[-3:] == "shp"
     ]
     datasources = [
         gdal.DataSource(os.path.join(self.serializer.path_directory, s))
         for s in shapefiles
     ]
     layers = [ds[0] for ds in datasources]
     return layers
Пример #13
0
    def validate(self, zip_path):
        """ Validate the uploaded, zipped shapefile by unpacking to a temporary sandbox.
        """
        # create a temporary file to write the zip archive to
        tmp = tempfile.NamedTemporaryFile(suffix='.zip', mode='w')
        # write zip to tmp sandbox
        self.write_file(zip_path, tmp.name)

        if not zipfile.is_zipfile(tmp.name):
            return False, 'That file is not a valid Zip Archive'

        # create zip object
        zfile = zipfile.ZipFile(tmp.name)

        # ensure proper file contents by extensions inside
        if not self.check_zip_contents('shp', zfile):
            return False, 'Found Zip Archive but no file with a .shp extension found inside.'
        #elif not self.check_zip_contents('prj', zfile):
        #    return False, 'You must supply a .prj file with the Shapefile to indicate the projection.'
        elif not self.check_zip_contents('dbf', zfile):
            return False, 'You must supply a .dbf file with the Shapefile to supply attribute data.'
        elif not self.check_zip_contents('shx', zfile):
            return False, 'You must supply a .shx file for the Shapefile to have a valid index.'

        # unpack contents into tmp directory
        tmp_dir = tempfile.gettempdir()
        zfile.extractall(tmp_dir)
        files_list = zfile.namelist()

        #looking for the shape. whe must have 3 files with the same name
        shape_candidate = self.get_shape_candidate(files_list)

        if not shape_candidate:
            return False, "No suitable shapefile found"

        ds_name = shape_candidate

        # ogr needs the full path to the unpacked 'file.shp'
        full_path = '%s%s%s.shp' % (tmp_dir, os.path.sep, ds_name)
        ds = gdal.DataSource(full_path)

        # shapefiles have just one layer, so grab the first...
        layer = ds[0]

        # one way of testing a sane shapefile...
        # further tests should be able to be plugged in here...
        if layer.test_capability('RandomRead'):
            if str(ds.driver) == 'ESRI Shapefile':
                return True, None
            else:
                return False, "Sorry, we've experienced a problem on our server. Please try again later."
        else:
            return False, 'Cannot read the shapefile, data is corrupted inside the zip, please try to upload again'
Пример #14
0
    def validate(self, filefield_data):
        """ Validate the uploaded, zipped shapefile by unpacking to a temporary sandbox.
        """
        # create a temporary file to write the zip archive to
        tmp = tempfile.NamedTemporaryFile(suffix='.zip', mode='w')

        # write zip to tmp sandbox
        self.write_file(tmp.name, filefield_data)

        if not zipfile.is_zipfile(tmp.name):
            return False, 'That file is not a valid Zip Archive'

        # create zip object
        zfile = zipfile.ZipFile(tmp.name)

        # ensure proper file contents by extensions inside
        if not self.check_zip_contents('shp', zfile):
            return False, 'Found Zip Archive but no file with a .shp extension found inside.'
        elif not self.check_zip_contents('prj', zfile):
            return False, 'You must supply a .prj file with the Shapefile to indicate the projection.'
        elif not self.check_zip_contents('dbf', zfile):
            return False, 'You must supply a .dbf file with the Shapefile to supply attribute data.'
        elif not self.check_zip_contents('shx', zfile):
            return False, 'You must supply a .shx file for the Shapefile to have a valid index.'

        # unpack contents into tmp directory
        tmp_dir = tempfile.gettempdir()
        for info in zfile.infolist():
            data = zfile.read(info.filename)
            shp_part = '%s%s%s' % (tmp_dir, os.path.sep, info.filename)
            fout = open(shp_part, "wb")
            fout.write(data)
            fout.close()

        # get the datasource name without extension
        ds_name = os.path.splitext(zfile.namelist()[0])[0]

        # ogr needs the full path to the unpacked 'file.shp'
        ds = gdal.DataSource('%s%s%s.shp' % (tmp_dir, os.path.sep, ds_name))

        # shapefiles have just one layer, so grab the first...
        layer = ds[0]

        # one way of testing a sane shapefile...
        # further tests should be able to be plugged in here...
        if layer.test_capability('RandomRead'):
            if str(ds.driver) == 'ESRI Shapefile':
                return True, None
            else:
                return False, "Sorry, we've experienced a problem on our server. Please try again later."
        else:
            return False, 'Cannot read the shapefile, data is corrupted inside the zip, please try to upload again'
Пример #15
0
    def save(self, commit=True):
        modal = super(ShapeForm, self).save(commit=False)
        print(self.cleaned_data['shape_file'])
        if not self.cleaned_data['shape_file'] == None:
            shape_file = self.cleaned_data['shape_file']
            working_dir = mkdtemp()
            try:
                shape_zip = ZipFile(shape_file)
                shape_zip.extractall(working_dir)
            except:
                shutil.rmtree(working_dir)
                print("Could not extract zip file.")

            shapes_list = glob.glob(working_dir + '/*.shp')

            try:
                ds = gdal.DataSource(shapes_list[0])
                layer = ds[0]
                polygons = []
                for feature in layer:
                    state = self.cleaned_data['state']
                    geom = feature.geom.geos
                    source = self.cleaned_data['source']
                    start_date = self.cleaned_data['start_date'] or date(
                        1945, 1, 1)
                    end_date = self.cleaned_data['end_date'] or date(
                        2018, 1, 1)

                    if type(geom) == geos.Polygon:
                        polygons.append(geom)

                if len(polygons) > 0:
                    multipoly = geos.MultiPolygon(polygons)
                    modal, created = Shape.objects.get_or_create(
                        state=state,
                        shape=multipoly,
                        source=source,
                        start_date=start_date,
                        end_date=end_date)

            except:
                shutil.rmtree(working_dir)
                print("Error when converting shape file.")

        if commit == True:
            m.save()
        return modal
    def handle(self, *args, **options):
        file_names = options['geojson']
        ds = gdal.DataSource(file_names[0])
        layer = ds[0]
        # assert(all(field in layer.fields for field in ['state', 'source', 'start_date', 'end_date']))
        for feature in layer:
            state = feature.get('name')
            geom = feature.geom.geos
            source = 'https://github.com/johan/world.geo.json'
            start_date = feature.get('start_date') or date(1945, 1, 1)
            if type(geom) == geos.Polygon:
                geom = geos.MultiPolygon(geom)
            state, created = State.objects.get_or_create(name=state, color="test")
            print(state, start_date, created)
            Shape.objects.get_or_create(state=state, shape=geom, source=source, start_date=start_date, end_date=date(2018, 1, 1))

        print(State.objects.count())
        print(Shape.objects.count())
Пример #17
0
    def handle(self, *args, **options):
        try:
            data_source, model_name = args
        except ValueError:
            raise CommandError('Invalid arguments, must provide: %s' %
                               self.args)

        if not gdal.HAS_GDAL:
            raise CommandError(
                'GDAL is required to inspect geospatial data sources.')

        # Removing options with `None` values.
        options = dict([(k, v) for k, v in options.items() if not v is None])

        # Getting the OGR DataSource from the string parameter.
        try:
            ds = gdal.DataSource(data_source)
        except gdal.OGRException, msg:
            raise CommandError(msg)
Пример #18
0
 def _from_file(self, fileobj, tmpdir):
     if zipfile.is_zipfile(fileobj):
         with zipfile.ZipFile(fileobj) as zf:
             for item in zf.infolist():
                 fname = os.path.abspath(os.path.join(
                     tmpdir, item.filename))
                 if fname.startswith(tmpdir):
                     zf.extract(item, tmpdir)
     else:
         # NOTE: is_zipfile() seeks to end of file or at least 110 bytes.
         fileobj.seek(0)
         with tempfile.NamedTemporaryFile(dir=tmpdir, delete=False) as fp:
             shutil.copyfileobj(fileobj, fp)
         fname = fp.name
     # Attempt to union all geometries from GDAL data source.
     try:
         geoms = gdal.DataSource(fname)[0].get_geoms()
         geom = reduce(lambda g1, g2: g1.union(g2), geoms)
         if not geom.srs:
             raise gdal.OGRException('Cannot determine SRS')
     except (gdal.OGRException, gdal.OGRIndexError):
         geom = None
     return geom
Пример #19
0
    def handle(self, filefield_data, user):
        """ Upload the file data, in chunks, to the media/upload/loadshp.
            Then unpack it, read the features and return them
        """
        # ensure the upload directory exists
        upload_dir = os.path.join(settings.MEDIA_ROOT, 'upload', 'loadshp',
                                  user.username)
        if not os.path.exists(upload_dir):
            os.makedirs(upload_dir)

        # contruct the full filepath and filename
        downloaded_file = os.path.normpath(
            os.path.join(upload_dir, filefield_data.name))

        # if we've already got an upload with the same name, append the daymonthyear_minute
        if os.path.exists(downloaded_file):
            name, ext = os.path.splitext(downloaded_file)
            append = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
            downloaded_file = '%s_%s%s' % (name, append, ext)

        # write the zip archive to final location
        self.write_file(downloaded_file, filefield_data)

        #unpack the files
        zfile = zipfile.ZipFile(downloaded_file)
        tmp_dir = tempfile.gettempdir()
        for info in zfile.infolist():
            data = zfile.read(info.filename)
            shp_part = '%s%s%s' % (tmp_dir, os.path.sep, info.filename)
            fout = open(shp_part, "wb")
            fout.write(data)
            fout.close()

        # get the datasource name without extension
        ds_name = os.path.splitext(zfile.namelist()[0])[0]
        ds = gdal.DataSource('%s%s%s.shp' % (tmp_dir, os.path.sep, ds_name))
        return ds[0]
Пример #20
0
    def save(self, commit=True):

        model = super(TerritoryForm, self).save(commit=False)
        if not self.cleaned_data["shape_file"] is None:
            shape_file = self.cleaned_data["shape_file"]
            working_dir = mkdtemp()
            try:
                shape_zip = ZipFile(shape_file)
                shape_zip.extractall(working_dir)
            except:
                shutil.rmtree(working_dir)
                raise ValidationError("Could not extract zipfile.")

            basename = os.path.splitext(os.path.basename(shape_file.name))[0]
            shapes_list = glob.glob(working_dir + "/" + basename + "/*.shp")

            try:
                ds = gdal.DataSource(shapes_list[0])
                layer = ds[0]
                polygons = []
                for feature in layer:
                    geom = feature.geom.geos
                    if isinstance(geom, geos.Polygon):
                        polygons.append(geom)

                if len(polygons) > 0:
                    multipoly = geos.MultiPolygon(polygons)
                    model.geo = multipoly.geojson
            except:
                shutil.rmtree(working_dir)
                raise ValidationError("Error converting shapefile")

        if commit:
            model.save()

        return model
Пример #21
0
    def handle_args(self, *args, **options):
        try:
            data_source, model_name = args
        except ValueError:
            raise CommandError('Invalid arguments, must provide: %s' %
                               self.args)

        if not gdal.HAS_GDAL:
            raise CommandError(
                'GDAL is required to inspect geospatial data sources.')

        # TODO: Support non file-based OGR datasources.
        if not os.path.isfile(data_source):
            raise CommandError('The given data source cannot be found: "%s"' %
                               data_source)

        # Removing options with `None` values.
        options = dict([(k, v) for k, v in options.items() if not v is None])

        # Getting the OGR DataSource from the string parameter.
        try:
            ds = gdal.DataSource(data_source)
        except gdal.OGRException, msg:
            raise CommandError(msg)
Пример #22
0
    def test_shape_mixed(self):
        """
        Test that a project made of intervention of different geom create multiple files.
        Check that those files are each of a different type (Point/LineString) and that
        the project and the intervention are correctly referenced in it.
        """

        # Create topology line
        topo_line = TopologyFactory.create(no_path=True)
        line = PathFactory.create(
            geom=LineString(Point(10, 10), Point(11, 10)))
        PathAggregationFactory.create(topo_object=topo_line, path=line)

        # Create a topology point
        lng, lat = tuple(
            Point(1, 1, srid=settings.SRID).transform(settings.API_SRID,
                                                      clone=True))

        closest_path = PathFactory(
            geom=LineString(Point(0, 0), Point(1, 0), srid=settings.SRID))
        topo_point = TopologyHelper._topologypoint(lng, lat, None).reload()

        self.assertEquals(topo_point.paths.get(), closest_path)

        # Create one intervention by geometry (point/linestring)
        it_point = InterventionFactory.create(topology=topo_point)
        it_line = InterventionFactory.create(topology=topo_line)
        # reload
        it_point = type(it_point).objects.get(pk=it_point.pk)
        it_line = type(it_line).objects.get(pk=it_line.pk)

        proj = ProjectFactory.create()
        proj.interventions.add(it_point)
        proj.interventions.add(it_line)

        # instanciate the class based view 'abnormally' to use create_shape directly
        # to avoid making http request, authent and reading from a zip
        pfl = ZipShapeSerializer()
        devnull = open(os.devnull, "wb")
        pfl.serialize(Project.objects.all(),
                      stream=devnull,
                      delete=False,
                      fields=ProjectFormatList.columns)
        self.assertEquals(len(pfl.layers), 2)

        ds_point = gdal.DataSource(pfl.layers.values()[0])
        layer_point = ds_point[0]
        ds_line = gdal.DataSource(pfl.layers.values()[1])
        layer_line = ds_line[0]

        self.assertEquals(layer_point.geom_type.name, 'MultiPoint')
        self.assertEquals(layer_line.geom_type.name, 'LineString')

        for layer in [layer_point, layer_line]:
            self.assertEquals(layer.srs.name, 'RGF93_Lambert_93')
            self.assertItemsEqual(layer.fields, [
                u'id', u'name', u'period', u'type', u'domain', u'constraint',
                u'global_cos', u'interventi', u'interven_1', u'comments',
                u'contractor', u'project_ow', u'project_ma', u'founders',
                u'related_st', u'insertion_', u'update_dat', u'cities',
                u'districts', u'restricted'
            ])

        self.assertEquals(len(layer_point), 1)
        self.assertEquals(len(layer_line), 1)

        for feature in layer_point:
            self.assertEquals(str(feature['id']), str(proj.pk))
            self.assertTrue(feature.geom.geos.equals(it_point.geom))

        for feature in layer_line:
            self.assertEquals(str(feature['id']), str(proj.pk))
            self.assertTrue(feature.geom.geos.equals(it_line.geom))

        # Clean-up temporary shapefiles
        for layer_file in pfl.layers.values():
            for subfile in shapefile_files(layer_file):
                os.remove(subfile)
Пример #23
0
 def getShapefileLayers(self):
     shapefiles = self.serializer.layers.values()
     datasources = [gdal.DataSource(s) for s in shapefiles]
     layers = [ds[0] for ds in datasources]
     return layers
Пример #24
0
    def test_shape_mixed(self):
        """
        Test that a project made of intervention of different geom create multiple files.
        Check that those files are each of a different type (Point/LineString) and that
        the project and the intervention are correctly referenced in it.
        """

        # Create topology line

        line = PathFactory.create(
            geom=LineString(Point(10, 10), Point(11, 10)))
        topo_line = TopologyFactory.create(paths=[line])

        # Create a topology point
        lng, lat = tuple(
            Point(1, 1, srid=settings.SRID).transform(settings.API_SRID,
                                                      clone=True))

        closest_path = PathFactory(
            geom=LineString(Point(0, 0), Point(1, 0), srid=settings.SRID))
        topo_point = TopologyHelper._topologypoint(lng, lat, None).reload()

        self.assertEqual(topo_point.paths.get(), closest_path)

        # Create one intervention by geometry (point/linestring)
        it_point = InterventionFactory.create(target=topo_point)
        it_line = InterventionFactory.create(target=topo_line)
        # reload
        it_point = type(it_point).objects.get(pk=it_point.pk)
        it_line = type(it_line).objects.get(pk=it_line.pk)

        proj = ProjectFactory.create()
        proj.interventions.add(it_point)
        proj.interventions.add(it_line)

        # instanciate the class based view 'abnormally' to use create_shape directly
        # to avoid making http request, authent and reading from a zip
        pfl = ZipShapeSerializer()
        shapefiles = pfl.path_directory
        devnull = open(os.devnull, "wb")
        pfl.serialize(Project.objects.all(),
                      stream=devnull,
                      delete=False,
                      fields=ProjectFormatList.columns)
        shapefiles = [
            shapefile for shapefile in os.listdir(shapefiles)
            if shapefile[-3:] == "shp"
        ]
        datasources = [
            gdal.DataSource(os.path.join(pfl.path_directory, s))
            for s in shapefiles
        ]
        layers = [ds[0] for ds in datasources]

        self.assertEqual(len(datasources), 2)
        geom_type_layer = {layer.name: layer for layer in layers}
        geom_types = geom_type_layer.keys()
        self.assertIn('MultiPoint', geom_types)
        self.assertIn('MultiLineString', geom_types)

        for layer in layers:
            self.assertEqual(layer.srs.name, 'RGF93_Lambert_93')
            self.assertCountEqual(layer.fields, [
                'id', 'name', 'period', 'type', 'domain', 'constraint',
                'global_cos', 'interventi', 'comments', 'contractor',
                'project_ow', 'project_ma', 'founders', 'related_st',
                'insertion_', 'update_dat', 'cities', 'districts', 'restricted'
            ])

        self.assertEqual(len(layers[0]), 1)
        self.assertEqual(len(layers[1]), 1)

        for feature in geom_type_layer['MultiPoint']:
            self.assertEqual(str(feature['id']), str(proj.pk))
            self.assertEqual(len(feature.geom.geos), 1)
            self.assertAlmostEqual(feature.geom.geos[0].x, it_point.geom.x)
            self.assertAlmostEqual(feature.geom.geos[0].y, it_point.geom.y)

        for feature in geom_type_layer['MultiLineString']:
            self.assertEqual(str(feature['id']), str(proj.pk))
            self.assertTrue(feature.geom.geos.equals(it_line.geom))
Пример #25
0
    def validate(self, filefield_data):
        """ Validate the uploaded, zipped shapefile by unpacking to a temporary sandbox.
        """
        # create a temporary file to write the zip archive to
        tmp = tempfile.NamedTemporaryFile(suffix='.zip',
                                          mode='w',
                                          delete=False)

        # write zip to tmp sandbox
        self.write_file(tmp.name, filefield_data)

        if not zipfile.is_zipfile(tmp.name):
            return False, 'That file is not a valid Zip Archive'

        # create zip object
        zfile = zipfile.ZipFile(tmp.name)

        # ensure proper file contents by extensions inside
        if not self.check_zip_contents('shp', zfile):
            return False, 'Found Zip Archive but no file with a .shp extension found inside.'
        elif not self.check_zip_contents('dbf', zfile):
            return False, 'You must supply a .dbf file with the Shapefile to supply attribute data.'
        elif not self.check_zip_contents('shx', zfile):
            return False, 'You must supply a .shx file for the Shapefile to have a valid index.'

        # unpack contents into tmp directory
        tmp_dir = tempfile.gettempdir()
        for info in zfile.infolist():
            data = zfile.read(info.filename)
            shp_part = '%s%s%s' % (tmp_dir, os.path.sep, info.filename)
            fout = open(shp_part, "wb")
            fout.write(data)
            fout.close()

        # get the datasource name without extension
        ds_name = os.path.splitext(zfile.namelist()[0])[0]

        # ogr needs the full path to the unpacked 'file.shp'
        ds = gdal.DataSource('%s%s%s.shp' % (tmp_dir, os.path.sep, ds_name))

        # shapefiles have just one layer, so grab the first...
        layer = ds[0]

        # one way of testing a sane shapefile...
        # further tests should be able to be plugged in here...
        if layer.test_capability('RandomRead'):
            if str(ds.driver) != 'ESRI Shapefile':
                return False, "Sorry, we've experienced a problem on our server. Please try again later."
        else:
            return False, 'Cannot read the shapefile, data is corrupted inside the zip, please try to upload again'

        # Must have a prj or have data in the -180,-90,180,90 window (assumed to be latlong)
        if not self.check_zip_contents('prj', zfile) and (layer.extent.min_x < -180.0 or layer.extent.max_x > 180.0 \
               or layer.extent.min_y < -90.0 or layer.extent.max_y > 90.0):
            return False, 'You must supply a .prj file with the Shapefile to indicate the projection.'
        else:
            g = layer[0].geom
            if g.srs:
                g.transform_to(4326)
            ext = g.envelope
            if ext.min_x < -180.0 or ext.max_x > 180.0 or ext.min_y < -90.0 or ext.max_y > 90.0:
                return False, 'There was an error reprojecting your geometry. You must supply a .prj file or reproject to WGS84.'

        if layer.geom_type.name not in self.supported_geomtypes:
            return False, "Sorry, %s geometries are not supported. Try uploading a zipped shapefile with %s geometries" % \
                    (layer.geom_type.name, ', '.join(self.supported_geomtypes))

        if not self.multi_feature and layer.num_feat != 1:
            return False, "We can only support shapefiles with a single feature"

        return True, "Shapefile is good to go"