Ejemplo n.º 1
0
    def load_zip(self, filename):
        path = os.path.abspath(filename)
        csv_path = os.path.join(os.path.dirname(path), 'plz_einwohner.csv')

        with open(csv_path) as f:
            reader = csv.DictReader(f)
            population = {x['plz']: int(x['einwohner']) for x in reader}

        ds = DataSource(path)
        mapping = LayerMapping(GeoRegion, ds, {'geom': 'geometry'})
        layer = ds[0]
        count = float(len(layer))
        for i, feature in enumerate(layer):
            self.stdout.write('%.2f%%\r' % (i / count * 100), ending='')
            name = feature['plz'].as_string()
            slug = name
            geom = mapping.feature_kwargs(feature)['geom']
            GeoRegion.objects.update_or_create(
                slug=slug, kind='zipcode', defaults={
                    'name': name,
                    'geom': geom,
                    'description': feature['note'].as_string(),
                    'region_identifier': name,
                    'global_identifier': 'DE-%s' % name,
                    'area': feature.geom.area,
                    'population': population.get(name, None),
                    'level': 3,
                    'valid_on': timezone.now()
                }
            )
Ejemplo n.º 2
0
    def load_boroughs(self, path, parent=None, name_col=None, ident_col=None):
        ds = DataSource(path)
        mapping = LayerMapping(GeoRegion, ds, {'geom': 'POLYGON'})
        layer = ds[0]
        parent = GeoRegion.objects.get(kind='municipality', name=parent)
        for i, feature in enumerate(layer):
            name = feature[name_col].as_string()
            identifier = feature[ident_col].as_string()[:2]
            kind = 'borough'
            kind_detail = 'Bezirk'
            slug = slugify(name)
            geom = mapping.feature_kwargs(feature)['geom']

            region_identifier = parent.region_identifier + identifier

            GeoRegion.objects.update_or_create(
                slug=slug, kind=kind,
                defaults={
                    'name': name,
                    'kind': kind,
                    'kind_detail': kind_detail,
                    'level': 6,
                    'region_identifier': region_identifier,
                    'global_identifier': '',
                    'population': None,
                    'geom': geom,
                    'area': feature.geom.area,
                    'valid_on': None,
                    'part_of': parent
                }
            )
Ejemplo n.º 3
0
    def handle(self, **options):
        shapefile = os.path.join(settings.HERE, os.pardir, 'data',
                                 'TM_WORLD_BORDERS-0.3.shp')

        ds = DataSource(shapefile)
        print len(ds), "layer"
        layer = ds[0]
        print '%s contains %s geometries (type: %s)' % (layer, len(layer),
                                                        layer.geom_type)
        print layer.srs

        #for feature in layer:
        #    print feature.get('NAME'), feature.geom.num_points

        mapping = {
            'fips': 'FIPS',
            'iso2': 'ISO2',
            'iso3': 'ISO3',
            'un': 'UN',
            'name': 'NAME',
            'area': 'AREA',
            'pop2005': 'POP2005',
            'region': 'REGION',
            'subregion': 'SUBREGION',
            'lon': 'LON',
            'lat': 'LAT',
            'mpoly': 'MULTIPOLYGON',
        }

        lm = LayerMapping(Country, shapefile, mapping,
                          transform=True, encoding='iso-8859-1')
        # Antartica doesn't convert to Spherical Mercator
        lm.save(fid_range=(1, 144), verbose=True)
        lm.save(fid_range=(146, 246), verbose=True)
Ejemplo n.º 4
0
    def handle(self, *args, **options):

        datadir = args[0]
        
        # Need to do this 50 times, onece for the shapefile of each state
        for i in range(1,57):
            # files are named with zero padded FIPS codes
            if i < 10:
                padded_i = "0"+str(i)
            else:
                padded_i = str(i)
            
            shpfile = os.path.abspath(os.path.join(os.path.dirname(__file__), datadir+'/state_tract_shapefiles/tl_2010_'+padded_i+'_tract10/tl_2010_'+padded_i+'_tract10.shp'))

            print "Attempting import of shapefile "+shpfile

            try:
                lm = LayerMapping(CensusTract, shpfile, censustracts_mapping)
            except OGRException:
                print "Could not open datasource ",
                print shpfile
            else:
                try:
                    lm.save(strict=True, verbose=False, progress=True)
                except IntegrityError:
                    print "Already imported ",
                    print shpfile
                    from django.db import transaction
                    transaction.rollback()
                else:
                    print "Imported shapefile "+shpfile
                    print
Ejemplo n.º 5
0
 def handle(self, *args, **options):
 	self.stdout.write('Import USA State')
 	state_shp = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../shpData/southStates.shp'))
 	state_mapping = {'statefp' : 'STATEFP','name' : 'NAME','geom' : 'POLYGON25D', }
 	lm = LayerMapping(State, state_shp, state_mapping,transform=False, encoding='iso-8859-1')
 	lm.save(strict=True, verbose=True)
 	self.stdout.write('Import OK')
Ejemplo n.º 6
0
def populate_GADM(
    model,
    shp_file,
    mapping_dict,
    verbose=False
):
    """
    Populates a model of GADM family
    Parameters
    ----------
    model : <model>
    shp_file : str
    mapping_dict : dict
    verbose : bool
    """
    try:
        lm = LayerMapping(
            model,
            shp_file,
            mapping_dict,
            transform=False,
        )
        with transaction.atomic():
            lm.save(verbose=verbose)
    except IntegrityError as ie:
        print "Integrity error:", ie.message
    except Exception as e:
        print 'Error loading GADM data!'
        print e.message
Ejemplo n.º 7
0
Archivo: load.py Proyecto: oddbird/mlt
def load_parcels(shapefile_path,
                 verbose=False, progress=True, silent=False, stream=None):
    """
    Load parcels from shapefile at given path.

    Raises IntegrityError on a duplicate PL, and rolls back the load.

    Returns number of parcels loaded.

    """
    # monkeypatch no-op transaction handling into LayerMapping, as we
    # wrap it in a transaction including more operations.
    LayerMapping.TRANSACTION_MODES['none'] = lambda func: func

    # first delete existing parcels
    Parcel.objects.all().delete()

    lm = LayerMapping(
        get_parcel_proxy(datetime.datetime.now()),
        shapefile_path, parcel_mapping, transform=True,
        transaction_mode='none')

    lm.save(
        strict=True,
        verbose=verbose,
        progress=progress,
        silent=silent,
        stream=stream or sys.stdout)

    return Parcel.objects.count()
    def handle(self, *args, **options):
        sys.path.append(os.path.join(settings.SITE_DIR, SHAPEFILES_DIR))
        from definitions import SHAPEFILES

        for name, options in SHAPEFILES.items():
            self.stdout.write('Processing \'{}\' now...'.format(name))

            self.stdout.write('Attempting to create collection...')
            collection, _ = Collection.objects.get_or_create(
                name=name,
                authority=options['authority'],
                last_updated=options['last_updated'],
                count=0,
                slug=options['slug'],
                source_url=options['source_url']
            )

            if _ is True:
                self.stdout.write('\'{}\' collection created!'.format(name))
            else:
                self.stdout.write('\'{}\' collection already exists!'
                                  ' Not a problem.'.format(name))

            lm = LayerMapping(Shape,
                              os.path.join(SHAPEFILES_DIR, options['file']),
                              options['layer_mapping'],
                              encoding='latin-1')

            lm.save(verbose=True, strict=True)
    def handle(self, *args, **options):
        """
        Load community shapefile using LayerMapping; automatically checks projection,
        if necessary transforms to WSG 1984
        """
        neighborhood_mapping = {
            'boundary': 'MULTIPOLYGON',
            'primary_name': 'COMMUNITY',
            'secondary_name': 'AREA_NUM_1',
        }

        path_to_shp = 'data/chicago_communities/CommAreas.shp'
        lm = LayerMapping(Neighborhood, path_to_shp, neighborhood_mapping)
        self.check_neighborhood_table()
        lm.save(strict=True)

        self.stdout.write('Successfully loaded %s communities from %s layer(s) into database\n'
                          % (len(lm.ds[0]), lm.ds.layer_count))

        # Change case of imported name strings from UPPER to Caps Case
        communities = Neighborhood.objects.all()
        for community in communities:
            names = [name.capitalize() for name in community.primary_name.split()]
            primary_name_caps = " ".join(names) 
            self.stdout.write('Changing name %s ==> %s\n' % (community.primary_name, primary_name_caps))
            community.primary_name = primary_name_caps
            community.save()

        # Export topojson
        export_topojson.Command().handle()
Ejemplo n.º 10
0
def from_shapefile(strict=False, progress=True, verbose=False, **kwargs):
    """
    Load parcel data into the database from the processed shapefile.
    """
    parcel_shp = get_processed_data_file(os.path.join('parcels', 'parcels.shp'))
    mapping = LayerMapping(Parcel, parcel_shp, parcel_mapping, transform=False)
    mapping.save(strict=strict, progress=progress, verbose=verbose, **kwargs)
def shp():
    """
    Load the ESRI shapefile from the Census in the County model.
    
    Example usage:
    
        >> from us_counties import load; load.shp();
    
    """
    # Import the database model where we want to store the data
    from models import County
    
    # A crosswalk between the fields in our database and the fields in our
    # source shapefile
    shp2db = {
        'polygon_4269': 'Polygon',
        'state_fips_code': 'STATEFP',
        'county_fips_code': 'COUNTYFP',
        'fips_code': 'CNTYIDFP',
        'county_ansi_code': 'COUNTYNS',
        'short_name': 'NAME',
        'full_name': 'NAMELSAD',
        'csa_code': 'CSAFP',
        'msa_code': 'CBSAFP',
        'mda_code': 'METDIVFP',
        'functional_status': 'FUNCSTAT',
    }
    # Load our model, shape, and the map between them into GeoDjango's magic
    # shape loading function (I also slipped the source coordinate system in
    # there. The Census says they put everything in NAD 83, which translates
    # to 4269 in the SRID id system.)
    lm = LayerMapping(County, shp_file, shp2db, source_srs=4269, encoding='latin-1')
    # Fire away!
    lm.save(verbose=False)
Ejemplo n.º 12
0
def run(verbose=True):
    lm = LayerMapping(Voie, tunisia_voie, voie_mapping, transform=True, encoding='utf8')
    lm.save(strict=True, verbose=verbose)
    for i in Voie.objects.all():
		i.label     = slugify(i.label    )
		i.nom_usage = slugify(i.nom_usage)
		i.save()
    def handle(self, shapefile, manipulator, *args, **options):
        try:
            manip_model = get_class(manipulator)
        except:
            raise Exception("The %s model could not be found.  \nBe sure and provide the complete description: <module name>.models.<manipulator model name>" %manipulator)
            
        ds = DataSource(shapefile)
        if len(ds) != 1:
            raise Exception("Data source should only contain a single layer. Aborting.")
        
        layer = ds[0]
        if len(layer) != 1: 
            raise Exception("Layer should containing ONLY a single feature")

        if not 'polygon' in layer.geom_type.name.lower():
            print layer.geom_type.name
            raise Exception("This geometry must be a polygon")

        mapping = {'geometry': 'MULTIPOLYGON'}
            
        lm = LayerMapping(manip_model, shapefile, mapping)
        lm.save()
        manip_geom = manip_model.objects.order_by('-creation_date')[0]
        if options.get('region_name'):
            manip_geom.name = options.get('region_name')
            manip_geom.save()
        else:
            manip_geom.name = layer.name
            manip_geom.save()
        
        print ""
        print "The manipulaotr geometry, %s, has been added to the %s model with primary key = %s" % (manip_geom.name, manipulator, manip_geom.pk)
        
        print "To switch to this geometry, you will need to run 'manage.py change_manipulator_geom %s %s'" % (manip_geom.pk, manipulator)
        print ""
Ejemplo n.º 14
0
def county_import(county_shp, year):
    if year == "2010":
        county_mapping = {
            'state_fips_code': 'STATEFP10',
            'fips_code': 'COUNTYFP10',
            'county_identifier': 'GEOID10',
            'name': 'NAME10',
            'name_and_description': 'NAMELSAD10',
            'legal_statistical_description': 'LSAD10',
            'fips_55_class_code': 'CLASSFP10',
            'feature_class_code': 'MTFCC10',
            'functional_status': 'FUNCSTAT10',
            'mpoly': 'POLYGON',
        }
    else:
        county_mapping = {
            'state_fips_code': 'STATEFP',
            'fips_code': 'COUNTYFP',
            'county_identifier': 'GEOID',
            'name': 'NAME',
            'name_and_description': 'NAMELSAD',
            'legal_statistical_description': 'LSAD',
            'fips_55_class_code': 'CLASSFP',
            'feature_class_code': 'MTFCC',
            'functional_status': 'FUNCSTAT',
            'mpoly': 'POLYGON',
        }
    lm = LayerMapping(County, county_shp, county_mapping, encoding='LATIN1')
    lm.save(verbose=True)
Ejemplo n.º 15
0
 def handle(self, *args, **options):
     mapping = {
         'perimeter': 'PERIMETER',
         'ward': 'WARD',
         'alderman': 'ALDERMAN',
         'ward_phone': 'WARD_PHONE',
         'hall_phone': 'HALL_PHONE',
         'hall_office': 'HALL_OFFIC',
         'address': 'ADDRESS',
         'edit_date1': 'EDIT_DATE1',
         'shape_area': 'SHAPE_AREA',
         'shape_len': 'SHAPE_LEN',
         'geom': 'MULTIPOLYGON'
     }
     shp_data = requests.get(SHP)
     if shp_data.status_code != 200:
         raise CommandError('City data portal returned a %s status when downloading Wards shapefile: %s' % (shp_data.status_code, shp_data.content))
     else:
         s = StringIO(shp_data.content)
         z = zipfile.ZipFile(s)
         data_path = os.path.join(os.path.curdir,'data/shp/wards')
         fname = [f for f in z.namelist() if f.endswith('shp')][0]
         z.extractall(data_path)
         datafile = os.path.join(data_path, fname)
         lm = LayerMapping(Ward, datafile, mapping)
         lm.save(strict=True,progress=True)
Ejemplo n.º 16
0
    def test05_geography_layermapping(self):
        "Testing LayerMapping support on models with geography fields."
        # There is a similar test in `layermap` that uses the same data set,
        # but the County model here is a bit different.
        if not gdal.HAS_GDAL:
            return
        from django.contrib.gis.utils import LayerMapping

        # Getting the shapefile and mapping dictionary.
        shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), "../", "data"))
        co_shp = os.path.join(shp_path, "counties", "counties.shp")
        co_mapping = {"name": "Name", "state": "State", "mpoly": "MULTIPOLYGON"}

        # Reference county names, number of polygons, and state names.
        names = ["Bexar", "Galveston", "Harris", "Honolulu", "Pueblo"]
        num_polys = [1, 2, 1, 19, 1]  # Number of polygons for each.
        st_names = ["Texas", "Texas", "Texas", "Hawaii", "Colorado"]

        lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique="name")
        lm.save(silent=True, strict=True)

        for c, name, num_poly, state in zip(County.objects.order_by("name"), names, num_polys, st_names):
            self.assertEqual(4326, c.mpoly.srid)
            self.assertEqual(num_poly, len(c.mpoly))
            self.assertEqual(name, c.name)
            self.assertEqual(state, c.state)
Ejemplo n.º 17
0
    def test05_geography_layermapping(self):
        "Testing LayerMapping support on models with geography fields."
        # There is a similar test in `layermap` that uses the same data set,
        # but the County model here is a bit different.
        if not gdal.HAS_GDAL: return
        from django.contrib.gis.utils import LayerMapping

        # Getting the shapefile and mapping dictionary.
        shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data'))
        co_shp = os.path.join(shp_path, 'counties', 'counties.shp')
        co_mapping = {'name' : 'Name',
                      'state' : 'State',
                      'mpoly' : 'MULTIPOLYGON',
                      }

        # Reference county names, number of polygons, and state names.
        names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo']
        num_polys = [1, 2, 1, 19, 1] # Number of polygons for each.
        st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado']

        lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name')
        lm.save(silent=True, strict=True)

        for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names):
            self.assertEqual(4326, c.mpoly.srid)
            self.assertEqual(num_poly, len(c.mpoly))
            self.assertEqual(name, c.name)
            self.assertEqual(state, c.state)
Ejemplo n.º 18
0
    def handle(self, shapefile, *args, **options):
        ds = DataSource(shapefile)
        if len(ds) != 1:
            raise Exception("Data source should only contain a single layer. Aborting.")

        layer = ds[0]
        if len(layer) != 1: 
            raise Exception("Layer should containing ONLY a single feature")

        if not 'polygon' in layer.geom_type.name.lower():
            print layer.geom_type.name
            raise Exception("Study region must be a multigeometry")

        if options.get('region_name'):
            mapping = {
                'geometry': 'MULTIPOLYGON',
            }
        else:
            mapping = {
                'geometry': 'MULTIPOLYGON',
                'name': 'name',
            }

        lm = LayerMapping(StudyRegion, shapefile, mapping, transform=False)
        lm.save()
        study_region = StudyRegion.objects.order_by('-creation_date')[0]
        if options.get('region_name'):
            study_region.name = options.get('region_name')
            study_region.save()
        print ""
        print "Study region created: %s, primary key = %s" % (study_region.name, study_region.pk)
        
        print "To switch to this study region, you will need to run 'python manage.py change_study_region %s'" % (study_region.pk, )
        print ""
Ejemplo n.º 19
0
    def run(self):
        if self.overwrite:
            self.print('Removing existing neighborhoods...', end='')
            if self.real_run:
                Neighborhood.objects.all().delete()
            self.print('Done')

        # Disconnect this temporarily so that the ``unique='name'`` option
        # passed to LayerMapping will work correctly (it collects all
        # records from the RLIS neighborhoods shapefile with the same name
        # into a single database record; if we normalize the names on save,
        # this feature won't work).
        pre_save.disconnect(normalize_name, sender=Neighborhood)

        self.print('Adding neighborhoods...', end='')
        if self.real_run:
            mapping = LayerMapping(
                Neighborhood, self.path, Neighborhood.layer_mapping, source_srs=self.from_srid,
                unique='name')
            mapping.save(strict=True)
        self.print('Done')

        self.print('Normalizing neighborhood names...', end='')
        neighborhoods = Neighborhood.objects.all()
        if self.real_run:
            with transaction.atomic():
                for neighborhood in neighborhoods:
                    neighborhood.name = Neighborhood.normalize_name(neighborhood.name)
                    neighborhood.save()
        self.print('Done')

        pre_save.connect(normalize_name, sender=Neighborhood)
Ejemplo n.º 20
0
 def handle(self, *args, **options):
     world_mapping = {
         'fips' : 'FIPS',
         'iso2' : 'ISO2',
         'iso3' : 'ISO3',
         'un' : 'UN',
         'name' : 'NAME',
         'area' : 'AREA',
         'pop2005' : 'POP2005',
         'region' : 'REGION',
         'subregion' : 'SUBREGION',
         'lon' : 'LON',
         'lat' : 'LAT',
         'mpoly' : 'MULTIPOLYGON',
     }
     user = User.objects.get(id=1)
     world_shp = os.path.abspath(os.path.join(os.path.dirname(__file__),
                                 'data', 'TM_WORLD_BORDERS-0.3.shp'))
     with transaction.commit_on_success():
         lm = LayerMapping(WorldBorder, world_shp, world_mapping,
                           transform=False, encoding='iso-8859-1')
         lm.save(strict=True, verbose=True)
         for country in WorldBorder.objects.all():
             gr = GeospatialReference(
                 title=country.name,
                 address=country.name,
                 geometry=country.mpoly,
                 point=Point(country.lon, country.lat),
                 description='http://thematicmapping.org/',
                 user=user,
             )
             gr.save()
Ejemplo n.º 21
0
def run(verbose=True):
    ds = Dataset(name = '2010 Census Tracts',
        cached = datetime.utcnow().replace(tzinfo=utc),
        cache_max_age = 1000,
        remote_id_field = 'GEOID10',
        name_field = 'NAMELSAD10',
        lat_field = 'INTPTLAT10',
        lon_field = 'INTPTLON10',
        field1_en = 'Land Area',
        field1_name = 'ALAND10',
        field2_en = 'Water Area',
        field2_name = 'AWATER10')


    tract_mapping = {
        'remote_id' : ds.remote_id_field,
        'name' : ds.name_field,
        'lat' : ds.lat_field,
        'lon' : ds.lon_field,
        'field1' : ds.field1_name,
        'field2' : ds.field2_name,
        'mpoly' : 'MULTIPOLYGON',
    }

    tract_shp = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data/tl_2010_36_tract10.shp'))

    lm = LayerMapping(MapPolygon, tract_shp, tract_mapping, transform=False, encoding='iso-8859-1')

    lm.save(strict=True, verbose=verbose)

    ds.save()

    MapPolygon.objects.filter(dataset = None).update(dataset = ds)
Ejemplo n.º 22
0
    def handle(self, *args, **options):
        verbose = options.get('verbose')
        source = options.get('source')

        if verbose:
            console = self.stdout
        else:
            console = None

        if not source:
            print "Source must be specified"
            return 0

        from django.contrib.gis.gdal import DataSource
        ds = DataSource("PG:"
            "host=localhost port=5432 dbname=ex1 user=ex1 password=ex1")

        lyr_ind = -1
        for i in range(len(ds)):
            if str(ds[i])  == source:
                lyr_ind = i
                break

        if lyr_ind == -1:
            print "Could not find table in data source"
            return 0

        lm = LayerMapping(
            Country,
            ds,
            COUNTRY_MAPPING,
            layer=lyr_ind,
            encoding='iso-8859-1')
        lm.save(strict=True, verbose=verbose)
Ejemplo n.º 23
0
def run(verbose=True):
	# args are model name, source file, and mapping dict
	# Transform is false because the database format is SRID 4326, which is WGS84,
	# and the shape file is already in this format
	lm = LayerMapping(WorldBorder, world_shp, world_mapping,
					  transform=False, encoding='iso-8859-1')
	lm.save(strict=True, verbose=verbose)
Ejemplo n.º 24
0
    def handle(self, *args, **options):
        for shp in args:
            lm = LayerMapping(self.config[shp]['model'], self.config[shp]['file'], self.config[shp]['mapping'], 
                encoding='iso-8859-1')
            lm.save(strict=True, verbose=True)

            self.stdout.write('Successfully imported "%s"\n' % shp)
Ejemplo n.º 25
0
    def fill_up_db(shapefile, verbose=False):
        from django.contrib.gis.utils import LayerMapping

        mapping = {
            'name': 'NAME',
            'area_code': 'AREA_CODE',
            'descriptio': 'DESCRIPTIO',
            'file_name': 'FILE_NAME',
            'number': 'NUMBER',
            'number0': 'NUMBER0',
            'polygon_id': 'POLYGON_ID',
            'unit_id': 'UNIT_ID',
            'code': 'CODE',
            'hectares': 'HECTARES',
            'area': 'AREA',
            'type_code': 'TYPE_CODE',
            'descript0': 'DESCRIPT0',
            'type_cod0': 'TYPE_COD0',
            'descript1': 'DESCRIPT1',
            'geom': 'POLYGON',
        }
        lm = LayerMapping(Region, shapefile, mapping,
                          transform=True, encoding='iso-8859-1')
        lm.save(strict=True, verbose=verbose)
        Region.objects.filter(descriptio__icontains='Welsh Assembly').delete()
        print "Regions imported"
def shp():
    """
    Load the ESRI shapefile from the Census in the District model.
    
    Example usage:
    
        >> from congressional_districts import load; load.shp();
    
    """
    # Import the database model where we want to store the data
    from models import District
    
    # A crosswalk between the fields in our database and the fields in our
    # source shapefile
    shp2db = {
        'state_fips_code' : 'STATE',
        'district_number' : 'CD',
        'lsad' : 'LSAD',
        'name' : 'NAME',
        'lsad_trans' : 'LSAD_TRANS',
        'polygon_4269' : 'POLYGON',
    }
    # Load our model, shape, and the map between them into GeoDjango's magic
    # shape loading function (I also slipped the source coordinate system in
    # there. The Census says they put everything in NAD 83, which translates
    # to 4269 in the SRID id system.)
    lm = LayerMapping(District, shp_file, shp2db, source_srs=4269, encoding='latin-1')
    # Fire away!
    lm.save(verbose=False)
Ejemplo n.º 27
0
def run(verbose=True):
    lm = LayerMapping(Noeud, tunisia_nodes, node_mapping, transform=True, encoding='utf8')
    lm.save(strict=True, verbose=verbose)
    for i in Noeud.objects.all():
		i.type   = slugify(i.type )
		i.label  = slugify(i.label)
		i.save()
Ejemplo n.º 28
0
def county_import(county_shp):
    if (
        "2015" in county_shp
        or "2014" in county_shp
        or "2013" in county_shp
        or "2012" in county_shp
        or "2011" in county_shp
    ):
        county_mapping = {
            "state_fips_code": "STATEFP",
            "fips_code": "COUNTYFP",
            "county_identifier": "GEOID",
            "name": "NAME",
            "name_and_description": "NAMELSAD",
            "legal_statistical_description": "LSAD",
            "fips_55_class_code": "CLASSFP",
            "feature_class_code": "MTFCC",
            "functional_status": "FUNCSTAT",
            "mpoly": "POLYGON",
        }
    else:
        county_mapping = {
            "state_fips_code": "STATEFP10",
            "fips_code": "COUNTYFP10",
            "county_identifier": "GEOID10",
            "name": "NAME10",
            "name_and_description": "NAMELSAD10",
            "legal_statistical_description": "LSAD10",
            "fips_55_class_code": "CLASSFP10",
            "feature_class_code": "MTFCC10",
            "functional_status": "FUNCSTAT10",
            "mpoly": "POLYGON",
        }
    lm = LayerMapping(County, county_shp, county_mapping, encoding="LATIN1")
    lm.save(verbose=True)
Ejemplo n.º 29
0
def pre_save_layer(instance, sender, **kwargs):
    """Save to postgis if there is a datastore.
    """
    # Abort if a postgis DATABASE is not configured.
    if DYNAMIC_DATASTORE not in settings.DATABASES:
        return

    # Do not process if there is no table.
    base_file = instance.get_base_file()[0]
    if base_file is None or base_file.name != 'shp':
        return

    filename = base_file.file.path

    # Load the table in postgis and get a mapping from fields in the database
    # and fields in the Shapefile.
    mapping = file2pgtable(filename, instance.name)

    # Get a dynamic model with the same name as the layer.
    model_description, __ = ModelDescription.objects.get_or_create(
        name=instance.name)

    # Set up the fields with the postgis table
    generate_model(model_description, mapping, db_key=DYNAMIC_DATASTORE)

    # Get the new actual Django model.
    TheModel = model_description.get_django_model()

    # Use layermapping to load the layer with geodjango
    lm = LayerMapping(TheModel, filename, mapping,
                      encoding=instance.charset,
                      using=DYNAMIC_DATASTORE,
                      transform=None
                      )
    lm.save()
Ejemplo n.º 30
0
def run(verbose=True):
    if len(Neighborhood.objects.all()) == 0:
        lm = LayerMapping(Neighborhood, neighborhood_shape, mapping)
        lm.save(strict=True, verbose=verbose)
        Neighborhood.objects.exclude(city='Seattle').delete()
        print "There are %d neighborhoods in Seattle" % len(Neighborhood.objects.all())
    else:
        print "Neighborhood shapefile already loaded (at least one neighborhood exists). Delete existing records via admin tool"
    def handle(self, *args, **options):
        wb_count = WorldBorder.objects.count()
        if wb_count:
            self.stdout.write(
                '{} countries already exist'.format(wb_count))
            return

        lm = LayerMapping(
            WorldBorder,
            WORLD_SHAPEFILE,
            WORLD_SHAPEFILE_MODEL_MAPPING,
            transform=False,
            encoding='iso-8859-1'
        )
        lm.save(strict=True, verbose=False)
        self.stdout.write("Loaded world borders")
    def handle(self, *args, **kwargs):
        wc_count = Constituency.objects.count()
        if wc_count:
            self.stdout.write(
                self.style.WARNING(
                    '{} constituencies already exist'.format(wc_count)))
            return

        lm = LayerMapping(Constituency,
                          constituency_shp,
                          constituency_mapping,
                          transform=False,
                          encoding='iso-8859-1')
        lm.save(strict=True, verbose=False)
        self.stdout.write(
            self.style.SUCCESS("Loaded constituencies shapefile"))
Ejemplo n.º 33
0
def run(verbose=True):
    numfeatures_veredas=32305

    #lm = LayerMapping(WorldBorder, ds, world_mapping, transform=False)
   #lm.save(strict=True, verbose=verbose)
    lm2 = LayerMapping(veredas, ds2, veredas_mapping, transaction_mode= 'autocommit',transform=False)
    lm2.save(strict=False, fid_range=(0,5000),verbose=False,progress=True,step=500)
    lm2.save(strict=False,fid_range=(5001,15000), verbose=False,progress=True,step=500)
       lm2.save(strict=False,fid_range=(15001,20000), verbose=False,progress=True,step=500)
Ejemplo n.º 34
0
def border_data(apps, schema_editor):
    # This border data sourced from iMapBC, specifically the "Province of BC - ABMS - Outlined" layer.
    # Shapefile inspected using:
    # ogrinfo -so gwells/migrations/ABMS_PROVINCE_SP/ABMS_PROV_polygon.shp ABMS_PROV_polygon
    # Model and mapping generated using:
    # python manage.py ogrinspect gwells/migrations/ABMS_PROVINCE_SP/ABMS_PROV_polygon.shp gwells.Border \
    #   --srid=4269 --mapping --multi
    Border = apps.get_model('gwells', 'Border')

    tmp_path = '/tmp/BCGW_ABMS_PROV-migrations/'

    zip_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            'migrations/BCGW_ABMS_PROV.zip')
    with ZipFile(zip_path, 'r') as zipObj:
        zipObj.extractall(tmp_path)

    border_shp = os.path.join(tmp_path,
                              'ABMS_PROVINCE_SP/ABMS_PROV_polygon.shp')
    if not os.path.exists(border_shp):
        raise FileNotFoundError('file not found: {}'.format(border_shp))

    border_mapping = {
        'se_a_c_flg': 'SE_A_C_FLG',
        'obejctid': 'OBEJCTID',
        'shape': 'SHAPE',
        'length_m': 'LENGTH_M',
        'oic_number': 'OIC_NUMBER',
        'area_sqm': 'AREA_SQM',
        'upt_date': 'UPT_DATE',
        'upt_type': 'UPT_TYPE',
        'chng_org': 'CHNG_ORG',
        'aa_parent': 'AA_PARENT',
        'aa_type': 'AA_TYPE',
        'aa_id': 'AA_ID',
        'aa_name': 'AA_NAME',
        'abrvn': 'ABRVN',
        'bdy_type': 'BDY_TYPE',
        'oic_year': 'OIC_YEAR',
        'afctd_area': 'AFCTD_AREA',
        'geom': 'MULTIPOLYGON25D',
    }

    lm = LayerMapping(Border, border_shp, border_mapping, transform=False)
    lm.save(strict=True, verbose=True)

    # Now clean up
    shutil.rmtree(tmp_path)
Ejemplo n.º 35
0
 def handle(self, *args, **options):
     self.stdout.write('Import USA State')
     state_shp = os.path.abspath(
         os.path.join(os.path.dirname(__file__),
                      '../../../shpData/southStates.shp'))
     state_mapping = {
         'statefp': 'STATEFP',
         'name': 'NAME',
         'geom': 'POLYGON25D',
     }
     lm = LayerMapping(State,
                       state_shp,
                       state_mapping,
                       transform=False,
                       encoding='iso-8859-1')
     lm.save(strict=True, verbose=True)
     self.stdout.write('Import OK')
Ejemplo n.º 36
0
def run(verbose=True):
    """
    load data from shp files to AlderDistrict table
    to reload data, rename shp files in format
    district.dbf
    district.prj
    district.shp
    district.shx
    """
    lm = LayerMapping(
        AlderDistrict,
        DISTRICT_SHP,
        DISTRICT_MAPPING,
        transform=False,
        encoding='iso-8859-1',
    )
    lm.save(strict=True, verbose=verbose)
Ejemplo n.º 37
0
def load_gpx_lm(verbose=True):
    """Recorre la carpeta datacentre/data/gpx e introduce en la BD los tracks de los GPX + dtours asociados

    Hace uso de LayerMapping --> Docs en https://docs.djangoproject.com/en/3.1/ref/contrib/gis/layermapping/
    """

    dir_gpx_data = Path(
        "/home/eguiwow/github/BIC/bic/datacentre/data/gpx"
    )  # Directorio donde están de momento guardados los GPXs
    for filepath in dir_gpx_data.glob('*.gpx'):
        print(filepath)
        lm = LayerMapping(
            Track, str(filepath), track_mapping, layer=2,
            transform=False)  # Layer = 2 -> Layer = track de un GPX
        lm.save(
            strict=False, verbose=verbose
        )  # strict = True antes. Esto lo que hace es que si hay un fallo para la ejecución
Ejemplo n.º 38
0
def run(verbose=True):
    "Making stuff happen but this line is here because some/all stuff below here might get commented out."

    # from .models import TsunamiZone
    # lm2 = LayerMapping(TsunamiZone, tsunami_shp, tsunamizone_mapping,
    #                transform=True, encoding='iso-8859-1',
    #                unique=['typeid'])
    # lm2.save(strict=True, verbose=verbose)

    from .models import ImpactZoneData
    lm3 = LayerMapping(ImpactZoneData,
                       impact_shp,
                       impactzone_mapping,
                       transform=True,
                       encoding='iso-8859-1',
                       unique=['zoneid'])
    lm3.save(strict=True, verbose=verbose)
Ejemplo n.º 39
0
    def handle(self, *args, **options):
        if options['truncate']:
            self.w('Truncate zones table...', ending='')
            FirefightersZone.objects.all().delete()
            self.w(self.style.SUCCESS('Done.'))

        mapping = {
            'name':
            'BWZone',  # The 'name' model field maps to the 'BWZone' layer field.
            'mpolygon': 'geometry',
        }

        lm = LayerMapping(FirefightersZone,
                          options['path_to_datasource'],
                          mapping,
                          transaction_mode='autocommit')
        lm.save(stream=self.stdout, verbose=True)
Ejemplo n.º 40
0
def run(verbose=True):
    for entry in data:
        file_name = os.path.abspath(
            os.path.join(os.path.dirname(__file__), 'data',
                         f'{entry["data_file"]}.shp'), )

        old_data = entry["data_class"].objects.all()
        for item in old_data:
            item.delete()

        print("-" * 80)
        print(f"{file_name}\n{entry['data_class']}\n{entry['mapping']}")
        lm = LayerMapping(entry["data_class"],
                          file_name,
                          entry["mapping"],
                          transform=False)
        lm.save(strict=True, verbose=verbose)
Ejemplo n.º 41
0
def import_shapes(shapefile_path, logger):
    srs = SpatialReference(srs_wkt)
    lm = LayerMapping(Parcel,
                      shapefile_path, {
                          "shape_leng": "SHAPE_Leng",
                          "shape_area": "SHAPE_Area",
                          "map_par_id": "MAP_PAR_ID",
                          "loc_id": "LOC_ID",
                          "poly_type": "POLY_TYPE",
                          "map_no": "MAP_NO",
                          "source": "SOURCE",
                          "plan_id": "PLAN_ID",
                          "last_edit": "LAST_EDIT",
                          "town_id": "TOWN_ID",
                          "shape": "POLYGON"
                      },
                      source_srs=srs)
    lm.save(strict=True)
    def do_import(self):
        """ Imports objects into database """
        self.stdout.write(self.style.MIGRATE_HEADING('\nBeginning import:'))

        region_mapping = {'name': 'NAME', 'polygon': 'POLYGON'}

        region_shp = '../data/kx-nz-regional-councils-2012-yearly-pattern-SHP/nz-regional-councils-2012-yearly-pattern.shp'

        lm = LayerMapping(
            Region,
            region_shp,
            region_mapping,
            transform=False,
            encoding='utf-8',
        )
        lm.save(strict=True, verbose=False)

        self.stdout.write(self.style.SUCCESS('\nImport complete'))
Ejemplo n.º 43
0
def run(verbose=True):
    BUCKET_NAME = 'hacko-data-archive'
    KEY = '2018-housing-affordability/data/permits/'
    s3 = boto3.resource('s3')

    f = 'Residential_Building_Permits.geojson.json'
    file_path = '/data/permits/{}'.format(f)
    if not os.path.isfile(file_path):
        key = KEY + f
        s3.Bucket(BUCKET_NAME).download_file(key, file_path)

    PermitData.objects.all().delete()
    lm = LayerMapping(PermitData,
                      file_path,
                      mapping,
                      transform=False,
                      encoding='iso-8859-1')
    lm.save(strict=True, verbose=verbose)
Ejemplo n.º 44
0
def prep_layer_mapping(shpfile_name, model, mapping):
    shpfile = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'data', shpfile_name))
    lm = LayerMapping(model,
                      shpfile,
                      mapping,
                      transform=False,
                      encoding='iso-8859-1')
    return lm
Ejemplo n.º 45
0
def load():
    #     # OB_districtshp = '/home/mohab/Main Folder/Airport/nash/Airport/Obied Airport/Tubra/Shapefiles/Ob_districts_fin.shp'
    #     # lm1 = LayerMapping(Obeid_districts,OB_districtshp,OB_Districts_mapping,transform=True,encoding='utf-8')
    #     # lm1.save()
    OB_streets = '/home/mohab/Main Folder/Airport/nash/Airport/Obied Airport/Tubra/Shapefiles/Ob_streets.shp'
    lm2 = LayerMapping(Obeid_streets,
                       OB_streets,
                       Obeid_streets_mapping,
                       transform=True,
                       encoding='utf-8')
    lm2.save()


#     OB_urban_area = '/home/mohab/Main Folder/Airport/nash/Airport/Obied Airport/Tubra/Shapefiles/Ob_urban_area.shp'
#     lm3 = LayerMapping(Ob_urban_area,OB_urban_area,ob_urban_area_mapping,transform=True,encoding='utf-8')
#     lm3.save()

# import this file and run the load function in the shell
Ejemplo n.º 46
0
    def handle(self, *args, **options):
        if options['geolevel']:
            # swap keys and values in TERRITORIO
            # use --geolevel=regione
            geolevels = dict(
                (value.lower(), key)
                for key, value in dict(Territorio.TERRITORIO).iteritems())
            geolevel = geolevels[options['geolevel'].lower()]

            def _set_geolevel(sender, instance, **kwargs):
                """
                Signal callback to set the geolevel for a
                place
                """
                instance.territorio = geolevel

            pre_save.connect(_set_geolevel, sender=Territorio)

            # based, on geolevel, define source, mapping and reference system ID
            if geolevel == 'C':
                shapefile = self.comune_shp
                mapping = self.comune_mapping
                source_srs = 23032
            elif geolevel == 'P':
                shapefile = self.provincia_shp
                mapping = self.provincia_mapping
                source_srs = 23032
            elif geolevel == 'R':
                shapefile = self.regione_shp
                mapping = self.regione_mapping
                source_srs = 900913
            else:
                raise CommandError(
                    "supported geolevels: regione, provincia or comune")

            lm = LayerMapping(Territorio,
                              shapefile,
                              mapping,
                              encoding=options['encoding'],
                              source_srs=source_srs)
            lm.save(verbose=True)

        else:
            raise CommandError("geolevel must be specified")
Ejemplo n.º 47
0
def import_country(shp='evaluations/data/countries.shp'):
    """
    Import the countries from a shapefile.
    """
    # first delete all features (if any)
    Country.objects.all().delete()
    # mapping model-shp
    country_mapping = {
        'name': 'NAME',
        'geometry': 'MULTIPOLYGON',
    }
    # import features from shapefile to model
    countries = LayerMapping(Country,
                             shp,
                             country_mapping,
                             transform=False,
                             encoding='iso-8859-1')
    countries.save(verbose=True, progress=True)
    print 'Import completed.'
Ejemplo n.º 48
0
    def handle(self, *args, **options):
        global znacka_id, status_id, author_id, import_fields, table_name
        znacka_id = options['znacka_id']
        status_id = options['status_id']
        author_id = options['author_id']
        data_source = DataSource(args[0])
        self.print_layer_info(data_source)
        fields = data_source[0].fields
        geom_type = data_source[0].geom_type
        table_name = data_source[0].name

        if geom_type == 'Point':
            mapping = {
                       'geom' : 'POINT',
                      }
    
        if geom_type == "LineString":
            mapping = {
                       'geom' : 'LINESTRING',
                      }
        
        if geom_type == "Polygon":
            mapping = {
                       'geom' : 'POLYGON',
                      }

        for field in fields:
            if field == 'popis':
                mapping['desc'] = 'popis'
                continue
            if field == 'nazev':
                mapping['nazev'] = 'nazev'
                continue
            mapping['sit_' + field] = field

        import_fields = fields

        source_srs = "+proj=krovak +lat_0=49.5 +lon_0=24.83333333333333 +alpha=30.28813972222222 \
                +k=0.9999 +x_0=0 +y_0=0 +ellps=bessel +pm=greenwich +units=m +no_defs \
                +towgs84=570.8,85.7,462.8,4.998,1.587,5.261,3.56"
        encoding = "WINDOWS-1250"
        lm = LayerMapping(ImportPoi, args[0], mapping, source_srs = source_srs, encoding = encoding)
        lm.save(verbose=True)
Ejemplo n.º 49
0
def load_gpkg_lyr_to_model(model,
                           model_mapping,
                           path_gpkg,
                           nom_lyr,
                           verbose=True,
                           progress=True):
    """

    Args:
        model:
        model_mapping:
        path_gpkg:
        nom_lyr:
        verbose:
        progress:
    """
    ds = DataSource(path_gpkg)
    lm = LayerMapping(model, ds, model_mapping, layer=nom_lyr, transform=False)
    lm.save(strict=True, verbose=verbose, progress=progress)
Ejemplo n.º 50
0
def airspace_import(path=''):
    from django.contrib.gis.gdal import DataSource

    airspace_mapping = {
        'mpoly': 'POLYGON',
        'ident': u'IDENT',
        'name': u'NAME',
        'upper_desc': u'UPPER_DESC',
        'upper_val': u'UPPER_VAL',
        'upper_uom': u'UPPER_UOM',
        'upper_code': u'UPPER_CODE',
        'lower_desc': u'LOWER_DESC',
        'lower_val': u'LOWER_VAL',
        'lower_uom': u'LOWER_UOM',
        'lower_code': u'LOWER_CODE',
        'type_code': u'TYPE_CODE',
        'local_type': u'LOCAL_TYPE',
        'airspace_class': u'CLASS',
        'mil_code': u'MIL_CODE',
        'comm_name': u'COMM_NAME',
        'level': u'LEVEL',
        'sector': u'SECTOR',
        'onshore': u'ONSHORE',
        'exclusion': u'EXCLUSION',
        'wkhr_code': u'WKHR_CODE',
        'wkhr_remark': u'WKHR_RMK',
        # None: u'DST',  # no data currently
        # None: u'GMTOFFSET',  # no data currently
        # None: u'SHAPE_Leng',  # no data currently
        # None: u'SHAPE_Area',  # no data currently
    }
    if os.path.exists(path):
        # spelunk the source
        ds = DataSource(path)
        layer = ds[0]
        print('Fields: %s' % layer.fields)
        print('Geom Type: %s' % layer.geom_type)
        print('SRS: %s' % layer.srs)

        lm = LayerMapping(Airspace, path, airspace_mapping)
        lm.save(verbose=True)
    else:
        raise NoShapeFileException
Ejemplo n.º 51
0
def read_shp(request):

    ds = DataSource('/opt/terra_indigena/tis_sirgas2000.shp')
    print ds[0].fields

    print ds[0][0][0]
    print ds[0][0].get('superficie')
    print ds[0][0].geom

    mapping = {'nome': 'terrai_nom', 'geom': 'POLYGON'}
    lm = LayerMapping(Indigena, '/opt/terra_indigena/tis_sirgas2000.shp',
                      mapping)
    lm.save(verbose=True)

    parcelas = Indigena.objects.filter(nome__icontains='k')
    #print len(parcelas)

    return render_to_response('terra_indigena.html', {"geo": parcelas},
                              context_instance=RequestContext(request))
Ejemplo n.º 52
0
def run(verbose=True):
    lm = LayerMapping(WorldBorder,
                      f"{world_shp}",
                      world_mapping,
                      transform=False)
    lm.save(strict=True, verbose=verbose)

    lm2 = LayerMapping(Campsite,
                       f"{campsite_shp}",
                       campsite_mapping,
                       transform=False)
    lm2.save(strict=True, verbose=verbose)
    def handle(self, *args, **options):

        dataSet = DataSource(
            os.path.join(os.path.dirname(__file__), 'datafiles',
                         'Streets_Permit_Jobs_Line.geojson'))

        fieldMap = {
            'objectid': 'OBJECTID',
            'linkpath': 'LinkPath',
            'projectid': 'ProjectID',
            'projectname': 'ProjectName',
            'status': 'Status',
            'contactname': 'ContactName',
            'shape_length': 'Shape_Length',
            'geom': 'UNKNOWN',
        }

        lm = LayerMapping(StPJline, dataSet, fieldMap)
        lm.save()
Ejemplo n.º 54
0
    def setUp(self):
        # met regions
        dt = models.DivisionType(name='met_region')
        dt.save()

        def pre_save_callback(sender, instance, *args, **kwargs):
            instance.type = dt

        mapping = {
            'name': 'NAME',
            'mpoly': 'MULTIPOLYGON',
        }

        lm = LayerMapping(models.Division, METREGIONS, mapping, transform=False)
        pre_save.connect(pre_save_callback, sender=models.Division)
        try:
            lm.save(strict=True, verbose=False)
        except Exception as exc:
            print repr(exc)
            raise
        finally:
            pre_save.disconnect(pre_save_callback, sender=models.Division)

        # toy regions
        dt = models.DivisionType(name='toy')
        dt.save()

        coords = [
            [(1, 1), (2, 1), (2, 2), (1, 2), (1, 1)],
            [(2, 1), (3, 1), (3, 2), (2, 2), (2, 1)],
            [(3, 1), (4, 1), (4, 2), (3, 2), (3, 1)],
            [(1, 2), (2, 2), (2, 3), (1, 3), (1, 2)],
            [(2, 2), (3, 2), (3, 3), (2, 3), (2, 2)],
            [(3, 2), (4, 2), (4, 3), (3, 3), (3, 2)],
            [(1, 3), (2, 3), (2, 4), (1, 4), (1, 3)],
            [(2, 3), (3, 3), (3, 4), (2, 4), (2, 3)],
            [(3, 3), (4, 3), (4, 4), (3, 4), (3, 3)],
            [(4, 3), (5, 3), (5, 4), (4, 4), (4, 3)],
        ]
        for (i, c) in enumerate(coords):
            mpoly = MultiPolygon(Polygon(c))
            models.Division(name="%02u" % (i+1), code=str(i), mpoly=mpoly, type=dt).save()
Ejemplo n.º 55
0
def run(verbose=True):
    lm_sp = LayerMapping(SaoPaulo,
                         saopaulo_shp,
                         saopaulo_mapping,
                         transform=False,
                         encoding='iso-8859-1')
    lm_mn = LayerMapping(Municipios,
                         municipios_shp,
                         municipios_mapping,
                         transform=False,
                         encoding='iso-8859-1')
    lm_sp.save(strict=True, verbose=True)
    lm_mn.save(strict=True, verbose=True)
Ejemplo n.º 56
0
def update_data(verbose=True):
    overpass_url = "http://overpass-api.de/api/interpreter"

    print("Sending request to ", overpass_url)
    response = requests.get(overpass_url, params={'data': OVERPASS_QUERY})

    print("Converting OSM json to geojson")
    geojson = osmjson2geojson.convert(json.loads(response.text))

    osm_id_list = []
    for feature in geojson['features']:
        osm_id_list.append(['id'])

    # set active the ids in the list
    queryset = Places.objects.filter(id__in=osm_id_list)
    queryset.update(active=True)

    print("Inactivate removed entries")
    # set inactive the ids not in the list
    queryset = Places.objects.all().exclude(id__in=osm_id_list)
    queryset.update(active=False)

    print("Writing temporary geojson file")
    temp_path = Path(__file__).resolve().parent / 'data' / "temp.json"

    try:
        os.remove(temp_path)
    except FileNotFoundError:
        print("no temp file at: ", temp_path)
        print("but that is ok")
    geojson_file = open(temp_path, "w")
    geojson_file.write(json.dumps(geojson))
    geojson_file.close()

    print("Execute LayerMapping")
    lm = LayerMapping(Places,
                      temp_path.as_posix(),
                      places_mapping,
                      transform=False)
    print("Save entries to database")
    lm.save(strict=True, verbose=verbose)
Ejemplo n.º 57
0
    def import_state(self, num):
        for chamber, mapping in {'su': upper_district_mapping,
                                 'sl': lower_district_mapping}.items():
            path = os.path.abspath(os.path.join(
                    os.path.dirname(os.path.dirname(os.path.dirname(
                                __file__))), 'data/'))

            path = os.path.join(path, "%s%s_d11.shp" % (chamber, num))

            try:
                LayerMapping(District, path, mapping,
                             source_srs=SOURCE_SRS,
                             transform=True).save(strict=True)
            except:
                try:
                    self.download_state_shapefiles(path, num)
                    LayerMapping(District, path, mapping,
                                 source_srs=SOURCE_SRS,
                                 transform=True).save(strict=True)
                except:
                    print 'error importing %s' % path
Ejemplo n.º 58
0
def building_footprints():
    fps = SocrataCatalogItem.objects.get(title='Building Footprint')
    url, extension = fps.get_distribution_type_url('Shapefile')
    fps.download_distribution(url, extension)
    fps.extract_zip(fps.orig_file_loc)
    fps_shp = fps.get_staged_file_path(extension='shp')

    fps_mapping = {
        'num_stories': 'numstories',
        'shape_length': 'shape_leng',
        'shape_area': 'shape_area',
        'feature_code': 'featurecod',
        'geopin': 'geopin',
        'object_id': 'objectid',
        'building_height': 'bldgheight',
        'the_geom': 'MULTIPOLYGON'
        }
    lm = LayerMapping(
            BuildingFootprint, fps_shp, fps_mapping, transform=False, encoding='iso-8859-1'
            )
    lm.save(strict=True, verbose=True)
Ejemplo n.º 59
0
def configure_models(instance, sender, **kwargs):
    """Save to postgis if there is a datastore.
    """
    # Abort if a postgis DATABASE is not configured.
    if DYNAMIC_DATASTORE not in settings.DATABASES:
        return

    # Do not process if there is no table.
    base_file = instance.get_base_file()[0]
    if base_file is None or base_file.name != 'shp':
        return

    filename = base_file.file.path

    # Load the table in postgis and get a mapping from fields in the database
    # and fields in the Shapefile.
    mapping = file2pgtable(filename, instance.name)

    # Get a dynamic model with the same name as the layer.
    model_description, __ = ModelDescription.objects.get_or_create(
        name=instance.name)

    # Set up the fields with the postgis table
    generate_model(model_description, mapping, db_key=DYNAMIC_DATASTORE)

    # Get the new actual Django model.
    TheModel = model_description.get_django_model()

    # Use layermapping to load the layer with geodjango
    if not has_datastore:
        lm = LayerMapping(TheModel,
                          filename,
                          mapping,
                          encoding=instance.charset,
                          using=DYNAMIC_DATASTORE,
                          transform=None)
        lm.save()

    else:
        post_save_layer(instance, sender, **kwargs)
Ejemplo n.º 60
0
    def handle(self, shapefile, manipulator, *args, **options):
        try:
            manip_model = get_class(manipulator)
        except:
            raise Exception(
                "The %s model could not be found.  \nBe sure and provide the complete description: <module name>.models.<manipulator model name>"
                % manipulator)

        ds = DataSource(shapefile)
        if len(ds) != 1:
            raise Exception(
                "Data source should only contain a single layer. Aborting.")

        layer = ds[0]
        if len(layer) != 1:
            raise Exception("Layer should containing ONLY a single feature")

        if not 'polygon' in layer.geom_type.name.lower():
            print layer.geom_type.name
            raise Exception("This geometry must be a polygon")

        mapping = {'geometry': 'MULTIPOLYGON'}

        lm = LayerMapping(manip_model, shapefile, mapping)
        lm.save()
        manip_geom = manip_model.objects.order_by('-creation_date')[0]
        if options.get('region_name'):
            manip_geom.name = options.get('region_name')
            manip_geom.save()
        else:
            manip_geom.name = layer.name
            manip_geom.save()

        print ""
        print "The manipulaotr geometry, %s, has been added to the %s model with primary key = %s" % (
            manip_geom.name, manipulator, manip_geom.pk)

        print "To switch to this geometry, you will need to run 'manage.py change_manipulator_geom %s %s'" % (
            manip_geom.pk, manipulator)
        print ""