def handle(self, *args, **options): sys.path.append(os.path.join(settings.SITE_DIR, SHAPEFILES_DIR)) from definitions import SHAPEFILES for name, options in SHAPEFILES.items(): self.stdout.write('Processing \'{}\' now...'.format(name)) self.stdout.write('Attempting to create collection...') collection, _ = Collection.objects.get_or_create( name=name, authority=options['authority'], last_updated=options['last_updated'], count=0, slug=options['slug'], source_url=options['source_url'] ) if _ is True: self.stdout.write('\'{}\' collection created!'.format(name)) else: self.stdout.write('\'{}\' collection already exists!' ' Not a problem.'.format(name)) lm = LayerMapping(Shape, os.path.join(SHAPEFILES_DIR, options['file']), options['layer_mapping'], encoding='latin-1') lm.save(verbose=True, strict=True)
def run(verbose=True): lm = LayerMapping(mun98nic, mun98nic_shp, mun98nic_mapping, transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose) lm = LayerMapping(dep50nic, dep50nic_shp, dep50nic_mapping, transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose)
def handle(self, *args, **options): """ Load community shapefile using LayerMapping; automatically checks projection, if necessary transforms to WSG 1984 """ neighborhood_mapping = { 'boundary': 'MULTIPOLYGON', 'primary_name': 'COMMUNITY', 'secondary_name': 'AREA_NUM_1', } path_to_shp = 'data/chicago_communities/CommAreas.shp' lm = LayerMapping(Neighborhood, path_to_shp, neighborhood_mapping) self.check_neighborhood_table() lm.save(strict=True) self.stdout.write('Successfully loaded %s communities from %s layer(s) into database\n' % (len(lm.ds[0]), lm.ds.layer_count)) # Change case of imported name strings from UPPER to Caps Case communities = Neighborhood.objects.all() for community in communities: names = [name.capitalize() for name in community.primary_name.split()] primary_name_caps = " ".join(names) self.stdout.write('Changing name %s ==> %s\n' % (community.primary_name, primary_name_caps)) community.primary_name = primary_name_caps community.save() # Export topojson export_topojson.Command().handle()
def run(verbose=True): # args are model name, source file, and mapping dict # Transform is false because the database format is SRID 4326, which is WGS84, # and the shape file is already in this format lm = LayerMapping(WorldBorder, world_shp, world_mapping, transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose)
def run(self): if self.overwrite: self.print('Removing existing neighborhoods...', end='') if self.real_run: Neighborhood.objects.all().delete() self.print('Done') # Disconnect this temporarily so that the ``unique='name'`` option # passed to LayerMapping will work correctly (it collects all # records from the RLIS neighborhoods shapefile with the same name # into a single database record; if we normalize the names on save, # this feature won't work). pre_save.disconnect(normalize_name, sender=Neighborhood) self.print('Adding neighborhoods...', end='') if self.real_run: mapping = LayerMapping( Neighborhood, self.path, Neighborhood.layer_mapping, source_srs=self.from_srid, unique='name') mapping.save(strict=True) self.print('Done') self.print('Normalizing neighborhood names...', end='') neighborhoods = Neighborhood.objects.all() if self.real_run: with transaction.atomic(): for neighborhood in neighborhoods: neighborhood.name = Neighborhood.normalize_name(neighborhood.name) neighborhood.save() self.print('Done') pre_save.connect(normalize_name, sender=Neighborhood)
def handle(self, *args, **options): world_mapping = { 'fips' : 'FIPS', 'iso2' : 'ISO2', 'iso3' : 'ISO3', 'un' : 'UN', 'name' : 'NAME', 'area' : 'AREA', 'pop2005' : 'POP2005', 'region' : 'REGION', 'subregion' : 'SUBREGION', 'lon' : 'LON', 'lat' : 'LAT', 'mpoly' : 'MULTIPOLYGON', } user = User.objects.get(id=1) world_shp = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data', 'TM_WORLD_BORDERS-0.3.shp')) with transaction.commit_on_success(): lm = LayerMapping(WorldBorder, world_shp, world_mapping, transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=True) for country in WorldBorder.objects.all(): gr = GeospatialReference( title=country.name, address=country.name, geometry=country.mpoly, point=Point(country.lon, country.lat), description='http://thematicmapping.org/', user=user, ) gr.save()
def fill_up_db(shapefile, verbose=False): from django.contrib.gis.utils import LayerMapping mapping = { 'name': 'NAME', 'area_code': 'AREA_CODE', 'descriptio': 'DESCRIPTIO', 'file_name': 'FILE_NAME', 'number': 'NUMBER', 'number0': 'NUMBER0', 'polygon_id': 'POLYGON_ID', 'unit_id': 'UNIT_ID', 'code': 'CODE', 'hectares': 'HECTARES', 'area': 'AREA', 'type_code': 'TYPE_CODE', 'descript0': 'DESCRIPT0', 'type_cod0': 'TYPE_COD0', 'descript1': 'DESCRIPT1', 'geom': 'POLYGON', } lm = LayerMapping(Region, shapefile, mapping, transform=True, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose) Region.objects.filter(descriptio__icontains='Welsh Assembly').delete() print "Regions imported"
def test05_geography_layermapping(self): "Testing LayerMapping support on models with geography fields." # There is a similar test in `layermap` that uses the same data set, # but the County model here is a bit different. if not gdal.HAS_GDAL: return from django.contrib.gis.utils import LayerMapping # Getting the shapefile and mapping dictionary. shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), "../", "data")) co_shp = os.path.join(shp_path, "counties", "counties.shp") co_mapping = {"name": "Name", "state": "State", "mpoly": "MULTIPOLYGON"} # Reference county names, number of polygons, and state names. names = ["Bexar", "Galveston", "Harris", "Honolulu", "Pueblo"] num_polys = [1, 2, 1, 19, 1] # Number of polygons for each. st_names = ["Texas", "Texas", "Texas", "Hawaii", "Colorado"] lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique="name") lm.save(silent=True, strict=True) for c, name, num_poly, state in zip(County.objects.order_by("name"), names, num_polys, st_names): self.assertEqual(4326, c.mpoly.srid) self.assertEqual(num_poly, len(c.mpoly)) self.assertEqual(name, c.name) self.assertEqual(state, c.state)
def shp(): """ Load the ESRI shapefile from the Census in the District model. Example usage: >> from congressional_districts import load; load.shp(); """ # Import the database model where we want to store the data from models import District # A crosswalk between the fields in our database and the fields in our # source shapefile shp2db = { 'state_fips_code' : 'STATE', 'district_number' : 'CD', 'lsad' : 'LSAD', 'name' : 'NAME', 'lsad_trans' : 'LSAD_TRANS', 'polygon_4269' : 'POLYGON', } # Load our model, shape, and the map between them into GeoDjango's magic # shape loading function (I also slipped the source coordinate system in # there. The Census says they put everything in NAD 83, which translates # to 4269 in the SRID id system.) lm = LayerMapping(District, shp_file, shp2db, source_srs=4269, encoding='latin-1') # Fire away! lm.save(verbose=False)
def load_parcels(shapefile_path, verbose=False, progress=True, silent=False, stream=None): """ Load parcels from shapefile at given path. Raises IntegrityError on a duplicate PL, and rolls back the load. Returns number of parcels loaded. """ # monkeypatch no-op transaction handling into LayerMapping, as we # wrap it in a transaction including more operations. LayerMapping.TRANSACTION_MODES['none'] = lambda func: func # first delete existing parcels Parcel.objects.all().delete() lm = LayerMapping( get_parcel_proxy(datetime.datetime.now()), shapefile_path, parcel_mapping, transform=True, transaction_mode='none') lm.save( strict=True, verbose=verbose, progress=progress, silent=silent, stream=stream or sys.stdout) return Parcel.objects.count()
def handle(self, *args, **options): mapping = { 'perimeter': 'PERIMETER', 'ward': 'WARD', 'alderman': 'ALDERMAN', 'ward_phone': 'WARD_PHONE', 'hall_phone': 'HALL_PHONE', 'hall_office': 'HALL_OFFIC', 'address': 'ADDRESS', 'edit_date1': 'EDIT_DATE1', 'shape_area': 'SHAPE_AREA', 'shape_len': 'SHAPE_LEN', 'geom': 'MULTIPOLYGON' } shp_data = requests.get(SHP) if shp_data.status_code != 200: raise CommandError('City data portal returned a %s status when downloading Wards shapefile: %s' % (shp_data.status_code, shp_data.content)) else: s = StringIO(shp_data.content) z = zipfile.ZipFile(s) data_path = os.path.join(os.path.curdir,'data/shp/wards') fname = [f for f in z.namelist() if f.endswith('shp')][0] z.extractall(data_path) datafile = os.path.join(data_path, fname) lm = LayerMapping(Ward, datafile, mapping) lm.save(strict=True,progress=True)
def shp(): """ Load the ESRI shapefile from the Census in the County model. Example usage: >> from us_counties import load; load.shp(); """ # Import the database model where we want to store the data from models import County # A crosswalk between the fields in our database and the fields in our # source shapefile shp2db = { 'polygon_4269': 'Polygon', 'state_fips_code': 'STATEFP', 'county_fips_code': 'COUNTYFP', 'fips_code': 'CNTYIDFP', 'county_ansi_code': 'COUNTYNS', 'short_name': 'NAME', 'full_name': 'NAMELSAD', 'csa_code': 'CSAFP', 'msa_code': 'CBSAFP', 'mda_code': 'METDIVFP', 'functional_status': 'FUNCSTAT', } # Load our model, shape, and the map between them into GeoDjango's magic # shape loading function (I also slipped the source coordinate system in # there. The Census says they put everything in NAD 83, which translates # to 4269 in the SRID id system.) lm = LayerMapping(County, shp_file, shp2db, source_srs=4269, encoding='latin-1') # Fire away! lm.save(verbose=False)
def from_shapefile(strict=False, progress=True, verbose=False, **kwargs): """ Load parcel data into the database from the processed shapefile. """ parcel_shp = get_processed_data_file(os.path.join('parcels', 'parcels.shp')) mapping = LayerMapping(Parcel, parcel_shp, parcel_mapping, transform=False) mapping.save(strict=strict, progress=progress, verbose=verbose, **kwargs)
def county_import(county_shp): if ( "2015" in county_shp or "2014" in county_shp or "2013" in county_shp or "2012" in county_shp or "2011" in county_shp ): county_mapping = { "state_fips_code": "STATEFP", "fips_code": "COUNTYFP", "county_identifier": "GEOID", "name": "NAME", "name_and_description": "NAMELSAD", "legal_statistical_description": "LSAD", "fips_55_class_code": "CLASSFP", "feature_class_code": "MTFCC", "functional_status": "FUNCSTAT", "mpoly": "POLYGON", } else: county_mapping = { "state_fips_code": "STATEFP10", "fips_code": "COUNTYFP10", "county_identifier": "GEOID10", "name": "NAME10", "name_and_description": "NAMELSAD10", "legal_statistical_description": "LSAD10", "fips_55_class_code": "CLASSFP10", "feature_class_code": "MTFCC10", "functional_status": "FUNCSTAT10", "mpoly": "POLYGON", } lm = LayerMapping(County, county_shp, county_mapping, encoding="LATIN1") lm.save(verbose=True)
def handle(self, shapefile, *args, **options): ds = DataSource(shapefile) if len(ds) != 1: raise Exception("Data source should only contain a single layer. Aborting.") layer = ds[0] if len(layer) != 1: raise Exception("Layer should containing ONLY a single feature") if not 'polygon' in layer.geom_type.name.lower(): print layer.geom_type.name raise Exception("Study region must be a multigeometry") if options.get('region_name'): mapping = { 'geometry': 'MULTIPOLYGON', } else: mapping = { 'geometry': 'MULTIPOLYGON', 'name': 'name', } lm = LayerMapping(StudyRegion, shapefile, mapping, transform=False) lm.save() study_region = StudyRegion.objects.order_by('-creation_date')[0] if options.get('region_name'): study_region.name = options.get('region_name') study_region.save() print "" print "Study region created: %s, primary key = %s" % (study_region.name, study_region.pk) print "To switch to this study region, you will need to run 'python manage.py change_study_region %s'" % (study_region.pk, ) print ""
def handle(self, *args, **options): for shp in args: lm = LayerMapping(self.config[shp]['model'], self.config[shp]['file'], self.config[shp]['mapping'], encoding='iso-8859-1') lm.save(strict=True, verbose=True) self.stdout.write('Successfully imported "%s"\n' % shp)
def run(verbose=True): ds = Dataset(name = '2010 Census Tracts', cached = datetime.utcnow().replace(tzinfo=utc), cache_max_age = 1000, remote_id_field = 'GEOID10', name_field = 'NAMELSAD10', lat_field = 'INTPTLAT10', lon_field = 'INTPTLON10', field1_en = 'Land Area', field1_name = 'ALAND10', field2_en = 'Water Area', field2_name = 'AWATER10') tract_mapping = { 'remote_id' : ds.remote_id_field, 'name' : ds.name_field, 'lat' : ds.lat_field, 'lon' : ds.lon_field, 'field1' : ds.field1_name, 'field2' : ds.field2_name, 'mpoly' : 'MULTIPOLYGON', } tract_shp = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data/tl_2010_36_tract10.shp')) lm = LayerMapping(MapPolygon, tract_shp, tract_mapping, transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose) ds.save() MapPolygon.objects.filter(dataset = None).update(dataset = ds)
def test05_geography_layermapping(self): "Testing LayerMapping support on models with geography fields." # There is a similar test in `layermap` that uses the same data set, # but the County model here is a bit different. if not gdal.HAS_GDAL: return from django.contrib.gis.utils import LayerMapping # Getting the shapefile and mapping dictionary. shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data')) co_shp = os.path.join(shp_path, 'counties', 'counties.shp') co_mapping = {'name' : 'Name', 'state' : 'State', 'mpoly' : 'MULTIPOLYGON', } # Reference county names, number of polygons, and state names. names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo'] num_polys = [1, 2, 1, 19, 1] # Number of polygons for each. st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado'] lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name') lm.save(silent=True, strict=True) for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names): self.assertEqual(4326, c.mpoly.srid) self.assertEqual(num_poly, len(c.mpoly)) self.assertEqual(name, c.name) self.assertEqual(state, c.state)
def handle(self, *args, **options): self.stdout.write('Import USA State') state_shp = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../shpData/southStates.shp')) state_mapping = {'statefp' : 'STATEFP','name' : 'NAME','geom' : 'POLYGON25D', } lm = LayerMapping(State, state_shp, state_mapping,transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=True) self.stdout.write('Import OK')
def handle(self, *args, **options): datadir = args[0] # Need to do this 50 times, onece for the shapefile of each state for i in range(1,57): # files are named with zero padded FIPS codes if i < 10: padded_i = "0"+str(i) else: padded_i = str(i) shpfile = os.path.abspath(os.path.join(os.path.dirname(__file__), datadir+'/state_tract_shapefiles/tl_2010_'+padded_i+'_tract10/tl_2010_'+padded_i+'_tract10.shp')) print "Attempting import of shapefile "+shpfile try: lm = LayerMapping(CensusTract, shpfile, censustracts_mapping) except OGRException: print "Could not open datasource ", print shpfile else: try: lm.save(strict=True, verbose=False, progress=True) except IntegrityError: print "Already imported ", print shpfile from django.db import transaction transaction.rollback() else: print "Imported shapefile "+shpfile print
def handle(self, *args, **options): verbose = options.get('verbose') source = options.get('source') if verbose: console = self.stdout else: console = None if not source: print "Source must be specified" return 0 from django.contrib.gis.gdal import DataSource ds = DataSource("PG:" "host=localhost port=5432 dbname=ex1 user=ex1 password=ex1") lyr_ind = -1 for i in range(len(ds)): if str(ds[i]) == source: lyr_ind = i break if lyr_ind == -1: print "Could not find table in data source" return 0 lm = LayerMapping( Country, ds, COUNTRY_MAPPING, layer=lyr_ind, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose)
def pre_save_layer(instance, sender, **kwargs): """Save to postgis if there is a datastore. """ # Abort if a postgis DATABASE is not configured. if DYNAMIC_DATASTORE not in settings.DATABASES: return # Do not process if there is no table. base_file = instance.get_base_file()[0] if base_file is None or base_file.name != 'shp': return filename = base_file.file.path # Load the table in postgis and get a mapping from fields in the database # and fields in the Shapefile. mapping = file2pgtable(filename, instance.name) # Get a dynamic model with the same name as the layer. model_description, __ = ModelDescription.objects.get_or_create( name=instance.name) # Set up the fields with the postgis table generate_model(model_description, mapping, db_key=DYNAMIC_DATASTORE) # Get the new actual Django model. TheModel = model_description.get_django_model() # Use layermapping to load the layer with geodjango lm = LayerMapping(TheModel, filename, mapping, encoding=instance.charset, using=DYNAMIC_DATASTORE, transform=None ) lm.save()
def run(verbose=True): lm = LayerMapping(Noeud, tunisia_nodes, node_mapping, transform=True, encoding='utf8') lm.save(strict=True, verbose=verbose) for i in Noeud.objects.all(): i.type = slugify(i.type ) i.label = slugify(i.label) i.save()
def run(verbose=True): lm = LayerMapping(Voie, tunisia_voie, voie_mapping, transform=True, encoding='utf8') lm.save(strict=True, verbose=verbose) for i in Voie.objects.all(): i.label = slugify(i.label ) i.nom_usage = slugify(i.nom_usage) i.save()
def handle(self, shapefile, manipulator, *args, **options): try: manip_model = get_class(manipulator) except: raise Exception("The %s model could not be found. \nBe sure and provide the complete description: <module name>.models.<manipulator model name>" %manipulator) ds = DataSource(shapefile) if len(ds) != 1: raise Exception("Data source should only contain a single layer. Aborting.") layer = ds[0] if len(layer) != 1: raise Exception("Layer should containing ONLY a single feature") if not 'polygon' in layer.geom_type.name.lower(): print layer.geom_type.name raise Exception("This geometry must be a polygon") mapping = {'geometry': 'MULTIPOLYGON'} lm = LayerMapping(manip_model, shapefile, mapping) lm.save() manip_geom = manip_model.objects.order_by('-creation_date')[0] if options.get('region_name'): manip_geom.name = options.get('region_name') manip_geom.save() else: manip_geom.name = layer.name manip_geom.save() print "" print "The manipulaotr geometry, %s, has been added to the %s model with primary key = %s" % (manip_geom.name, manipulator, manip_geom.pk) print "To switch to this geometry, you will need to run 'manage.py change_manipulator_geom %s %s'" % (manip_geom.pk, manipulator) print ""
def county_import(county_shp, year): if year == "2010": county_mapping = { 'state_fips_code': 'STATEFP10', 'fips_code': 'COUNTYFP10', 'county_identifier': 'GEOID10', 'name': 'NAME10', 'name_and_description': 'NAMELSAD10', 'legal_statistical_description': 'LSAD10', 'fips_55_class_code': 'CLASSFP10', 'feature_class_code': 'MTFCC10', 'functional_status': 'FUNCSTAT10', 'mpoly': 'POLYGON', } else: county_mapping = { 'state_fips_code': 'STATEFP', 'fips_code': 'COUNTYFP', 'county_identifier': 'GEOID', 'name': 'NAME', 'name_and_description': 'NAMELSAD', 'legal_statistical_description': 'LSAD', 'fips_55_class_code': 'CLASSFP', 'feature_class_code': 'MTFCC', 'functional_status': 'FUNCSTAT', 'mpoly': 'POLYGON', } lm = LayerMapping(County, county_shp, county_mapping, encoding='LATIN1') lm.save(verbose=True)
def populate_GADM( model, shp_file, mapping_dict, verbose=False ): """ Populates a model of GADM family Parameters ---------- model : <model> shp_file : str mapping_dict : dict verbose : bool """ try: lm = LayerMapping( model, shp_file, mapping_dict, transform=False, ) with transaction.atomic(): lm.save(verbose=verbose) except IntegrityError as ie: print "Integrity error:", ie.message except Exception as e: print 'Error loading GADM data!' print e.message
def handle(self, **options): shapefile = os.path.join(settings.HERE, os.pardir, 'data', 'TM_WORLD_BORDERS-0.3.shp') ds = DataSource(shapefile) print len(ds), "layer" layer = ds[0] print '%s contains %s geometries (type: %s)' % (layer, len(layer), layer.geom_type) print layer.srs #for feature in layer: # print feature.get('NAME'), feature.geom.num_points mapping = { 'fips': 'FIPS', 'iso2': 'ISO2', 'iso3': 'ISO3', 'un': 'UN', 'name': 'NAME', 'area': 'AREA', 'pop2005': 'POP2005', 'region': 'REGION', 'subregion': 'SUBREGION', 'lon': 'LON', 'lat': 'LAT', 'mpoly': 'MULTIPOLYGON', } lm = LayerMapping(Country, shapefile, mapping, transform=True, encoding='iso-8859-1') # Antartica doesn't convert to Spherical Mercator lm.save(fid_range=(1, 144), verbose=True) lm.save(fid_range=(146, 246), verbose=True)
def zipcode_import(zipcode_shp): zipcode_mapping = { 'code': 'ZCTA5CE10', 'mpoly': 'POLYGON', } lm = LayerMapping(Zipcode, zipcode_shp, zipcode_mapping) lm.save(verbose=True)
def run(verbose=True): if len(Neighborhood.objects.all()) == 0: lm = LayerMapping(Neighborhood, neighborhood_shape, mapping) lm.save(strict=True, verbose=verbose) Neighborhood.objects.exclude(city='Seattle').delete() print "There are %d neighborhoods in Seattle" % len(Neighborhood.objects.all()) else: print "Neighborhood shapefile already loaded (at least one neighborhood exists). Delete existing records via admin tool"
def run(verbose=True): lm = LayerMapping( Point, centroidesPredio_shp, centroidesPredio_mapping, transform=False, encoding='iso-8859-1', ) lm.save(strict=True, verbose=verbose)
def run(verbose=True): lm = LayerMapping(administration, ihe_borehole, ihe_borehole_mapping,transform=True, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose)
def run(verbose=True): lm = LayerMapping(Counties, county_shp, counties_mapping, transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose)
from django.contrib.gis.utils import LayerMapping from models import Pipelines pipelinesJsonFile = '/Users/kiran/Downloads/pipelines1.json' mapping = { 'objectID': 'OBJECTID', 'pipelineID': 'PIPELIN_ID', 'size': 'SIZE_', 'datasource': 'DATASOURCE', 'useID': 'USE_ID', 'pipe_type': 'PIPE_TYPE', 'predecess': 'PREDECESSO', 'status': 'STATUS', 'level': 'LEVEL_', 'shape_length': 'SHAPE_Leng', 'ownerID': 'OWNER_ID', 'geomData': 'GEOMETRY' } lm = LayerMapping(Pipelines, pipelinesJsonFile, mapping) lm.save(verbose=True)
def run(verbose=True): lm = LayerMapping(JeffersonAddress, JeffersonAddress_shp, JeffersonAddress_mapping, transform=False) lm.save(strict=True, verbose=verbose)
def run(verbose=True): lm1 = LayerMapping(TerrebonneAddress, TerrebonneAddress_shp, TerrebonneAddress_mapping, transform=False) lm1.save(strict=True, verbose=verbose)
def run(verbose=True): lm = LayerMapping(WorldBorder, world_shp, world_mapping, transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose)
def run(verbose=True): lm = LayerMapping(ShoreLine, shp, shoreline_mapping, transform=False, encoding='iso-8859-1') lm.save(strict=True, verbose=verbose)
def run(verbose=True): lm = LayerMapping(Route, world_shp, mapping, transform=False) lm.save(strict=True, verbose=verbose)
def run_estacoes(verbose=True): lm = LayerMapping(Estacao, shp, estacao_mapping) lm.save(strict=True, verbose=True)
def run(verbose=True): lm = LayerMapping(FisheryArea, nafo_shp, mapping, transform=False) lm.save(strict=True, verbose=verbose)
def run(verbose=True): lm = LayerMapping(Buzon, buzon_shp, Buzon_mapping, transform=False) lm.save(strict=True, verbose=verbose)
def run(verbose = True): lm = LayerMapping(India, india_shp, india_mapping, transform= False, encoding="iso-8859-1") lm.save(strict=True, verbose=verbose)
def execute_loading(**kwargs): """Master function""" good_models = [] bad_models = [] cleaned_models = kwargs['cleaned_models'] canonical_models = kwargs['canonical_models'] target_app = kwargs['TARGET_APP'] shapefile_dir = kwargs['shapefile_dir'] agenda_models = get_models_to_run(cleaned_models=cleaned_models, canonical_models=canonical_models) try: for mdl in agenda_models: short_name = mdl zip_name = short_name + '.zip' shapefile_name = short_name + '.shp' model_class = canonical_models[mdl] mapping_name = short_name + '_mapping' mapping = getattr(boundaries.models, mapping_name) # shapefile manipulation shapefile_zip = shapefile_dir / zip_name shapefile_target = shapefile_dir with ZipFile(shapefile_zip, 'r') as zipObj: # Extract all the contents of zip file in current directory zipObj.extractall(path=shapefile_target) shapefile_shp = str(shapefile_target / short_name / shapefile_name) spatial_data_source = DataSource(shapefile_shp) # clear all objects while model_class.objects.count() > 0: model_class.objects.all().delete() lm = LayerMapping(model_class, spatial_data_source, mapping, transform=True) lm.save(verbose=True, strict=False) good_models.append(model_class) except Exception as ex: print("There was an error at the command level: {0}".format(ex)) bad_models.append(model_class) if len(bad_models) > 0: return False else: return True
def load_layer_mapping(model, shp_file, mapping, verbose=False, unique=None): lm = LayerMapping(model, shp_file, mapping, transform=True, unique=unique) lm.save(strict=True, verbose=verbose)
def run(verbose=True): lm = LayerMapping( HealthCentre, health_shp, healthcentre_mapping, transform=False, encoding='iso-8859-1', ) lm.save(strict=True, verbose=verbose)
def map(model, file, mapping, verbose=True, strict=True, progress=False, step=False): data_source = _get_filepath(file) lm = LayerMapping(model, data_source, mapping, transform=False, encoding='iso-8859-1') lm.save(verbose=verbose, strict=strict, progress=progress, step=step)
def run_linkstation(verbose=True): lm = LayerMapping(LINKStation, link_shp, linkstation_mapping, transform=False) lm.save(strict=True, verbose=verbose)
def run(verbose=True): lm = LayerMapping(Soils, soils_shp, soils_mapping, transform=False, encoding='iso-8859-1') lm.save(verbose=True, strict=True)
def run_dist(verbose=True): lm = LayerMapping( maharashtra_districts, dist_shp, maharashtra_districts_mapping , transform=False, encoding='utf-8', ) lm.save(strict=False, verbose=verbose)
def trun(verbose=True): lm = LayerMapping(Towns, towns_shp, towns_mapping, transform=False) lm.save(strict=True, verbose=verbose)
def handle(self, *args, **options): layer_mapping = {"code": "Lower_Laye", "boundary": "Unknown"} lm = LayerMapping(PCT, options["filename"], layer_mapping, transform=True) lm.save(strict=True) set_centroids()
def run(verbose=True): lm = LayerMapping( world, world_shp, mundo_mapping, transform=False, encoding='utf-8', ) lm.save(strict=False, verbose=verbose)
def run(verbose=True): lm = LayerMapping(Boundary, str(boundary_shp), boundary_mapping, transform=False) lm.save(strict=True, verbose=verbose)
def run(verbose=True): shp_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data/tiger_state_2010/tl_2010_us_state10.shp')) lm = LayerMapping(State, shp_file, state_mapping, transform=True, encoding='latin-1') lm.save(strict=True, verbose=verbose)
def run_district(verbose=True): lm = LayerMapping( district_boundaries, district_shp, district_boundaries_mapping, transform=False, encoding='utf-8', ) lm.save(strict=False, verbose=verbose)
def run_taluka(verbose=True): lm = LayerMapping( taluka_boundaries, taluka_shp, taluka_boundaries_mapping, transform=False, encoding='utf-8', ) lm.save(strict=False, verbose=verbose)
def run_maha(verbose=True): lm = LayerMapping( state_maharashtra, maha_shp, maharashtra_mapping , transform=False, encoding='utf-8', ) lm.save(strict=False, verbose=verbose)
def process_gpsdata(request): point_mapping = { 'point': 'POINT', 'timestamp': 'time', } #Once the form is valid, unzip the submitted zip file form = FileUploadForm(request.POST or None, request.FILES or None) if form.is_valid(): #unzip zipgpx = form.save(commit=False) zipgpx.filename = request.FILES['filename'] #get file name zipgpx.user = request.user zipgpx.save() # file save and in dababase zippath = BASE_DIR + '\\analytics\\routefiles' + '\\' + str( zipgpx.filename) zip = zipfile.ZipFile(zippath) unzippath = BASE_DIR + '\\analytics\\routefiles\\' + str( request.user ) + '\\' + str( zipgpx.id ) # unzip in the user's folder which is created at the same time in the name of the current zipfile id/if the folder or files already exists, it overwrites. zip.extractall(unzippath) #delete the uploded zipfile #zip.close() #os.remove(unzippath) ##save all the unzipped files into the database #If some of the files are .tcxs, they are converted into .gpxs by the tcx2gpx.py for (root, dirs, filenames) in os.walk(unzippath): for f in filenames: if f.endswith(".tcx"): tcx_file = os.path.abspath( os.path.join( os.path.dirname(__file__), 'routefiles/' + str(request.user) + '/' + str(zipgpx.id) + '/' + str(f))) output = os.path.abspath( os.path.join( os.path.dirname(__file__), 'routefiles/' + str(request.user) + '/' + str(zipgpx.id) + '/' + str(f).replace("tcx", "gpx"))) output_set(output) handler = MyHandler() sax.parse(tcx_file, handler) close_fl() #Read all the gpx files and save them in the database via LayerMapping gpxidlist = [] for (root, dirs, filenames) in os.walk(unzippath): for f in filenames: if f.endswith(".gpx"): gpx_file = os.path.abspath( os.path.join( os.path.dirname(__file__), 'routefiles/' + str(request.user) + '/' + str(zipgpx.id) + '/' + str(f))) lm_tr = LayerMapping(GPXPoint, gpx_file, point_mapping, layer='track_points') gpxfile = GPXFile(user=request.user, filename=str(f)) gpxfile.save() gpxidlist.append(gpxfile.pk) #source: https://stackoverflow.com/questions/22324752/django-connect-temporary-pre-save-signal #This is to fill the user field while each row is being populated by layermapping def pre_save_callback(sender, instance, *args, **kwargs): instance.user = request.user # set an instance of the model field and impose value to it which will fill the field this process. instance.gpxfile = gpxfile pre_save.connect(pre_save_callback, sender=GPXPoint) try: lm_tr.save(strict=True) finally: pre_save.disconnect(pre_save_callback, sender=GPXPoint) # take the gpxfile objects and gpxpoint objects only for the current user user_gpxfiles = GPXFile.objects.filter(user=request.user) gpxfiles_of_uploadded = user_gpxfiles.filter( pk__in=gpxidlist) #uploaded gpxfiles user_points = GPXPoint.objects.filter(user=request.user) #print "took files from db" #map matching to OSM network and update gpxfile table with matched segments matching_segments_returns = matching_segments(request, gpxfiles_of_uploadded, user_points) file_to_segment_list = matching_segments_returns[0] file_to_segment_geojson = matching_segments_returns[1] #This is for checking matching points file_to_coordsliest = matching_segments_returns[ 2] # {fileid: [[lat,lng], ..., ], ..., } for fileid, latlng_list in file_to_coordsliest.items(): for latlng in latlng_list: point = Point(latlng[1], latlng[0], srid=4326) p = MatchingPoint(user=request.user, gpxfile=GPXFile.objects.get(pk=fileid), point=point) p.save() #for k in file_to_segment_geojson: #print "file_to_geojson_k", k #count segment and save it to the database count_segment(request, SegmentCounts, file_to_segment_list) # Grouping files as routes by similarity between segments gpxfiles = GPXFile.objects.filter(user=request.user).values( 'pk', 'segments') gfs = {} for i in range(len(gpxfiles)): fileid = gpxfiles[i]['pk'] segments = json.loads(gpxfiles[i]['segments']) #print 'pk', type(fileid), fileid #print 'jlosd', type(segments), segments gfs[fileid] = segments route_group_list = segments_to_routes(gfs) #This extract travel times of gpxfiles traveltime = extract_traveltime(user_points, user_gpxfiles) # make a dictionary of routeID to average travel time {routeID:average travel time over files, ...,} integrate_route_data(request, Route, SegmentGeoJSON, route_group_list, traveltime, file_to_segment_geojson) return render(request, 'analytics/mycommute.html') context = { "form": form, } return render(request, 'analytics/upload_files.html', context)
def run_worldborder(verbose=True): lm = LayerMapping(WorldBorder, world_shp, world_mapping, transform=False) lm.save(strict=True, verbose=verbose) for l in WorldBorder.objects.all(): l.lat_lon = Point(x=l.lon, y=l.lat, srid=4326) l.save()