def main(): opts, args = parse_args() if len(args) != 1: optparser.error('must give path to shapefile') shapefile = args[0] if not os.path.exists(shapefile): optparser.error('file does not exist') ds = DataSource(shapefile) layer = ds[opts.layer_id] metro = get_metro() metro_name = metro['metro_name'].upper() location_type, _ = LocationType.objects.get_or_create( name='neighborhood', plural_name='neighborhoods', scope=metro_name, slug='neighborhoods', is_browsable=True, is_significant=True, ) importer = LocationImporter(layer, location_type) num_created = importer.save(name_field=opts.name_field, source=opts.source, verbose=opts.verbose) if opts.verbose: print >> sys.stderr, 'Created %s neighborhoods.' % num_created
def main(): opts, args = parse_args() if len(args) != 1: optparser.error('must give path to shapefile') shapefile = args[0] if not os.path.exists(shapefile): optparser.error('file does not exist') ds = DataSource(shapefile) layer = ds[opts.layer_id] metro = get_metro() metro_name = metro['metro_name'].upper() location_type, _ = LocationType.objects.get_or_create( name = 'neighborhood', plural_name = 'neighborhoods', scope = metro_name, slug = 'neighborhoods', is_browsable = True, is_significant = True, ) importer = LocationImporter(layer, location_type) num_created = importer.save(name_field=opts.name_field, source=opts.source, verbose=opts.verbose) if opts.verbose: print >> sys.stderr, 'Created %s neighborhoods.' % num_created
def augment_cities(self): # Add in county subdivisions, deleting from their shapes any area # already covered by a "proper" city. fkey = 'cousub' starter_cities = Location.objects.filter(location_type=self.city_type) within_cities = GEOSGeometry('MULTIPOLYGON EMPTY') for city in starter_cities: within_cities = within_cities.union(city.location) city_pks = [l.pk for l in starter_cities] layer = DataSource('%s/%s.shp' % (self.zip_dir, self.datafiles[fkey]['file_name']))[0] loc_importer = LocationImporter(layer, self.city_type, source = self.datafiles[fkey].get('source', 'Unknown'), filter_bounds=False, verbose=True) loc_created_count = loc_importer.save(self.datafiles[fkey]['name_field']) townships = Location.objects.filter(location_type=self.city_type).exclude(pk__in=city_pks) city_names = Location.objects.filter(location_type=self.city_type, pk__in=city_pks).values_list('name', flat=True) city_names = [name.lower() for name in city_names] for township in townships: # If a same-named city already exists, then rename the township to "Cityname area." if township.name.lower() in city_names: township.name = '%s area' % capwords(township.name) else: township.name = capwords(township.name) township.slug = slugify(township.name) township.location = township.location.difference(within_cities) township.save() return loc_created_count
def import_starter_cities(self): fkey = 'place' type_data = { 'name': 'Community', 'plural_name': 'Communities', 'slug': 'cities', 'is_browsable': True, 'is_significant': True, 'scope': self.metro_name, } try: self.city_type = LocationType.objects.get(slug=type_data['slug']) except LocationType.DoesNotExist: self.city_type = LocationType.objects.create(**type_data) Location.objects.filter(location_type=self.city_type).delete() layer = DataSource('%s/%s.shp' % (self.zip_dir, self.datafiles[fkey]['file_name']))[0] loc_importer = LocationImporter(layer, self.city_type, source = self.datafiles[fkey].get('source', 'Unknown'), filter_bounds=True, verbose=True) loc_importer.bounds = self.county_location.location loc_created_count = loc_importer.save(self.datafiles[fkey]['name_field']) return loc_created_count
def import_location(shapefile, layer_number, location_type, name, wkt, filter_bounds, display_order): # Passing WKT because background functions need all their args to # be json-serializable. try: layer = layer_from_shapefile(shapefile, layer_number) importer = LocationImporter(layer, location_type, filter_bounds=filter_bounds) geom = GEOSGeometry(wkt) importer.create_location(name, location_type, geom, display_order=display_order) except: logger.exception("Location import of %s failed" % name)
def handle(self, **options): county_importer = ColumbusCountyImporter(COLUMBUS_COUNTY, use_tiger=options['tiger']) zip_dir = county_importer.fetch_files(options['dir']) # Do our own import of the county location, using the Columbus County GIS file # instead of the census NC Counties file. Columbus County file has just one # feature in it. metro_name = get_metro()['metro_name'].upper() county_type_data = { 'name': 'County', 'plural_name': 'Counties', 'slug': 'counties', 'is_browsable': True, 'is_significant': False, 'scope': metro_name, } try: county_type = LocationType.objects.get(slug=county_type_data['slug']) except LocationType.DoesNotExist: county_type = LocationType.objects.create(**county_type_data) Location.objects.filter(location_type=county_type).delete() county_layer = DataSource('%s/%s.shp' % (zip_dir, county_importer.datafiles['county']['file_name']))[0] loc_importer = LocationImporter(county_layer, county_type, filter_bounds=False, verbose=True) loc_created_count = loc_importer.save(county_importer.datafiles['county']['name_field']) columbus_county_location = Location.objects.get(location_type=county_type) # Set the county location in the county_importer and call its full_import to do the rest. county_importer.county_location = columbus_county_location county_importer.full_import() print "Done." if not options['dir']: print "Removing temp directory %s" % zip_dir os.system('rm -rf %s' % zip_dir)