def test01_valid_shp(self): "Testing valid SHP Data Source files." for source in ds_list: # Loading up the data source ds = DataSource(source.ds) # Making sure the layer count is what's expected (only 1 layer in a SHP file) self.assertEqual(1, len(ds)) # Making sure GetName works self.assertEqual(source.ds, ds.name) # Making sure the driver name matches up self.assertEqual(source.driver, str(ds.driver)) # Making sure indexing works msg = 'Index out of range when accessing layers in a datasource: %s.' with self.assertRaisesMessage(IndexError, msg % len(ds)): ds.__getitem__(len(ds)) with self.assertRaisesMessage(IndexError, 'Invalid OGR layer name given: invalid.'): ds.__getitem__('invalid')
def get_layer(): # This DataSource object is not accessible outside this # scope. However, a reference should still be kept alive # on the `Layer` returned. ds = DataSource(source.ds) return ds[0]
def test02_invalid_shp(self): "Testing invalid SHP files for the Data Source." for source in bad_ds: with self.assertRaises(GDALException): DataSource(source.ds)
def handle_label(self, directory_name, **options): current_generation = Generation.objects.current() new_generation = Generation.objects.new() if not new_generation: raise Exception("No new generation to be used for import!") if not os.path.isdir(directory_name): raise Exception("'%s' is not a directory" % (directory_name,)) os.chdir(directory_name) mapit_type_glob = smart_text("[A-Z0-9][A-Z0-9][A-Z0-9]") if not glob(mapit_type_glob): raise Exception( "'%s' did not contain any directories that look like MapIt types (e.g. O11, OWA, etc.)" % ( directory_name,)) def verbose(s): if int(options['verbosity']) > 1: print(smart_str(s)) verbose("Loading any admin boundaries from " + directory_name) verbose("Finding language codes...") language_code_to_name = {} code_keys = ('two_letter', 'three_letter') for row in get_iso639_2_table(): english_name = getattr(row, 'english_name') for k in code_keys: code = getattr(row, k) if not code: continue language_code_to_name[code] = english_name global_country = Country.objects.get(code='G') # print json.dumps(language_code_to_name, sort_keys=True, indent=4) skip_up_to = None # skip_up_to = 'relation-80370' skipping = bool(skip_up_to) for type_directory in sorted(glob(mapit_type_glob)): verbose("Loading type " + type_directory) if not os.path.exists(type_directory): verbose("Skipping the non-existent " + type_directory) continue verbose("Loading all KML in " + type_directory) files = sorted(os.listdir(type_directory)) total_files = len(files) for i, e in enumerate(files): progress = "[%d%% complete] " % ((i * 100) / total_files,) if skipping: if skip_up_to in e: skipping = False else: continue if not e.endswith('.kml'): verbose("Ignoring non-KML file: " + e) continue m = re.search(r'^(way|relation)-(\d+)-', e) if not m: raise Exception("Couldn't extract OSM element type and ID from: " + e) osm_type, osm_id = m.groups() kml_filename = os.path.join(type_directory, e) verbose(progress + "Loading " + os.path.realpath(kml_filename)) # Need to parse the KML manually to get the ExtendedData kml_data = KML() xml.sax.parse(smart_str(kml_filename), kml_data) useful_names = [n for n in kml_data.data.keys() if not n.startswith('Boundaries for')] if len(useful_names) == 0: raise Exception("No useful names found in KML data") elif len(useful_names) > 1: raise Exception("Multiple useful names found in KML data") name = useful_names[0] print(smart_str(" %s" % name)) if osm_type == 'relation': code_type_osm = CodeType.objects.get(code='osm_rel') elif osm_type == 'way': code_type_osm = CodeType.objects.get(code='osm_way') else: raise Exception("Unknown OSM element type: " + osm_type) ds = DataSource(kml_filename) layer = ds[0] if len(layer) != 1: raise Exception("We only expect one feature in each layer") feat = layer[1] g = feat.geom.transform(4326, clone=True) if g.geom_count == 0: # Just ignore any KML files that have no polygons in them: verbose(' Ignoring that file - it contained no polygons') continue # Nowadays, in generating the data we should have # excluded any "polygons" with less than four points # (the final one being the same as the first), but # just in case: polygons_too_small = 0 for polygon in g: if polygon.num_points < 4: polygons_too_small += 1 if polygons_too_small: message = "%d out of %d polygon(s) were too small" % (polygons_too_small, g.geom_count) verbose(' Skipping, since ' + message) continue g_geos = g.geos if not g_geos.valid: verbose(" Invalid KML:" + kml_filename) fixed_multipolygon = fix_invalid_geos_multipolygon(g_geos) if len(fixed_multipolygon) == 0: verbose(" Invalid polygons couldn't be fixed") continue g = fixed_multipolygon.ogr area_type = Type.objects.get(code=type_directory) try: osm_code = Code.objects.get(type=code_type_osm, code=osm_id, area__generation_high__lte=current_generation, area__generation_high__gte=current_generation) except Code.DoesNotExist: verbose(' No area existed in the current generation with that OSM element type and ID') osm_code = None was_the_same_in_current = False if osm_code: m = osm_code.area # First, we need to check if the polygons are # still the same as in the previous generation: previous_geos_geometry = m.polygons.aggregate(Collect('polygon'))['polygon__collect'] if previous_geos_geometry is None: verbose(' In the current generation, that area was empty - skipping') else: # Simplify it to make sure the polygons are valid: previous_geos_geometry = shapely.wkb.loads( str(previous_geos_geometry.simplify(tolerance=0).ewkb)) new_geos_geometry = shapely.wkb.loads(str(g.geos.simplify(tolerance=0).ewkb)) if previous_geos_geometry.almost_equals(new_geos_geometry, decimal=7): was_the_same_in_current = True else: verbose(' In the current generation, the boundary was different') if was_the_same_in_current: # Extend the high generation to the new one: verbose(' The boundary was identical in the previous generation; raising generation_high') m.generation_high = new_generation else: # Otherwise, create a completely new area: m = Area( name=name, type=area_type, country=global_country, parent_area=None, generation_low=new_generation, generation_high=new_generation, ) poly = [g] if options['commit']: m.save() verbose(' Area ID: ' + str(m.id)) if name not in kml_data.data: print(json.dumps(kml_data.data, sort_keys=True, indent=4)) raise Exception("Will fail to find '%s' in the dictionary" % (name,)) old_lang_codes = set(n.type.code for n in m.names.all()) for k, translated_name in kml_data.data[name].items(): language_name = None if k == 'name': lang = 'default' language_name = "OSM Default" else: name_match = re.search(r'^name:(.+)$', k) if name_match: lang = name_match.group(1) if lang in language_code_to_name: language_name = language_code_to_name[lang] if not language_name: continue old_lang_codes.discard(lang) # Otherwise, make sure that a NameType for this language exists: NameType.objects.update_or_create(code=lang, defaults={'description': language_name}) name_type = NameType.objects.get(code=lang) m.names.update_or_create(type=name_type, defaults={'name': translated_name}) if old_lang_codes: verbose('Removing deleted languages codes: ' + ' '.join(old_lang_codes)) m.names.filter(type__code__in=old_lang_codes).delete() # If the boundary was the same, the old Code # object will still be pointing to the same Area, # which just had its generation_high incremented. # In every other case, there's a new area object, # so create a new Code and save it: if not was_the_same_in_current: new_code = Code(area=m, type=code_type_osm, code=osm_id) new_code.save() save_polygons({'dummy': (m, poly)})
def handle_label(self, filename, **options): print(filename) new_generation = Generation.objects.new() if not new_generation: raise Exception("No new generation to be used for import!") name_type = NameType.objects.get(code='O') code_type = CodeType.objects.get(code='gss') ds = DataSource(filename) layer = ds[0] for feat in layer: name = feat['NAME'].value if not isinstance(name, six.text_type): name = name.decode('iso-8859-1') print(" %s" % name) name = re.sub(r'\s*\(DET( NO \d+|)\)\s*(?i)', '', name) name = re.sub(r'\s+', ' ', name) if "P Const" in name: area_code = 'SPC' elif "PER" in name: area_code = 'SPE' else: raise Exception("Unknown type of area %s" % name) ons_code = name_to_code[name] if ons_code in self.ons_code_to_shape: m, poly = self.ons_code_to_shape[ons_code] if options['commit']: m_name = m.names.get(type=name_type).name if name != m_name: raise Exception("ONS code %s is used for %s and %s" % (ons_code, name, m_name)) # Otherwise, combine the two shapes for one area print(" Adding subsequent shape to ONS code %s" % ons_code) poly.append(feat.geom) continue try: m = Area.objects.get(codes__type=code_type, codes__code=ons_code) except Area.DoesNotExist: m = Area( type=Type.objects.get(code=area_code), country=Country.objects.get(name='Scotland'), generation_low=new_generation, generation_high=new_generation, ) if options['commit']: m.save() poly = [feat.geom] if options['commit']: m.names.update_or_create(type=name_type, defaults={'name': name}) if ons_code: self.ons_code_to_shape[ons_code] = (m, poly) if options['commit']: m.codes.update_or_create(type=code_type, defaults={'code': ons_code}) if options['commit']: save_polygons(self.ons_code_to_shape)
def parse_features(self, kml): ds = DataSource(kml) return ds[0]
def read(self, path): ''' return the read shapefile ''' shp_file = DataSource(os.path.abspath(path)) return shp_file
def import_shape(self, store, config, updatefield): """ Import a shapefile, based on a config. Parameters: config -- A dictionary with 'shapepath', 'geolevel', 'name_field', 'region_filters' and 'subject_fields' keys. """ def get_shape_tree(shapefile, feature): shpfields = shapefile.xpath('Fields/Field') builtid = '' for idx in range(0, len(shpfields)): idpart = shapefile.xpath( 'Fields/Field[@type="tree" and @pos=%d]' % idx) if len(idpart) > 0: idpart = idpart[0] part = feature.get(idpart.get('name')) # strip any spaces in the treecode if not (isinstance(part, types.StringTypes)): part = '%d' % part part = part.strip(' ') width = int(idpart.get('width')) builtid = '%s%s' % (builtid, part.zfill(width)) return builtid def get_shape_portable(shapefile, feature): field = shapefile.xpath('Fields/Field[@type="portable"]')[0] portable = feature.get(field.get('name')) if not (isinstance(portable, types.StringTypes)): portable = '%d' % portable return portable def get_shape_name(shapefile, feature): field = shapefile.xpath('Fields/Field[@type="name"]')[0] strname = feature.get(field.get('name')) if type(strname) == str: return strname.decode('latin-1') else: return str(strname) for h, shapefile in enumerate(config['shapefiles']): if not exists(shapefile.get('path')): logger.info( """ ERROR: The filename specified by the configuration: %s Could not be found. Please check the configuration and try again. """, shapefile.get('path')) raise IOError('Cannot find the file "%s"' % shapefile.get('path')) ds = DataSource(shapefile.get('path')) logger.info('Importing from %s, %d of %d shapefiles...', ds, h + 1, len(config['shapefiles'])) lyr = ds[0] logger.info('%d objects in shapefile', len(lyr)) level = Geolevel.objects.get(name=config['geolevel'].lower()[:50]) # Create the subjects we need subject_objects = {} for sconfig in config['subject_fields']: attr_name = sconfig.get('field') foundalias = False for elem in sconfig.getchildren(): if elem.tag == 'Subject': foundalias = True sub = Subject.objects.get( name=elem.get('id').lower()[:50]) if not foundalias: sub = Subject.objects.get( name=sconfig.get('id').lower()[:50]) subject_objects[attr_name] = sub subject_objects['%s_by_id' % sub.name] = attr_name progress = 0.0 logger.info('0% .. ') for i, feat in enumerate(lyr): if (float(i) / len(lyr)) > (progress + 0.1): progress += 0.1 logger.info('%2.0f%% .. ', progress * 100) levels = [level] for region, filter_list in config['region_filters'].iteritems( ): # Check for applicability of the function by examining the config geolevel_xpath = '/DistrictBuilder/GeoLevels/GeoLevel[@name="%s"]' % config[ 'geolevel'] geolevel_config = store.data.xpath(geolevel_xpath) geolevel_region_xpath = '/DistrictBuilder/Regions/Region[@name="%s"]/GeoLevels//GeoLevel[@ref="%s"]' % ( region, geolevel_config[0].get('id')) if len(store.data.xpath(geolevel_region_xpath)) > 0: # If the geolevel is in the region, check the filters for f in filter_list: if f(feat) is True: levels.append( Geolevel.objects.get(name='%s_%s' % (region, level.name))) shape_name = get_shape_name(shapefile, feat) shape_portable_id = get_shape_portable(shapefile, feat) shape_tree_code = get_shape_tree(shapefile, feat) prefetch = Geounit.objects.filter( name=shape_name, geolevel__in=levels, portable_id=shape_portable_id, tree_code=shape_tree_code) should_create = prefetch.count() == 0 if should_create: try: # Store the geos geometry # Buffer by 0 to get rid of any self-intersections which may make this geometry invalid. geos = feat.geom.geos.buffer(0) # Coerce the geometry into a MultiPolygon if geos.geom_type == 'MultiPolygon': my_geom = geos elif geos.geom_type == 'Polygon': my_geom = MultiPolygon(geos) simple = my_geom.simplify(tolerance=Decimal( config['tolerance']), preserve_topology=True) if simple.geom_type != 'MultiPolygon': simple = MultiPolygon(simple) center = my_geom.centroid geos = None # Ensure the centroid is within the geometry if not center.within(my_geom): # Get the first polygon in the multipolygon first_poly = my_geom[0] # Get the extent of the first poly first_poly_extent = first_poly.extent min_x = first_poly_extent[0] max_x = first_poly_extent[2] # Create a line through the bbox and the poly center my_y = first_poly.centroid.y centerline = LineString((min_x, my_y), (max_x, my_y)) # Get the intersection of that line and the poly intersection = centerline.intersection(first_poly) if type(intersection) is MultiLineString: intersection = intersection[0] # the center of that line is my within-the-poly centroid. center = intersection.centroid first_poly = first_poly_extent = min_x = max_x = my_y = centerline = intersection = None g = Geounit(geom=my_geom, name=shape_name, simple=simple, center=center, portable_id=shape_portable_id, tree_code=shape_tree_code) g.save() g.geolevel = levels g.save() except: logger.info('Failed to import geometry for feature %d', feat.fid) logger.info(traceback.format_exc()) continue else: g = prefetch[0] g.geolevel = levels g.save() if not config['attributes']: # If we created a new Geounit, we can let this function know that it doesn't # need to check for existing Characteristics, which will speed things up # significantly. self.set_geounit_characteristic(g, subject_objects, feat, not should_create, updatefield) logger.info('100%') if config['attributes']: progress = 0 logger.info("Assigning subject values to imported geography...") logger.info('0% .. ') for h, attrconfig in enumerate(config['attributes']): if not exists(attrconfig.get('path')): logger.info( """ ERROR: The filename specified by the configuration: %s Could not be found. Please check the configuration and try again. """, attrconfig.get('path')) raise IOError('Cannot find the file "%s"' % attrconfig.get('path')) lyr = DataSource(attrconfig.get('path'))[0] for i, feat in enumerate(lyr): if (float(i) / len(lyr)) > (progress + 0.1): progress += 0.1 logger.info('%2.0f%% .. ', progress * 100) gid = get_shape_tree(attrconfig, feat) g = Geounit.objects.filter(tree_code=gid) if g.count() > 0: self.set_geounit_characteristic( g[0], subject_objects, feat, True, updatefield) logger.info('100%')
def getLayer(self): ds = DataSource(self.filename) layer = ds[0] return layer
def importData(file, characterEncoding, format, user, folder): start_time = time.time() try: #manage zipfile fd, fname = tempfile.mkstemp(suffix=fileExt_dic[format]) os.close(fd) f = open(fname, "wb") for chunk in file.chunks(): f.write(chunk) f.close() if not zipfile.is_zipfile(fname): os.remove(fname) return "Not a valid zip archive.", None zip = zipfile.ZipFile(fname) hasSuffix = {} required_suffixes = suffixes_dic[format] for suffix in required_suffixes: hasSuffix[suffix] = False for info in zip.infolist(): extension = os.path.splitext(info.filename)[1].lower() if extension in required_suffixes: hasSuffix[extension] = True for suffix in required_suffixes: if not hasSuffix[suffix]: zip.close() os.remove(fname) return "Archive missing required %s file." % suffix, None zip = zipfile.ZipFile(fname) dirname = tempfile.mkdtemp() for info in zip.infolist(): if info.filename.endswith(filenameExt_dic[format]): filename = info.filename dstFile = os.path.join(dirname, info.filename) f = open(dstFile, "wb") f.write(zip.read(info.filename)) f.close() zip.close() #verify if vectorfile is valid ds = DataSource(os.path.join(dirname, filename), encoding=characterEncoding) for srcLayer in ds: layer = Shapefile.objects.create(name=srcLayer.name, parent=folder, srs_wkt=srcLayer.srs, geom_type=srcLayer.geom_type.name, encoding=characterEncoding, created_by=user) #define layer's attributes attributes = [] for idx in range(srcLayer.num_fields): attr = Attribute(shapefile=layer, name=srcLayer.fields[idx], type=srcLayer.field_types[idx].__name__, width=srcLayer.field_widths[idx], precision=srcLayer.field_precisions[idx]) attributes.append(attr) Attribute.objects.bulk_create(attributes) #store layer's features srcSpatialRef = SpatialReference(srcLayer.srs.wkt) dstSpatialRef = SpatialReference('EPSG:3857') ct = CoordTransform(srcSpatialRef, dstSpatialRef) features = [] for srcFeature in srcLayer: srcGeometry = srcFeature.geom srcGeometry.transform(ct) srcGeometry = srcGeometry.geos ## if srcGeometry.coord_dim > 2: ## srcGeometry = srcGeometry.flatten2D() srcGeometry = utils.wrapGEOSGeometry(srcGeometry) #Store layer"s attributes hash_attributeValue = {} attribute_value = {} attributes.sort(key=lambda x: x.name.lower()) for attr in attributes: try: value = srcFeature.get(attr.name) except DjangoUnicodeDecodeError: return "Wrong character encoding", None if type(value) == datetime.date: value = value.isoformat() hash_attributeValue[attr.name] = value feature = Feature(shapefile=layer, attribute_value=hash_attributeValue, id_relat=srcFeature.fid) setattr(feature, utils.calcGeometryField(srcLayer.geom_type), srcGeometry) features.append(feature) Feature.objects.bulk_create(features) print("Temps final: --- %s seconds ---" % str(time.time() - start_time)) return None, layer except Exception, e: return e, None
def get_bbox(filename): """Return bbox in the format [xmin,xmax,ymin,ymax].""" from django.contrib.gis.gdal import DataSource, SRSException srid = 4326 bbox_x0, bbox_y0, bbox_x1, bbox_y1 = -180, -90, 180, 90 try: if is_vector(filename): y_min = -90 y_max = 90 x_min = -180 x_max = 180 datasource = DataSource(filename) layer = datasource[0] bbox_x0, bbox_y0, bbox_x1, bbox_y1 = layer.extent.tuple srs = layer.srs try: if not srs: raise GeoNodeException( 'Invalid Projection. Layer is missing CRS!') srs.identify_epsg() except SRSException: pass epsg_code = srs.srid # can't find epsg code, then check if bbox is within the 4326 boundary if epsg_code is None and (x_min <= bbox_x0 <= x_max and x_min <= bbox_x1 <= x_max and y_min <= bbox_y0 <= y_max and y_min <= bbox_y1 <= y_max): # set default epsg code epsg_code = '4326' elif epsg_code is None: # otherwise, stop the upload process raise GeoNodeException( "Invalid Layers. " "Needs an authoritative SRID in its CRS to be accepted") # eliminate default EPSG srid as it will be added when this function returned srid = epsg_code if epsg_code else '4326' elif is_raster(filename): gtif = gdal.Open(filename) gt = gtif.GetGeoTransform() prj = gtif.GetProjection() srs = osr.SpatialReference(wkt=prj) cols = gtif.RasterXSize rows = gtif.RasterYSize ext = [] xarr = [0, cols] yarr = [0, rows] # Get the extent. for px in xarr: for py in yarr: x = gt[0] + (px * gt[1]) + (py * gt[2]) y = gt[3] + (px * gt[4]) + (py * gt[5]) ext.append([x, y]) yarr.reverse() # ext has four corner points, get a bbox from them. # order is important, so make sure min and max is correct. bbox_x0 = min(ext[0][0], ext[2][0]) bbox_y0 = min(ext[0][1], ext[2][1]) bbox_x1 = max(ext[0][0], ext[2][0]) bbox_y1 = max(ext[0][1], ext[2][1]) srid = srs.GetAuthorityCode(None) if srs else '4326' except Exception: pass return [bbox_x0, bbox_x1, bbox_y0, bbox_y1, f"EPSG:{str(srid)}"]
def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds)
def handle_label(self, filename, **options): country = Country.objects.get(code='N') oa_type = Type.objects.get(code='OUA') soa_type = Type.objects.get(code='OLF') name_type = NameType.objects.get(code='S') code_type = CodeType.objects.get(code='ons') current_generation = Generation.objects.current() new_generation = Generation.objects.new() if not new_generation: raise Exception("No new generation to be used for import!") # Compile an alphabetical list of NI councils and their wards, OA codes # are assigned alphabetically. if not self.councils: self.councils = Area.objects.filter(type=Type.objects.get( code='LGD')).order_by('name').values() for lgd in self.councils: lges = Area.objects.filter(parent_area=lgd['id']) areas = [] for lge in lges: lgws = Area.objects.filter(parent_area=lge).values() areas += lgws lgd['wards'] = sorted(areas, key=lambda x: x['name']) ds = DataSource(filename) layer = ds[0] layer_name = str(layer) for feat in layer: if layer_name == 'soa': area_type = soa_type ons_code = feat['SOA_CODE'].value name = feat['SOA_LABEL'].value.replace('_', ' ') elif layer_name == 'OA_ni': area_type = oa_type ons_code = feat['OA_CODE'].value name = 'Output Area %s' % ons_code else: raise Exception('Bad data passed in') council = ord(ons_code[2:3]) - 65 ward = int(ons_code[4:6]) - 1 if ward == 98: # SOA covers two wards, set parent to council, best we can do parent = self.councils[council]['id'] else: parent = self.councils[council]['wards'][ward]['id'] try: m = Area.objects.get(codes__type=code_type, codes__code=ons_code) if int(options['verbosity']) > 1: print(" Area matched, %s" % (m, )) except Area.DoesNotExist: print(" New area: %s" % (ons_code)) m = Area( name= name, # If committing, this will be overwritten by the m.names.update_or_create type=area_type, country=country, parent_area_id=parent, generation_low=new_generation, generation_high=new_generation, ) if m.generation_high and current_generation and m.generation_high.id < current_generation.id: raise Exception( "Area %s found, but not in current generation %s" % (m, current_generation)) m.generation_high = new_generation m.parent_area_id = parent if options['commit']: m.save() f = feat.geom f.srid = 29902 poly = [f] if options['commit']: m.names.update_or_create(type=name_type, defaults={'name': name}) if ons_code: self.ons_code_to_shape[ons_code] = (m, poly) if options['commit']: m.codes.update_or_create(type=code_type, defaults={'code': ons_code}) if options['commit']: save_polygons(self.ons_code_to_shape)
def handle_label(self, filename, **options): missing_options = [] for k in [ 'generation_id', 'area_type_code', 'name_type_code', 'country_code' ]: if options[k]: continue else: missing_options.append(k) if missing_options: message_start = "Missing arguments " if len( missing_options) > 1 else "Missing argument " message = message_start + " ".join('--{0}'.format(k) for k in missing_options) raise CommandError(message) generation_id = options['generation_id'] area_type_code = options['area_type_code'] name_type_code = options['name_type_code'] country_code = options['country_code'] override_name = options['override_name'] name_field = options['name_field'] if not (override_name or name_field): name_field = 'Name' override_code = options['override_code'] code_field = options['code_field'] code_type_code = options['code_type'] encoding = options['encoding'] or 'utf-8' if name_field and override_name: raise CommandError( "You must not specify both --name_field and --override_name") if code_field and override_code: raise CommandError( "You must not specify both --code_field and --override_code") using_code = (code_field or override_code) if (using_code and not code_type_code) or (not using_code and code_type_code): raise CommandError( "If you want to save a code, specify --code_type and either --code_field or --override_code" ) try: area_type = Type.objects.get(code=area_type_code) except: type_desc = input( 'Please give a description for area type code %s: ' % area_type_code) area_type = Type(code=area_type_code, description=type_desc) if options['commit']: area_type.save() try: name_type = NameType.objects.get(code=name_type_code) except: name_desc = input( 'Please give a description for name type code %s: ' % name_type_code) name_type = NameType(code=name_type_code, description=name_desc) if options['commit']: name_type.save() if country_code != 'first-letter': try: country = Country.objects.get(code=country_code) except: country_name = input( 'Please give the name for country code %s: ' % country_code) country = Country(code=country_code, name=country_name) if options['commit']: country.save() if code_type_code: try: code_type = CodeType.objects.get(code=code_type_code) except: code_desc = input( 'Please give a description for code type %s: ' % code_type_code) code_type = CodeType(code=code_type_code, description=code_desc) if options['commit']: code_type.save() self.stdout.write("Importing from %s" % filename) if not options['commit']: self.stdout.write( '(will not save to db as --commit not specified)') current_generation = Generation.objects.current() new_generation = Generation.objects.get(id=generation_id) def verbose(*args): if int(options['verbosity']) > 1: self.stdout.write(" ".join(str(a) for a in args)) ds = DataSource(filename) layer = ds[0] if (override_name or override_code) and len(layer) > 1: message = ( "Warning: you have specified an override %s and this file contains more than one feature; " "multiple areas with the same %s will be created") if override_name: self.stdout.write(message % ('name', 'name')) if override_code: self.stdout.write(message % ('code', 'code')) for feat in layer: if override_name: name = override_name else: name = None for nf in name_field.split(','): try: name = feat[nf].value break except: pass if name is None: choices = ', '.join(layer.fields) raise CommandError( "Could not find name using name field '%s' - should it be something else? " "It will be one of these: %s. Specify which with --name_field" % (name_field, choices)) try: if not isinstance(name, six.text_type): name = name.decode(encoding) except: raise CommandError( "Could not decode name using encoding '%s' - is it in another encoding? " "Specify one with --encoding" % encoding) name = re.sub(r'\s+', ' ', name) if not name: if options['ignore_blank']: continue raise Exception("Could not find a name to use for area") code = None if override_code: code = override_code elif code_field: try: code = feat[code_field].value except: choices = ', '.join(layer.fields) raise CommandError( "Could not find code using code field '%s' - should it be something else? " "It will be one of these: %s. Specify which with --code_field" % (code_field, choices)) self.stdout.write(" looking at '%s'%s" % (name, (' (%s)' % code) if code else '')) if country_code == 'first-letter' and code: try: country = Country.objects.get(code=code[0]) except Country.DoesNotExist: self.stdout.write(" No country found from first-letter") country = None g = None if hasattr(feat, 'geom'): g = feat.geom.transform(settings.MAPIT_AREA_SRID, clone=True) try: if options['new']: # Always want a new area raise Area.DoesNotExist if code: matching_message = "code %s of code type %s" % (code, code_type) areas = Area.objects.filter( codes__code=code, codes__type=code_type).order_by('-generation_high') else: matching_message = "name %s of area type %s" % (name, area_type) areas = Area.objects.filter( name=name, type=area_type).order_by('-generation_high') if len(areas) == 0: verbose(" the area was not found - creating a new one") raise Area.DoesNotExist m = areas[0] verbose(" found the area") if options['preserve']: # Find whether we need to create a new Area: previous_geos_geometry = m.polygons.aggregate( Collect('polygon'))['polygon__collect'] if m.generation_high < current_generation.id: # Then it was missing in current_generation: verbose( " area existed previously, but was missing from", current_generation) raise Area.DoesNotExist elif g is None: if previous_geos_geometry is not None: verbose(" area is now empty") raise Area.DoesNotExist else: verbose(" the area has remained empty") elif previous_geos_geometry is None: # It was empty in the previous generation: verbose(" area was empty in", current_generation) raise Area.DoesNotExist else: # Otherwise, create a new Area unless the # polygons were the same in current_generation: previous_geos_geometry = previous_geos_geometry.simplify( tolerance=0) new_geos_geometry = g.geos.simplify(tolerance=0) create_new_area = not previous_geos_geometry.equals( new_geos_geometry) p = previous_geos_geometry.sym_difference( new_geos_geometry ).area / previous_geos_geometry.area verbose(" change in area is:", "%.03f%%" % (100 * p, )) if create_new_area: verbose( " the area", m, "has changed, creating a new area due to --preserve" ) raise Area.DoesNotExist else: verbose(" the area remained the same") else: # If --preserve is not specified, the code or the name must be unique: if len(areas) > 1: raise Area.MultipleObjectsReturned( "There was more than one area with %s, and --preserve was not specified" % (matching_message, )) except Area.DoesNotExist: m = Area( name=name, type=area_type, country=country, # parent_area=parent_area, generation_low=new_generation, generation_high=new_generation, ) if options['use_code_as_id'] and code: m.id = int(code) # check that we are not about to skip a generation if m.generation_high and current_generation and m.generation_high.id < current_generation.id: raise Exception( "Area %s found, but not in current generation %s" % (m, current_generation)) m.generation_high = new_generation if options['fix_invalid_polygons'] and g is not None: # Make a GEOS geometry only to check for validity: geos_g = g.geos if not geos_g.valid: geos_g = fix_invalid_geos_geometry(geos_g) if geos_g is None: self.stdout.write( "The geometry for area %s was invalid and couldn't be fixed" % name) g = None else: g = geos_g.ogr poly = [g] if g is not None else [] if options['commit']: m.save() m.names.update_or_create(type=name_type, defaults={'name': name}) if code: m.codes.update_or_create(type=code_type, defaults={'code': code}) save_polygons({m.id: (m, poly)})
print(layer.srs) # 获取图层的空间参考信息 name = layer.get_fields('NAME') print(name) # for f in layer.get_geoms(): # print(f) """获取要素的信息:图层由要素组成的。直接从图层获取要素,然后可以根据要素获取属性,几何等等""" def featInfo(): feat = layer[1] name = feat.get('NAME') print(name) print(feat.geom_type) def fieldInfo(): field = layer[1]['NAME'] print(field.name) print(field.value) print(field.type_name) if __name__ == "__main__": ds = DataSource('../Data/region.shp') layer = ds[0] # 获取数据源里的第一个图层 # featInfo() fieldInfo()
def get_layer(json): ds = DataSource(json) return ds[0]
def handle(self, *args, **options): try: from osgeo import gdal, ogr, osr # NOQA except ImportError: raise CommandError( 'GDAL Python bindings are not available. Can not proceed.') # Validate arguments if len(args) != 1: raise CommandError('Filename missing. See help') filename = args[0] if not os.path.exists(filename): raise CommandError('File does not exists at: %s' % filename) data_source = DataSource(filename, encoding=options.get('encoding')) field_name = options.get('name-field') field_infrastructure_type = options.get('type-field') field_condition_type = options.get('condition-field') field_structure_type = options.get('structure-field') field_description = options.get('description-field') field_implantation_year = options.get('year-field') sid = transaction.savepoint() try: for layer in data_source: self.stdout.write("- Layer '{}' with {} objects found".format( layer.name, layer.num_feat)) available_fields = layer.fields if (field_name and field_name not in available_fields)\ or (not field_name and not options.get('name-default')): self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_name))) self.stdout.write( self.style.ERROR( u"Set it with --name-field, or set a default value with --name-default" )) break if (field_infrastructure_type and field_infrastructure_type not in available_fields)\ or (not field_infrastructure_type and not options.get('type-default')): self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_infrastructure_type))) self.stdout.write( self.style.ERROR( u"Set it with --type-field, or set a default value with --type-default" )) break if (field_condition_type and field_condition_type not in available_fields)\ or (not field_condition_type and not options.get('condition-default')): self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_condition_type))) self.stdout.write( self.style.ERROR( u"Set it with --condition-field, or set a default value with --condition-default" )) break if (field_structure_type and field_structure_type not in available_fields)\ or (not field_structure_type and not options.get('structure-default')): self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_structure_type))) self.stdout.write( self.style.ERROR( u"Set it with --structure-field, or set a default value with --structure-default" )) break if field_description and field_description not in available_fields: self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_description))) self.stdout.write( self.style.ERROR( u"Set it with --description-field, or set a default value with --description-default" )) break if field_implantation_year and field_implantation_year not in available_fields: self.stdout.write( self.style.ERROR( u"Field '{}' not found in data source.".format( field_implantation_year))) self.stdout.write( self.style.ERROR( u"Set it with --implantation-field, or set a default value with --implantation-default" )) break for feature in layer: feature_geom = feature.geom.transform(settings.API_SRID, clone=True) feature_geom.coord_dim = 2 name = feature.get( field_name ) if field_name in available_fields else options.get( 'name-default') infra = feature.get( field_infrastructure_type ) if field_infrastructure_type in available_fields else options.get( 'type-default') condition = feature.get( field_condition_type ) if field_condition_type in available_fields else options.get( 'condition-default') structure = feature.get( field_structure_type ) if field_structure_type in available_fields else options.get( 'structure-default') description = feature.get( field_description ) if field_description in available_fields else options.get( 'description-default') year = int( feature.get(field_implantation_year) ) if field_implantation_year in available_fields else options.get( 'year-default') self.create_signage(feature_geom, name, infra, condition, structure, description, year) transaction.savepoint_commit(sid) self.stdout.write( self.style.NOTICE(u"{} objects created.".format(self.counter))) except Exception: self.stdout.write( self.style.ERROR( u"An error occured, rolling back operations.")) transaction.savepoint_rollback(sid) raise
def __init__( self, model, data, mapping, layer=0, source_srs=None, encoding="utf-8", transaction_mode="commit_on_success", transform=True, unique=None, using=None, ): """ A LayerMapping object is initialized using the given Model (not an instance), a DataSource (or string path to an OGR-supported data file), and a mapping dictionary. See the module level docstring for more details and keyword argument usage. """ # Getting the DataSource and the associated Layer. if isinstance(data, str): self.ds = DataSource(data, encoding=encoding) else: self.ds = data self.layer = self.ds[layer] self.using = using if using is not None else router.db_for_write(model) self.spatial_backend = connections[self.using].ops # Setting the mapping & model attributes. self.mapping = mapping self.model = model # Checking the layer -- initialization of the object will fail if # things don't check out before hand. self.check_layer() # Getting the geometry column associated with the model (an # exception will be raised if there is no geometry column). if connections[self.using].features.supports_transform: self.geo_field = self.geometry_field() else: transform = False # Checking the source spatial reference system, and getting # the coordinate transformation object (unless the `transform` # keyword is set to False) if transform: self.source_srs = self.check_srs(source_srs) self.transform = self.coord_transform() else: self.transform = transform # Setting the encoding for OFTString fields, if specified. if encoding: # Making sure the encoding exists, if not a LookupError # exception will be thrown. from codecs import lookup lookup(encoding) self.encoding = encoding else: self.encoding = None if unique: self.check_unique(unique) transaction_mode = "autocommit" # Has to be set to autocommit. self.unique = unique else: self.unique = None # Setting the transaction decorator with the function in the # transaction modes dictionary. self.transaction_mode = transaction_mode if transaction_mode == "autocommit": self.transaction_decorator = None elif transaction_mode == "commit_on_success": self.transaction_decorator = transaction.atomic else: raise LayerMapError("Unrecognized transaction mode: %s" % transaction_mode)
def handle_label(self, filename, **options): if not options['control']: raise Exception("You must specify a control file") __import__(options['control']) control = sys.modules[options['control']] code_version = CodeType.objects.get(code='gss') name_type = NameType.objects.get(code='O') code_type_os = CodeType.objects.get(code='unit_id') print(filename) current_generation = Generation.objects.current() new_generation = Generation.objects.new() if not new_generation: raise Exception("No new generation to be used for import!") ds = DataSource(filename) layer = ds[0] for feat in layer: name = feat['NAME'].value if not isinstance(name, six.text_type): name = name.decode('iso-8859-1') name = re.sub(r'\s*\(DET( NO \d+|)\)\s*(?i)', '', name) name = re.sub(r'\s+', ' ', name) ons_code = feat['CODE'].value if feat['CODE'].value not in ('999999', '999999999') else None unit_id = str(feat['UNIT_ID'].value) area_code = feat['AREA_CODE'].value patch = self.patch_boundary_line(name, ons_code, unit_id, area_code) if 'ons-code' in patch: ons_code = patch['ons-code'] elif 'unit-id' in patch: unit_id = patch['unit-id'] if area_code == 'NCP': continue # Ignore Non Parished Areas if ons_code in self.ons_code_to_shape: m, poly = self.ons_code_to_shape[ons_code] try: m_name = m.names.get(type=name_type).name except Name.DoesNotExist: m_name = m.name # If running without commit for dry run, so nothing being stored in db if name != m_name: raise Exception("ONS code %s is used for %s and %s" % (ons_code, name, m_name)) # Otherwise, combine the two shapes for one area poly.append(feat.geom) continue if unit_id in self.unit_id_to_shape: m, poly = self.unit_id_to_shape[unit_id] try: m_name = m.names.get(type=name_type).name except Name.DoesNotExist: m_name = m.name # If running without commit for dry run, so nothing being stored in db if name != m_name: raise Exception("Unit ID code %s is used for %s and %s" % (unit_id, name, m_name)) # Otherwise, combine the two shapes for one area poly.append(feat.geom) continue if ons_code: country = ons_code[0] # Hooray! elif area_code in ('CED', 'CTY', 'DIW', 'DIS', 'MTW', 'MTD', 'LBW', 'LBO', 'LAC', 'GLA'): country = 'E' else: raise Exception(area_code) try: check = control.check(name, area_code, country, feat.geom, ons_code=ons_code, commit=options['commit']) if check is True: raise Area.DoesNotExist if isinstance(check, Area): m = check try: ons_code = m.codes.get(type=code_version).code except Code.DoesNotExist: ons_code = None elif ons_code: m = Area.objects.get(codes__type=code_version, codes__code=ons_code) elif unit_id: m = Area.objects.get( codes__type=code_type_os, codes__code=unit_id, generation_high=current_generation) m_name = m.names.get(type=name_type).name if name != m_name: raise Exception("Unit ID code %s is %s in DB but %s in SHP file" % (unit_id, m_name, name)) else: raise Exception('Area "%s" (%s) has neither ONS code nor unit ID' % (name, area_code)) if int(options['verbosity']) > 1: print(" Area matched, %s" % (m, )) except Area.DoesNotExist: print(" New area: %s %s %s %s" % (area_code, ons_code, unit_id, name)) m = Area( name=name, # If committing, this will be overwritten by the m.names.update_or_create type=Type.objects.get(code=area_code), country=Country.objects.get(code=country), generation_low=new_generation, generation_high=new_generation, ) if m.generation_high and current_generation and m.generation_high.id < current_generation.id: raise Exception("Area %s found, but not in current generation %s" % (m, current_generation)) m.generation_high = new_generation if options['commit']: m.save() # Make a GEOS geometry only to check for validity: g = feat.geom geos_g = g.geos if not geos_g.valid: print(" Geometry of %s %s not valid" % (ons_code, m)) geos_g = fix_invalid_geos_geometry(geos_g) if geos_g is None: raise Exception("The geometry for area %s was invalid and couldn't be fixed" % name) g = None else: g = geos_g.ogr poly = [g] if options['commit']: m.names.update_or_create(type=name_type, defaults={'name': name}) if ons_code: self.ons_code_to_shape[ons_code] = (m, poly) if options['commit']: m.codes.update_or_create(type=code_version, defaults={'code': ons_code}) if unit_id: self.unit_id_to_shape[unit_id] = (m, poly) if options['commit']: m.codes.update_or_create(type=code_type_os, defaults={'code': unit_id}) if options['commit']: save_polygons(self.unit_id_to_shape) save_polygons(self.ons_code_to_shape)
def handle(self, *args, **options): shapefile_path = options['shapefile_path'][0] ds = DataSource(shapefile_path) source_layer = ds[0] def _get_ethnic_group(groups): groups_names = groups.split(',') for group_name in groups_names: group_name = group_name.strip() if group_name: group, created = EthnicGroup.objects.get_or_create( name=group_name) if created: print('Criado grupo etnico: ' + group_name) yield group def _get_ethnic_subgroup(groups): groups_names = groups.split(',') for group_name in groups_names: group_name = group_name.strip() if group_name: group, created = ProminentEthnicSubGroup.objects.get_or_create( name=group_name) if created: print('Criado subgrupo etnico: ' + group_name) yield group for feat in source_layer: villages_layer, _ = MapLayer.objects.get_or_create( name=feat.get('layer')) villages_layer.save() kwargs = { 'layer': villages_layer, } try: kwargs['name'] = feat.get('name') except: kwargs['name'] = '' # print('Nome da aldeia ausente') try: kwargs['other_names'] = feat.get('other_name') except: kwargs['other_names'] = '' # print('Outras denominações da aldeia ausente') try: kwargs['position_source'] = feat.get('position_source') except: kwargs['position_source'] = '' # print('Fonte da localização da aldeia ausente') try: kwargs['private_comments'] = feat.get('private_co') except: kwargs['private_comments'] = '' # print('Observações restritas da aldeia ausente') try: kwargs['public_comments'] = feat.get('public_com') except: kwargs['public_comments'] = '' # print('Observações da aldeia ausente') indigenous_village = IndigenousVillage(**kwargs) try: position_precision = feat.get('position_p') except: position_precision = None # print('Nome da aldeia ausente') if position_precision == 'Exata' or position_precision == 'Exato' or position_precision == 'Exact': indigenous_village.position_precision = 'exact' elif position_precision == 'Aproximada': indigenous_village.position_precision = 'approximate' elif not position_precision: indigenous_village.position_precision = 'no_info' else: self.stdout.write('Precisão da posição não encontrata: ' + position_precision) indigenous_village.status = 'public' indigenous_village.geometry = feat.geom.wkt try: # try to save as MultPolygon indigenous_village.save() except: self.stdout.write('Falha ao salvar aldeia indígena\n') try: ethnic_groups_raw = feat.get('ethnic_gro') ethnic_groups = _get_ethnic_group(ethnic_groups_raw) for group in ethnic_groups: indigenous_village.ethnic_groups.add(group) except: pass try: ethnic_subgroups_raw = feat.get('prominent_') ethnic_subgroups = _get_ethnic_subgroup(ethnic_subgroups_raw) for ethnic_subgroup in ethnic_subgroups: indigenous_village.prominent_subgroup.add(ethnic_subgroup) except: pass try: guarani_presence = feat.get('guarani_pr') except: guarani_presence = '' if guarani_presence == 'Sim' or guarani_presence == 'Habitada atualmente': # FIXME ver questão da fonte. guarani_presence = GuaraniPresence( presence=True, date=datetime.date(2016, 1, 1), source='Mapa Guarani Continental 2016', village=indigenous_village) guarani_presence.save() elif guarani_presence == 'Não': pass else: self.stdout.write('Falha ao ler Presença Guarani. Valor: ' + guarani_presence) try: population = feat.get('population') except: population = None if population: try: population = int(population.split()[0]) population = Population( population=population, date=datetime.date(2016, 1, 1), source='Mapa Guarani Continental 2016', village=indigenous_village) population.save() except: self.stdout.write('Falha ao ler população. População: ' + str(population)) indigenous_village.save() try: project_name = feat.get('PROJETO') if project_name: project, created = Project.objects.get_or_create( name=project_name) project.indigenous_villages.add(indigenous_village) project.save() if created: self.stdout.write('Projeto ' + project_name + ' criado com sucesso!!!') except: pass try: action_field_name = feat.get('AREA_ATU') if action_field_name: action_field, created = ActionField.objects.get_or_create( name=action_field_name) action_field.layers.add(villages_layer) action_field.save() if created: self.stdout.write('Área de atuação ' + action_field_name + ' criada com sucesso!!!') except: pass self.stdout.write('\n') self.stdout.write( 'Camada de aldeias importada com sucesso! Caminho do arquivo fornecido: "%s"' % shapefile_path) self.stdout.write('\n')
def load_geojson_countries(path_geojson=PATH_GEOJSON_COUNTRIES): """ Load from geojson countries from https://www.naturalearthdata.com/ Args: path_geojson: Returns: """ try: ds = DataSource(path_geojson) lyr = next((l for l in ds)) except Exception: LOGGER.warning(f'Impossible to load countries from "{path_geojson}"') return feat_iso2 = lambda f: iso2 if (iso2 := f.get('ISO_A2') ) and iso2 != '-99' else f.get('WB_A2') feat_iso3 = lambda f: iso3 if (iso3 := f.get('ISO_A3_EH') ) and iso3 != '-99' else f.get('WB_A3') code_lang, region_lang = get_language().split('-') wiki_en = wikipediaapi.Wikipedia() wiki_lang = wikipediaapi.Wikipedia(code_lang) for feat in (f for f in lyr if feat_iso2(f) != '-99'): cat_colour, created = get_or_set_colour( *brewer_colour(feat.get('MAPCOLOR9'), number=9)) wikidata_id = feat.get('WIKIDATAID') wikidata = json_from_url( URL_JSON_WIKI_COUNTRY.format(id_country=wikidata_id, language=code_lang)) site_links = next( iter( wikidata.get('entities', {}).get(wikidata_id, {}).get('sitelinks', {}).values()), {}) name_country = site_links.get('title', name_en := feat.get('NAME_EN')) wiki_page_country = wiki_lang.page(name_country) if not wiki_page_country.exists(): wiki_page_country = wiki_en.page(name_en) country, created = get_or_set_country( code_country=feat_iso2(feat), name_country=name_country, pol_border=feat.geom.geos, code_iso3_country=feat_iso3(feat), name_iso_country=feat.get('NAME'), colour=cat_colour, wikidata=wikidata, wikipedia=wiki_page_country.summary, pop_est=feat.get('POP_EST'), gdp_md_est=feat.get('GDP_MD_EST'), economy=feat.get('ECONOMY'), income_grp=feat.get('INCOME_GRP'), continent=feat.get('CONTINENT'), region_un=feat.get('REGION_UN'), subregion=feat.get('SUBREGION'), region_wb=feat.get('REGION_WB'), update=True, idioma='en') LOGGER.debug(f'Country {country} loaded')
def convert_shapefile(shapefilename, srid=4674): """ shapefilename: considera nomenclatura de shapefile do IBGE para determinar se é UF ou Municípios. ex. 55UF2500GC_SIR.shp para UF e 55MU2500GC_SIR.shp para Municípios srid: 4674 (Projeção SIRGAS 2000) """ # /home/nando/Desktop/IBGE/2010/55MU2500GC_SIR.shp ds = DataSource(shapefilename) is_uf = shapefilename.upper().find('UF') != -1 transform_coord = None if srid != SRID: transform_coord = CoordTransform(SpatialReference(srid), SpatialReference(SRID)) if is_uf: model = UF else: model = Municipio ct = 0 for f in ds[0]: # 3D para 2D se necessário if f.geom.coord_dim != 2: f.geom.coord_dim = 2 # converte para MultiPolygon se necessário if isinstance(f.geom, Polygon): g = OGRGeometry(OGRGeomType('MultiPolygon')) g.add(f.geom) else: g = f.geom # transforma coordenadas se necessário if transform_coord: g.transform(transform_coord) # força 2D g.coord_dim = 2 kwargs = {} if is_uf: kwargs['nome'] = capitalize_name(unicode(f.get(CAMPO_NOME_UF),'latin1')) kwargs['geom'] = g.ewkt kwargs['id_ibge'] = f.get(CAMPO_GEOCODIGO_UF) kwargs['regiao'] = capitalize_name(unicode(f.get(CAMPO_REGIAO_UF), 'latin1')) kwargs['uf'] = UF_SIGLAS_DICT.get(kwargs['id_ibge']) else: kwargs['nome'] = capitalize_name(unicode(f.get(CAMPO_NOME_MU),'latin1')) kwargs['geom'] = g.ewkt kwargs['id_ibge'] = f.get(CAMPO_GEOCODIGO_MU) kwargs['uf'] = UF.objects.get(pk=f.get(CAMPO_GEOCODIGO_MU)[:2]) kwargs['uf_sigla'] = kwargs['uf'].uf kwargs['nome_abreviado'] = slugify(kwargs['nome']) # tenta corrigir nomes duplicados, são em torno de 242 nomes repetidos # adicionando a sigla do estado no final if Municipio.objects.filter(nome_abreviado=kwargs['nome_abreviado']).count() > 0: kwargs['nome_abreviado'] = u'%s-%s' % (kwargs['nome_abreviado'], kwargs['uf_sigla'].lower()) instance = model(**kwargs) instance.save() ct += 1 print(ct, (is_uf and "Unidades Federativas criadas" or "Municipios criados"))
def __init__(self, gpx_file): self.ds = DataSource(gpx_file)
def file2pgtable(infile, table_name, srid=4326): """Create table and fill it from file.""" table_name = table_name.lower() datasource = DataSource(infile) layer = datasource[0] # création de la requête de création de table geo_type = str(layer.geom_type).upper() coord_dim = 0 # bizarre, mais les couches de polygones MapInfo ne sont pas détectées if geo_type == 'UNKNOWN' and (infile.endswith('.TAB') or infile.endswith('.tab') or infile.endswith('.MIF') or infile.endswith('.mif')): geo_type = 'POLYGON' if has_datastore and not geo_type.startswith( 'MULTI') and geo_type != 'POINT': geo_type = 'MULTI' + geo_type pk_name = 'fid' if has_datastore else 'id' geo_column_name = 'the_geom' if has_datastore else 'geom' sql = 'BEGIN;' # Drop table if exists sql += 'DROP TABLE IF EXISTS %s;' % (table_name) sql += "CREATE TABLE %s(" % (table_name) first_feature = True # Mapping from postgis table to shapefile fields. mapping = {} for feature in layer: # Getting the geometry for the feature. geom = feature.geom if geom.geom_count > 1: if not geo_type.startswith('MULTI'): geo_type = 'MULTI' + geo_type if geom.coord_dim > coord_dim: coord_dim = geom.coord_dim if coord_dim > 2: coord_dim = 2 if first_feature: first_feature = False fields = [] fields.append(pk_name + " serial NOT NULL PRIMARY KEY") if has_datastore: mapping[pk_name] = pk_name fieldnames = [] for field in feature: field_name = get_model_field_name(field.name) if field.type == 0: # integer fields.append(field_name + " integer") fieldnames.append(field_name) elif field.type == 2: # float fields.append(field_name + " double precision") fieldnames.append(field_name) elif field.type == 4: fields.append(field_name + " character varying(%s)" % (field.width)) fieldnames.append(field_name) elif field.type == 8 or field.type == 9 or field.type == 10: fields.append(field_name + " date") fieldnames.append(field_name) mapping[field_name] = field.name sql += ','.join(fields) sql += ');' sql += "SELECT AddGeometryColumn('public','%s','geom',%d,'%s',%d);" % \ (table_name, srid, geo_type, coord_dim) sql += 'END;' # la table est créée il faut maintenant injecter les données fieldnames.append(geo_column_name) mapping[geo_column_name] = geo_type # Running the sql if not has_datastore: execute(sql) return mapping
def handle(self, *args, **options): # pylint: disable=R0912,R0914,R0915 if "shape_file" not in options: raise CommandError(_("Missing locations shape file path")) else: try: path = os.path.abspath(options["shape_file"]) except Exception as error: raise CommandError(_("Error: %(msg)s" % {"msg": error})) else: code_field = options["code_field"] level = options["level"] name_field = options["name_field"] parent_field = options["parent_field"] parent_level = options["parent_level"] skip_parent = options.get("skip_parent") parent_code_field = options.get("parent_code") structures_field = options["structures_field"] skip_field = options.get("skip_field") skip_value = options.get("skip_value") if skip_field and not skip_value: raise CommandError(_("Error: please provide skip value")) skip_value = int( skip_value) if skip_value is not None else None count = exception_raised = failed = skipped = updated = 0 srs = SpatialReference("+proj=longlat +datum=WGS84 +no_defs") data_source = DataSource(path) layer = data_source[0] for feature in layer: # skip if skip_field and skip_value is not None: if feature.get(skip_field) == skip_value: skipped += 1 self.stdout.write("Skipping %s - skip." % feature.get(name_field)) continue try: is_polygon = isinstance(feature.geom, geometries.Polygon) except GDALException as error: self.stderr.write("Error: %s" % error) continue if is_polygon: geom = geometries.MultiPolygon("MULTIPOLYGON", srs=srs) geom.add(feature.geom.transform(srs, True)) else: geom = feature.geom.transform(srs, True) name = feature.get(name_field) if code_field == "fid": code = feature.fid + 1 else: code = int(feature.get(code_field)) if code == 0: skipped += 1 self.stdout.write("Skipping %s - code." % feature.get(name_field)) continue if bytearray( structures_field.encode("utf8")) in feature.fields: structures = int(feature.get(structures_field)) else: structures = 0 if bytearray( parent_field.encode("utf8")) in feature.fields: parent_name = feature.get(parent_field) elif parent_field in feature.fields: parent_name = feature.get(parent_field) else: parent_name = name name = name.strip() parent_name = parent_name.strip() if skip_parent == "yes": self.stdout.write(f"No parent for {name}.") parent = None else: if parent_code_field: parent_code = feature.get(parent_code_field) parent = get_parent_by_code( parent_code, parent_level) else: parent = get_parent(geom, parent_level, parent_name) if not parent: self.stdout.write( f"Skipping {name_field} - parent.") skipped += 1 continue try: target = feature.get("TARGET") in [1, 2] except IndexError: target = True try: location = Location.objects.get(name=name, level=level, parent=parent) except Location.DoesNotExist: try: Location.objects.create( name=name, code=code, structures=structures, level=level, parent=parent, geom=geom.wkt, target=target, ) except IntegrityError: failed += 1 except Exception: # pylint: disable=broad-except exception_raised += 1 else: count += 1 else: if level == "RHC" and code != location.code: location.code = code location.target = target location.geom = geom.wkt try: location.save() except IntegrityError: pass else: updated += 1 self.stdout.write( "Created %s locations, %s updated, failed %s, skipped %s, " "error %s" % (count, updated, failed, skipped, exception_raised))
def test_ds_input_pathlib(self): test_shp = Path(get_ds_file("test_point", "shp")) ds = DataSource(test_shp) self.assertEqual(len(ds), 1)
def handle(self, *args, **options): verbosity = options.get('verbosity') try: from osgeo import gdal, ogr, osr # NOQA except ImportError: raise CommandError( 'GDAL Python bindings are not available. Can not proceed.') filename = options['point_layer'] if not os.path.exists(filename): raise CommandError('File does not exists at: %s' % filename) data_source = DataSource(filename, encoding=options.get('encoding')) field_name = options.get('name_field') field_infrastructure_type = options.get('type_field') field_condition_type = options.get('condition_field') field_structure_type = options.get('structure_field') field_description = options.get('description_field') field_implantation_year = options.get('year_field') field_eid = options.get('eid_field') sid = transaction.savepoint() structure_default = options.get('structure_default') try: for layer in data_source: if verbosity >= 2: self.stdout.write( "- Layer '{}' with {} objects found".format( layer.name, layer.num_feat)) available_fields = layer.fields if (field_infrastructure_type and field_infrastructure_type not in available_fields)\ or (not field_infrastructure_type and not options.get('type_default')): self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_infrastructure_type))) self.stdout.write( self.style.ERROR( u"Set it with --type-field, or set a default value with --type-default" )) break if (field_name and field_name not in available_fields)\ or (not field_name and not options.get('name_default')): self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_name))) self.stdout.write( self.style.ERROR( u"Set it with --name-field, or set a default value with --name-default" )) break if field_condition_type and field_condition_type not in available_fields: self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_condition_type))) self.stdout.write( self.style.ERROR( u"Change your --condition-field option")) break if field_structure_type and field_structure_type not in available_fields: self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_structure_type))) self.stdout.write( self.style.ERROR( u"Change your --structure-field option")) break elif not field_structure_type and not structure_default: structure = default_structure() else: try: structure = Structure.objects.get( name=structure_default) if verbosity > 0: self.stdout.write( u"Infrastructures will be linked to {}".format( structure)) except Structure.DoesNotExist: self.stdout.write( u"Structure {} set in options doesn't exist". format(structure_default)) break if field_description and field_description not in available_fields: self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_description))) self.stdout.write( self.style.ERROR( u"Change your --description-field option")) break if field_implantation_year and field_implantation_year not in available_fields: self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_implantation_year))) self.stdout.write( self.style.ERROR("Change your --year-field option")) break if field_eid and field_eid not in available_fields: self.stdout.write( self.style.ERROR( "Field '{}' not found in data source.".format( field_eid))) self.stdout.write( self.style.ERROR(u"Change your --eid-field option")) break for feature in layer: feature_geom = feature.geom.transform(settings.API_SRID, clone=True) feature_geom.coord_dim = 2 name = feature.get( field_name ) if field_name in available_fields else options.get( 'name_default') if feature_geom.geom_type == 'MultiPoint': self.stdout.write( self.style.NOTICE( u"This object is a MultiPoint : %s" % name)) if len(feature_geom) < 2: feature_geom = feature_geom[0].geos else: raise CommandError( "One of your geometry is a MultiPoint object with multiple points" ) type = feature.get( field_infrastructure_type ) if field_infrastructure_type in available_fields else options.get( 'type_default') if field_condition_type in available_fields: condition = feature.get(field_condition_type) else: condition = options.get('condition_default') structure = Structure.objects.get(name=feature.get(field_structure_type)) \ if field_structure_type in available_fields else structure description = feature.get( field_description ) if field_description in available_fields else options.get( 'description_default') year = int( feature.get(field_implantation_year) ) if field_implantation_year in available_fields and feature.get( field_implantation_year).isdigit() else options.get( 'year_default') eid = feature.get( field_eid) if field_eid in available_fields else None self.create_infrastructure(feature_geom, name, type, condition, structure, description, year, verbosity, eid) transaction.savepoint_commit(sid) if verbosity >= 2: self.stdout.write( self.style.NOTICE(u"{} objects created.".format( self.counter))) except Exception: self.stdout.write( self.style.ERROR( u"An error occured, rolling back operations.")) transaction.savepoint_rollback(sid) raise
def test03a_layers(self): "Testing Data Source Layers." for source in ds_list: ds = DataSource(source.ds) # Incrementing through each layer, this tests DataSource.__iter__ for layer in ds: self.assertEqual(layer.name, source.name) self.assertEqual(str(layer), source.name) # Making sure we get the number of features we expect self.assertEqual(len(layer), source.nfeat) # Making sure we get the number of fields we expect self.assertEqual(source.nfld, layer.num_fields) self.assertEqual(source.nfld, len(layer.fields)) # Testing the layer's extent (an Envelope), and its properties self.assertIsInstance(layer.extent, Envelope) self.assertAlmostEqual(source.extent[0], layer.extent.min_x, 5) self.assertAlmostEqual(source.extent[1], layer.extent.min_y, 5) self.assertAlmostEqual(source.extent[2], layer.extent.max_x, 5) self.assertAlmostEqual(source.extent[3], layer.extent.max_y, 5) # Now checking the field names. flds = layer.fields for f in flds: self.assertIn(f, source.fields) # Negative FIDs are not allowed. with self.assertRaisesMessage( IndexError, "Negative indices are not allowed on OGR Layers."): layer.__getitem__(-1) with self.assertRaisesMessage(IndexError, "Invalid feature id: 50000."): layer.__getitem__(50000) if hasattr(source, "field_values"): # Testing `Layer.get_fields` (which uses Layer.__iter__) for fld_name, fld_value in source.field_values.items(): self.assertEqual(fld_value, layer.get_fields(fld_name)) # Testing `Layer.__getitem__`. for i, fid in enumerate(source.fids): feat = layer[fid] self.assertEqual(fid, feat.fid) # Maybe this should be in the test below, but we might # as well test the feature values here while in this # loop. for fld_name, fld_value in source.field_values.items(): self.assertEqual(fld_value[i], feat.get(fld_name)) msg = ( "Index out of range when accessing field in a feature: %s." ) with self.assertRaisesMessage(IndexError, msg % len(feat)): feat.__getitem__(len(feat)) with self.assertRaisesMessage( IndexError, "Invalid OFT field name given: invalid."): feat.__getitem__("invalid")
def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=None, multi_geom=False, name_field=None, imports=True, decimal=False, blank=False, null=False): """ Helper routine for `ogrinspect` that generates GeoDjango models corresponding to the given data source. See the `ogrinspect` docstring for more details. """ # Getting the DataSource if isinstance(data_source, six.string_types): data_source = DataSource(data_source) elif isinstance(data_source, DataSource): pass else: raise TypeError( 'Data source parameter must be a string or a DataSource object.') # Getting the layer corresponding to the layer key and getting # a string listing of all OGR fields in the Layer. layer = data_source[layer_key] ogr_fields = layer.fields # Creating lists from the `null`, `blank`, and `decimal` # keyword arguments. def process_kwarg(kwarg): if isinstance(kwarg, (list, tuple)): return [s.lower() for s in kwarg] elif kwarg: return [s.lower() for s in ogr_fields] else: return [] null_fields = process_kwarg(null) blank_fields = process_kwarg(blank) decimal_fields = process_kwarg(decimal) # Gets the `null` and `blank` keywords for the given field name. def get_kwargs_str(field_name): kwlist = [] if field_name.lower() in null_fields: kwlist.append('null=True') if field_name.lower() in blank_fields: kwlist.append('blank=True') if kwlist: return ', ' + ', '.join(kwlist) else: return '' # For those wishing to disable the imports. if imports: yield '# This is an auto-generated Django model module created by ogrinspect.' yield 'from django.contrib.gis.db import models' yield '' yield 'class %s(models.Model):' % model_name for field_name, width, precision, field_type in zip( ogr_fields, layer.field_widths, layer.field_precisions, layer.field_types): # The model field name. mfield = field_name.lower() if mfield[-1:] == '_': mfield += 'field' # Getting the keyword args string. kwargs_str = get_kwargs_str(field_name) if field_type is OFTReal: # By default OFTReals are mapped to `FloatField`, however, they # may also be mapped to `DecimalField` if specified in the # `decimal` keyword. if field_name.lower() in decimal_fields: yield ' %s = models.DecimalField(max_digits=%d, decimal_places=%d%s)' % ( mfield, width, precision, kwargs_str) else: yield ' %s = models.FloatField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTInteger: yield ' %s = models.IntegerField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTInteger64: yield ' %s = models.BigIntegerField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTString: yield ' %s = models.CharField(max_length=%s%s)' % ( mfield, width, kwargs_str) elif field_type is OFTDate: yield ' %s = models.DateField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTDateTime: yield ' %s = models.DateTimeField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTTime: yield ' %s = models.TimeField(%s)' % (mfield, kwargs_str[2:]) else: raise TypeError('Unknown field type %s in %s' % (field_type, mfield)) # TODO: Autodetection of multigeometry types (see #7218). gtype = layer.geom_type if multi_geom: gtype.to_multi() geom_field = gtype.django # Setting up the SRID keyword string. if srid is None: if layer.srs is None: srid_str = 'srid=-1' else: srid = layer.srs.srid if srid is None: srid_str = 'srid=-1' elif srid == 4326: # WGS84 is already the default. srid_str = '' else: srid_str = 'srid=%s' % srid else: srid_str = 'srid=%s' % srid yield ' %s = models.%s(%s)' % (geom_name, geom_field, srid_str) if name_field: yield '' yield ' def __%s__(self): return self.%s' % ('str' if six.PY3 else 'unicode', name_field)
def handle(self, *args, **options): shape_file = args[0] gateway = args[1] skipped_points = [] imported_points = 0 try: gateway_type = GatewayType.objects.get(name=gateway) ds = DataSource(shape_file) print('{} Layers: {}'.format(ds, len(ds))) lyr = ds[0] print('Layer 1: {} {} {}'.format(lyr, len(lyr), lyr.geom_type)) print(lyr.srs) print 'Fields:' for field in lyr.fields: print (field) for feat in lyr: field_values = location_mapping.copy() for key, value in field_values.items(): field_values[key] = feat.get(value) if not field_values['p_code'] or field_values['p_code'] == '0': print 'No P_Code for location: {}'.format(field_values) skipped_points.append(field_values) continue print "\nImporting values: {}".format(field_values) cad_code = feat.get('CAD_CODE_1').encode('utf-8').split('.')[0] try: locality = Locality.objects.get(cad_code=cad_code) except Locality.DoesNotExist: print "Locality does not exist with Cad Code: {}".format(cad_code) continue location, created = Location.objects.get_or_create( #p_code=field_values['p_code'], name=field_values['name'].encode('utf-8'), gateway=gateway_type, locality=locality ) location.name = field_values['name'].encode('utf-8') location.p_code = str(field_values['p_code']) location.longitude = field_values['longitude'] location.latitude = field_values['latitude'] location.point = feat.geom.wkt location.save() print("Location {} {}".format( location.name, "created" if created else 'updated' )) imported_points += 1 except Exception as exp: raise CommandError(exp) print "{} points skipped".format(len(skipped_points)) print "{} points imported".format(imported_points)
def handle(self, *args, **options): logger.info( 'Retrieving all external unit identifiers from the database...') # Get all external datasource records from the database unit_identifiers = UnitIdentifier.objects.filter(namespace='lipas') # Build a lipas_id-indexed dictionary of units units_by_lipas_id = {} for unit_identifier in unit_identifiers: units_by_lipas_id[int( unit_identifier.value)] = unit_identifier.unit logger.info('Retrieved {} objects.'.format(len(unit_identifiers))) # Get path and area data from the Lipas WFS logger.info('Retrieving geodata from Lipas...') wfs = MiniWFS(WFS_BASE) max_features = options.get('max_features') muni_filter = options.get('muni_id') if muni_filter is not None: muni_filter = ' OR '.join( ["kuntanumero = '{}'".format(id_) for id_ in muni_filter]) layers = {} for key, val in TYPES.items(): url = wfs.get_feature(type_name=val, max_features=max_features, cql_filter=muni_filter) layers[key] = DataSource(url)[0] logger.info('Retrieved {} path and {} area features.'.format( len(layers['paths']), len(layers['areas']))) # The Lipas database stores paths and areas as different features # which have a common id. We want to store the paths as one # multi-collection which includes all the small subpaths or areas. # This is the dict which will contain multi-collections hashed by # their Lipas id. geometries = {} # Iterate through Lipas layers and features logger.info('Processing Lipas geodata...') for layer in layers.values(): for feature in layer: logger.debug(feature.fid) # Check if the feature's id is in the dict we built earlier lipas_id = feature['id'].value unit = units_by_lipas_id.get(lipas_id) if not unit: logging.debug('id not found: {}'.format(lipas_id)) continue logger.debug('found id: {}'.format(lipas_id)) def clean_name(name): import re name = name.lower().strip() name = re.sub(r'\s{2,}', ' ', name) return name if clean_name(feature['nimi_fi'].value) != clean_name( unit.name_fi): logger.warning( 'id {} has non-matching name fields (Lipas: {}, db: {}).' .format(lipas_id, feature['nimi_fi'].value, unit.name_fi)) try: # Create a multi-container for the first encountered feature. # We try to add all other features to the multi-container but # fall back to a FeatureCollection if it's some other type. if lipas_id in geometries: try: geometries[lipas_id].append(feature.geom.geos) except TypeError: raise TypeError( "The lipas database contains mixed geometries, this is unsupported!" ) # If mixed geometry types ever begin to appear in the lipas database, # uncommenting the following might make everything work straight # away. Please note that it's completely untested. # logger.warning("id {} has mixed geometries, " # "creating a GeometryCollection as fallback".format(lipas_id)) # geometries[lipas_id] = GeometryCollection(list(geometries[lipas_id]) + feature.geom.geos) else: geometries[lipas_id] = get_multi(feature.geom.geos) except GDALException as err: # We might be dealing with something weird that the Python GDAL lib doesn't handle. # One example is a CurvePolygon as defined here http://www.gdal.org/ogr__core_8h.html logger.error( 'Error while processing a geometry: {}'.format(err)) logger.info('Found {} matches.'.format(len(geometries))) # Add all geometries we found to the db logger.info('Updating geometries in the database...') for lipas_id, geometry in geometries.items(): unit = units_by_lipas_id[lipas_id] # FIXME: make sports map UI support simplified # geometries and bring back simplification # from commit 6cff46e0399fedbbc8266efa5230cd4ccb8a8485 unit.geometry = geometry unit.save()