def load(self): sql_query = '''select link_id, geom from link_geometry;''' cursor = connection.cursor() cursor.execute(sql_query) sio = StringIO() for id, geom in cursor: mls = MultiLineString(GEOSGeometry(geom)) mls.set_srid(config.EPSG4326) sio.write('\t'.join([str(id), mls.ewkt])) sio.write('\n') sio.reset() cursor.copy_from(sio, 'phidata_link') return self
def filter_geom(self, src, val): if val is None: return None if val.geom_type == 'MultiLineString': points = val[0] for i, path in enumerate(val[1:]): distance = Point(points[-1]).distance(Point(path[0])) if distance > 5: self.add_warning( _(u"Not contiguous segment {i} ({distance} m) for geometry for field '{src}'" ).format(i=i + 2, p1=points[-1], p2=path[0], distance=int(distance), src=src)) points += path return MultiLineString(points) elif val.geom_type != 'LineString': self.add_warning( _(u"Invalid geometry type for field '{src}'. Should be LineString, not {geom_type}" ).format(src=src, geom_type=val.geom_type)) return None return val
def test_update_geometry_on_stop_save(self): route = Route.objects.create(feed=self.feed, rtype=3) trip = Trip.objects.create(route=route) s1 = Stop.objects.create(feed=self.feed, point="POINT(-117.133162 36.425288)") s2 = Stop.objects.create(feed=self.feed, point="POINT(-117.13 36.42)") StopTime.objects.create(stop=s1, trip=trip, stop_sequence=1) StopTime.objects.create(stop=s2, trip=trip, stop_sequence=2) # Starts unset trip = Trip.objects.get(id=trip.id) route = Route.objects.get(id=route.id) self.assertFalse(trip.geometry) # Stop save s1.save() # Now set trip = Trip.objects.get(id=trip.id) route = Route.objects.get(id=route.id) self.assertEqual(trip.geometry.coords, ((-117.133162, 36.425288), (-117.13, 36.42))) self.assertEqual(route.geometry, MultiLineString(trip.geometry))
def update_geometry(self): for route in self.route_set.all(): if route.geometry: self.geometry = route.geometry self.save(update_fields=['geometry']) return patterns = [] linestrings = [] for trip in Trip.objects.filter( route__service=self).prefetch_related('stoptime_set__stop'): stops = [ stoptime.stop for stoptime in trip.stoptime_set.all() if stoptime.stop and stoptime.stop.latlong ] pattern = [stop.pk for stop in stops] if pattern in patterns: continue patterns.append(pattern) points = [stop.latlong for stop in stops] if len(points) > 1: linestrings.append(LineString(points)) if linestrings: self.geometry = MultiLineString(*linestrings) self.save(update_fields=['geometry'])
def generate_tracksegments(self): """ Deletes old Trackseg objects between given range from the database. Loops all trackpoints between given range and creates Tracksegs for them. """ # First delete all Tracksegments of this Trackfile self.tracksegs.all().delete() # Then recreate trackpoints = self.trackpoints.order_by("time") points = [] tracksegs = [] # TODO: configurable options maxtime = 120 # seconds limit = 1000 for tp in trackpoints: # Loop all Trackpoints (max 'limit') timediff = ( tp.time - points[-1].time ).seconds if points else 0 # Seconds between 2 last points if timediff > maxtime or len(points) >= limit: trackseg = trackpointlist_to_trackseg( self, points) # Create new Trackseg tracksegs.append(trackseg) # if track was split because of limit then reuse the last point if len(points) >= limit: points = [points[-1]] else: points = [] points.append(tp) trackseg = trackpointlist_to_trackseg( self, points) # Create last new Trackseg tracksegs.append(trackseg) simplified_linestrings = [] for trackseg in tracksegs: simplified_linestrings.append(simplify(trackseg.geometry, 30)) self.geometry = MultiLineString(simplified_linestrings) self.save()
def get(self, request, *args, **kwargs): id1 = request.GET.get('bid') id2 = request.GET.get('eid') sql_inside_of_function = "select id, source, target, cost * (4-rtng_ccpx) * (4-rtng_mean) * (4-rtng_cbf7)+case when one_way=-1 then 1000000 else 0 END as cost,cost * (4-rtng_ccpx)*(4-rtng_mean)*(4-rtng_cbf7) + case when one_way=1 then 1000000 else 0 END as reverse_cost from \"Data_minnesotabiketrails\"\'" sql_function = "select ccp_name, the_geom, bt.cost, bt.item_tags from pgr_dijkstra(\'" cursor = connection.cursor() cursor.execute(sql_function+sql_inside_of_function+", %s , %s , true,true) join \"Data_minnesotabiketrails\" as bt on bt.id=id2",(str(id1),str(id2),)) all = cursor.fetchall() names = [] gj = [] for item in all: names.append((item[0],item[2])) poly = loads(GEOSGeometry(item[1], srid=4326).geojson) poly['properties'] = {'name':item[0], 'tag':item[3]} gj.append(poly) #this creates a list of linestrings and then makes a Multilinestring and gets the extent geo = [GEOSGeometry(geoDumps(po)) for po in gj] extent = MultiLineString(*geo).extent extent = [[extent[1],extent[0]],[extent[3],extent[2]]] #next is getting the distance on each same named trail section sent_names = [] dist_on_path = 0 for i,n in enumerate(names): if i==0: previous_name=n[0] if n[0]==previous_name: dist_on_path +=n[1] else: sent_names.append((previous_name,"%.2f" % dist_on_path)) dist_on_path=n[1] previous_name = n[0] if i==len(names)-1: sent_names.append((previous_name, "%.2f" % dist_on_path)) return HttpResponse(dumps({'names':sent_names, 'geojson':gj, 'extent':extent}), content_type="application/json; charset='utf-8'")
def process_feature_geoms(self, properties, resource, geo_process='collection'): geoms = [] result = None for g in resource['_source']['geometries']: geom = GEOSGeometry(JSONSerializer().serialize(g['value'], ensure_ascii=False)) geoms.append(geom) if geo_process=='collection': geometry = GeometryCollection(geoms) result = {'type':'Feature','geometry': geometry,'properties': properties} elif geo_process == 'sorted': result = [] sorted_geoms = {'points':[], 'lines':[], 'polys':[]} for geom in geoms: if geom.geom_typeid == 0: sorted_geoms['points'].append(geom) if geom.geom_typeid == 1: sorted_geoms['lines'].append(geom) if geom.geom_typeid == 3: sorted_geoms['polys'].append(geom) if geom.geom_typeid == 4: for feat in geom: sorted_geoms['points'].append(feat) if geom.geom_typeid == 5: for feat in geom: sorted_geoms['lines'].append(feat) if geom.geom_typeid == 6: for feat in geom: sorted_geoms['polys'].append(feat) if len(sorted_geoms['points']) > 0: result.append({'type':'Feature','geometry': MultiPoint(sorted_geoms['points']),'properties': properties}) if len(sorted_geoms['lines']) > 0: result.append({'type':'Feature','geometry': MultiLineString(sorted_geoms['lines']),'properties': properties}) if len(sorted_geoms['polys']) > 0: result.append({'type':'Feature','geometry': MultiPolygon(sorted_geoms['polys']),'properties': properties}) return result
def export(self, map_obj): print('Export data to DB') fake = Factory.create() # Export regions print('Save regions') self.cleanup_region(map_obj) # new_objects = [] for region in map_obj.regions: obj = self.region_model() obj.name = fake.city() polygons = [center.shapely_object for center in region.centers] region_poly = cascaded_union(polygons) coords = [ self.point_to_lnglat(point) for point in region_poly.exterior.coords ] obj.geom = MultiPolygon([Polygon(coords)]) self.region_pre_save(obj, region, map_obj) obj.full_clean() obj.save() region.model = obj # new_objects.append(obj) # self.region_model.objects.bulk_create(new_objects) # Save region neighbors print('Save regions neighbors') checked = [] for region in map_obj.regions: for neighbour in region.neighboir_regions: if neighbour not in checked: region.model.neighbors.add(neighbour.model) # Export biomes print('Save biomes') self.cleanup_biome(map_obj) new_objects = [] for center in map_obj.centers: obj = self.biome_model() center.model = obj obj.biome = center.biome obj.water = center.water obj.coast = center.coast obj.border = center.border obj.ocean = center.ocean obj.elevation = center.elevation obj.moisture = center.moisture obj.center = Point(*self.point_to_lnglat(center.point)) obj.river = any(edge.river for edge in center.borders) if not center.water: obj.region = center.region.model coords = [] for corner in center.corners: coords.append(self.point_to_lnglat(corner.point)) # Sort coordinates. Should be sorted already, but lets check once more. coords.sort(key=lambda p: math.atan2(p[1] - obj.center.y, p[0] - obj.center.x)) coords.append(coords[0]) obj.geom = MultiPolygon([Polygon(coords)]) self.biome_pre_save(obj, center, map_obj) obj.full_clean() obj.save() new_objects.append(obj) # FIXME: Use bulk_create and change neighbors saving # self.model.objects.bulk_create(new_objects) # save neighbors print('Save biomes neighbors') checked = [] for center in map_obj.centers: checked.append(center) for neighbour in center.neighbors: if neighbour not in checked: center.model.neighbors.add(neighbour.model) # Export rivers print('Save rivers') self.cleanup_river(map_obj) new_objects = [] for edge in map_obj.edges: if edge.river: obj = self.river_model() obj.width = edge.river p1 = self.point_to_lnglat(edge.corners[0].point) p2 = self.point_to_lnglat(edge.corners[1].point) obj.geom = MultiLineString(LineString(p1, p2)) self.river_pre_save(obj, edge, map_obj) obj.full_clean() new_objects.append(obj) self.river_model.objects.bulk_create(new_objects) # Export cities print('Save cities') self.cleanup_city(map_obj) new_objects = [] for region in map_obj.regions: for center in region.centers: obj = self.city_model() obj.biome = center.model obj.capital = (center == region.capital) obj.name = fake.city() obj.region = region.model obj.coords = Point(*self.point_to_lnglat(center.point)) self.region_pre_save(obj, region, map_obj) obj.full_clean() new_objects.append(obj) self.city_model.objects.bulk_create(new_objects)
def do_service(self, open_file, filename): """ Given a root element, region ID, filename, and optional dictionary of service descriptions (for the NCSD), does stuff """ timetable = Timetable(open_file) if not hasattr(timetable, 'element'): return if timetable.operating_period.end and timetable.operating_period.end < date.today(): return operators = timetable.operators # if timetable.operator and len(operators) > 1: # operators = [operator for operator in operators if operator.get('id') == timetable.operator] operators = [operator for operator in map(self.get_operator, operators) if operator] line_name, line_brand = self.get_line_name_and_brand(timetable.element.find('txc:Services/txc:Service', NS), filename) # net and service code: net, service_code, line_ver = self.infer_from_filename(timetable.element.attrib['FileName']) if service_code is None: service_code = timetable.service_code defaults = dict( line_name=line_name, line_brand=line_brand, mode=timetable.mode, net=net, line_ver=line_ver, region_id=self.region_id, date=timetable.transxchange_date, current=True, source=self.source ) # stops: stops = StopPoint.objects.in_bulk(timetable.stops.keys()) try: stop_usages = [] for grouping in timetable.groupings: if grouping.rows: stop_usages += [ StopUsage( service_id=service_code, stop_id=row.part.stop.atco_code, direction=grouping.direction, order=i, timing_status=row.part.timingstatus ) for i, row in enumerate(grouping.rows) if row.part.stop.atco_code in stops ] if grouping.direction == 'outbound' or grouping.direction == 'inbound': defaults[grouping.direction + '_description'] = str(grouping) show_timetable = True line_strings = [] for grouping in timetable.groupings: for journeypattern in grouping.journeypatterns: line_string = self.line_string_from_journeypattern(journeypattern, stops) if line_string not in line_strings: line_strings.append(line_string) multi_line_string = MultiLineString(*(ls for ls in line_strings if ls)) except (AttributeError, IndexError) as error: warnings.warn('%s, %s' % (error, filename)) show_timetable = False stop_usages = [StopUsage(service_id=service_code, stop_id=stop, order=0) for stop in stops] multi_line_string = None # service: defaults['show_timetable'] = show_timetable defaults['geometry'] = multi_line_string if self.service_descriptions: filename_parts = filename.split('_') operator = filename_parts[-2] line_name = filename_parts[-1][:-4] defaults['outbound_description'] = self.service_descriptions.get('%s%s%s' % (operator, line_name, 'O'), '') defaults['inbound_description'] = self.service_descriptions.get('%s%s%s' % (operator, line_name, 'I'), '') defaults['description'] = defaults['outbound_description'] or defaults['inbound_description'] else: description = timetable.description if not description: warnings.warn('%s missing a description' % filename) elif len(description) > 255: warnings.warn('Description "%s" too long in %s' % (description, filename)) description = description[:255] if self.region_id == 'NE': description = self.sanitize_description(description) if description and description != 'Origin - Destination': defaults['description'] = description parts = service_code.split('_') if parts[0] == 'NW': assert len(parts) >= 5 assert parts[-1].isdigit() homogeneous_service_code = '_'.join(parts[:-1]) same_services = Service.objects.filter(description=description, current=True) same_service = same_services.filter(service_code__startswith=homogeneous_service_code + '_') same_service = same_service.exclude(service_code=service_code).first() if same_service: ServiceCode.objects.update_or_create(service=same_service, code=service_code, scheme='NW TNDS') service_code = same_service.service_code for stop_usage in stop_usages: stop_usage.service_id = service_code service, created = Service.objects.update_or_create(service_code=service_code, defaults=defaults) if created: service.operator.add(*operators) else: service.operator.set(operators) if service_code not in self.service_codes: service.stops.clear() StopUsage.objects.bulk_create(stop_usages) if timetable.private_code: ServiceCode.objects.update_or_create({ 'code': timetable.private_code }, service=service, scheme='Traveline Cymru') self.service_codes.add(service_code)
def import_espoo_units(self, filename): ds = DataSource(filename) assert len(ds) == 1 uid = self.get_lowest_high_unit_id() lyr = ds[0] created = 0 updated = 0 for feat in lyr: if feat.get('NIMI') in [ 'Tali 6.5 km', 'Pirttimäki 3.0 km', 'Pirttimäki 6.4 km', 'Pirttimäki 8.3 km' ]: # These are Helsinki's tracks, # and the maintainer is set to Espoo in the Helsinki importer continue name = feat.get('NIMI') if name.find('Aurattu ulkoilureitti') == 0: # Do not import these continue if type(feat.geom ) == django.contrib.gis.gdal.geometries.MultiLineString: multilinestring = GEOSGeometry(feat.geom.wkt) else: multilinestring = MultiLineString(GEOSGeometry(feat.geom.wkt)) if lyr.srs is not None and lyr.srs[ 'PROJCS'] == 'ETRS89_TM35FIN_E_N': # support layers with the correct srs converted_multilinestring = multilinestring else: # else assume the coordinates are in Espoo coordinates converted_multilinestring_coords = [] for line in multilinestring: converted_multilinestring_coords.append( LineString( tuple( (espoo_coordinates_to_gk25(point[0], point[1]) for point in line)))) converted_multilinestring = (MultiLineString( (converted_multilinestring_coords), srid=3879)) length = feat.get('PITUUS') if len(length) == 0: length = None maintenance_organization = '49' extra_fields = { 'lighting': ESPOO_LIGHTING[feat.get('VALAISTUS')], 'skiing_technique': HELSINKI_TECHNIQUES[feat.get('TYYLI')], 'maintenance_group': 'kaikki', 'maintenance_organization': maintenance_organization, 'length': length } street_address = feat.get('street_add') www_url = 'http://www.espoo.fi/liikunta' address_zip = feat.get('zip') point = None if street_address: point = self.geocode_street_address(street_address, 'espoo') if point is None: point = Point(converted_multilinestring[0][0], converted_multilinestring[0][1], srid=3879) defaults = self.unit_defaults(converted_multilinestring, point, extra_fields, street_address, address_zip, www_url) defaults['municipality_id'] = 'espoo' defaults['organization_id'] = 49 uid, did_create, unit = self._create_or_update_unit( uid, feat.get('NIMI'), defaults) if did_create: created += 1 else: updated += 1 if street_address: self.add_public_transportation_connection( unit, street_address, 'espoo') _report_counts('espoo', created, updated)
def test_create_poly_file_string_when_geometry_is_invalid_raises_raises_type_error( self): invalid_geometry = MultiLineString(LineString((0, 0), (0, 1), (1, 1)), LineString((1, 1), (1, 2), (2, 2))) self.assertRaises(TypeError, create_poly_file_string, invalid_geometry)
def index(self, document): """ The method that actually performs the indexing. :param document: The document as a memory file. """ from django.contrib.gis.geos import ( Point, LineString, MultiLineString) from obspy import read_events # Collect all indices in a list. Each index has to be a dictionary. indices = [] inv = read_events(document, format="quakeml") for event in inv: if event.origins: org = event.preferred_origin() or event.origins[0] else: org = None if event.magnitudes: mag = event.preferred_magnitude() or event.magnitudes[0] else: mag = None has_focal_mechanism = False has_moment_tensor = False if event.focal_mechanisms: has_focal_mechanism = True if any(mt for mt in event.focal_mechanisms): has_moment_tensor = True # Parse attributes in the baynet namespace. # The public attribute defaults to None, it can only be set to # True by utilizing the baynet namespace as of now. extra = event.get("extra", {}) if "public" in extra: public = extra["public"]["value"] if public.lower() in ["false", "f"]: public = False elif public.lower() in ["true", "t"]: public = True else: public = None else: public = None if "evaluationMode" in extra: evaluation_mode = extra["evaluationMode"]["value"] else: evaluation_mode = None # parse horizontal uncertainties if org and org.origin_uncertainty: org_unc = org.origin_uncertainty if org_unc.preferred_description == 'horizontal uncertainty': horizontal_uncertainty_max = org_unc.horizontal_uncertainty horizontal_uncertainty_min = org_unc.horizontal_uncertainty horizontal_uncertainty_max_azimuth = 0 elif org_unc.preferred_description == 'uncertainty ellipse': horizontal_uncertainty_max = \ org_unc.max_horizontal_uncertainty horizontal_uncertainty_min = \ org_unc.min_horizontal_uncertainty horizontal_uncertainty_max_azimuth = \ org_unc.azimuth_max_horizontal_uncertainty else: horizontal_uncertainty_max = None horizontal_uncertainty_min = None horizontal_uncertainty_max_azimuth = None else: horizontal_uncertainty_max = None horizontal_uncertainty_min = None horizontal_uncertainty_max_azimuth = None geometry = None if org: geometry = [Point(org.longitude, org.latitude)] if all(value is not None for value in ( horizontal_uncertainty_max, horizontal_uncertainty_min, horizontal_uncertainty_max_azimuth)): import geopy import geopy.distance start = geopy.Point(latitude=org.latitude, longitude=org.longitude) lines = [] for distance, azimuth in ( (horizontal_uncertainty_max, horizontal_uncertainty_max_azimuth), (horizontal_uncertainty_min, horizontal_uncertainty_max_azimuth + 90)): azimuth = azimuth % 180 distance = geopy.distance.VincentyDistance( kilometers=distance / 1e3) end1 = distance.destination( point=start, bearing=azimuth) end2 = distance.destination( point=start, bearing=azimuth + 180) line = LineString((end1.longitude, end1.latitude), (org.longitude, org.latitude), (end2.longitude, end2.latitude)) lines.append(line) geometry.append(MultiLineString(lines)) else: geometry.append(MultiLineString([])) # phase counts used_phase_count = None used_p = None used_s = None if org: if org.quality: used_phase_count = org.quality.used_phase_count if org.quality and org.quality.get('extra'): extra = org.quality.get('extra', {}) used_p = extra.get( 'usedPhaseCountP', {}).get('value', None) used_s = extra.get( 'usedPhaseCountS', {}).get('value', None) if used_p is not None: used_p = int(used_p) if used_s is not None: used_s = int(used_s) # set first/last pick times first_pick_time = None last_pick_time = None if event.picks: pick_times = [ pick.time for pick in event.picks if pick.time is not None] if pick_times: first_pick_time = str(min(pick_times)) last_pick_time = str(max(pick_times)) indices.append({ "quakeml_id": str(event.resource_id), "latitude": org.latitude if org else None, "longitude": org.longitude if org else None, "depth_in_m": org.depth if org else None, "origin_time": str(org.time) if org else None, "first_pick_time": first_pick_time, "last_pick_time": last_pick_time, "used_phase_count": used_phase_count, "used_p": used_p, "used_s": used_s, "magnitude": mag.mag if mag else None, "magnitude_type": mag.magnitude_type if mag else None, "agency": event.creation_info and event.creation_info.agency_id or None, "author": event.creation_info and event.creation_info.author or None, "public": public, "evaluation_mode": evaluation_mode, "event_type": event.event_type, "has_focal_mechanism": has_focal_mechanism, "has_moment_tensor": has_moment_tensor, # The special key geometry can be used to store geographic # information about the indexes geometry. Useful for very # fast queries using PostGIS. "geometry": geometry, "horizontal_uncertainty_max": horizontal_uncertainty_max, "horizontal_uncertainty_min": horizontal_uncertainty_min, "horizontal_uncertainty_max_azimuth": horizontal_uncertainty_max_azimuth, }) return indices
def test_indexing(self): expected_usgs = [ {'agency': 'ci', 'author': None, 'depth_in_m': 10.0, 'evaluation_mode': None, 'event_type': 'quarry blast', 'geometry': [ Point(-117.6623333, 35.0476667), MultiLineString([ LineString((-117.6623332999999860, 35.0521735816367155), (-117.6623333, 35.0476667), (-117.6623332999999860, 35.0431598150083872)), LineString((-117.6568529607076101, 35.0476665762236408), (-117.6623333, 35.0476667), (-117.6678136392923619, 35.0476665762236408))]) ], 'has_focal_mechanism': False, 'has_moment_tensor': False, 'horizontal_uncertainty_max': 500.0, 'horizontal_uncertainty_max_azimuth': 0, 'horizontal_uncertainty_min': 500.0, 'latitude': 35.0476667, 'longitude': -117.6623333, 'magnitude': 1.54, 'magnitude_type': 'ml', 'origin_time': '2014-11-06T00:24:42.240000Z', 'public': True, 'quakeml_id': 'quakeml:comcat.cr.usgs.gov/fdsnws/event/1/' 'query?eventid=ci37285320&format=quakeml'}, {'agency': 'uw', 'author': None, 'depth_in_m': 0.0, 'evaluation_mode': None, 'event_type': 'quarry blast', 'geometry': [ Point(-120.2807, 42.138), MultiLineString([ LineString((-120.2807, 42.2073215168237184), (-120.2807, 42.1379999999999981), (-120.2807, 42.0686776424464739)), LineString((-120.18756038590098, 42.1379621973716567), (-120.2807, 42.1379999999999981), (-120.3738396140990119, 42.1379621973716567))]) ], 'has_focal_mechanism': False, 'has_moment_tensor': False, 'horizontal_uncertainty_max': 7700.0, 'horizontal_uncertainty_max_azimuth': 0, 'horizontal_uncertainty_min': 7700.0, 'latitude': 42.138, 'longitude': -120.2807, 'magnitude': 1.6, 'magnitude_type': 'Md', 'origin_time': '2014-11-14T21:07:48.200000Z', 'public': True, 'quakeml_id': 'quakeml:comcat.cr.usgs.gov/fdsnws/event/1/' 'query?eventid=uw60916552&format=quakeml'}] expected_focmec = [ {'agency': None, 'author': None, 'depth_in_m': None, 'evaluation_mode': None, 'event_type': None, 'geometry': None, 'has_focal_mechanism': True, 'has_moment_tensor': True, 'horizontal_uncertainty_max': None, 'horizontal_uncertainty_min': None, 'horizontal_uncertainty_max_azimuth': None, 'latitude': None, 'longitude': None, 'magnitude': None, 'magnitude_type': None, 'origin_time': None, 'public': True, 'quakeml_id': 'smi:ISC/evid=11713537'}] indexer = QuakeMLIndexerPlugin() result_usgs = indexer.index(FILES['usgs']) result_focmec = indexer.index(FILES['focmec']) self.assertEqual(expected_usgs, result_usgs) self.assertEqual(expected_focmec, result_focmec)
def process_feature_geoms(self, properties, resource, geo_process='collection'): geoms = [] result = None for g in resource['_source']['geometries']: geom = GEOSGeometry(JSONSerializer().serialize(g['value'], ensure_ascii=False)) geoms.append(geom) if geo_process == 'collection': ## need to iterate the list and double-iterate any geom collections ## to make a new list of only simple geometries, which is, in turn, ## transformed back into a collection. newgeomlist = list() for g in geoms: if g.geom_typeid == 7: for gchild in g: newgeomlist.append(gchild) else: newgeomlist.append(g) geometry = GeometryCollection(newgeomlist) result = { 'type': 'Feature', 'geometry': geometry, 'properties': properties } elif geo_process == 'sorted': result = [] sorted_geoms = {'points': [], 'lines': [], 'polys': []} for geom in geoms: if geom.geom_typeid == 0: sorted_geoms['points'].append(geom) if geom.geom_typeid == 1: sorted_geoms['lines'].append(geom) if geom.geom_typeid == 3: sorted_geoms['polys'].append(geom) if geom.geom_typeid == 4: for feat in geom: sorted_geoms['points'].append(feat) if geom.geom_typeid == 5: for feat in geom: sorted_geoms['lines'].append(feat) if geom.geom_typeid == 6: for feat in geom: sorted_geoms['polys'].append(feat) # process a geometry collection by iterating and further # sorting its pieces if geom.geom_typeid == 7: for g in geom: if g.geom_typeid == 0: sorted_geoms['points'].append(g) if g.geom_typeid == 1: sorted_geoms['lines'].append(g) if g.geom_typeid == 3: sorted_geoms['polys'].append(g) if len(sorted_geoms['points']) > 0: result.append({ 'type': 'Feature', 'geometry': MultiPoint(sorted_geoms['points']), 'properties': properties }) if len(sorted_geoms['lines']) > 0: result.append({ 'type': 'Feature', 'geometry': MultiLineString(sorted_geoms['lines']), 'properties': properties }) if len(sorted_geoms['polys']) > 0: result.append({ 'type': 'Feature', 'geometry': MultiPolygon(sorted_geoms['polys']), 'properties': properties }) return result
def setUp(self): super().setUp() self.client = APIClient() self.administrative_entity = factories.PermitAdministrativeEntityFactory() self.group = factories.SecretariatGroupFactory() self.administrative_entity = self.group.permitdepartment.administrative_entity # Users and Permissions self.normal_user = factories.UserFactory() self.secretariat_user = factories.SecretariatUserFactory(groups=[self.group]) self.admin_user = factories.UserFactory(is_staff=True, is_superuser=True) # Works object Types self.works_object_types = factories.WorksObjectTypeFactory.create_batch( 2, is_public=True ) self.administrative_entity.works_object_types.set(self.works_object_types) # Create the different types of Permit Requests by different authors ## Normal User ## self.permit_request_normal_user = factories.PermitRequestFactory( status=models.PermitRequest.STATUS_SUBMITTED_FOR_VALIDATION, administrative_entity=self.administrative_entity, author=self.normal_user.permitauthor, ) works_object_type_choice = factories.WorksObjectTypeChoiceFactory( permit_request=self.permit_request_normal_user, works_object_type=self.works_object_types[0], ) factories.WorksObjectPropertyValueFactory( works_object_type_choice=works_object_type_choice ) factories.PermitRequestGeoTimeFactory( permit_request=self.permit_request_normal_user ) ## Admin User ## self.permit_request_admin_user = factories.PermitRequestFactory( status=models.PermitRequest.STATUS_APPROVED, administrative_entity=self.administrative_entity, author=self.admin_user.permitauthor, ) works_object_type_choice = factories.WorksObjectTypeChoiceFactory( permit_request=self.permit_request_admin_user, works_object_type=self.works_object_types[0], ) factories.WorksObjectPropertyValueFactory( works_object_type_choice=works_object_type_choice ) factories.PermitRequestGeoTimeFactory( permit_request=self.permit_request_admin_user ) ## Secretary User ## self.permit_request_secretary_user = factories.PermitRequestFactory( status=models.PermitRequest.STATUS_PROCESSING, administrative_entity=self.administrative_entity, author=self.secretariat_user.permitauthor, ) works_object_type_choice = factories.WorksObjectTypeChoiceFactory( permit_request=self.permit_request_secretary_user, works_object_type=self.works_object_types[1], ) factories.WorksObjectPropertyValueFactory( works_object_type_choice=works_object_type_choice ) factories.PermitRequestGeoTimeFactory( permit_request=self.permit_request_secretary_user, geom=GeometryCollection( MultiLineString( LineString( (2539096.09997796, 1181119.41274907), (2539094.37477054, 1181134.07701214), ), LineString( (2539196.09997796, 1181219.41274907), (2539294.37477054, 1181134.07701214), ), ) ), ) ## For Validator User ## self.permit_request_validator_user = factories.PermitRequestFactory( status=models.PermitRequest.STATUS_AWAITING_VALIDATION, administrative_entity=self.administrative_entity, author=self.normal_user.permitauthor, ) works_object_type_choice = factories.WorksObjectTypeChoiceFactory( permit_request=self.permit_request_validator_user, works_object_type=self.works_object_types[1], ) factories.WorksObjectPropertyValueFactory( works_object_type_choice=works_object_type_choice ) factories.PermitRequestGeoTimeFactory( permit_request=self.permit_request_validator_user, geom=GeometryCollection(MultiPoint(Point(0, 0), Point(1, 1))), )
def get_track(polyline_data): coords = polyline.decode(polyline_data) coords = [(x, y) for (y, x) in coords] if len(coords) == 1: coords = [] return MultiLineString([LineString(coords, srid=4326)], srid=4326)
def SaveGPXtoPostGIS(fGPX, file_instance): try: #opening the GPX file, loaded by the user gpx_file = open(settings.TMP_MEDIA_ROOT + fGPX) #parsing the GPX file #we store all the file contents within the gpx variable gpx = gpxpy.parse(gpx_file) #after loading the file in memory, we can delete the physical one os.unlink(gpx_file.name) except Exception as e: if settings.DEBUG: print(e) logging.error(e) try: ###### WAYPOINTS ####### if gpx.waypoints: for wp in gpx.waypoints: nwp = mdlGPXWaypoint() #Assignation of all the parameters for each waypoint if wp.name: nwp.name = wp.name else: nwp.name = 'unk' nwp.point = Point(wp.longitude, wp.latitude) nwp.gpx_file = file_instance nwp.elevation = wp.elevation if wp.time: nwp.time = wp.time.replace(tzinfo=pytz.UTC) nwp.magnetic_variation = wp.magnetic_variation nwp.geoid_height = wp.geoid_height nwp.comment = wp.comment nwp.description = wp.description nwp.source = wp.source nwp.symbol = wp.symbol nwp.type = wp.type nwp.type_of_gpx_fix = wp.type_of_gpx_fix nwp.satellites = wp.satellites nwp.horizontal_dilution = wp.horizontal_dilution nwp.vertical_dilution = wp.vertical_dilution nwp.position_dilution = wp.position_dilution nwp.age_of_dgps_data = wp.age_of_dgps_data nwp.dgps_id = wp.dgps_id nwp.extensions = wp.extensions nwp.save() if wp.link: wpl = mdlGPXWaypointLinks() wpl.link = wp.link wpl.link_text = wp.link_text wpl.link_type = wp.link_type wpl.gpx_waypoint = nwp wpl.save() ###### TRACK ####### if gpx.tracks: for track in gpx.tracks: nt = mdlGPXTrack() nt.gpx_file = file_instance nt.name = track.name nt.comment = track.comment nt.description = track.description nt.source = track.source #po.link = None #po.link_text = None if (track.number): nt.number = track.number #po.link_type = None nt.type = track.type nt.extensions = track.extensions nt.save() if track.link: tl = mdlGPXTrackLinks() tl.link = track.link tl.link_text = track.link_text tl.link_type = track.link_type tl.gpx_track = nt tl.save() for segment in track.segments: trs = mdlGPXTrackSegment() trs.gpx_track = nt trs.extensions = segment.extensions trs.save() track_list_of_points = [] for point in segment.points: po = mdlGPXTrackSegmentPoint() point_in_segment = Point(point.longitude, point.latitude) track_list_of_points.append(point_in_segment.coords) po.gpx_track_segment = trs po.point = point_in_segment po.elevation = point.elevation if point.time: po.time = point.time.replace(tzinfo=pytz.UTC) po.course = point.course po.speed = point.speed po.magnetic_variation = point.magnetic_variation po.geoid_height = point.geoid_height po.name = point.name po.comment = point.comment po.description = point.description po.source = point.source po.symbol = point.symbol po.type = point.type po.type_of_gpx_fix = point.type_of_gpx_fix po.satellites = point.satellites po.horizontal_dilution = point.horizontal_dilution po.vertical_dilution = point.vertical_dilution po.position_dilution = point.position_dilution po.age_of_dgps_data = point.age_of_dgps_data po.dgps_id = point.dgps_id po.extensions = point.extensions po.save() new_track_segment = LineString(track_list_of_points) trs.segmentLine = LineString(track_list_of_points) trs.save() nt.track = MultiLineString(new_track_segment) nt.save() ###### ROUTES ####### if gpx.routes: for route in gpx.routes: ro = mdlGPXRoute() ro.gpx_file = file_instance ro.name = route.name ro.comment = route.comment ro.description = route.description ro.source = route.source if route.number: ro.number = route.number ro.type = route.type ro.extensions = route.extensions ro.save() if route.link: rl = mdlGPXRouteLinks() rl.link = route.link rl.link_text = route.link_text rl.link_type = route.link_type rl.gpx_route = ro rl.save() route_list_of_points = [] for point in segment.points: po = mdlGPXRoutePoint() po.gpx_route = ro point_in_segment = Point(point.longitude, point.latitude) route_list_of_points.append(point_in_segment.coords) po.elevation = point.elevation if point.time: po.time = point.time.replace(tzinfo=pytz.UTC) po.magnetic_variation = point.magnetic_variation po.geoid_height = point.geoid_height po.name = point.name po.comment = point.comment po.description = point.description po.source = point.source po.symbol = point.symbol po.type = point.type po.type_of_gpx_fix = point.type_of_gpx_fix po.satellites = point.satellites po.horizontal_dilution = point.horizontal_dilution po.vertical_dilution = point.vertical_dilution po.position_dilution = point.position_dilution po.age_of_dgps_data = point.age_of_dgps_data po.dgps_id = point.dgps_id po.link_type = point.link_type po.extensions = point.extensions po.save() if point.link: rpl = mdlGPXRoutePointLinks() rpl.link = point.link rpl.link_text = point.link_text rpl.link_type = point.link_type rpl.gpx_route_point = po rpl.save() ro.trackLine = LineString(route_list_of_points) ro.save() except Exception as e: if settings.DEBUG: print(e) logging.error(e)
def handle_zipfile(path, collection, url): source = DataSource.objects.update_or_create( { 'url': url, 'datetime': timezone.now() }, name=f'{collection} GTFS')[0] shapes = {} service_shapes = {} operators = {} routes = {} services = set() headsigns = {} with zipfile.ZipFile(path) as archive: for line in read_file(archive, 'shapes.txt'): shape_id = line['shape_id'] if shape_id not in shapes: shapes[shape_id] = [] shapes[shape_id].append( Point(float(line['shape_pt_lon']), float(line['shape_pt_lat']))) for line in read_file(archive, 'agency.txt'): operator, created = Operator.objects.get_or_create( { 'name': line['agency_name'], 'region_id': 'LE' }, id=line['agency_id'], region__in=['CO', 'UL', 'MU', 'LE', 'NI']) if not created and operator.name != line['agency_name']: print(operator, line) operators[line['agency_id']] = operator for line in read_file(archive, 'routes.txt'): if line['route_short_name'] and len(line['route_short_name']) <= 8: route_id = line['route_short_name'] elif line['route_long_name'] and len(line['route_long_name']) <= 4: route_id = line['route_long_name'] else: route_id = line['route_id'].split()[0] service_code = collection + '-' + route_id assert len(service_code) <= 24 defaults = { 'region_id': 'LE', 'line_name': line['route_short_name'], 'description': line['route_long_name'], 'date': time.strftime('%Y-%m-%d'), 'mode': MODES.get(int(line['route_type']), ''), 'current': True, 'show_timetable': True } service, created = Service.objects.update_or_create( defaults, service_code=service_code, source=source) try: operator = operators[line['agency_id']] if service in services: service.operator.add(operator) else: service.operator.set([operator]) except KeyError: pass services.add(service) route, created = Route.objects.update_or_create( { 'line_name': line['route_short_name'], 'description': line['route_long_name'], 'service': service, }, source=source, code=line['route_id'], ) if not created: route.trip_set.all().delete() routes[line['route_id']] = route stops, stops_not_created = do_stops(archive) calendars = {} for line in read_file(archive, 'calendar.txt'): calendar = Calendar( mon='1' == line['monday'], tue='1' == line['tuesday'], wed='1' == line['wednesday'], thu='1' == line['thursday'], fri='1' == line['friday'], sat='1' == line['saturday'], sun='1' == line['sunday'], start_date=parse_date(line['start_date']), end_date=parse_date(line['end_date']), ) calendar.save() calendars[line['service_id']] = calendar for line in read_file(archive, 'calendar_dates.txt'): CalendarDate.objects.create( calendar=calendars[line['service_id']], start_date=parse_date(line['date']), end_date=parse_date(line['date']), operation=line['exception_type'] == '1') trips = {} for line in read_file(archive, 'trips.txt'): route = routes[line['route_id']] trips[line['trip_id']] = Trip( route=route, calendar=calendars[line['service_id']], inbound=line['direction_id'] == '1') if route.service_id not in service_shapes: service_shapes[route.service_id] = set() service_shapes[route.service_id].add(line['shape_id']) if line['trip_headsign']: if line['route_id'] not in headsigns: headsigns[line['route_id']] = { '0': set(), '1': set(), } headsigns[line['route_id']][line['direction_id']].add( line['trip_headsign']) for route_id in headsigns: route = routes[route_id] if not route.service.description: origins = headsigns[route_id]['1'] destinations = headsigns[route_id]['0'] origin = None destination = None if len(origins) <= 1 and len(destinations) <= 1: if len(origins) == 1: origin = list(origins)[0] if len(destinations) == 1: destination = list(destinations)[0] if origin and ' - ' in origin: route.service.inbound_description = origin route.service.description = origin if destination and ' - ' in destination: route.service.outbound_description = destination route.service.description = destination if origin and destination and ' - ' not in origin: route.service.description = route.service.outbound_description = f'{origin} - {destination}' route.service.inbound_description = f'{destination} - {origin}' route.service.save(update_fields=[ 'description', 'inbound_description', 'outbound_description' ]) stop_times = [] trip_id = None trip = None for line in read_file(archive, 'stop_times.txt'): if trip_id != line['trip_id']: if trip: trip.start = stop_times[0].departure trip.end = stop_times[-1].arrival trip.save() for stop_time in stop_times: stop_time.trip = trip StopTime.objects.bulk_create(stop_times) stop_times = [] trip = Trip() trip_id = line['trip_id'] trip = trips[trip_id] stop = stops.get(line['stop_id']) stop_time = StopTime( stop=stop, arrival=line['arrival_time'], departure=line['departure_time'], sequence=line['stop_sequence'], ) if stop: trip.destination = stop elif line['stop_id'] in stops_not_created: stop_time.stop_code = stops_not_created[line['stop_id']] else: stop_time.stop_code = line['stop_id'] print(line) stop_times.append(stop_time) trip.start = stop_times[0].departure trip.end = stop_times[-1].arrival trip.save() for stop_time in stop_times: stop_time.trip = trip StopTime.objects.bulk_create(stop_times) for service in services: if service.id in service_shapes: linestrings = [ LineString(*shapes[shape]) for shape in service_shapes[service.id] if shape in shapes ] service.geometry = MultiLineString(*linestrings) service.save(update_fields=['geometry']) groupings = get_stop_usages( Trip.objects.filter(route__service=service)) service.stops.clear() stop_usages = [ StopUsage(service=service, stop_id=stop_time.stop_id, timing_status=stop_time.timing_status, direction='outbound', order=i) for i, stop_time in enumerate(groupings[0]) ] + [ StopUsage(service=service, stop_id=stop_time.stop_id, timing_status=stop_time.timing_status, direction='inbound', order=i) for i, stop_time in enumerate(groupings[1]) ] StopUsage.objects.bulk_create(stop_usages) service.region = Region.objects.filter( adminarea__stoppoint__service=service).annotate( Count('adminarea__stoppoint__service')).order_by( '-adminarea__stoppoint__service__count').first() if service.region: service.save(update_fields=['region']) service.update_search_vector() for operator in operators.values(): operator.region = Region.objects.filter( adminarea__stoppoint__service__operator=operator).annotate( Count('adminarea__stoppoint__service__operator')).order_by( '-adminarea__stoppoint__service__operator__count').first() if operator.region_id: operator.save(update_fields=['region']) print( source.service_set.filter(current=True).exclude( route__in=routes.values()).update(current=False)) print( source.service_set.filter(current=True).exclude( route__trip__isnull=False).update(current=False)) print( source.route_set.exclude( id__in=(route.id for route in routes.values())).delete()) StopPoint.objects.filter(active=False, service__current=True).update(active=True) StopPoint.objects.filter(active=True, service__isnull=True).update(active=False)
def convert_to_MultiLineString(self): lines = [self._geometry_map[id] for id in self._id_sequence] multiline = MultiLineString(lines) multiline.set_srid(lines[0].get_srid()) return multiline
def render_static(request, height=None, width=None, format='png', background='satellite', bounds=None, center=None, render_srid=3857): # width and height width = int(width) height = int(height) if width > settings.MAX_IMAGE_DIMENSION or \ height > settings.MAX_IMAGE_DIMENSION or \ width <= 1 or height <= 1: logging.debug("Invalid size") return HttpResponseBadRequest( "Invalid image size, both dimensions must be in range %i-%i" % (1, settings.MAX_IMAGE_DIMENSION)) # image format if format not in IMAGE_FORMATS: logging.error("unknown image format %s" % format) return HttpResponseBadRequest( "Unknown image format, available formats: " + ", ".join(IMAGE_FORMATS)) if format.startswith('png'): mimetype = 'image/png' elif format.startswith('jpeg'): mimetype = 'image/jpeg' # bounds bounds_box = None if bounds: bounds_components = bounds.split(',') if len(bounds_components) != 4: return HttpResponseBadRequest( "Invalid bounds, must be 4 , separated numbers") bounds_components = [float(f) for f in bounds_components] if not (-180 < bounds_components[0] < 180) or not (-180 < bounds_components[2] < 180): logging.error("x out of range %f or %f" % (bounds_components[0], bounds_components[2])) return HttpResponseBadRequest( "x out of range %f or %f" % (bounds_components[0], bounds_components[2])) if not (-90 < bounds_components[1] < 90) or not (-90 < bounds_components[3] < 90): logging.error("y out of range %f or %f" % (bounds_components[1], bounds_components[3])) return HttpResponseBadRequest( "y out of range %f or %f" % (bounds_components[1], bounds_components[3])) ll = Point(bounds_components[0], bounds_components[1], srid=4326) ll.transform(render_srid) ur = Point(bounds_components[2], bounds_components[3], srid=4326) ur.transform(render_srid) bounds_box = mapnik.Box2d(ll.x, ll.y, ur.x, ur.y) elif center: center_components = center.split(',') if len(center_components) != 3: return HttpResponseBadRequest() lon = float(center_components[0]) lat = float(center_components[1]) zoom = int(center_components[2]) # todo calc bounds from center and zoom # baselayer if background not in settings.BASE_LAYERS and background != 'none': return HttpResponseNotFound("Background not found") # GeoJSON post data if request.method == "POST" and len(request.body): input_data = json.loads(request.body) else: input_data = None if not bounds and not center and not input_data: return HttpResponseBadRequest( "Bounds, center, or post data is required.") # initialize map m = mapnik.Map(width, height) m.srs = '+init=epsg:' + str(render_srid) # add a tile source as a background if background != "none": background_file = settings.BASE_LAYERS[background] background_style = mapnik.Style() background_rule = mapnik.Rule() background_rule.symbols.append(mapnik.RasterSymbolizer()) background_style.rules.append(background_rule) m.append_style('background style', background_style) tile_layer = mapnik.Layer('background') tile_layer.srs = '+init=epsg:' + str(render_srid) tile_layer.datasource = mapnik.Gdal(base=settings.BASE_LAYER_DIR, file=background_file) tile_layer.styles.append('background style') m.layers.append(tile_layer) # add features from geojson if input_data and input_data['type'] == "Feature": features = [input_data] elif input_data and input_data['type'] == "FeatureCollection": if 'features' not in input_data: return HttpResponseBadRequest() features = input_data['features'] else: features = [] logging.debug("Adding %d features to map" % len(features)) geometries = [] point_features = [] fid = 0 for feature in features: if 'geometry' not in feature: logging.debug("feature does not have geometry") return HttpResponseBadRequest("Feature does not have a geometry") if 'type' not in feature['geometry']: logging.debug("geometry does not have type") return HttpResponseBadRequest("Geometry does not have a type") fid += 1 style_name = str(fid) if feature['geometry']['type'] == 'Point': point_features.append(feature) elif feature['geometry']['type'] in ('LineString', 'MultiLineString'): if feature['geometry']['type'] == 'LineString': geos_feature = LineString(feature['geometry']['coordinates']) elif feature['geometry']['type'] == 'MultiLineString': rings = feature['geometry']['coordinates'] rings = [[(c[0], c[1]) for c in r] for r in rings] if len(rings) == 1: geos_feature = LineString(rings[0]) else: linestrings = [] for ring in rings: try: linestrings.append(LineString(ring)) except Exception, e: logging.error("Error adding ring: %s", e) geos_feature = MultiLineString(linestrings) geos_feature.srid = 4326 geos_feature.transform(render_srid) geometries.append(geos_feature) style = mapnik.Style() line_rule = mapnik.Rule() style_dict = None if 'style' in feature: style_dict = feature['style'] elif 'properties' in feature: style_dict = feature['properties'] line_rule.symbols.append(line_symbolizer(style_dict)) style.rules.append(line_rule) m.append_style(style_name, style) wkt = geos_feature.wkt line_layer = mapnik.Layer(style_name + ' layer') line_layer.datasource = mapnik.CSV(inline='wkt\n' + '"' + wkt + '"') line_layer.styles.append(style_name) line_layer.srs = '+init=epsg:' + str(render_srid) m.layers.append(line_layer) elif feature['geometry']['type'] == 'Polygon': geos_feature = GEOSGeometry(json.dumps(feature['geometry'])) geos_feature.srid = 4326 geos_feature.transform(render_srid) geometries.append(geos_feature) style = mapnik.Style() rule = mapnik.Rule() style_dict = None if 'style' in feature: style_dict = feature['style'] elif 'properties' in feature: style_dict = feature['properties'] rule.symbols.append(polygon_symbolizer(style_dict)) rule.symbols.append(line_symbolizer(style_dict)) style.rules.append(rule) m.append_style(style_name, style) wkt = geos_feature.wkt layer = mapnik.Layer(style_name + ' layer') layer.datasource = mapnik.CSV(inline='wkt\n' + '"' + wkt + '"') layer.styles.append(style_name) layer.srs = '+init=epsg:' + str(render_srid) m.layers.append(layer)
def import_from_file(osm_data, resume: bool, rerun: bool): Settings = IngestSettings( max_distance=Distance(km=50), max_segments=300, max_concurrent=40, quality_settings=DefaultQualitySettings, location_filter=None, ) digest = sha256_digest(osm_data) print('Digest: ', digest) previous_import = e.Import.objects.filter( complete=False, sha256_sum=digest).order_by('-updated_at').first() if not resume or previous_import is None: if e.Import.objects.filter(sha256_sum=digest, complete=True): print('Import already done!') if rerun: e.Import.objects.filter(sha256_sum=digest, complete=True).delete() else: return #e.Import.objects.all().update(active=False) import_obj = e.Import(active=True, complete=False, border=Polygon(), name=str(osm_data), sha256_sum=digest) import_obj.save() digests = set() else: import_obj = previous_import if not click.confirm( f'Resuming import {import_obj.name}, last modified {import_obj.updated_at} currently containing {import_obj.networks.count()} trail networks' ): return 1 # TODO: probably n queries digests = {n.digest for n in import_obj.networks.all()} print(f'{len(digests)} loaded') loader = OSMIngestor(Settings) loader.load_osm(osm_data, extra_links=[(885729040, 827103027)]) networks = [] for network in tqdm(loader.trail_networks(already_processed=digests)): try: multiline_strs = MultiLineString([ LineString(trail.points()) for trail in network.trail_segments() ]) border = multiline_strs.convex_hull simplified = multiline_strs # .simplify(tolerance=0.01) if isinstance(simplified, LineString): simplified = MultiLineString([simplified]) # TODO: look for polygons that intersect this one trailheads = MultiPoint( [t.node.to_point() for t in network.trailheads]) est_network = e.TrailNetwork(name=network.name or '', source=import_obj, trails=simplified, poly=border, total_length=network.total_length(), graph=pickle.dumps(network.graph), area=border.area, trailheads=trailheads, digest=network.digest) est_network.save() networks.append(est_network) except Exception as ex: import pdb pdb.set_trace() print(ex) import_obj.complete = True if networks: import_border = MultiPolygon([n.poly for n in networks]) import_obj.border = import_border.convex_hull import_obj.save()
def test_multilinestring(self): geom = MultiLineString(LineString((0, 0), (0, 1), (1, 1), (1, 0))) self.assertEqual(self.parser.filter_geom('geom', geom), LineString((0, 0), (0, 1), (1, 1), (1, 0))) self.assertFalse(self.parser.warnings)
def SaveGPXtoModel(f, owner): # parse gpx file gpx = parse(f.read().decode('utf-8')) f.seek(0) # get moving data moving_data = gpx.get_moving_data() # generate hash file_hash = GenerateFileHash(f, owner.username) # import track data if gpx.tracks: for track in gpx.tracks: tracks = open(f.temporary_file_path(), layer='tracks') # generate multi line string multi_line_string = [] for line_string in tracks[0]['geometry']['coordinates']: multi_line_string.append(LineString(line_string)) # create new track new_track = Track( file_hash=file_hash, owner=owner, start=utc.localize(track.get_time_bounds().start_time), finish=utc.localize(track.get_time_bounds().end_time), average_speed=((moving_data[2] / 1000) / (moving_data[0] / 3600)), duration=moving_data[0] / 3600, distance=moving_data[2] / 1000, track=MultiLineString(multi_line_string)) new_track.save() for segment_id, segment in enumerate(track.segments): for point_id, point in enumerate(segment.points): speed = segment.get_speed(point_id) if point_id == 0: speed = 0 if segment_id == 0: point_type = 'S' else: point_type = 'R' elif point_id == len(segment.points) - 1: if segment_id == len(track.segments) - 1: point_type = 'F' else: point_type = 'P' else: point_type = 'A' new_point = TrackPoint( track=new_track, point_type=point_type, point=Point(point.longitude, point.latitude), time=point.time, elevation=point.elevation, segment_id=segment_id, speed=speed, ) new_point.save() return new_track
def handle_archive(self, archive_name): self.routes = {} self.calendars = {} if 'ulb' in archive_name.lower(): source_name = 'ULB' else: source_name = 'MET' self.source, source_created = DataSource.objects.get_or_create( name=source_name) self.source.datetime = datetime.fromtimestamp( os.path.getmtime(archive_name), timezone.utc) with zipfile.ZipFile(archive_name) as archive: for filename in archive.namelist(): if filename.endswith('.cif'): with archive.open(filename) as open_file: self.handle_file(open_file) assert self.stop_times == [] for route in self.routes.values(): groupings = get_stop_usages(route.trip_set.all()) route.service.stops.clear() stop_usages = [ StopUsage(service=route.service, stop_id=stop_time.stop_id, timing_status=stop_time.timing_status, direction='outbound', order=i) for i, stop_time in enumerate(groupings[0]) ] + [ StopUsage(service=route.service, stop_id=stop_time.stop_id, timing_status=stop_time.timing_status, direction='inbound', order=i) for i, stop_time in enumerate(groupings[1]) ] StopUsage.objects.bulk_create(stop_usages) # self.stops doesn't contain all stops, and has latlongs in the Irish Grid projection stops = StopPoint.objects.in_bulk(stop_usage.stop_id for stop_usage in stop_usages) line_strings = [] for pattern in get_journey_patterns(route.trip_set.all()): points = (stops[stop_code].latlong for stop_code in pattern if stop_code in stops) line_strings.append(LineString(*points)) route.service.geometry = MultiLineString(*line_strings) services = { route.service.id: route.service for route in self.routes.values() }.values() Service.objects.bulk_update(services, fields=[ 'geometry', 'description', 'outbound_description', 'inbound_description' ]) for service in services: service.update_search_vector() self.source.route_set.exclude(code__in=self.routes.keys()).delete() self.source.service_set.filter(current=True).exclude( service_code__in=self.routes.keys()).update(current=False) self.source.save(update_fields=['datetime'])
def test_multilinestring_with_hole(self): geom = MultiLineString(LineString((0, 0), (0, 1)), LineString((100, 100), (100, 101))) self.assertEqual(self.parser.filter_geom('geom', geom), LineString((0, 0), (0, 1), (100, 100), (100, 101))) self.assertTrue(self.parser.warnings)
) if not regular: trip.date = today trip.interval_min = random.randint(0, 6) trip.interval_max = random.randint(0, 6) else: trip.dows = [ dow for dow in range(0, 7) if random.random() < 0.5 ] if not trip.dows: trip.dows = [1] if trip_type != Trip.OFFER: demand = TripDemand(radius=random.choice(DEMAND_RADIUS_CHOICE)) demand.save() trip.demand = demand if trip_type != Trip.DEMAND: offer = TripOffer(radius=random.choice(OFFER_RADIUS_CHOICE), route=MultiLineString( [GEOSGeometry(json.get('geometry')[0])])) offer.save() trip.offer = offer trip.save() except Exception, e: print "!!!!!!!!!!!!!!!!!!!!!", e #if index > 10: # break finally: route_file.close() print "OK\n"
def handle_service(self, filename, transxchange, txc_service, today, stops): if txc_service.operating_period.end: if txc_service.operating_period.end < today: print(filename, txc_service.operating_period.end) return elif txc_service.operating_period.end < txc_service.operating_period.start: return operators = self.get_operators(transxchange, txc_service) if not operators: basename = os.path.basename(filename) # e.g. 'KCTB_' if basename[4] == '_': maybe_operator_code = basename[:4] if maybe_operator_code.isupper( ) and maybe_operator_code.isalpha(): try: operators = [ Operator.objects.get(id=maybe_operator_code) ] except Operator.DoesNotExist: pass if self.is_tnds() and self.source.name != 'L': if operators and all(operator.id in self.open_data_operators for operator in operators): return linked_services = [] description = self.get_description(txc_service) if description == 'Origin - Destination': description = '' if re.match(r'^P[BCDFGHKM]\d+:\d+.*.$', txc_service.service_code): unique_service_code = txc_service.service_code else: unique_service_code = None for line in txc_service.lines: existing = None service_code = None if unique_service_code: # first try getting by BODS profile compliant service code existing = Service.objects.filter( service_code=unique_service_code, line_name__iexact=line.line_name).order_by( '-current', 'id').first() if not existing and operators and line.line_name: if self.source.name in {'Go South West', 'Oxford Bus Company'}: assert operators[0].parent existing = Service.objects.filter( operator__parent=operators[0].parent) if self.source.name == 'Oxford Bus Company': if txc_service.service_code.startswith('T'): operators = Operator.objects.filter(id='THTR') elif txc_service.service_code.startswith('C'): operators = Operator.objects.filter(id='CSLB') elif self.source.name == 'Go South West': if txc_service.service_code.startswith('GC'): operators = Operator.objects.filter(id='TFCN') elif all(operator.parent == 'Go South Coast' for operator in operators): existing = Service.objects.filter( operator__parent='Go South Coast') elif self.source.name.startswith('Stagecoach'): existing = Service.objects.filter( Q(source=self.source) | Q(operator__in=operators)) if description: existing = existing.filter(description=description) else: existing = Service.objects.filter(operator__in=operators) if len(transxchange.services) == 1: has_stop_time = Exists( StopTime.objects.filter( stop__in=stops, trip__route__service=OuterRef('id'))) has_stop_usage = Exists( StopUsage.objects.filter(stop__in=stops, service=OuterRef('id'))) has_no_route = ~Exists( Route.objects.filter(service=OuterRef('id'))) existing = existing.filter(has_stop_time | (has_stop_usage & has_no_route)) elif len(txc_service.lines) == 1: existing = existing.filter( Exists( Route.objects.filter( service_code=txc_service.service_code, service=OuterRef('id')))) elif description: existing = existing.filter(description=description) existing = existing.filter( line_name__iexact=line.line_name).order_by( '-current', 'id').first() if self.is_tnds(): if self.should_defer_to_other_source(operators, line.line_name): continue service_code = get_service_code(filename) if service_code is None: service_code = txc_service.service_code if not existing: # assume service code is at least unique within a TNDS region existing = self.source.service_set.filter( service_code=service_code).first() elif unique_service_code: service_code = unique_service_code if existing: service = existing else: service = Service() service.line_name = line.line_name service.date = today service.current = True service.source = self.source service.show_timetable = True if service_code: service.service_code = service_code if description: service.description = description line_brand = line.line_brand if txc_service.marketing_name and txc_service.marketing_name != 'CornwallbyKernow': line_brand = txc_service.marketing_name if line.line_name in line_brand: line_brand_parts = line_brand.split() if line.line_name in line_brand_parts: line_brand_parts.remove(line.line_name) line_brand = ' '.join(line_brand_parts) print(line_brand) if line_brand: service.line_brand = line_brand if txc_service.mode: service.mode = txc_service.mode if self.region_id: service.region_id = self.region_id if self.service_descriptions: # NCSD service.outbound_description, service.inbound_description = self.get_service_descriptions( filename) service.description = service.outbound_description or service.inbound_description if service.id: service_created = False else: service_created = True service.save() if not service_created: if '_' in service.slug or '-' not in service.slug or existing and not existing.current: service.slug = '' service.save(update_fields=['slug']) if operators: if service_created: service.operator.set(operators) else: if self.source.name in { 'Oxford Bus Company', 'Go South West' }: pass elif service.id in self.service_ids or all( o.parent == 'Go South Coast' for o in operators): service.operator.add(*operators) else: service.operator.set(operators) self.service_ids.add(service.id) linked_services.append(service.id) journeys = transxchange.get_journeys(txc_service.service_code, line.id) if journeys: journey = journeys[0] ticket_machine_service_code = journey.ticket_machine_service_code if ticket_machine_service_code and ticket_machine_service_code != line.line_name: try: ServiceCode.objects.create( scheme='SIRI', code=ticket_machine_service_code, service=service) except IntegrityError: pass # a code used in Traveline Cymru URLs: if self.source.name == 'W': private_code = journey.private_code if private_code and ':' in private_code: ServiceCode.objects.update_or_create( {'code': private_code.split(':', 1)[0]}, service=service, scheme='Traveline Cymru') # timetable data: route_defaults = { 'line_name': line.line_name, 'line_brand': line_brand, 'start_date': txc_service.operating_period.start, 'end_date': txc_service.operating_period.end, 'dates': txc_service.operating_period.dates(), 'service': service, 'revision_number': transxchange.attributes['RevisionNumber'], 'service_code': txc_service.service_code } if description: route_defaults['description'] = description geometry = [] if transxchange.route_sections: patterns = { journey.journey_pattern.id: journey.journey_pattern for journey in journeys } routes = [ pattern.route_ref for pattern in patterns.values() if pattern.route_ref ] if routes: routes = [ transxchange.routes[route_id] for route_id in transxchange.routes if route_id in routes ] for route in routes: for section_ref in route.route_section_refs: section = transxchange.route_sections[section_ref] for link in section.links: if link.track: geometry.append(link.track) else: route_links = {} for section in transxchange.route_sections.values(): for link in section.links: route_links[link.id] = link for journey in journeys: if journey.journey_pattern: for section in journey.journey_pattern.sections: for link in section.timinglinks: link = route_links[link.route_link_ref] if link.track: geometry.append(link.track) if geometry: geometry = MultiLineString(geometry).simplify() if not isinstance(geometry, MultiLineString): geometry = MultiLineString(geometry) route_defaults['geometry'] = geometry route_code = filename if len(transxchange.services) > 1: route_code += f'#{txc_service.service_code}' if len(txc_service.lines) > 1: route_code += f'#{line.id}' route, route_created = Route.objects.update_or_create( route_defaults, source=self.source, code=route_code) self.route_ids.add(route.id) if not route_created: # if 'opendata.ticketer' in self.source.url and route.service_id == service_id: # continue route.trip_set.all().delete() self.handle_journeys(route, stops, journeys, txc_service, line.id) service.stops.clear() outbound, inbound = get_stop_usages( Trip.objects.filter(route__service=service)) changed_fields = [] if self.source.name.startswith( 'Arriva ') or self.source.name == 'Yorkshire Tiger': if outbound: changed = 0 origin_stop = outbound[0].stop destination_stop = outbound[-1].stop if txc_service.origin in origin_stop.common_name: if origin_stop.locality.name not in txc_service.origin: txc_service.origin = f'{origin_stop.locality.name} {txc_service.origin}' changed += 1 if txc_service.destination in destination_stop.common_name: if destination_stop.locality.name not in txc_service.destination: txc_service.destination = f'{destination_stop.locality.name} {txc_service.destination}' changed += 1 if changed == 2: service.description = f'{txc_service.origin} - {txc_service.destination}' changed_fields.append('description') stop_usages = [ StopUsage(service=service, stop_id=stop_time.stop_id, timing_status=stop_time.timing_status, direction='outbound', order=i) for i, stop_time in enumerate(outbound) ] + [ StopUsage(service=service, stop_id=stop_time.stop_id, timing_status=stop_time.timing_status, direction='inbound', order=i) for i, stop_time in enumerate(inbound) ] StopUsage.objects.bulk_create(stop_usages) if outbound: outbound = Grouping(txc_service, outbound[0].stop, outbound[-1].stop) outbound_description = str(outbound) if outbound_description != service.outbound_description: service.outbound_description = outbound_description changed_fields.append('outbound_description') if inbound: inbound = Grouping(txc_service, inbound[0].stop, inbound[-1].stop) inbound_description = str(inbound) if inbound_description != service.inbound_description: service.inbound_description = inbound_description changed_fields.append('inbound_description') if changed_fields: service.save(update_fields=changed_fields) service_code = service.service_code if service_code in self.corrections: corrections = {} for field in self.corrections[service_code]: if field == 'operator': service.operator.set( self.corrections[service_code][field]) else: corrections[field] = self.corrections[service_code][ field] Service.objects.filter(service_code=service_code).update( **corrections) service.update_search_vector() if len(linked_services) > 1: for i, from_service in enumerate(linked_services): for i, to_service in enumerate(linked_services[i + 1:]): kwargs = { 'from_service_id': from_service, 'to_service_id': to_service, } if not ServiceLink.objects.filter(**kwargs).exists(): ServiceLink.objects.create(**kwargs, how='also')
def handle(self, *args, **options): session = requests.Session() # access_token = '' # params = { # 'access_token': access_token, # 'geometries': 'geojson', # 'overview': 'full' # } # for service in Service.objects.filter(operator='LYNX', current=True): # print(service) # linestrings = (self.get_linestring(session, access_token, params, ls) for ls in service.geometry) # linestrings = (ls for ls in linestrings if ls) # service.geometry = MultiLineString(*linestrings).simplify() # service.save() for service in Service.objects.filter(current=True, operator='SNDR'): stopses = set() for file in service.get_files_from_zipfile(): timetable = txc.Timetable(file) for grouping in timetable.groupings: for journeypattern in grouping.journeypatterns: stop_ids = [ journeypattern.sections[0].timinglinks[0].origin. stop.atco_code ] for section in journeypattern.sections: for timinglink in section.timinglinks: stop_ids.append( timinglink.destination.stop.atco_code) stopses.add(','.join(stop_ids)) stopses = [string.split(',') for string in stopses] linestrings = [] for stop_ids in stopses: stops = StopPoint.objects.in_bulk((stop_ids)) gpx = gpxpy.gpx.GPX() gpx_track = gpxpy.gpx.GPXTrack() gpx.tracks.append(gpx_track) gpx_segment = gpxpy.gpx.GPXTrackSegment() gpx_track.segments.append(gpx_segment) for stop_id in stop_ids: stop = stops[stop_id] point = gpxpy.gpx.GPXTrackPoint(stop.latlong.y, stop.latlong.x) gpx_segment.points.append(point) xml = gpx.to_xml() response = session.post( 'https://bustimes.org/match?type=json&points_encoded=false', headers={'Content-Type': 'application/gpx+xml'}, data=xml) if response.ok: json = response.json() if json['map_matching']['distance']: geometry = json['paths'][0]['points'] linestrings.append(GEOSGeometry(str(geometry))) service.geometry = MultiLineString(*linestrings) service.save() print(service.pk)
def import_helsinki_units(self, filename): ds = DataSource(filename) assert len(ds) == 1 uid = self.get_lowest_high_unit_id() def get_lighting(p): return HELSINKI_LIGHTING[p.get('VALAISTUS')] def get_technique(p): return HELSINKI_TECHNIQUES[p.get('TYYLI')] def get_length(p): l = p.get('PITUUS') if len(l) == 0: l = None return l def get_maintenance_group(p): n = p.get('NIMI') return HELSINKI_GROUPS[n] created = 0 updated = 0 lyr = ds[0] for feat in lyr: properties = feat maintenance_organization = '91' if properties.get('NIMI') == 'Siltamäki': maintenance_organization = '92' elif properties.get('NIMI').find('Pirttimäki') == 0: maintenance_organization = '49' try: maintenance_group = get_maintenance_group(properties) except KeyError: print('Missing maintenance group for', properties.get('NIMI'), ', skipping') continue extra_fields = { 'lighting': get_lighting(properties), 'skiing_technique': get_technique(properties), 'length': get_length(properties), 'maintenance_group': maintenance_group, 'maintenance_organization': maintenance_organization } if type(feat.geom ) == django.contrib.gis.gdal.geometries.MultiLineString: multilinestring = GEOSGeometry(feat.geom.wkt) else: multilinestring = MultiLineString(GEOSGeometry(feat.geom.wkt)) converted_multilinestring_coords = [] for line in multilinestring: converted_multilinestring_coords.append( LineString( tuple( (helsinki_coordinates_to_gk25(point[0], point[1]) for point in line)))) converted_multilinestring = (MultiLineString( (converted_multilinestring_coords), srid=3879)) street_address = properties.get('address') www_url = properties.get('www_url') address_zip = properties.get('zip') if len(street_address) == 0: street_address = None if len(www_url) == 0: www_url = None if len(address_zip) == 0: address_zip = None municipality = properties.get('city') if municipality is None or len(municipality) == 0: municipality = 'helsinki' point = None if street_address: point = self.geocode_street_address(street_address, municipality) if point is None: point = Point(converted_multilinestring[0][0], converted_multilinestring[0][1], srid=3879) defaults = self.unit_defaults(converted_multilinestring, point, extra_fields, street_address, address_zip, www_url) defaults['municipality_id'] = municipality defaults['organization_id'] = 91 uid, did_create, unit = self._create_or_update_unit( uid, properties['NIMI'], defaults) if did_create: created += 1 else: updated += 1 if street_address: self.add_public_transportation_connection( unit, street_address, municipality) _report_counts('helsinki', created, updated)
def save(self, *args, **kwargs): if self.geom: self.centroid = self.geom.centroid super(ReserveAirspace, self).save(*args, **kwargs) if self.log: from django.contrib.gis.geos import LineString, MultiLineString url = self.log.path DATA = mission_planner_logs(url) line = LineString(DATA) multi_line = MultiLineString(line) multi_line_to_polygon = multi_line.convex_hull # print(multi_line, "this is the multi line ") Returns a multilinestring # print(multi_line.convex_hull, "this is the convex ") Converts the multilinestring to polygon self.geom = multi_line_to_polygon # TODO: LOG UPLOAD: still retain the main log upload or merge the polygon to multiline string in detail_view self.centroid = self.geom.centroid if self.created_by.userprofile.organization.organization_type == "ROC": x = "FP/CAA/ROC/" y = self.pk self.application_number = x + str(y) elif self.created_by.userprofile.organization.organization_type == "REC": x = "FP/CAA/REC/" y = self.pk self.application_number = x + str(y) elif self.created_by.userprofile.organization.organization_type == "PVT": x = "FP/CAA/PVT/" y = self.pk self.application_number = x + str(y) elif self.created_by.userprofile.organization.organization_type == "ATO": x = "FP/CAA/ATO/" y = self.pk self.application_number = x + str(y) elif self.created_by.userprofile.organization.organization_type == "CLB": x = "FP/CAA/CLB/" y = self.pk self.application_number = x + str(y) # Putting saving time to be less than 6 minutes, after that model cant even add a flightlog saving_time = self.date_modified - self.date_created saving_time_seconds = saving_time.total_seconds() # TO DO: Questions: what if you create and save and edit in the 6 minutes? """ Turns out it does save two Flight Logs --FIXED by checking if the log is already created TODO: perhaps put this in properties? but can i access model properties in the clean? """ if (saving_time_seconds / 60) < 6: from flight_plans.models import FlightLog get_log = FlightLog.objects.filter(reserve_airspace=self.pk) if not get_log: x = FlightLog.objects.create(reserve_airspace_id=self.pk, user_id=self.created_by.pk) x.save() # if self.status == 1: # from notifications.send_a_notification import send_a_notification # send_a_notification(self.created_by,"Your flight has been Rejected Flight",str(self.comments)) # print("should have sent a notification") # elif self.status == 2: # from notifications.send_a_notification import send_a_notification # x = mark_safe('<a href="/applications/airspace/"> Go To Airspace</a>') # send_a_notification(self.created_by, x, str(self.comments)) # super(ReserveAirspace, self).save(*args, **kwargs) super(ReserveAirspace, self)
def get(self, request, *args, **kwargs): lat1 = request.GET.get('blat') lon1 = request.GET.get('blon') lat2 = request.GET.get('elat') lon2 = request.GET.get('elon') id1 = request.GET.get('bid') id2 = request.GET.get('eid') if id1 is None: qs = Bikeintersections.objects.filter( the_geom__dwithin=(Point(float(lon1), float(lat1), srid=4326), 1000)) qs = qs.distance(Point(float(lon1), float(lat1), srid=4326)) qs = qs.order_by('distance') try: id1 = qs[0].id except: id1 = None if id2 is None: qs = Bikeintersections.objects.filter( the_geom__dwithin=(Point(float(lon2), float(lat2), srid=4326), 1000)) qs = qs.distance(Point(float(lon2), float(lat2), srid=4326)) qs = qs.order_by('distance') try: print(qs[0]) id2 = qs[0].id except: id2 = None sql_inside_of_function = "select id, source, target, cost * (4-rtng_ccpx) * (4-rtng_mean) * (4-rtng_cbf7)+case when one_way=-1 then 1000000 else 0 END as cost,cost * (4-rtng_ccpx)*(4-rtng_mean)*(4-rtng_cbf7) + case when one_way=1 then 1000000 else 0 END as reverse_cost from \"Data_minnesotabiketrails\"\'" sql_function = "select ccp_name, the_geom, bt.cost, bt.item_tags, directions(the_geom, lead(the_geom,1) over (ORDER BY seq)) from pgr_dijkstra(\'" cursor = connection.cursor() cursor.execute( sql_function + sql_inside_of_function + ", %s , %s , true,true) join \"Data_minnesotabiketrails\" as bt on bt.id=id2", ( str(id1), str(id2), )) all = cursor.fetchall() names = [] gj = [] for item in all: names.append((item[0], item[2], item[4])) poly = loads(GEOSGeometry(item[1], srid=4326).geojson) poly['properties'] = {'name': item[0], 'tag': item[3]} gj.append(poly) #this creates a list of linestrings and then makes a Multilinestring and gets the extent geo = [GEOSGeometry(geoDumps(po)) for po in gj] extent = MultiLineString(*geo).extent extent = [[extent[1], extent[0]], [extent[3], extent[2]]] #next is getting the distance on each same named trail section sent_names = [] dist_on_path = 0 for i, n in enumerate(names): print 'Name: ' + str(n[0]) + " Turn: " + str(n[2]) if i == 0: previous_name = n[0] if n[0] == previous_name: dist_on_path += n[1] previous_turn = n[2] else: sent_names.append( (previous_name, "%.2f" % dist_on_path, previous_turn)) dist_on_path = n[1] previous_name = n[0] if i == len(names) - 1: sent_names.append((previous_name, "%.2f" % dist_on_path)) return HttpResponse(dumps({ 'names': sent_names, 'geojson': gj, 'extent': extent }), content_type="application/json; charset='utf-8'")