def test_reverse(self): pos= '49.9506159,14.3155932' c= geocode.Google() adr, p=c.reverse(pos) print adr, p c= geocode.OSMNominatim() adr, p=c.reverse(pos) print adr, p point=Point(14.3155932, 49.9506159, srid=4326) adr2, p2=c.reverse(point) self.assertEqual(p,p2) self.assertTrue(adr2.street,adr.street) point2=point.transform(3857, True) adr3, p3=c.reverse(point2) self.assertEqual(p,p3) self.assertTrue(adr3.street,adr.street) adr, p=c.reverse('50.0027028,15.0414849') self.assertEqual(adr.postal_code, '28107')
def clean(self): super(AddWellForm, self).clean() network = self.cleaned_data['network'] name = self.cleaned_data['name'] try: network.well_set.get(name=name) raise forms.ValidationError( 'A well with name "{}" already exists.'.format(name)) except Well.DoesNotExist: pass srid = self.cleaned_data['srid'] x = self.cleaned_data['xcoord'] y = self.cleaned_data['ycoord'] z = self.cleaned_data['zcoord'] p = Point(x, y, srid=int(srid)) error = False if srid != 4326: try: x, y = p.transform(4326, True).tuple except: error = True if error or (x < 2 or x > 8 or y < 50 or y > 54): raise forms.ValidationError( 'Coordinates of the well are not in The Netherlands.')
def setUp(self): super(TestVenuess, self).setUp() self.cuisine = Cuisine.objects.create(name="cuisine") self.approvedRestaurant = Restaurant(name="Approved", approved=True, location=Point(0, 0)) self.approvedRestaurant.save() self.approvedRestaurant.cuisines.add(self.cuisine) self.approvedRestaurant.save() self.approvedFarAwayRestaurant = Restaurant( name="approvedFarAwayRestaurant", approved=True, location=Point(45, 45)) self.approvedFarAwayRestaurant.save() self.approvedFarAwayRestaurant.cuisines.add(self.cuisine) self.approvedFarAwayRestaurant.save() self.approvedRestaurantWithoutCategory = Restaurant( name="WithoutCategory", approved=True, location=Point(0, 0)) self.approvedRestaurantWithoutCategory.save() self.restaurant = Restaurant(name="UnApproved", approved=False, location=Point(0, 0)) self.restaurant.save() self.restaurant.cuisines.add(self.cuisine) self.restaurant.save()
def test_projectlocatie_can_return_its_location_in_latlon_format(self): project = Project.objects.create(name=u'ProjectNaam') projectlocatie = project.projectlocatie_set.create( name=u'ProjectLocatie', location=Point(157525, 478043)) point = Point(x=157525, y=478043, srid=28992) point.transform(4326) self.assertEqual(projectlocatie.latlon().tuple, point.tuple)
def test_meetlocatie_can_access_its_project(self): project = Project.objects.create(name=u'ProjectNaam') projectlocatie = project.projectlocatie_set.create( name=u'ProjectLocatie', location=Point(157525, 478043)) meetlocatie = projectlocatie.meetlocatie_set.create( name=u'MeetLocatie', location=Point(157520, 478040)) self.assertEqual(meetlocatie.project(), project)
def find_stoppingplace(lat, lng): current_position = Point( lng, lat, # careful! latitude and longitude MUST be reversed here! srid= 4326 # 4326 used by Leaflet when working with latitude and longitude ) places = StoppingPlace.objects.all() found_place = None # Trying to see if the position is on a slope for place in places: if current_position.within(place.area): found_place = place # If it is not, we consider it belongs to the closest slope if found_place is None: min_distance = places[0].area.distance(current_position) closest_place = places[0] # Looking for the slope closest to the provided position for place in places[1:]: if current_position.distance(place.area) < min_distance: min_distance = current_position.distance(place.area) closest_place = place found_place = closest_place return found_place
def test_it_is_possible_to_add_a_meetlocatie_to_a_projectlocatie(self): project = Project.objects.create(name=u'ProjectNaam') projectlocatie = project.projectlocatie_set.create( name=u'ProjectLocatie', location=Point(157525, 478043)) self.assertEqual(projectlocatie.location_count(), 0) projectlocatie.meetlocatie_set.create(name=u'MeetLocatie', location=Point(157520, 478040)) self.assertEqual(projectlocatie.location_count(), 1)
def test_list_view_filter_in_bbox(self): StationFactory.create(position=Point([20, 50])) StationFactory.create(position=Point([21, 51])) StationFactory.create(position=Point([0, 0])) api_response = self.client.get(self.station_list_url, data={'in_bbox': '19, 49, 22, 52'}) self.assertEqual(2, len(api_response.data['results']))
def _omgeo_candidate_to_dict(candidate, srid=3857): p = Point(candidate.x, candidate.y, srid=candidate.wkid) if candidate.wkid != srid: p.transform(srid) return {'address': candidate.match_addr, 'srid': p.srid, 'score': candidate.score, 'x': p.x, 'y': p.y}
def _get_geometry_wkt(self, dataset): longitudes = dataset.variables[self.longitude_attribute] latitudes = dataset.variables[self.latitude_attribute] lonlat_dependent_data = False for nc_variable_name, nc_variable_value in dataset.variables.items(): if (nc_variable_name not in dataset.dimensions and self.longitude_attribute in nc_variable_value.dimensions and self.latitude_attribute in nc_variable_value.dimensions): lonlat_dependent_data = True break #read the fill_value just one time for usage in below loop lat_fil_value = latitudes[:].fill_value if np.ma.isMaskedArray( latitudes[:]) else None lon_fil_value = longitudes[:].fill_value if np.ma.isMaskedArray( longitudes[:]) else None # If at least a variable is dependent on latitude and # longitude, the longitude and latitude arrays are combined to # find all the data points if lonlat_dependent_data: points = [] for lon in longitudes: for lat in latitudes: if lon_fil_value == lon or lat_fil_value == lat: continue # Don't add the points that have the default value inside them points.append(Point(float(lon), float(lat), srid=4326)) geometry = MultiPoint(points, srid=4326).convex_hull # If the longitude and latitude variables have the same shape, # we assume that they contain the coordinates for each data # point elif longitudes.shape == latitudes.shape: points = [] # in this case numpy.nditer() works like zip() for # multi-dimensional arrays for lon, lat in np.nditer((longitudes, latitudes), flags=['buffered']): if lon_fil_value == lon or lat_fil_value == lat: continue # Don't add the points that have the default value inside them new_point = Point(float(lon), float(lat), srid=4326) # Don't add duplicate points in trajectories if not points or new_point != points[-1]: points.append(new_point) if len(longitudes.shape) == 1: if len(points) == 1: geometry = points[0] else: geometry = LineString(points, srid=4326) else: geometry = MultiPoint(points, srid=4326).convex_hull else: raise ValueError("Could not determine the spatial coverage") return geometry.wkt
def _omgeo_candidate_to_dict(candidate, srid=3857): p = Point(candidate.x, candidate.y, srid=candidate.wkid) if candidate.wkid != srid: p.transform(srid) return { 'address': candidate.match_addr, 'srid': p.srid, 'score': candidate.score, 'x': p.x, 'y': p.y }
def setUp(self): if settings.TESTING: # move to media root for testing purposes self.default_media_path = settings.MEDIA_ROOT settings.MEDIA_ROOT = ABS_PATH('media_test') with open(self.data_path('grid.xml')) as grid_file: Earthquake.objects.create( shake_id='20150619200628', shake_grid=File(grid_file), magnitude=4.6, time=datetime.datetime(2015, 6, 19, 20, 6, 28, tzinfo=pytz.timezone('Asia/Jakarta')), depth=10, location=Point(x=126.52, y=4.16, srid=4326), location_description='Manado') earthquake = Earthquake.objects.get(shake_id='20150619200628') earthquake.save() report_pdf = earthquake.shake_id + '-id.pdf' report_png = earthquake.shake_id + '-id.png' report_thumb = earthquake.shake_id + '-thumb-id.png' report = EarthquakeReport() report.language = 'id' report.earthquake = earthquake if os.path.exists(self.data_path(report_pdf)): with open(self.data_path(report_pdf)) as pdf: report.report_pdf = File(pdf) report.save() if os.path.exists(self.data_path(report_png)): with open(self.data_path(report_png)) as png: report.report_image = File(png) report.save() if os.path.exists(self.data_path(report_thumb)): with open(self.data_path(report_thumb)) as thumb: report.report_thumbnail = File(thumb) report.save() # create test user User = get_user_model() self.user = User.objects.create_user(username='******', email='*****@*****.**', password='******', location=Point(0, 0), email_updates=False) realtime_group = Group.objects.get(name=REST_GROUP) self.user.groups.add(realtime_group) self.user.save() self.client.login(email='*****@*****.**', password='******')
def test_find_nearby(self, radius, expected_ids): Shop.objects.bulk_create([ ShopFactory.build(id=1, location=Point(x=10, y=15)), ShopFactory.build(id=2, location=Point(x=20, y=25)), ]) point = Point(10, 15.5) result = { shop.id for shop in Shop.objects.find_nearby( point, radius=radius, unit="km") } assert expected_ids == result
def test_latlng(self): expected_allegations = OfficerAllegationFactory.create_batch(2) for i in range(len(expected_allegations)): allegation = expected_allegations[i].allegation allegation.point = Point(0 + i/1000, 0 + 1/1000) allegation.save() allegation = OfficerAllegationFactory().allegation allegation.point = Point(650, 650) allegation.save() query_string = 'latlng=0,0&radius=500' expected_ids = [o.id for o in expected_allegations] self.check_built_query(query_string, expected_ids)
def _omgeo_candidate_to_dict(candidate, srid=3857): p = Point(candidate.x, candidate.y, srid=candidate.wkid) if candidate.wkid != srid: p.transform(srid) return { 'address': candidate.match_addr, 'region': candidate.match_region, 'city': candidate.match_city, 'srid': p.srid, 'score': candidate.score, 'x': p.x, 'y': p.y, 'type': candidate.locator_type, }
def get(self, request): lat = float(request.GET.get('lat', 41.850033)) lng = float(request.GET.get('lng', -87.6500523)) point = Point(lng, lat) beats = Area.objects.filter(type='police-beats', polygon__contains=point) if beats.exists(): beat = beats.first() session = Session.objects.create( title="Police Beat %s" % beat.name, query={ 'filters': { 'Area': [{ 'value': beat.id, 'filters': 'allegation__areas_id={id}'.format(id=beat.id), 'pinned': False }] } }) return HttpResponseRedirect( "/data/{session_hash}".format(session_hash=session.hash_id)) return HttpResponseRedirect("/data/")
def test_user_can_add_restaurant(self): self.can_get("venues.views.venuess.add_restaurant") params = { 'name': 'alksjdlas', 'address': 'laksdlkasd', 'cuisines': self.cuisine.pk, 'catering': True, 'delivery': True, 'alcoholFree': True, 'porkFree': True, 'muslimOwner': True, 'location': str(Point(0, 0)), 'menu': 1, 'city': 'kalsjd', 'country': 'RU', 'website': 'http://site.ru/', } count_before = Restaurant.objects.count() response = self.redirect_on_post("venues.views.venuess.add_restaurant", params=params) self.assertEqual(count_before + 1, Restaurant.objects.count()) rest = Restaurant.objects.order_by('-pk').first() for key in params.keys(): if key == 'cuisines': self.assertEqual(rest.cuisines.all()[0].pk, self.cuisine.pk) else: self.assertEqual(getattr(rest, key), params[key])
def test_user_CANT_update_not_his_restaurant(self): r = Restaurant() r.save() cuisine = Cuisine.objects.create(name="nasjdasdashaksds62a") params = { 'name': 'alksjdlas', 'address': 'laksdlkasd', 'cuisines': cuisine.pk, 'catering': True, 'delivery': True, 'alcoholFree': True, 'porkFree': True, 'muslimOwner': True, 'location': str(Point(0, 0)), 'menu': 1, 'city': 'kalsjd', 'country': 'RU', 'website': 'http://site.ru/', } self.redirect_to_login_on_get("venues.views.venuess.update_restaurant", pargs=[r.pk], params=params)
def user_locate(request): venues = [] if request.method == 'GET': latlng = Point(float(request.GET['lat']), float(request.GET['lng'])) results = Venue.objects.filter(point__distance_lte=(latlng, D(km=5))) venues = '[' + ','.join(x.toJson() for x in results) + ']' return HttpResponse(str(venues))
def setUp(self): if settings.TESTING: # move to media root for testing purposes self.default_media_path = settings.MEDIA_ROOT settings.MEDIA_ROOT = ABS_PATH('media_test') flood = Flood.objects.create( event_id=u'2015112518-3-rw', time=datetime.datetime(2015, 11, 25, 18, 0, 0), interval=3, source=u'Peta Jakarta', region=u'Jakarta', hazard_layer=File(open(self.data_path('hazard.zip'))), hazard_path=flood_layer_uri) FloodReport.objects.create( flood=flood, language='en', impact_report=File(open(self.data_path('impact-table-en.pdf'))), impact_map=File(open(self.data_path('impact-map-en.pdf'))), ) # create test user User = get_user_model() self.user = User.objects.create_user(username='******', email='*****@*****.**', password='******', location=Point(0, 0), email_updates=False) realtime_group = Group.objects.get(name=REST_GROUP) self.user.groups.add(realtime_group) self.user.save() self.client.login(email='*****@*****.**', password='******')
def shop(db, maps_url): return ShopFactory( maps_url=maps_url, slug="bonobo-shop", place_name="Bonobo Shop", location=Point(10, 10), )
def buscar_plaza_trabajo(request): data = {} templeate = 'plaza_trabajo/busqueda_plaza_trabajo.html' if request.method == 'POST' and 'lat' in request.POST and 'lng' in request.POST and 'radio' in request.POST: centro = Point(float(request.POST['lng']), float(request.POST['lat'])) radio_busqueda = centro.buffer(int(request.POST['radio']) / 100) plazas_trabajo = PlazaTrabajo.objects.filter( coordenadas__contained=radio_busqueda) return JsonResponse([{ 'lat': plaza.coordenadas.y, 'lng': plaza.coordenadas.x, 'nombre': plaza.cargo.nombre } for plaza in plazas_trabajo], safe=False) return render(request, templeate, data)
class Stop(models.Model): usid = models.CharField(max_length=255, default=0) name = models.CharField(max_length=100) latitude = models.FloatField() longitude = models.FloatField() created_at = models.DateTimeField(auto_now_add=True) location = models.PointField(default=Point(0, 0))
def save(self, commit=True): plaza_trabajo = super(PlazaTrabajoForm, self).save(commit=False) punto = Point(plaza_trabajo.longitud, plaza_trabajo.latitud) plaza_trabajo.coordenadas = punto if commit: plaza_trabajo.save() return plaza_trabajo
def setUp(self): if settings.TESTING: # move to media root for testing purposes self.default_media_path = settings.MEDIA_ROOT settings.MEDIA_ROOT = ABS_PATH('media_test') app_config = django_apps.get_app_config('realtime') app_config.ready() Flood.objects.create(event_id=u'2015112518-3-rw', time=datetime.datetime(2015, 11, 25, 18, 0, 0), interval=3, source=u'Peta Jakarta', region=u'Jakarta', hazard_layer=File( open(self.data_path('hazard.zip')))) # create test user User = get_user_model() self.user = User.objects.create_user(username='******', email='*****@*****.**', password='******', location=Point(0, 0), email_updates=False) realtime_group = Group.objects.get(name=REST_GROUP) self.user.groups.add(realtime_group) self.user.save() self.client.login(email='*****@*****.**', password='******')
def test_it_is_possible_to_add_a_datasource_to_a_meetlocatie_as_meetlocatie( self): project = Project.objects.create(name=u'ProjectNaam') projectlocatie = project.projectlocatie_set.create( name=u'ProjectLocatie', location=Point(157525, 478043)) meetlocatie = projectlocatie.meetlocatie_set.create( name=u'MeetLocatie', location=Point(157520, 478040)) generator = Generator.objects.create( name=u'Generator', classname='acacia.data.tests.test_models.MockGenerator') user = User.objects.create(username=u'UserName', password=u'PassWord') self.assertEqual(meetlocatie.datasource_set.count(), 0) generator.datasource_set.create(name=u'DataSource', user=user, meetlocatie=meetlocatie) self.assertEqual(meetlocatie.datasource_set.count(), 1)
def add_problem(request, massif_key_name=None): if request.method == 'POST' and massif_key_name is not None: user = request.user x1, y1 = float(request.POST.get('x')), float(request.POST.get('y')) in_proj = Proj(init='EPSG:32758') out_proj = Proj(init='EPSG:4326') x2, y2 = transform(in_proj, out_proj, x1, y1) problem_type = request.POST.get('problem') comment = request.POST.get('comment') massif = Massif.objects.get(key_name=massif_key_name) pb = DigitizingProblem(uuid=uuid.uuid1(), massif=massif, location=Point(x2, y2), created=timezone.now(), created_by=user, problem=problem_type, comments=comment) pb.save() return HttpResponse( json.dumps({ 'id': pb.id, 'problem': pb.problem, 'creator_full_name': user.get_full_name(), 'creator_username': user.username, 'comments': pb.comments, }))
def test_can_be_set_for_a_store(self): store = StoreFactory(name='Test Store', location=Point(30.3333, 123.323)) store = store.__class__.objects.get(id=store.id) self.assertEqual(store.location.x, 30.3333) self.assertEqual(store.location.y, 123.323)
def get_random_point(polygon): """Return a random point in the given polygon.""" (x_min, y_min), (x_max, _), (_, y_max) = polygon.envelope[0][:3] while True: point = Point(random.uniform(x_min, x_max), random.uniform(y_min, y_max)) if polygon.contains(point): return point
def within_itree_regions(request): from treemap.models import ITreeRegion x = request.GET.get('x', None) y = request.GET.get('y', None) return (bool(x) and bool(y) and ITreeRegion.objects .filter(geometry__contains=Point(float(x), float(y))).exists())
def setup_up_to_datasource(self): project = Project.objects.create(name=u'ProjectNaam') projectlocatie = project.projectlocatie_set.create( name=u'ProjectLocatie', location=Point(157525, 478043)) meetlocatie = projectlocatie.meetlocatie_set.create( name=u'MeetLocatie', location=Point(157520, 478040)) generator = Generator.objects.create( name=u'Generator', classname='acacia.data.tests.test_models.MockGenerator') user = User.objects.create(username=u'UserName', password=u'PassWord') datasource = generator.datasource_set.create(name=u'DataSource', user=user, meetlocatie=meetlocatie, url=testurl) meetlocatie.datasources.add(datasource) return (project, projectlocatie, meetlocatie, generator, user, datasource)
def post(self, request, *args, **kwargs): msg = request.POST['Body'] print(msg) lat, lng = [float(n) for n in msg.split(',')] Place.objects.create(name="New Place", location=Point((lng, lat), srid=4326)) print("Created new point at ({}, {})".format(lat, lng)) return JsonResponse({'status': 'OK'})
def find_emergency_path(request): # if not can_fly(): # return HttpResponse(status=503) lat = request.GET.get('lat', '') lon = request.GET.get('lng', '') path = os.path.dirname(__file__) + "/static/kml/" generate_weather(path) last_distance = sys.maxint all_hangars = models.Hangar.objects.all() selected_hangar = None all_droppoints = models.DropPoint.objects.all() selected_droppoint = None point_location = Point(float(lon), float(lat)) for droppoint in all_droppoints: distance = D(m=point_location.distance( Point(droppoint.longitude, droppoint.latitude))) if distance.m < last_distance: last_distance = distance.m selected_droppoint = droppoint last_distance = sys.maxint point_location = Point( selected_droppoint.longitude, selected_droppoint.latitude) for hangar in all_hangars: distance = D(m=point_location.distance( Point(hangar.longitude, hangar.latitude))) if distance.m < last_distance: last_distance = distance.m selected_hangar = hangar selected_hangar.drone.destination_lat = selected_droppoint.latitude selected_hangar.drone.destination_lon = selected_droppoint.longitude if not selected_hangar.is_available: return HttpResponse(status=503) incidence = models.Incidence(lat=lat, long=lon, dropPoint=selected_droppoint, drone=hangar.drone, hangar=selected_hangar, is_active=True) incidence.save() # generate_mission_file(selected_hangar) kml_generator.find_drone_path( selected_hangar, selected_droppoint, path, incidence) return HttpResponse(status=201)
def set_attr_on_model(model, attr, val): if attr == 'geom': srid = val.get('srid', 3857) val = Point(val['x'], val['y'], srid=srid) val.transform(3857) if attr == 'id': if val != model.pk: raise Exception("Can't update id attribute") elif attr.startswith('udf:'): udf_name = attr[4:] if udf_name in [field.name for field in model.get_user_defined_fields()]: model.udfs[udf_name] = val else: raise KeyError('Invalid UDF %s' % attr) elif attr in model.fields(): model.apply_change(attr, val) else: raise Exception('Maformed request - invalid field %s' % attr)
def maak_plotje2(x2, y2, startdatum, einddatum, resultaat): pnt = Point(float(x2),float(y2),srid=4326) pnt.transform(28992) nummer_meteostation = NeerslagStation.closest(pnt).nummer naam_meteostation = NeerslagStation.closest(pnt).naam bestandspad= DATA_ROOT x= str(x2) y= str(y2) startdatum = str(startdatum) #'2015-01-01' einddatum = str(einddatum)#'2015-11-08' ################################################################### #hier begint dan het programma datum=meteo_query(nummer_meteostation, startdatum, einddatum)[0] neerslag=meteo_query(nummer_meteostation, startdatum, einddatum)[1] verdamping=meteo_query(nummer_meteostation, startdatum, einddatum)[2] lengte = len(neerslag) #arrays die je samen wilt gebruiken in een tijdserie moeten even lang zijn, anders gaat er van alles mis array_neerslag1 = np.array(neerslag) array_verdamping1 = np.array(verdamping) #np.zeros(shape = (1, lengte), order='C') datum_array = np.array(datum) array_neerslagoverschot = np.zeros(shape = (1, lengte), order='C') startdatum = datum_array[0] einddatum = datum_array[lengte-1] for i in range(0, lengte): array_neerslagoverschot[0][i] = int(array_neerslag1[i]) - int(array_verdamping1[i]) #voorbewerkte meteo inlezen in een pandas dataframe en dan wegschrijven naar een numpy array #bodemdata afkomstig uit de QGIS puntenwolk inlezen in een pandas dataframe en dan wegschrijven naar een numpy arrays array_bergingscoefficient = np.array([float(raster_q(bestandspad + "bergcoef-nzv.tif", x, y))]) array_drainweerstand = np.array([float(raster_q(bestandspad + "drainw-nzv.tif", x, y))]) array_qbot = np.array([float(raster_q(bestandspad + "kwel-nzv.tif", x, y))]) array_hgem = np.array([float(raster_q(bestandspad + "ontwbas-nzv.tif", x, y))*-1.0]) #array grondwaterstand voorbereiden en vullen met nullen, zodoende begint de berekening altijd op 0 cm-mv en gaat python niet klagen over de positieaanduiding in een nog niet bestaande array array_grondwaterstand = np.zeros(shape = (2, lengte), order='C') #door de functie gws_op_t uit het rekenhart aan te roepen en met behulp van een for loop uit te voeren wordt voor iedere dag de grondwaterstand berekend met de netto neerslag en de bodemdata, tegelijk wordt de oppervlakkige afstroming berekend, maar dit laatste is nog in ontwikkeling for i in range(1,lengte): array_grondwaterstand[0,i] = gws_op_t(array_bergingscoefficient[0], array_drainweerstand[0], array_grondwaterstand[0, (i-1)], array_qbot[0], array_hgem[0], array_neerslagoverschot[0][i])[0] array_grondwaterstand[1,i] = gws_op_t(array_bergingscoefficient[0], array_drainweerstand[0], array_grondwaterstand[0, (i-1)], array_qbot[0], array_hgem[0], array_neerslagoverschot[0][i])[1] ################################################################### #hieronder wordt het outputbestand met de grondwaterstanden en de oppervlakkige afstroming gemaakt #startdatum en dates zijn variabelen die in het verloop van het programma gebruikt worden om een array of serie om te kunnen zetten naar een dataframe met datums #startdatum = dfNettoNeerslag.ix[0, 'datum'] dates = datum_array #pd.date_range(startdatum, periods=lengte) #dfGWS en serafstroming worden met behulp van pd.Series omgezet in een tijdserie waarbij de grondwaterstanden en afstroming een datum hebben dfGWS = pd.Series(array_grondwaterstand[0], index=dates) serafstroming = pd.Series(array_grondwaterstand[1], index=dates) #de net gemaakte tijdseries worden omgezet in een dataframe, dat is gemakkelijker met pandas te hanteren voor wegschrijven naar csv en plotten dfGrondwaterstanden = dfGWS.to_frame(name = 'Grondwaterstanden') dfAfstroming = serafstroming.to_frame(name = 'Afstroming') #het grondwaterframe en het afstromingsframe worden samengevoegd tot een dataframe, waardoor beide series in een csv-bestand weggezet kunnen worden dfOutput = pd.merge(dfGrondwaterstanden, dfAfstroming,how='inner', on=None, left_on=None, right_on=None, left_index=True, right_index=True) #variabele bestandsnaam voor het grondwaterstandenbestand, deze is afhankelijk van het meteostationsnummer om bij verschillende tijdseries niet over de vorige heen te schrijven #met behulp van de module GxG.py worden de GHG en GLG berekend en voor het plotten in een dataframe met datums gestopt, anders kan er geen horizontale lijn voor een getal geplot worden GHG = GHG_berekening(dfGWS, dates, lengte)[0] GLG = GLG_berekening(dfGWS, dates, lengte)[0] GVG = GVG_berekening(dfGWS, dates, lengte)[0] dfGHGs = GHG_berekening(dfGWS, dates, lengte)[1] dfGLGs = GLG_berekening(dfGWS, dates, lengte)[1] dfGVGs = GVG_berekening(dfGWS, dates, lengte)[1] gt = GT(GHG[0],GLG[0])[1] ################################################################### #plotje maken van de grondwaterstanden en opslaan if resultaat == 'plot': return plot_buf(dfGWS, dfGHGs, dfGLGs, gt, nummer_meteostation, x2, y2), gt, int(GHG[0]),int(GLG[0]), int(GVG), startdatum.date(), einddatum.date(), array_bergingscoefficient[0], array_drainweerstand[0], array_qbot[0], array_hgem[0], nummer_meteostation, naam_meteostation elif resultaat == 'csv': return dfGrondwaterstanden, startdatum, einddatum
def find_emergency_path(request): MAX_WIND_SPEED = 10.0 url = 'http://api.openweathermap.org/data/2.5/weather?q=Lleida&units=metric' response = requests.get(url=url) data = json.loads(response.text) try: if data['wind']['speed'] >= MAX_WIND_SPEED or bool(data['rain']): print data['rain'] print data['wind']['speed'] return HttpResponse(status=503) except KeyError: pass lat = request.GET.get('lat', '') lon = request.GET.get('lng', '') path = os.path.dirname(__file__) + "/static/kml/" last_distance = sys.maxint all_hangars = models.Hangar.objects.all() selected_hangar = None all_droppoints = models.DropPoint.objects.all() selected_droppoint = None point_location = Point(float(lon), float(lat)) for droppoint in all_droppoints: distance = D(m=point_location.distance(Point(droppoint.longitude, droppoint.latitude))) if distance.m < last_distance: last_distance = distance.m selected_droppoint = droppoint last_distance = sys.maxint point_location = Point(selected_droppoint.longitude, selected_droppoint.latitude) for hangar in all_hangars: distance = D(m=point_location.distance(Point(hangar.longitude, hangar.latitude))) if distance.m < last_distance: last_distance = distance.m selected_hangar = hangar # print selected_hangar.name, selected_hangar.drone.name, selected_droppoint.name selected_hangar.drone.destination_lat = selected_droppoint.latitude selected_hangar.drone.destination_lon = selected_droppoint.longitude if not selected_hangar.is_available: return HttpResponse(status=503) kml_generator.weather_info(os.path.dirname(__file__) + "/static/kml/meteo_info.kml", data['main']['temp'], data['main']['temp_max'], data['main']['temp_min'], data['wind']['speed'], data['clouds']['all'], data['main']['pressure'], data['main']['humidity'], data['weather'][0]['description']) # generate_mission_file(selected_hangar) kml_generator.create_emergency_marker(lat, lon, path + "incidence.kml") Kml(name="incidence.kml", url="static/kml/incidence.kml", visibility=True).save() #sync_kmls_file() #sync_kmls_to_galaxy(emergency=True) kml_generator.find_drone_path(selected_hangar, selected_droppoint, path) Kml.objects.get(name="incidence.kml").delete() os.remove(path + "incidence.kml") for step in range(0, 34, 1): Kml.objects.get(name="drone_" + str(step) + ".kml").delete() os.remove(path + "drone_" + str(step) + ".kml") #sync_kmls_file() #sync_kmls_to_galaxy(emergency=True) return HttpResponse(status=201)