def test_CountrySerializer(self): country = geodata_factory.CountryFactory.build( code='NL', name='Netherlands', alt_name='Nederland', language='en', dac_country_code=1, iso3='NLD', alpha3='NLD', fips10='FIPS', center_longlat=Point(5.45, 52.3), ) serializer = serializers.CountrySerializer( country, context={'request': self.request_dummy}) assert serializer.data['code'] == country.code,\ """ 'country.code' should be serialized to a field called 'code' """ assert serializer.data['name'] == country.name,\ """ 'country.name' should be serialized to a field called 'name' """ assert serializer.data[ 'numerical_code_un' ] == country.numerical_code_un,\ """ 'country.numerical_code_un' should be serialized to a field called 'numerical_code_un' """ assert serializer.data['name'] == country.name,\ """ 'country.name' should be serialized to a field called 'name' """ assert serializer.data['alt_name'] == country.alt_name,\ """ 'country.alt_name' should be serialized to a field called 'alt_name' """ assert serializer.data['language'] == country.language,\ """ 'country.language' should be serialized to a field called 'language' """ assert serializer.data[ 'dac_country_code' ] == country.dac_country_code,\ """ 'country.dac_country_code' should be serialized to a field called 'dac_country_code' """ assert serializer.data['iso3'] == country.iso3,\ """ 'country.iso3' should be serialized to a field called 'iso3' """ assert serializer.data['alpha3'] == country.alpha3,\ """ 'country.alpha3' should be serialized to a field called 'alpha3' """ assert serializer.data['fips10'] == country.fips10,\ """ 'country.fips10' should be serialized to a field called 'fips10' """ required_fields = ( 'url', 'code', 'pk', 'numerical_code_un', 'name', 'alt_name', 'language', 'region', 'un_region', 'unesco_region', 'dac_country_code', 'iso3', 'alpha3', 'fips10', 'data_source', 'activities', 'location', 'polygon', ) assertion_msg = "the field '{0}' should be in the serialized country" for field in required_fields: assert field in serializer.data, assertion_msg.format(field)
def test_deleted_pois_nds(self): trek = TrekFactory.create(geom=LineString((0, 0), (4, 4))) poi = POIFactory.create(geom=Point(2.4, 2.4)) self.assertCountEqual(trek.pois, [poi]) poi.delete() self.assertCountEqual(trek.pois, [])
from django.conf import settings from django.contrib.gis.geos import LineString, Point, Polygon from rest_framework.test import APITestCase from traffic_control.models import Lifecycle, MountType, Owner, TrafficControlDeviceType from users.models import User test_point_3d = Point(10.0, 10.0, 0.0, srid=settings.SRID) test_point_2_3d = Point(0.0, 0.0, 0.0, srid=settings.SRID) test_point_3_3d = Point(100.0, 100.0, 0.0, srid=settings.SRID) test_point_4_3d = Point(-44.3, 60.1, 0.0, srid=4326) test_line_3d = LineString((0.0, 0.0, 0.0), (50.0, 0.0, 0.0), srid=settings.SRID) test_line_2_3d = LineString((20.0, 20.0, 0.0), (30.0, 30.0, 0.0), srid=settings.SRID) test_line_3_3d = LineString((40.0, 40.0, 0.0), (60.0, 60.0, 0.0), srid=settings.SRID) test_line_4_3d = LineString( (500.0, 500.0, 0.0), (500.0, 550.0, 0.0), srid=settings.SRID ) test_polygon_3d = Polygon( ( (0.0, 0.0, 0.0), (0.0, 50.0, 0.0), (50.0, 50.0, 0.0), (50.0, 0.0, 0.0), (0.0, 0.0, 0.0), ), srid=settings.SRID, ) test_polygon_2_3d = Polygon( ( (1000.0, 1000.0, 0.0), (1000.0, 1050.0, 0.0),
def set_point(self, lat, lon): pnt = Point(lon, lat) self.latlgn = pnt
def words_to_point(q): geolocator = Nominatim() location = geolocator.geocode(q) point = Point(location.longitude, location.latitude) return point
def import_data(self, metadata, output): """ Pulls weather data from the BBC """ socket.setdefaulttimeout(5) try: observations = self.get_observations_data() forecasts = self.get_forecast_data() except Exception as e: output.write("Error importing weather data from BBC\n") traceback.print_exc(file=output) output.write('\n') logger.exception("Error importing weather data from BBC") return metadata # We only keep the most recent observation. This avoids the DB growing # without limit. We also may not have a right to store this data. weathers = [(Weather.objects.get_or_create(location_id=self.id, ptype=PTYPE_OBSERVATION)[0], observations)] for observation_date, forecast in forecasts['forecasts'].items(): weathers.append((Weather.objects.get_or_create( location_id=self.id, ptype=PTYPE_FORECAST, observed_date=observation_date)[0], forecast)) VERBATIM = [ 'temperature', 'wind_speed', 'humidity', 'pressure', 'wind_direction', 'sunset', 'sunrise', 'observed_date', 'published_date', 'name', 'min_temperature', 'max_temperature', ] LOOKUP = [ ('outlook', OUTLOOK_CHOICES), ('visibility', VISIBILITY_CHOICES), ('pressure_state', PRESSURE_STATE_CHOICES), ('uv_risk', SCALE_CHOICES), ('pollution', SCALE_CHOICES), ] for weather, data in weathers: for feature in VERBATIM: if feature in data: setattr(weather, feature, data[feature]) for feature, values in LOOKUP: if feature in data: setattr(weather, feature, self._find_choice_match(values, data[feature])) weather.location = Point(data['location'], srid=4326) weather.save() return metadata
def import_gbif_occurrences(taxonomy, offset=0, habitat=None, origin=None): """ Import gbif occurrences based on taxonomy gbif key, data stored to biological_collection_record table :param taxonomy: Taxonomy object :param offset: response data offset, default is 0 :param habitat: habitat of species, default to None :param origin: origin of species, default to None """ api_url = 'http://api.gbif.org/v1/occurrence/search?' api_url += 'taxonKey={}'.format(taxonomy.gbif_key) api_url += '&offset={}'.format(offset) # We need data with coordinate api_url += '&hasCoordinate=true' # We don't need data with geospatial issue api_url += '&hasGeospatialIssue=false' # Only fetch South Africa api_url += '&country=ZA' try: response = requests.get(api_url) json_result = response.json() data_count = json_result['count'] except HTTPError as e: logger.error(e.message) return source_collection = 'gbif' admins = settings.ADMINS superusers = Profile.objects.filter(is_superuser=True) if admins: for admin in admins: superuser_list = superusers.filter(email=admin[1]) if superuser_list.exists(): superusers = superuser_list break if superusers.exists(): user = superusers[0] else: user = None models.signals.post_save.disconnect(location_site_post_save_handler, ) models.signals.post_save.disconnect(collection_post_save_handler, ) for result in json_result['results']: upstream_id = result.get(UPSTREAM_ID_KEY, None) longitude = result.get(LON_KEY) latitude = result.get(LAT_KEY) coordinate_uncertainty = result.get(COORDINATE_UNCERTAINTY_KEY, 0) event_date = result.get(EVENT_DATE_KEY, result.get(MODIFIED_DATE_KEY, None)) collector = result.get(COLLECTOR_KEY, '') institution_code = result.get(INSTITUTION_CODE_KEY, source_collection) reference = result.get(REFERENCE_KEY, '') species = result.get(SPECIES_KEY, None) site_point = Point(longitude, latitude, srid=4326) # Check nearest site based on site point and coordinate uncertainty location_sites = LocationSite.objects.filter( geometry_point__distance_lte=(site_point, D(m=coordinate_uncertainty))) if location_sites.exists(): # Get first site location_site = location_sites[0] else: # Create a new site locality = result.get( LOCALITY_KEY, result.get(VERBATIM_LOCALITY_KEY, DEFAULT_LOCALITY)) location_type, status = LocationType.objects.get_or_create( name='PointObservation', allowed_geometry='POINT') location_site = LocationSite.objects.create( geometry_point=site_point, name=locality, location_type=location_type) try: collection_record = BiologicalCollectionRecord.objects.get( upstream_id=upstream_id) logger.info( 'Update existing collection record with upstream ID : {}'. format(upstream_id)) except BiologicalCollectionRecord.DoesNotExist: logger.info( 'Collection record created with upstream ID : {}'.format( upstream_id)) collection_record = BiologicalCollectionRecord.objects.create( upstream_id=upstream_id, site=location_site, taxonomy=taxonomy) if event_date: collection_record.collection_date = parse(event_date) else: pass collection_record.taxonomy = taxonomy collection_record.owner = user collection_record.original_species_name = species collection_record.collector = collector collection_record.source_collection = source_collection collection_record.institution_id = institution_code collection_record.reference = reference collection_record.collection_habitat = habitat.lower() for category in BiologicalCollectionRecord.CATEGORY_CHOICES: if origin.lower() == category[1].lower(): origin = category[0] break collection_record.category = origin collection_record.validated = True collection_record.additional_data = { 'fetch_from_gbif': True, 'date_fetched': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') } collection_record.save() logger.info('Collection record id {} has been updated'.format( collection_record.id)) # reconnect post save handler models.signals.post_save.connect(location_site_post_save_handler, ) models.signals.post_save.connect(collection_post_save_handler, ) if data_count > offset: # Import more occurrences import_gbif_occurrences( taxonomy=taxonomy, offset=offset + 20, habitat=habitat, origin=origin, )
def handle(self, *args, **options): point_file_name = args[0] point_file_reader = csv.reader(open(point_file_name, 'rb'), delimiter='\t') if len(args) > 1: start_index = args[1] else: start_index = None #setup the data logger data_logger = logging.getLogger('DataLogger') data_logger.setLevel(logging.INFO) data_handler = logging.handlers.TimedRotatingFileHandler( 'data.log', 'H', 1) data_handler.setFormatter( logging.Formatter('%(asctime)s%(msecs)d|%(message)s', datefmt='%Y%m%d%H%M%S')) data_logger.addHandler(data_handler) #setup the application logger logger = logging.getLogger('AppLogger') logger.setLevel(logging.INFO) #Rotate the application log, once at midnight logging_handler = logging.handlers.TimedRotatingFileHandler( 'app.log', 'midnight', 1) logging_handler.setFormatter( logging.Formatter('%(asctime)s%(msecs)d|%(message)s', datefmt='%Y%m%d%H%M%S')) logger.addHandler(logging_handler) if start_index: start = False else: start = True for row in point_file_reader: tweet_id = row[0] if tweet_id == start_index: start = True if start: longitude = row[1] latitude = row[2] p = Point(float(longitude), float(latitude)) counties = CountyBoundary.objects.filter(geom__contains=p) response_data = {} response_data['tweet_id'] = tweet_id if len(counties) > 0: response_data['county'] = counties[0].name10 response_data['county_fips'] = counties[0].geoid10 else: response_data['county'] = None places = Place.objects.filter(geom__contains=p) if len(places) > 0: response_data['place'] = places[0].namelsad10 response_data['place_fips'] = places[0].geoid10 else: response_data['place'] = None zipcodes = ZipCode.objects.filter(geom__contains=p) if len(zipcodes) > 0: response_data['zcta'] = zipcodes[0].zcta5ce10 else: response_data['zcta'] = None states = State.objects.filter(geom__contains=p) if len(states) > 0: response_data['state'] = states[0].stusps10 response_data['state_fips'] = states[0].geoid10 else: response_data['state'] = None metros = MetropolitanStatisticalArea.objects.filter( geom__contains=p) if len(metros) > 0: response_data['metro'] = metros[0].name response_data['metro_code'] = metros[0].cbsafp else: response_data['metro'] = None data_logger.info(simplejson.dumps(response_data)) logger.info(tweet_id)
def from_lat_lng(_class, lat, lng, data=None): return _class.objects.create( point=Point(x=lat, y=lng), googlemapsjson=data, )
def get_location(self, record): from django.contrib.gis.geos import Point lon = record['lng'] lat = record['lat'] return Point(lon, lat)
def get_station_point(self, record): return Point(float(record.easting), float(record.northing), srid=27700)
def import_data(self): """ Import data from this data file. Abort if data existsself. Import errors are appended to the import_errors field. return True if data was imported, else return False """ if self.import_started: self._messages = ["File data already beeing imported"] self._write_messages(append = True, save = True) return False self.import_started = timezone.now() self.save() datagrid = excread.excread(str(self.filepath)) MANDATORY_VARS = ( 'EXPOCODE', 'EXC_DATETIME', 'EXC_CTDDEPTH', 'STNNBR', 'LATITUDE', 'LONGITUDE', ) # Variables not to be treated as data variables IGNORE = ( 'EXPOCODE', 'EXC_DATETIME', 'EXC_CTDDEPTH', 'STNNBR', 'SECT_ID', 'DATE', 'TIME', 'LATITUDE', 'LONGITUDE', 'BTLNBR', 'BTLNBR_FLAG_W', 'SAMPNO', 'CASTNO', 'CTDDEPTH', 'CTDDEP', 'HOUR', 'MINUTE', 'DEPTH', 'HOUR','MINUTE', ) QC_SUFFIX = '_FLAG_W' # Check all mandatory variables are there depth = '' stnnbr = '' castno = '' data_set = None station = None cast = None depth = None # Raise an exception if mandatory columns are missing if not all(key in datagrid.columns for key in MANDATORY_VARS): message = "Data file missing some mandatory column: {}".format( ', '.join(MANDATORY_VARS) ) self._messages.append(message) self._write_messages() self.import_finnished = timezone.now() self.save() return False # Import data types missing_vars = [] data_type_names = { str(type_):type_ for type_ in DataTypeName.objects.all() } for var in datagrid.columns: if var in IGNORE: continue if var.endswith(QC_SUFFIX): continue if var not in data_type_names: missing_vars.append(var) if missing_vars: message = """There where variables in the dataset that are not defined in the system. These cannot be handled. An administrator has to add the variables as data types for them to be treated. Unhandled variables in the data set: {} """.format( ', '.join(missing_vars) ) self._messages.append(message) missing_depth_warning = False # Indicate missing depth already warned missing_position_warning = False # (Hopefully sensible) defaults for authoritative temp, salin, pressure temp_aut = DataTypeName.objects.filter(name="CTDTMP").first() salin_aut = DataTypeName.objects.filter(name="CTDSAL").first() press_aut = DataTypeName.objects.filter(name="CTDPRS").first() value_list = [] line_no = 0 for i, expo in enumerate(datagrid['EXPOCODE']): line_no += 1 if not data_set or expo != data_set.expocode: # Add new dataset data_set = DataSet( expocode=expo, is_reference = False, data_file = self, owner = self.owner, temp_aut = temp_aut, salin_aut = salin_aut, press_aut = press_aut, ) if DataSet.objects.filter( expocode=expo, owner=self.owner ).exists(): # TODO Support files with multiple datasets, where one or # more might already exist in database, but not all. message = 'Dataset {} already exists for this user'. format( expo ) self._messages = [message] self._write_messages() self.import_finnished = timezone.now() self.save() return False data_set.save() station = None cast = None depth = None if not station or datagrid['STNNBR'][i] != station.station_number: longitude = datagrid['LONGITUDE'][i] latitude = datagrid['LATITUDE'][i] if math.isnan(longitude) or math.isnan(latitude): if missing_position_warning: continue # Warning and dont insert if depth is NaN message = """Latitude or longitude is nan on line {}. Station will not be added when position is missing. Subsequent missing position errors are supressed for this file. """.format(i) self._messages.append(message) missing_position_warning = True continue # Add new station station = Station( data_set = data_set, position = Point(longitude, latitude), station_number = int(datagrid['STNNBR'][i]) ) station.save() cast = None depth = None if ( not cast or ('CASTNO' in datagrid and datagrid['CASTNO'][i] != cast.cast) ): # Add new cast cast_ = 1 if 'CASTNO' in datagrid: cast_ = int(datagrid['CASTNO'][i]) cast = Cast( station = station, cast = cast_ ) cast.save() depth = None if ( not depth or depth.depth != datagrid['EXC_CTDDEPTH'][i] or ( 'BTLNBR' in datagrid and depth.bottle != datagrid['BTLNBR'][i] ) ): if math.isnan(datagrid['EXC_CTDDEPTH'][i]): if missing_depth_warning: continue # Warning and dont insert if depth is NaN message = """Depth is nan on line {}. Data will not be added when depth is nan. Subsequent missing depth errors are supressed for this file. """.format(i) self._messages.append(message) missing_depth_warning = True continue # Add new depth btlnbr = datagrid.get('BTLNBR', False) depth = Depth( cast = cast, depth = float(datagrid['EXC_CTDDEPTH'][i]), bottle = 1 if btlnbr is False else btlnbr[i], date_and_time = datagrid['EXC_DATETIME'][i], ) try: depth.save() except Exception as e: m = "Line {}, Error {}".format(i, str(e), ) self._messages = [m] self._write_messages(append=True,save=True) raise e elif ( depth.depth == datagrid['EXC_CTDDEPTH'][i] and datagrid['CASTNO'][i] == cast.cast and datagrid['STNNBR'][i] == station.station_number and expo == data_set.expocode ): # Implies duplicate line. Skip this line with a warning m = "Line {}, Error {}".format(i, "Duplicate, ignores line") self._messages = [m] self._write_messages(append=True,save=True) continue temp_val = salin_val = press_val = None for key in datagrid.columns: if key in IGNORE: continue if not key in data_type_names: # Variable not found in database. Already reported. continue v = datagrid[key][i].item() # collect temp, press, salin values if key == temp_aut.name: temp_val = v if key == salin_aut.name: salin_val = v if key == press_aut.name: press_val = v # Don't import missing values: if numpy.isnan(v) or v < -10: continue qc_flag = None if ( key + QC_SUFFIX in datagrid and not numpy.isnan(datagrid[key + QC_SUFFIX][i]) ): qc_flag = int(datagrid[key + QC_SUFFIX][i]) value = DataValue( depth = depth, value = v, qc_flag = qc_flag, data_type_name = data_type_names[key] ) value_list.append(value) # If all are set, we can calculate sigma4 if not None in [temp_val, salin_val, press_val]: try: sigma4 = gsw.density.sigma4( gsw.conversions.SA_from_SP( salin_val, press_val, longitude, latitude, ), temp_val, ) depth.sigma4 = sigma4 depth.save() except Exception as e: m = "Line {}, Error {}".format(i, str(e), ) self._messages = [m] self._write_messages(append=True,save=True) raise e # Apply sql on every 500 line, so memory is not exhausted if line_no % 500 == 0 and value_list: DataValue.objects.bulk_create(value_list) value_list = [] # Save data values if value_list: DataValue.objects.bulk_create(value_list) self._write_messages() self.import_finnished = timezone.now() self.save() return True
def test_plan_derive_location_from_related_plans(): plan = get_plan() bp_1 = get_barrier_plan(location=Point(10.0, 10.0, 0.0, srid=settings.SRID), plan=plan) bp_2 = get_barrier_plan(location=Point(5.0, 5.0, 0.0, srid=settings.SRID), plan=plan) mp_1 = get_mount_plan(location=Point(20.0, 5.0, 0.0, srid=settings.SRID), plan=plan) mp_2 = get_mount_plan(location=Point(100.0, 10.0, 0.0, srid=settings.SRID), plan=plan) rmp_1 = get_road_marking_plan(location=Point(0.0, 50.0, 0.0, srid=settings.SRID), plan=plan) rmp_2 = get_road_marking_plan(location=Point(100.0, 100.0, 0.0, srid=settings.SRID), plan=plan) sp_1 = get_signpost_plan(location=Point(10.0, 100.0, 0.0, srid=settings.SRID), plan=plan) sp_2 = get_signpost_plan(location=Point(35.0, 130.0, 0.0, srid=settings.SRID), plan=plan) tlp_1 = get_traffic_light_plan(location=Point(55.0, 120.0, 0.0, srid=settings.SRID), plan=plan) tlp_2 = get_traffic_light_plan(location=Point(90.0, 115.0, 0.0, srid=settings.SRID), plan=plan) tsp_1 = get_traffic_sign_plan(location=Point(55.0, 5.0, 0.0, srid=settings.SRID), plan=plan) tsp_2 = get_traffic_sign_plan(location=Point(95.0, 110.0, 0.0, srid=settings.SRID), plan=plan) asp_1 = get_additional_sign_plan( location=Point(80.0, 120.0, 0.0, srid=settings.SRID)) asp_2 = get_additional_sign_plan(location=Point(80.0, 120.0, 0.0, srid=settings.SRID), parent=tsp_2) noise_bp = get_barrier_plan( location=Point(150.0, 150.0, 0.0, srid=settings.SRID)) noise_mp = get_mount_plan( location=Point(150.0, 150.0, 0.0, srid=settings.SRID)) noise_rmp = get_road_marking_plan( location=Point(150.0, 150.0, 0.0, srid=settings.SRID)) noise_sp = get_signpost_plan( location=Point(150.0, 150.0, 0.0, srid=settings.SRID)) noise_tlp = get_traffic_light_plan( location=Point(150.0, 150.0, 0.0, srid=settings.SRID)) noise_tsp = get_traffic_sign_plan( location=Point(150.0, 150.0, 0.0, srid=settings.SRID)) noise_asp = get_additional_sign_plan( location=Point(150.0, 150.0, 0.0, srid=settings.SRID)) plan.refresh_from_db() plan.derive_location_from_related_plans() assert plan.location.contains(bp_1.location) assert plan.location.contains(bp_2.location) assert plan.location.contains(mp_1.location) assert plan.location.contains(mp_2.location) assert plan.location.contains(rmp_1.location) assert plan.location.contains(rmp_2.location) assert plan.location.contains(sp_1.location) assert plan.location.contains(sp_2.location) assert plan.location.contains(tlp_1.location) assert plan.location.contains(tlp_2.location) assert plan.location.contains(tsp_1.location) assert plan.location.contains(tsp_2.location) assert plan.location.contains(asp_1.location) assert plan.location.contains(asp_2.location) assert not plan.location.contains(noise_bp.location) assert not plan.location.contains(noise_mp.location) assert not plan.location.contains(noise_rmp.location) assert not plan.location.contains(noise_sp.location) assert not plan.location.contains(noise_tlp.location) assert not plan.location.contains(noise_tsp.location) assert not plan.location.contains(noise_asp.location)
def test_plan_get_related_locations(): plan = get_plan() bp_1 = get_barrier_plan(location=Point(10.0, 10.0, 0.0, srid=settings.SRID), plan=plan) bp_2 = get_barrier_plan(location=Point(5.0, 5.0, 0.0, srid=settings.SRID), plan=plan) mp_1 = get_mount_plan(location=Point(20.0, 5.0, 0.0, srid=settings.SRID), plan=plan) mp_2 = get_mount_plan(location=Point(100.0, 10.0, 0.0, srid=settings.SRID), plan=plan) rmp_1 = get_road_marking_plan(location=Point(0.0, 50.0, 0.0, srid=settings.SRID), plan=plan) rmp_2 = get_road_marking_plan(location=Point(100.0, 100.0, 0.0, srid=settings.SRID), plan=plan) sp_1 = get_signpost_plan(location=Point(10.0, 100.0, 0.0, srid=settings.SRID), plan=plan) sp_2 = get_signpost_plan(location=Point(35.0, 130.0, 0.0, srid=settings.SRID), plan=plan) tlp_1 = get_traffic_light_plan(location=Point(55.0, 120.0, 0.0, srid=settings.SRID), plan=plan) tlp_2 = get_traffic_light_plan(location=Point(90.0, 115.0, 0, srid=settings.SRID), plan=plan) tsp_1 = get_traffic_sign_plan(location=Point(55.0, 5.0, 0.0, srid=settings.SRID), plan=plan) tsp_2 = get_traffic_sign_plan(location=Point(95.0, 110.0, 0.0, srid=settings.SRID), plan=plan) asp_1 = get_additional_sign_plan(location=Point(80.0, 120.0, 0.0, srid=settings.SRID), plan=plan) asp_2 = get_additional_sign_plan(location=Point(80.0, 120.0, 0.0, srid=settings.SRID), parent=tsp_2, plan=plan) locations = plan._get_related_locations() assert bp_1.location in locations assert bp_2.location in locations assert mp_1.location in locations assert mp_2.location in locations assert rmp_1.location in locations assert rmp_2.location in locations assert sp_1.location in locations assert sp_2.location in locations assert tlp_1.location in locations assert tlp_2.location in locations assert tsp_1.location in locations assert tsp_2.location in locations assert asp_1.location in locations assert asp_2.location in locations
def test_openwisp(self): """ test OpenWISP synchronizer """ layer = Layer.objects.external()[0] layer.minimum_distance = 0 layer.area = None layer.new_nodes_allowed = False layer.save() layer = Layer.objects.get(pk=layer.pk) xml_url = '%snodeshot/testing/openwisp-georss.xml' % settings.STATIC_URL external = LayerExternal(layer=layer) external.interoperability = 'nodeshot.interoperability.synchronizers.OpenWISP' external.config = '{ "url": "%s" }' % xml_url external.save() # start capturing print statements output = StringIO() sys.stdout = output # execute command management.call_command('synchronize', 'vienna', verbosity=0) # stop capturing print statements sys.stdout = sys.__stdout__ # ensure following text is in output self.assertIn('42 nodes added', output.getvalue()) self.assertIn('0 nodes changed', output.getvalue()) self.assertIn('42 total external', output.getvalue()) self.assertIn('42 total local', output.getvalue()) # start checking DB too nodes = layer.node_set.all() # ensure all nodes have been imported self.assertEqual(nodes.count(), 42) # check one particular node has the data we expect it to have node = Node.objects.get(slug='podesta1-ced') self.assertEqual(node.name, 'Podesta1 CED') self.assertEqual(node.address, 'Test WISP') point = Point(8.96166, 44.4185) self.assertTrue(node.geometry.equals(point)) self.assertEqual(node.updated.strftime('%Y-%m-%d'), '2013-07-10') self.assertEqual(node.added.strftime('%Y-%m-%d'), '2011-08-24') ### --- with the following step we expect some nodes to be deleted --- ### xml_url = '%snodeshot/testing/openwisp-georss2.xml' % settings.STATIC_URL external.config = '{ "url": "%s" }' % xml_url external.save() # start capturing print statements output = StringIO() sys.stdout = output # execute command management.call_command('synchronize', 'vienna', verbosity=0) # stop capturing print statements sys.stdout = sys.__stdout__ # ensure following text is in output self.assertIn('4 nodes unmodified', output.getvalue()) self.assertIn('38 nodes deleted', output.getvalue()) self.assertIn('0 nodes changed', output.getvalue()) self.assertIn('4 total external', output.getvalue()) self.assertIn('4 total local', output.getvalue()) # ensure all nodes have been imported self.assertEqual(nodes.count(), 4) # check one particular node has the data we expect it to have node = Node.objects.get(slug='lercari2-42') self.assertEqual(node.name, 'Lercari2 42') self.assertEqual(node.address, 'Test WISP') point = Point(8.96147, 44.4076) self.assertTrue(node.geometry.equals(point)) self.assertEqual(node.updated.strftime('%Y-%m-%d'), '2013-07-10') self.assertEqual(node.added.strftime('%Y-%m-%d'), '2013-06-14')
logging.info("Not adding unknown feature type") # point features are coaslesced into a single layer for efficiency if len(point_features): logging.debug("Adding %i point features in 1 layer" % len(point_features)) point_style = mapnik.Style() point_rule = mapnik.Rule() point_symbolizer = mapnik.PointSymbolizer() point_rule.symbols.append(point_symbolizer) point_style.rules.append(point_rule) m.append_style('point_style', point_style) csv = 'wkt\n' for feature in point_features: geos_feature = Point(feature['geometry']['coordinates']) geos_feature.srid = 4326 geos_feature.transform(render_srid) geometries.append(geos_feature) csv += '"' + geos_feature.wkt + '"\n' point_layer = mapnik.Layer('point layer') point_layer.datasource = mapnik.CSV(inline=csv) point_layer.styles.append('point_style') point_layer.srs = '+init=epsg:' + str(render_srid) m.layers.append(point_layer) # bounds not in url, calculate from data if not bounds_box: geometry_collection = GeometryCollection(geometries)
def test_provinciawifi(self): """ test ProvinciaWIFI converter """ layer = Layer.objects.external()[0] layer.minimum_distance = 0 layer.area = None layer.new_nodes_allowed = False layer.save() layer = Layer.objects.get(pk=layer.pk) xml_url = '%snodeshot/testing/provincia-wifi.xml' % settings.STATIC_URL external = LayerExternal(layer=layer) external.interoperability = 'nodeshot.interoperability.synchronizers.ProvinciaWIFI' external.config = '{ "url": "%s" }' % xml_url external.save() # start capturing print statements output = StringIO() sys.stdout = output # execute command management.call_command('synchronize', 'vienna', verbosity=0) # stop capturing print statements sys.stdout = sys.__stdout__ # ensure following text is in output self.assertIn('5 nodes added', output.getvalue()) self.assertIn('0 nodes changed', output.getvalue()) self.assertIn('5 total external', output.getvalue()) self.assertIn('5 total local', output.getvalue()) # start checking DB too nodes = layer.node_set.all() # ensure all nodes have been imported self.assertEqual(nodes.count(), 5) # check one particular node has the data we expect it to have node = Node.objects.get(slug='viale-di-valle-aurelia-73') self.assertEqual(node.name, 'viale di valle aurelia, 73') self.assertEqual(node.address, 'viale di valle aurelia, 73, Roma') point = Point(12.4373, 41.9025) self.assertTrue(node.geometry.equals(point)) # ensure itmes with the same name on the XML get a different name in the DB node = Node.objects.get(slug='largo-agostino-gemelli-8') node = Node.objects.get(slug='largo-agostino-gemelli-8-2') node = Node.objects.get(slug='largo-agostino-gemelli-8-3') node = Node.objects.get(slug='largo-agostino-gemelli-8-4') ### --- with the following step we expect some nodes to be deleted and some to be added --- ### xml_url = '%snodeshot/testing/provincia-wifi2.xml' % settings.STATIC_URL external.config = '{ "url": "%s" }' % xml_url external.save() # start capturing print statements output = StringIO() sys.stdout = output # execute command management.call_command('synchronize', 'vienna', verbosity=0) # stop capturing print statements sys.stdout = sys.__stdout__ # ensure following text is in output self.assertIn('1 nodes added', output.getvalue()) self.assertIn('2 nodes unmodified', output.getvalue()) self.assertIn('3 nodes deleted', output.getvalue()) self.assertIn('0 nodes changed', output.getvalue()) self.assertIn('3 total external', output.getvalue()) self.assertIn('3 total local', output.getvalue()) # ensure all nodes have been imported self.assertEqual(nodes.count(), 3) # check one particular node has the data we expect it to have node = Node.objects.get(slug='via-g-pullino-97') self.assertEqual(node.name, 'Via G. Pullino 97') self.assertEqual(node.address, 'Via G. Pullino 97, Roma') self.assertEqual( node.description, 'Indirizzo: Via G. Pullino 97, Roma; Tipologia: Privati federati') point = Point(12.484, 41.8641) self.assertTrue(node.geometry.equals(point))
def render_static(request, height=None, width=None, format='png', background='satellite', bounds=None, center=None, render_srid=3857): # width and height width = int(width) height = int(height) if width > settings.MAX_IMAGE_DIMENSION or \ height > settings.MAX_IMAGE_DIMENSION or \ width <= 1 or height <= 1: logging.debug("Invalid size") return HttpResponseBadRequest( "Invalid image size, both dimensions must be in range %i-%i" % (1, settings.MAX_IMAGE_DIMENSION)) # image format if format not in IMAGE_FORMATS: logging.error("unknown image format %s" % format) return HttpResponseBadRequest( "Unknown image format, available formats: " + ", ".join(IMAGE_FORMATS)) if format.startswith('png'): mimetype = 'image/png' elif format.startswith('jpeg'): mimetype = 'image/jpeg' # bounds bounds_box = None if bounds: bounds_components = bounds.split(',') if len(bounds_components) != 4: return HttpResponseBadRequest( "Invalid bounds, must be 4 , separated numbers") bounds_components = [float(f) for f in bounds_components] if not (-180 < bounds_components[0] < 180) or not (-180 < bounds_components[2] < 180): logging.error("x out of range %f or %f" % (bounds_components[0], bounds_components[2])) return HttpResponseBadRequest( "x out of range %f or %f" % (bounds_components[0], bounds_components[2])) if not (-90 < bounds_components[1] < 90) or not (-90 < bounds_components[3] < 90): logging.error("y out of range %f or %f" % (bounds_components[1], bounds_components[3])) return HttpResponseBadRequest( "y out of range %f or %f" % (bounds_components[1], bounds_components[3])) ll = Point(bounds_components[0], bounds_components[1], srid=4326) ll.transform(render_srid) ur = Point(bounds_components[2], bounds_components[3], srid=4326) ur.transform(render_srid) bounds_box = mapnik.Box2d(ll.x, ll.y, ur.x, ur.y) elif center: center_components = center.split(',') if len(center_components) != 3: return HttpResponseBadRequest() lon = float(center_components[0]) lat = float(center_components[1]) zoom = int(center_components[2]) # todo calc bounds from center and zoom # baselayer if background not in settings.BASE_LAYERS and background != 'none': return HttpResponseNotFound("Background not found") # GeoJSON post data if request.method == "POST" and len(request.body): input_data = json.loads(request.body) else: input_data = None if not bounds and not center and not input_data: return HttpResponseBadRequest( "Bounds, center, or post data is required.") # initialize map m = mapnik.Map(width, height) m.srs = '+init=epsg:' + str(render_srid) # add a tile source as a background if background != "none": background_file = settings.BASE_LAYERS[background] background_style = mapnik.Style() background_rule = mapnik.Rule() background_rule.symbols.append(mapnik.RasterSymbolizer()) background_style.rules.append(background_rule) m.append_style('background style', background_style) tile_layer = mapnik.Layer('background') tile_layer.srs = '+init=epsg:' + str(render_srid) tile_layer.datasource = mapnik.Gdal(base=settings.BASE_LAYER_DIR, file=background_file) tile_layer.styles.append('background style') m.layers.append(tile_layer) # add features from geojson if input_data and input_data['type'] == "Feature": features = [input_data] elif input_data and input_data['type'] == "FeatureCollection": if 'features' not in input_data: return HttpResponseBadRequest() features = input_data['features'] else: features = [] logging.debug("Adding %d features to map" % len(features)) geometries = [] point_features = [] fid = 0 for feature in features: if 'geometry' not in feature: logging.debug("feature does not have geometry") return HttpResponseBadRequest("Feature does not have a geometry") if 'type' not in feature['geometry']: logging.debug("geometry does not have type") return HttpResponseBadRequest("Geometry does not have a type") fid += 1 style_name = str(fid) if feature['geometry']['type'] == 'Point': point_features.append(feature) elif feature['geometry']['type'] in ('LineString', 'MultiLineString'): if feature['geometry']['type'] == 'LineString': geos_feature = LineString(feature['geometry']['coordinates']) elif feature['geometry']['type'] == 'MultiLineString': rings = feature['geometry']['coordinates'] rings = [[(c[0], c[1]) for c in r] for r in rings] if len(rings) == 1: geos_feature = LineString(rings[0]) else: linestrings = [] for ring in rings: try: linestrings.append(LineString(ring)) except Exception, e: logging.error("Error adding ring: %s", e) geos_feature = MultiLineString(linestrings) geos_feature.srid = 4326 geos_feature.transform(render_srid) geometries.append(geos_feature) style = mapnik.Style() line_rule = mapnik.Rule() style_dict = None if 'style' in feature: style_dict = feature['style'] elif 'properties' in feature: style_dict = feature['properties'] line_rule.symbols.append(line_symbolizer(style_dict)) style.rules.append(line_rule) m.append_style(style_name, style) wkt = geos_feature.wkt line_layer = mapnik.Layer(style_name + ' layer') line_layer.datasource = mapnik.CSV(inline='wkt\n' + '"' + wkt + '"') line_layer.styles.append(style_name) line_layer.srs = '+init=epsg:' + str(render_srid) m.layers.append(line_layer) elif feature['geometry']['type'] == 'Polygon' or feature['geometry'][ 'type'] == 'MultiPolygon': geos_feature = GEOSGeometry(json.dumps(feature['geometry'])) geos_feature.srid = 4326 geos_feature.transform(render_srid) geometries.append(geos_feature) style = mapnik.Style() rule = mapnik.Rule() style_dict = None if 'style' in feature: style_dict = feature['style'] elif 'properties' in feature: style_dict = feature['properties'] rule.symbols.append(polygon_symbolizer(style_dict)) rule.symbols.append(line_symbolizer(style_dict)) style.rules.append(rule) m.append_style(style_name, style) wkt = geos_feature.wkt layer = mapnik.Layer(style_name + ' layer') layer.datasource = mapnik.CSV(inline='wkt\n' + '"' + wkt + '"') layer.styles.append(style_name) layer.srs = '+init=epsg:' + str(render_srid) m.layers.append(layer)
def endElement(self, name): if name in ('node','way') and self.valid: try: types = self.find_types(self.tags) except ValueError: self.ignore_count += 1 return # Ignore ways that lay partly outside our bounding box if name == 'way' and not all(id in self.node_locations for id in self.nodes): return # Ignore disused and under-construction entities if self.tags.get('life_cycle', 'in_use') != 'in_use' or self.tags.get('disused') in ('1', 'yes', 'true'): return # Memory management in debug mode reset_queries() if self.id in self.identities: entity = get_entity(*self.identities[self.id].split(':')) entity.metadata['osm'] = { 'attrs': dict(self.attrs), 'tags': dict(zip((k.replace(':', '_') for k in self.tags.keys()), self.tags.values())) } identifiers = entity.identifiers identifiers.update({ 'osm': self.id }) entity.save(identifiers=identifiers) entity.all_types = set(entity.all_types.all()) | set(self.entity_types[et] for et in types) entity.update_all_types_completion() self.ids.remove(self.id) else: try: entity = Entity.objects.get(source=self.source, _identifiers__scheme='osm', _identifiers__value=self.id) created = False except Entity.DoesNotExist: entity = Entity(source=self.source) created = True if not 'osm' in entity.metadata or \ entity.metadata['osm'].get('attrs', {}).get('timestamp', '') < self.attrs['timestamp']: if created: self.create_count += 1 else: self.modify_count += 1 if name == 'node': entity.location = Point(self.node_location, srid=4326) entity.geometry = entity.location elif name == 'way': cls = LinearRing if self.nodes[0] == self.nodes[-1] else LineString entity.geometry = cls([self.node_locations[n] for n in self.nodes], srid=4326) min_, max_ = (float('inf'), float('inf')), (float('-inf'), float('-inf')) for lon, lat in [self.node_locations[n] for n in self.nodes]: min_ = min(min_[0], lon), min(min_[1], lat) max_ = max(max_[0], lon), max(max_[1], lat) entity.location = Point( (min_[0]+max_[0])/2 , (min_[1]+max_[1])/2 , srid=4326) else: raise AssertionError("There should be no other types of entity we're to deal with.") names = dict() for lang_code, lang_name in settings.LANGUAGES: with override(lang_code): if '-' in lang_code: tags_to_try = ('name:%s' % lang_code, 'name:%s' % lang_code.split('-')[0], 'name', 'operator') else: tags_to_try = ('name:%s' % lang_code, 'name', 'operator') name = None for tag_to_try in tags_to_try: if self.tags.get(tag_to_try): name = self.tags.get(tag_to_try) break if name is None: try: name = reverse_geocode(*entity.location)[0]['name'] if not name: raise IndexError name = u"↝ %s" % name except IndexError: name = u"↝ %f, %f" % (self.node_location[1], self.node_location[0]) names[lang_code] = name entity.metadata['osm'] = { 'attrs': dict(self.attrs), 'tags': dict(zip((k.replace(':', '_') for k in self.tags.keys()), self.tags.values())) } entity.primary_type = self.entity_types[types[0]] identifiers = entity.identifiers identifiers.update({ 'osm': self.id }) entity.save(identifiers=identifiers) for lang_code, name in names.items(): set_name_in_language(entity, lang_code, title=name) entity.all_types = [self.entity_types[et] for et in types] entity.update_all_types_completion() else: self.unchanged_count += 1
def choosePlace(request, ptype): userName = request.user.get_username() if request.method == 'POST': logger.info('user=%s choosePlace-Post' % (userName)) qform = getPlaceList(request.POST, ptype=ptype) if not qform.is_valid(): return HttpResponse("Invalid placeList form?!") qryData = qform.cleaned_data tpchoice = qryData['placeList'] xlng = tpchoice.xlng ylat = tpchoice.ylat srs_default = 4326 # WGS84 srs_10N = 26910 # UTM zone 10N closeRadius = 500 pt = Point(xlng, ylat, srid=srs_default) pt.transform(srs_10N) # tstDT = datetime(2017, 2, 10, 17, 00) nowDT = awareDT(datetime.now()) minDate = nowDT - timedelta(days=180) # emulate psql: # select point from table where point && # ST_Transform( ST_Buffer( ST_Transform( point, 32610 ), 500 ), 4326 ) queryset = OakCrime.objects.filter(cdateTime__gt=minDate). \ exclude(xlng__isnull=True). \ exclude(ylat__isnull=True). \ filter(point__distance_lte=(pt, D(m=closeRadius))). \ order_by('cdateTime') incidList = list(queryset) logger.info('username=%s choosePlace: Ptype=%s Choice=%s NIncid=%d near (xlng=%s,ylat=%s)' % \ (userName, ptype, tpchoice.name, len(incidList), xlng, ylat)) context = {} context['lat'] = ylat context['lng'] = xlng context['nIncid'] = len(incidList) context['incidList'] = incidList context['ptype'] = ptype context['pdesc'] = tpchoice.desc return render(request, 'dailyIncid/nearHereListMB.html', context) else: logger.info('user=%s choosePlace-nonPost' % (userName)) # qform = getPlaceList() qform = getPlaceList(ptype=ptype) qs2 = TargetPlace.objects.filter(placeType=ptype) qsl = [(tp.ylat, tp.xlng, tp.name, tp.desc) for tp in list(qs2)] return render(request, 'dailyIncid/getPlaceName.html', { 'form': qform, 'ptype': ptype, 'qsl': qsl })
def update(self): """ Download Calendar RSS feed and update database """ logger.info("Starting EventsCalendarScraper") feed = feedparser.parse(self.url) seencount = addcount = updatecount = 0 for entry in feed.entries: def ns_get(element): # work around feedparser unpredictability. namespace, element = element.split(':') result = entry.get('%s_%s' % (namespace, element)) if result is None: result = entry.get(element) return result seencount += 1 title = convert_entities(entry.title) try: item = NewsItem.objects.get(title=title, schema__id=self.schema.id) status = "updated" except NewsItem.DoesNotExist: item = NewsItem() status = "added" except NewsItem.MultipleObjectsReturned: logger.warn("Multiple entries matched title %r, event titles are not unique?" % title) continue try: item.location_name = '%s %s' % (ns_get('xcal:x-calconnect-venue-name'), ns_get('xcal:x-calconnect-street')) item.location_name = item.location_name.strip() item.schema = self.schema item.title = title item.description = convert_entities(entry.description) item.url = entry.link start_dt = ns_get('xcal:dtstart') start_dt = dateutil.parser.parse(start_dt) # Upstream bug: They provide a UTC offset of +0000 which # means times in UTC, but they're actually times in # US/Eastern, so do *not* fix the zone. #start_dt = start_dt.astimezone(local_tz) item.item_date = start_dt.date() item.pub_date = datetime.datetime(*entry.updated_parsed[:6]) item.location = Point((float(ns_get('geo:long')), float(ns_get('geo:lat')))) if (item.location.x, item.location.y) == (0.0, 0.0): logger.warn("Skipping %r, bad location 0,0" % item.title) continue if not item.location_name: # Fall back to reverse-geocoding. from ebpub.geocoder import reverse try: block, distance = reverse.reverse_geocode(item.location) logger.info(" Reverse-geocoded point to %r" % block.pretty_name) item.location_name = block.pretty_name except reverse.ReverseGeocodeError: logger.debug(" Failed to reverse geocode %s for %r" % (item.location.wkt, item.title)) item.location_name = u'' item.save() item.attributes['start_time'] = start_dt.time() end_dt = ns_get('xcal:dtend') or u'' if end_dt.strip(): end_dt = dateutil.parser.parse(end_dt.strip()) #end_dt = end_dt.astimezone(local_tz) item.attributes['end_time'] = end_dt.time() if status == 'added': addcount += 1 else: updatecount += 1 logger.info("%s: %s" % (status, item.title)) except: logger.error("unexpected error:", sys.exc_info()[1]) log_exception() logger.info("EventsCalendarScraper finished: %d added, %d updated of %s total" % (addcount, updatecount, seencount))
def test_shape_mixed(self): """ Test that a project made of intervention of different geom create multiple files. Check that those files are each of a different type (Point/LineString) and that the project and the intervention are correctly referenced in it. """ # Create topology line topo_line = TopologyFactory.create(no_path=True) line = PathFactory.create( geom=LineString(Point(10, 10), Point(11, 10))) PathAggregationFactory.create(topo_object=topo_line, path=line) # Create a topology point lng, lat = tuple( Point(1, 1, srid=settings.SRID).transform(settings.API_SRID, clone=True)) closest_path = PathFactory( geom=LineString(Point(0, 0), Point(1, 0), srid=settings.SRID)) topo_point = TopologyHelper._topologypoint(lng, lat, None).reload() self.assertEquals(topo_point.paths.get(), closest_path) # Create one intervention by geometry (point/linestring) it_point = InterventionFactory.create(topology=topo_point) it_line = InterventionFactory.create(topology=topo_line) # reload it_point = type(it_point).objects.get(pk=it_point.pk) it_line = type(it_line).objects.get(pk=it_line.pk) proj = ProjectFactory.create() proj.interventions.add(it_point) proj.interventions.add(it_line) # instanciate the class based view 'abnormally' to use create_shape directly # to avoid making http request, authent and reading from a zip pfl = ZipShapeSerializer() devnull = open(os.devnull, "wb") pfl.serialize(Project.objects.all(), stream=devnull, delete=False, fields=ProjectFormatList.columns) self.assertEquals(len(pfl.layers), 2) ds_point = gdal.DataSource(pfl.layers.values()[0]) layer_point = ds_point[0] ds_line = gdal.DataSource(pfl.layers.values()[1]) layer_line = ds_line[0] self.assertEquals(layer_point.geom_type.name, 'MultiPoint') self.assertEquals(layer_line.geom_type.name, 'LineString') for layer in [layer_point, layer_line]: self.assertEquals(layer.srs.name, 'RGF93_Lambert_93') self.assertItemsEqual(layer.fields, ['domain', 'name', 'type', 'period', 'id']) self.assertEquals(len(layer_point), 1) self.assertEquals(len(layer_line), 1) for feature in layer_point: self.assertEquals(str(feature['id']), str(proj.pk)) self.assertTrue(feature.geom.geos.equals(it_point.geom)) for feature in layer_line: self.assertEquals(str(feature['id']), str(proj.pk)) self.assertTrue(feature.geom.geos.equals(it_line.geom)) # Clean-up temporary shapefiles for layer_file in pfl.layers.values(): for subfile in shapefile_files(layer_file): os.remove(subfile)
def test_point(self): geom = Point(0, 0) self.assertEqual(self.parser.filter_geom('geom', geom), None) self.assertTrue(self.parser.warnings)
def to_geo_element(self): out = self.json_dict out.pop('geom') out['geom'] = self.geom return Point(**out)
def get_location(self): return type( "Geocoder", (object,), {"centroid": Point(-2.54333651887832, 51.43921783606831, srid=4326)}, )
def copy_position(apps, schema_editor): Fruit = apps.get_model("fruit", "Fruit") for fruit in Fruit.objects.all(): fruit.position = Point(float(fruit.longitude), float(fruit.latitude)) fruit.save()
def setUp(self): self.trek = TrekFactory.create( points_reference=MultiPoint([Point(0, 0), Point(1, 1)], srid=settings.SRID), parking_location=Point(0, 0, srid=settings.SRID), )
def test_proxy(self): "Testing Lazy-Geometry support (using the GeometryProxy)." ## Testing on a Point pnt = Point(0, 0) nullcity = City(name='NullCity', point=pnt) nullcity.save() # Making sure TypeError is thrown when trying to set with an # incompatible type. for bad in [5, 2.0, LineString((0, 0), (1, 1))]: try: nullcity.point = bad except TypeError: pass else: self.fail('Should throw a TypeError') # Now setting with a compatible GEOS Geometry, saving, and ensuring # the save took, notice no SRID is explicitly set. new = Point(5, 23) nullcity.point = new # Ensuring that the SRID is automatically set to that of the # field after assignment, but before saving. self.assertEqual(4326, nullcity.point.srid) nullcity.save() # Ensuring the point was saved correctly after saving self.assertEqual(new, City.objects.get(name='NullCity').point) # Setting the X and Y of the Point nullcity.point.x = 23 nullcity.point.y = 5 # Checking assignments pre & post-save. self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point) nullcity.save() self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point) nullcity.delete() ## Testing on a Polygon shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0)) inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40)) # Creating a State object using a built Polygon ply = Polygon(shell, inner) nullstate = State(name='NullState', poly=ply) self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None nullstate.save() ns = State.objects.get(name='NullState') self.assertEqual(ply, ns.poly) # Testing the `ogr` and `srs` lazy-geometry properties. if gdal.HAS_GDAL: self.assertEqual(True, isinstance(ns.poly.ogr, gdal.OGRGeometry)) self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb) self.assertEqual(True, isinstance(ns.poly.srs, gdal.SpatialReference)) self.assertEqual('WGS 84', ns.poly.srs.name) # Changing the interior ring on the poly attribute. new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30)) ns.poly[1] = new_inner ply[1] = new_inner self.assertEqual(4326, ns.poly.srid) ns.save() self.assertEqual(ply, State.objects.get(name='NullState').poly) ns.delete()
def import_row(self, row): # generate slugs if 'name' in row: row['name'] = row['name'].strip() if 'email' in row: row['email'] = row['email'].lower() if 'url' in row: if row['url'] and not row['url'].startswith( ('http://', 'https://')): row['url'] = 'http://' + row['url'] if 'slug' not in row and 'name' in row: row['slug'] = slugify(row['name']) if 'classification' in row: row['classification'] = self.get_classification( row.pop('classification', None)) categories = parse_tags(row.pop('categories', '')) categories = list(self.get_categories(categories)) tags = parse_tags(row.pop('tags', '')) # Backwards compatible handling of topic__slug topic_slug = row.pop('topic__slug', None) if topic_slug: tags.append(self.get_topic(topic_slug)) # resolve foreign keys if 'jurisdiction__slug' in row: row['jurisdiction'] = self.get_jurisdiction( row.pop('jurisdiction__slug')) regions = None if 'georegion_id' in row: regions = [self.get_georegion(id=row.pop('georegion_id'))] elif 'georegion_identifier' in row: regions = [ self.get_georegion(identifier=row.pop('georegion_identifier')) ] elif 'regions' in row: regions = row.pop('regions') if regions: regions = [ self.get_georegion(id=r) for r in regions.split(',') ] parent = row.pop('parent__name', None) if parent: row['parent'] = PublicBody._default_manager.get( slug=slugify(parent)) parent = row.pop('parent__id', None) if parent: row['parent'] = PublicBody._default_manager.get(pk=parent) # get optional values for n in ('description', 'other_names', 'request_note', 'website_dump'): if n in row: row[n] = row.get(n, '').strip() if 'lat' in row and 'lng' in row: row['geo'] = Point(float(row.pop('lng')), float(row.pop('lat'))) try: if 'id' in row and row['id']: pb = PublicBody._default_manager.get(id=row['id']) elif row.get('source_reference'): pb = PublicBody._default_manager.get( source_reference=row['source_reference']) else: pb = PublicBody._default_manager.get(slug=row['slug']) # If it exists, update it row.pop('id', None) # Do not update id though row.pop('slug', None) # Do not update slug either row['_updated_by'] = self.user row['updated_at'] = timezone.now() PublicBody._default_manager.filter(id=pb.id).update(**row) pb.laws.clear() pb.laws.add(*row['jurisdiction'].laws) pb.tags.set(*tags) if regions: pb.regions.set(*regions) pb.categories.set(*categories) return pb except PublicBody.DoesNotExist: pass row.pop('id', None) # Remove id if present pb = PublicBody(**row) pb._created_by = self.user pb._updated_by = self.user pb.created_at = timezone.now() pb.updated_at = timezone.now() pb.confirmed = True pb.site = self.site pb.save() pb.laws.add(*row['jurisdiction'].laws) pb.tags.set(*list(tags)) if regions: pb.regions.set(*regions) pb.categories.set(*categories) return pb
def tile_url(layer, z, x, y, style=None, internal=True): """Construct actual tile request to QGIS Server. Different than tile_url_format, this method will return url for requesting a tile, with all parameters filled out. :param layer: Layer to use :type layer: Layer :param z: TMS coordinate, zoom parameter :type z: int, str :param x: TMS coordinate, longitude parameter :type x: int, str :param y: TMS coordinate, latitude parameter :type y: int, str :param style: Layer style to choose :type style: str :param internal: Flag to switch between public url and internal url. Public url will be served by Django Geonode (proxified). :type internal: bool :return: Tile url :rtype: str """ try: qgis_layer = QGISServerLayer.objects.get(layer=layer) except QGISServerLayer.DoesNotExist: msg = 'No QGIS Server Layer for existing layer {0}'.format( layer.name) logger.debug(msg) raise x = int(x) y = int(y) z = int(z) # Call the WMS top, left = num2deg(x, y, z) bottom, right = num2deg(x + 1, y + 1, z) transform = CoordTransform(SpatialReference(4326), SpatialReference(3857)) top_left_corner = Point(left, top, srid=4326) bottom_right_corner = Point(right, bottom, srid=4326) top_left_corner.transform(transform) bottom_right_corner.transform(transform) bottom = bottom_right_corner.y right = bottom_right_corner.x top = top_left_corner.y left = top_left_corner.x bbox = ','.join([str(val) for val in [left, bottom, right, top]]) if not style: style = 'default' if style not in [s.name for s in qgis_layer.styles.all()]: style = qgis_layer.default_style.name query_string = { 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetMap', 'BBOX': bbox, 'CRS': 'EPSG:3857', 'WIDTH': '256', 'HEIGHT': '256', 'MAP': qgis_layer.qgis_project_path, 'LAYERS': layer.name, 'STYLE': style, 'FORMAT': 'image/png', 'TRANSPARENT': 'true', 'DPI': '96', 'MAP_RESOLUTION': '96', 'FORMAT_OPTIONS': 'dpi:96' } qgis_server_url = qgis_server_endpoint(internal) url = Request('GET', qgis_server_url, params=query_string).prepare().url return url