def get_bounding_boxes_for_country(country_code): if len([c for c in country_subunits_by_iso_code(country_code)]) == 0: return None return { 'bboxes': [c.bbox for c in country_subunits_by_iso_code(country_code)], 'weights': [c.pop_est for c in country_subunits_by_iso_code(country_code)] }
def maximum_country_radius(country_code): """ Return the maximum radius of a circle encompassing the largest country subunit in meters, rounded to 1 km increments. """ if not isinstance(country_code, string_types): return None country_code = country_code.upper() if len(country_code) not in (2, 3): return None value = _radius_cache.get(country_code, None) if value: return value diagonals = [] for country in country_subunits_by_iso_code(country_code): (lon1, lat1, lon2, lat2) = country.bbox diagonals.append(distance(lat1, lon1, lat2, lon2)) if diagonals: # Divide by two to get radius, round to 1 km and convert to meters radius = max(diagonals) / 2.0 value = _radius_cache[country_code] = round(radius) * 1000.0 return value
def get_country_fullname(string): countrylist = [c.name for c in country_subunits_by_iso_code(string)] print(countrylist) if len(countrylist) == 1: return countrylist[0] elif string == 'BE': return 'Belgium' elif string == 'ES': return 'Spain' elif string == 'PT': return 'Portugal' elif string == 'IT': return 'Italy' elif string == 'GB': return 'Great Britain' elif string == 'US': return 'USA' elif string == 'DK': return 'Denmark' elif string == 'FR': return 'France' elif string == 'NO': return 'Norway' else: return countrylist[-1]
def slice_country(country_iso_code='ML'): """" load global glofas data, select one country and save it in a separate file """ # load country bounding box (see https://github.com/graydon/country-bounding-boxes) bbox = [c.bbox for c in country_subunits_by_iso_code(country_iso_code)][0] longmin = bbox[0] longmax = bbox[2] latmin = bbox[3] latmax = bbox[1] print(longmin, longmax, latmin, latmax) data_dir = 'data_all_2000_2019' # global glofas data directory ncconcat = xr.Dataset() # initialize final country file # loop over all global glofas data for ix, file in enumerate(os.listdir(data_dir)): nc = xr.open_dataset(data_dir + '/' + file) # select country nc_c = nc.sel(lon=slice(longmin, longmax), lat=slice(latmin, latmax)) # if first iteration, copy to new country file, otherwise concatenate to existing one if ix == 0: print(nc) ncconcat = nc_c else: ncconcat = xr.concat([ncconcat, nc_c], dim='time') del nc, nc_c # save new dataframe print(ncconcat) ncconcat.to_netcdf('data_'+country_iso_code+'.nc')
def to_json(self): wrapper = copy.deepcopy(self.tags) wrapper['bounding-boxes'] = [ c.bbox for c in country_subunits_by_iso_code(self.iso2) ] wrapper['polygons'] = self.polygons wrapper.update(hooks.get_additional_tags(self)) return json.dumps(wrapper)
def to_geojson(self): wrapper = copy.deepcopy(self.tags) wrapper['bounding-boxes'] = [ c.bbox for c in country_subunits_by_iso_code(self.iso2) ] wrapper.update(hooks.get_additional_tags(self)) main_data = MultiPolygon(self.polygons) feature = Feature(geometry=main_data, properties=wrapper) return geojson.dumps(FeatureCollection([feature]), sort_keys=True)
def country_matches_location(lat, lon, country_code, margin=0): """ Return whether or not a given (lat, lon) pair is inside one of the country subunits associated with a given alpha2 country code. """ for country in country_subunits_by_iso_code(country_code): (lon1, lat1, lon2, lat2) = country.bbox if lon1 - margin <= lon and lon <= lon2 + margin and \ lat1 - margin <= lat and lat <= lat2 + margin: return True return False
def handle(self, *args, **options): from landmatrix.models.country import Country from country_bounding_boxes import country_subunits_by_iso_code def findBounds(boxes, counter, country): if len(boxes) > 100: counter += 1 min_lat = 180 min_lon = 90 max_lat = -180 max_lon = -90 for bb in boxes: if bb[0] < min_lat: min_lat = bb[0] if bb[1] < min_lon: min_lon = bb[1] if bb[2] > max_lat: max_lat = bb[2] if bb[3] > max_lon: max_lon = bb[3] else: try: bb = boxes[0] min_lat = bb[0] min_lon = bb[1] max_lat = bb[2] max_lon = bb[3] except: print(country, "..is bad.") try: country.point_lat_max = max_lat country.point_lon_max = max_lon country.point_lat_min = min_lat country.point_lon_min = min_lon country.save() except: print(country, "..is bad, too.") counter += 1 return counter counter = 0 whole = 0 for country in Country.objects.all(): whole += 1 boxes = [ c.bbox for c in country_subunits_by_iso_code(country.code_alpha3) ] counter = findBounds(boxes, counter, country) print("%i of %i countries with borked borders." % (counter, whole))
def location_is_in_country(lat, lon, country, margin=0): """ Return whether or not a given lat, lon pair is inside one of the country subunits associated with a given alpha2 country code. """ for c in country_subunits_by_iso_code(country): (lon1, lat1, lon2, lat2) = c.bbox if lon1 - margin <= lon and lon <= lon2 + margin and \ lat1 - margin <= lat and lat <= lat2 + margin: return True return False
def get_country_bbox(): """ Returns the bounding box coordinates of a country :return: """ global country_iso matched_bbox = [c.bbox for c in country_subunits_by_iso_code(country_iso)] # ensure only one country is matched if len(matched_bbox) != 1: return None bbox = matched_bbox[0] # bbox = (x_min, y_min, x_max, y_max) return bbox
def get_country_bbox(countryobject): try: boxes = [ c.bbox for c in country_subunits_by_iso_code(countryobject.code) ] if len(boxes) == 1: #print(boxes) #print(type(boxes)) return boxes else: #print(boxes) #print(type(boxes)) #print(boxes[0]) return [boxes[0]] except IndexError: return None
def find_bounds(country_obj): bounding_boxes = [ c.bbox for c in country_subunits_by_iso_code(country_obj.code_alpha3)] if not bounding_boxes: message = 'No data found for country code {}'.format( country_obj.code_alpha3) raise ValueError(message) polygons = [] for box in bounding_boxes: x1, y1, x2, y2 = box polygon = Polygon.from_bbox((x1, y1, x2, y2)) polygons.append(polygon) collection = GeometryCollection(*polygons) return collection.extent
def find_bounds(country_obj): bounding_boxes = [ c.bbox for c in country_subunits_by_iso_code(country_obj.code_alpha3) ] if not bounding_boxes: message = 'No data found for country code {}'.format( country_obj.code_alpha3) raise ValueError(message) polygons = [] for box in bounding_boxes: x1, y1, x2, y2 = box polygon = Polygon.from_bbox((x1, y1, x2, y2)) polygons.append(polygon) collection = GeometryCollection(*polygons) return collection.extent
def get_country_circles(country_code): subunits = country_subunits_by_iso_code(country_code) print('Getting shape for {}'.format(country_code)) country_shape = get_country_shape(country_code) if country_shape is None: print('Country not found {}'.format(country_code)) else: circles = [] for unit in subunits: bounds = Bounds(unit.bbox) print(unit.name, bounds.width(), 'km,', bounds.height(), 'km') print('Finding circles for', unit.name) circles.extend(get_bounded_circles(country_shape, bounds)) # points = circles[0].shape() # with open('out/points.csv', 'w') as file: # writer = csv.writer(file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) # for point in points: # writer.writerow([point.latitude, point.longitude]) return circles
def get_country_bbox(country): """ bbox = min Longitude , min Latitude , max Longitude , max Latitude bbox = left,bottom,right,top :param country: country of interest :return: a tuple representing a bounding box """ country_iso = get_country_iso(country) if country_iso is None: return matched_bbox = [c.bbox for c in country_subunits_by_iso_code(country_iso)] # ensure only one country is matched if len(matched_bbox) == 0: return elif len(matched_bbox) == 1: bbox = matched_bbox[0] else: bbox = matched_bbox[1] # bbox = (x_min, y_min, x_max, y_max) return bbox
def maximum_country_radius(country): """ Return the maximum radius of a circle encompassing the largest country subunit in meters, rounded to 1 km increments. """ if not isinstance(country, basestring): return None country = country.upper() if len(country) not in (2, 3): return None value = _radius_cache.get(country, None) if value: return value diagonals = [] for c in country_subunits_by_iso_code(country): (lon1, lat1, lon2, lat2) = c.bbox diagonals.append(distance(lat1, lon1, lat2, lon2)) if diagonals: # Divide by two to get radius, round to 1 km and convert to meters value = _radius_cache[country] = round(max(diagonals) / 2.0) * 1000.0 return value
def get_bboxes_from_db_time_zone(db_time_zone): # Make the strings match time_zone = db_time_zone.replace(' ', '_') # We got a hit country_codes = get_countries_using_time_zone(time_zone) bbox = None if country_codes != None: # Look in tz database for country_code in country_codes: bboxes = [ c.bbox for c in country_subunits_by_iso_code(country_code) ] return bboxes # for coords in bboxes: # # We have a bounding box # bbox = coords elif get_special_bbox_special_treatment( time_zone) != None: # Look in special treatments # We have a bounding box bbox = get_special_bbox_special_treatment(time_zone) return [bbox] else: # We could not find a bounding box return None
def get_data(country_iso_code, datestart, dateend, dest, collection, variable): # define bounding box of the Philippines bbox_coords = [ c.bbox for c in country_subunits_by_iso_code(country_iso_code) ][0] bounding_box = ee.Geometry.Rectangle(list(bbox_coords)) output_dir = dest # get list of dates from given date range if not isinstance(datestart, str): datestart = datestart.strftime('%Y-%m-%d') dateend = dateend.strftime('%Y-%m-%d') if not os.path.exists(output_dir): os.mkdir(output_dir) collection_dir = collection.replace('/', '_') folder = output_dir + '/' + collection_dir + '_' + variable if not os.path.exists(folder): os.mkdir(folder) # convert datetime objects to strings name = variable + '_' + datestart + '_' + dateend file_name = folder + '/' + name if os.path.exists(file_name): print('found existing', name, ', skipping') return file_name # get ImageCollection within given dates and bounding box try: col = (ee.ImageCollection(collection).filterDate( datestart, dateend).filterBounds(bounding_box)) except: time.sleep(30) col = (ee.ImageCollection(collection).filterDate( datestart, dateend).filterBounds(bounding_box)) count = col.size().getInfo() if count == 0: print('ERROR: no data found') return 'error' # get list of images in collection clist = col.toList(col.size().getInfo()) # save the scale of first image (need to use it later to save aggregated raster) image_scale = int( tools.image.minscale(ee.Image( clist.get(0)).select(variable)).getInfo()) # filter only data with good QA flag(s) if 'LST' in variable: # tranform to celsius def KtoC(img): return ( img.select(variable).float().multiply(0.02).subtract(273.15)) col = col.map(KtoC) if 'Rainf_f_tavg' in variable: def kgmstomm(img): return (img.select(variable).float().multiply(2.628e+6)) col = col.map(kgmstomm) if 'precip' in variable.lower(): # print('sum rainfall') image_agg = col.select(variable).reduce(ee.Reducer.sum()) else: # calculate mean over month image_agg = col.select(variable).reduce(ee.Reducer.mean()) if image_scale < 1000: image_scale = 1000 # set a name to the file and download to disk # print('downloading ' + name) try: batch.image.toLocal(image_agg, file_name, scale=image_scale, region=bounding_box) except FileExistsError: pass return file_name
from country_bounding_boxes import ( country_subunits_containing_point, country_subunits_by_iso_code ) import sys, json if len(sys.argv) < 2: sys.exit('Usage: %s ISO-CODE' % sys.argv[0]) print ( json.dumps({ 'subunits': [c.name for c in country_subunits_by_iso_code(sys.argv[1])], 'bounds': [c.bbox for c in country_subunits_by_iso_code(sys.argv[1])]}) )
from country_bounding_boxes import (country_subunits_containing_point, country_subunits_by_iso_code) import sys, json if len(sys.argv) < 2: sys.exit('Usage: %s ISO-CODE' % sys.argv[0]) print( json.dumps({ 'subunits': [c.name for c in country_subunits_by_iso_code(sys.argv[1])], 'bounds': [c.bbox for c in country_subunits_by_iso_code(sys.argv[1])] }))
def script(countryISO='US', query='landuse', outputFolder='data/', partOfData=1, outputFile='OSMdata_'): """ Main function executed by top 'countryISO': Country for which BBox data should be downloaded. Can also contain custom boundary box 'query': Tag for OSM query to search for 'partOfData': Part of total data of a country to be processed Returns list with [filename,datatype], where datatype is the GDAL_CODE """ partOfData = float(partOfData) subunits = [] countryISOlist = countryISO.split() if countrISOlist[1]: #if there is more than one entry bbox = [] bbox.append(float(contryISOlist[0])) bbox.append(float(contryISOlist[1])) bbox.append(float(contryISOlist[2])) bbox.append(float(contryISOlist[3])) else: #Load country data for c in country_subunits_by_iso_code(countryISO): subunits.append(c) #Chose subunits, if more than 1 subunit = 1 if len(subunits) > 1: #if there are subunits cnt = 1 print "Subunits:" for c in subunits: print cnt, "- ", c.name cnt += 1 subunit = input('Chose subunit: ') elif len(subunits) == 0: #if nothing found print "Error: No country or entry with ISO code", countryISO exit() #Get BBox data for country print "Acquiring data for", subunits[subunit - 1].name bbox = subunits[subunit - 1].bbox #0-w, 1-s, 2-e, 3-n w = bbox[0] s = bbox[1] e = bbox[2] n = bbox[3] print "Coordinates:", w, s, e, n print "Key:", query # Country is split into 100 boxes, as (for the us) sample is too big # (timeout) # Number of Boxes = (samples-1)^2 boxes. #calculate number of boxes mindiff = min([abs(w - e), abs(n - s)]) samples = int(floor((mindiff * 8) + 2)) print "Number of queries:", (samples - 1)**2 #samples = 11 # 100 boxes fullquery = query_begin + query + query_end #Get Elements from OSM overpass_client = OverpassClient(endpoint='fr') d = overpass_client.get_bbox_elements(ql_template=fullquery, bb_s=s, bb_w=w, bb_n=n, bb_e=e, samples=samples) lene = len(d) print 'Total elements found: %d' % lene boundery_index = int(floor(partOfData * lene)) d = d[0:boundery_index] dr = list(reversed(d)) lene = boundery_index fileName = outputFolder + '/' + outputFile + str( subunits[subunit - 1].name).replace(" ", "_") + ".json" #Create GeoJSON string geojson = [] geojson.append(''' { "type": "FeatureCollection", "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::4326" } }, "features": [''') #create faster node searchs print "Create library for faster node searches" #get max id cnt = 0. ids = {} for e in d: print "\t Library creation:", int(cnt * 100. / lene), "% done.\r", cnt += 1 if e['type'] == 'node': ids[e['id']] = [e['lon'], e['lat']] #creade list of nodes with ids. #coordlist=[] #cnt = 0. #for i in range(0,maxid+1): # print "\t Step 2/3:",cnt*100./maxid+1,"% done.\r", # cnt+=1 # coordlist.append([0,0]) #cnt = 0. #rint "" #for e in d: # print "\t Step 3/3:",cnt*100./lene,"% done.\r", # cnt+=1 # if e['type']=='node': # coordlist(e['id'])[0]=e['lon'] # coordlist(e['id'])[1]=e['lat'] #loop through elements and append to GeoJSON string cnt = 0. cnte = 0 print "" print "Convert to GeoJSON file", fileName writetag = [] writecoord = [] for e in d: cnt += 1 print "\tGet elemenents:", int(cnt * 100. / lene), "% done.\r", if e['type'] == 'way': # if e['area']=='yes': writetag.append(e['tags'][query]) writecoord.append([]) for node in e['nodes']: try: lon = str(ids[node][0]) lat = str(ids[node][1]) except KeyError: print '' print '\tNode', node, 'not found in library. Download informations from openstreetmap.org ...' response = urllib2.urlopen( 'http://api.openstreetmap.org/api/0.6/node/' + str(node)) fullxml = str(response.read()) lon = find_between(fullxml, "lon=\"", "\"", lastfirst=True) lat = find_between(fullxml, "lat=\"", "\"", lastfirst=True) writecoord[cnte].append([lon, lat]) cnte += 1 cnte2 = 0 print '' for tag in writetag: print "\tCreate GeoJSON:", int(cnte2 * 100. / cnte), "% done.\r", geojson.append(''' { "type": "Feature",'''+\ "\n\t\t\t\"properties\":{\"Descriptio\":\""+\ tag+"\"},") geojson.append(''' "geometry" : { "type": "MultiPolygon", "coordinates":[[[''') for coord in writecoord[cnte2]: geojson.append("[" + coord[0] + "," + coord[1] + "],") cnte2 += 1 geojson[-1] = geojson[-1][0:-1] + "]]]}}," #geojson=geojson[0:-1]+"]]]}}," geojson = ''.join(geojson) geojson = geojson[0:-1] + "\n]\n}" print " " with open(fileName, 'w+') as f: json.dump(geojson, f) #save as geojson file #replace escape characters with open(fileName, 'r') as file: filedata = file.read()[1:-1] # Replace the target string filedata = filedata.replace('\\n', '') filedata = filedata.replace('\\t', '') filedata = filedata.replace('\\"', '"') # Save the result with open(fileName, 'w') as file: file.write(filedata) print "Written to", fileName return [fileName, datatype]