def get_context_by_admin2(geodash_conn=None, iso_alpha3=None): context_by_admin2 = {} url = settings.SITEURL[:-1] + reverse( "api_data_country", kwargs={ "iso3": iso_alpha3, "dataset": "context", "title": iso_alpha3.upper() + "_NHR_ContextLayers", "extension": "json" }) response = requests.get(url) features = extract(["features"], response.json(), None) for feature in features: admin2_code = extract(u"properties.admin2_code", feature, None) context_by_admin2[str(admin2_code)] = { 'ldi': extract(u"properties.ldi", feature, None), 'delta_mean': extract(u"properties.delta_mean", feature, None), 'delta_positive': extract(u"properties.delta_positive", feature, None), 'delta_negative': extract(u"properties.delta_negative", feature, None), 'delta_crop': extract(u"properties.delta_crop", feature, None), 'delta_forest': extract(u"properties.delta_forest", feature, None), 'erosion_propensity': extract(u"properties.erosion_propensity", feature, None), } return context_by_admin2
def writerow(self, rowdict): row = [extract(x['path'], rowdict, self.fallback) for x in self.fields] # row = [unicode(x) for x in row] row = [x.replace('"', '""') for x in row] # self.output = self.output + self.delimiter.join( [self.quote + x + self.quote for x in row]) + self.newline
def writerows(self, rowdicts): rows = [] for rowdict in rowdicts: rows.append([ extract(x['path'], rowdict, self.fallback) for x in self.fields ]) for row in rows: # row = [unicode(x) for x in row] row = [x.replace('"', '""') for x in row] # self.output = self.output + self.delimiter.join( [self.quote + x + self.quote for x in row]) + self.newline
def get_country(iso_alpha3=None): result = None if iso_alpha3: iso_alpha3_uc = iso_alpha3.upper() url = reverse("api_data", kwargs={"dataset": "countries", "extension": "json"}) if url: response = requests.get(settings.SITEURL[:-1]+url+"?grep=iso.alpha3%3D"+iso_alpha3_uc) if response: data = response.json() countries = extract("countries", data, None) if countries: if len(countries) == 1: result = countries[0] return result
def _build_data(self, request, *args, **kwargs): iso3 = kwargs.get('iso3', None) hazard = kwargs.get('hazard', None) dataset = kwargs.get('dataset', None) extension = kwargs.get('extension', None) ext_lc = extension.lower() country_title = extract("gaul.admin0_name", get_country(iso_alpha3=iso3), "") ds = yaml.load(get_template("sparc2/datasets/{dataset}.yml".format(**kwargs)).render({ "iso3": iso3, "hazard": hazard, "country_title": country_title })) return ds
def _build_geometry_type(self, request, *args, **kwargs): geometryType = None url = reverse("api_metadata_countryhazard", kwargs={ "iso3": kwargs.get('iso3', None), "hazard": kwargs.get('hazard', None), "dataset": kwargs.get('dataset', None), "extension": "json" }) if url: response = requests.get(settings.SITEURL[:-1]+url) if response: ds = response.json() if ds: geometryType = GEOMETRY_TYPE_TO_OGR.get(extract("geometry.type", ds , "").lower()) return geometryType
def _build_geometry_path(self, request, *args, **kwargs): path = None url = reverse("api_metadata_countryhazard", kwargs={ "iso3": kwargs.get('iso3', None), "hazard": kwargs.get('hazard', None), "dataset": kwargs.get('dataset', None), "extension": "json" }) if url: response = requests.get(settings.SITEURL[:-1]+url) if response: ds = response.json() path = extract("geometry.path", ds , None) return path
def get_context_by_admin2(geodash_conn=None, iso_alpha3=None): context_by_admin2 = {} url = settings.SITEURL[:-1]+reverse("api_data_country", kwargs={ "iso3": iso_alpha3, "dataset": "context", "title": iso_alpha3.upper()+"_NHR_ContextLayers", "extension": "json" }) response = requests.get(url) features = extract(["features"], response.json(), None) for feature in features: admin2_code = extract(u"properties.admin2_code", feature, None) context_by_admin2[str(admin2_code)] = { 'ldi': extract(u"properties.ldi", feature, None), 'delta_mean': extract(u"properties.delta_mean", feature, None), 'delta_positive': extract(u"properties.delta_positive", feature, None), 'delta_negative': extract(u"properties.delta_negative", feature, None), 'delta_crop': extract(u"properties.delta_crop", feature, None), 'delta_forest': extract(u"properties.delta_forest", feature, None), 'erosion_propensity': extract(u"properties.erosion_propensity", feature, None), } return context_by_admin2
def country_detail(request, iso3=None, hazard=None, month=None): now = datetime.datetime.now() current_month = now.month t = "sparc2/country_detail.html" country_title = extract("gaul.admin0_name", get_country(iso_alpha3=iso3), "") map_config_yml = get_template("sparc2/maps/country_detail.yml").render({ "iso_alpha3": iso3, "country_title": country_title }) map_config = yaml.load(map_config_yml) ############## state_schema = { "iso3": "string", "view": { "lat": "float", "lon": "float", "z": "integer" }, "filters": {}, "styles": { "context": "string" } } ctx = { "map_config": map_config, "map_config_json": json.dumps(map_config), "state_schema": state_schema, "state_schema_json": json.dumps(state_schema), "init_function": "init_country", "geodash_main_id": "geodash-main", "include_sidebar_left": False } ctx.update({ "iso3": iso3, "country_title": country_title }) return render_to_response(t, RequestContext(request, ctx))
def get_country(iso_alpha3=None): result = None if iso_alpha3: iso_alpha3_uc = iso_alpha3.upper() url = reverse("api_data", kwargs={ "dataset": "countries", "extension": "json" }) if url: response = requests.get(settings.SITEURL[:-1] + url + "?grep=iso.alpha3%3D" + iso_alpha3_uc) if response: data = response.json() countries = extract("countries", data, None) if countries: if len(countries) == 1: result = countries[0] return result
def _build_data(self, request, *args, **kwargs): data = None iso3 = kwargs.pop('iso3', None) hazard = kwargs.pop('hazard', None) month = kwargs.pop('month', None) now = datetime.datetime.now() current_month = now.month country_title = extract("gaul.admin0_name", get_country(iso_alpha3=iso3), "") hazard_title = [h for h in SPARC_HAZARDS_CONFIG if h["id"]==hazard][0]["title"] month_num = get_month_number(month) if month_num == -1: month_num = current_month month_title = MONTHS_SHORT3[month_num-1] ############## # This is inefficient, so would be better to rework summary = None if hazard == "cyclone": summary = get_summary_cyclone(table_popatrisk="cyclone.admin2_popatrisk", iso_alpha3=iso3) elif hazard == "drought": summary = get_summary_drought(table_popatrisk="drought.admin2_popatrisk", iso_alpha3=iso3) elif hazard == "flood": summary = get_summary_flood(table_popatrisk="flood.admin2_popatrisk", iso_alpha3=iso3) elif hazard == "landslide": summary = get_summary_landslide(table_popatrisk="landslide.admin2_popatrisk", iso_alpha3=iso3) ############# maxValue = summary["all"]["max"]["at_admin2_month"] ############# data = yaml.load(get_template("sparc2/maps/countryhazardmonth_detail.yml").render({ "iso_alpha3": iso3, "hazard_title": hazard_title, "country_title": country_title, "hazard": hazard, "maxValue": maxValue })) return data
def get(self, request, *args, **kwargs): ext_lc = kwargs['extension'].lower() ## data = None if settings.GEODASH_CACHE_DATA: client = provision_memcached_client() if client: key = self._build_key(request, *args, **kwargs) print "Checking cache with key ", key data = None try: data = client.get(key) except socket_error as serr: data = None print "Error getting data from in-memory cache." if serr.errno == errno.ECONNREFUSED: print "Memcached is likely not running. Start memcached with supervisord." raise serr if not data: print "Data not found in cache." data = self._build_data(request, *args, **kwargs) if ext_lc == "geodash": data = [int(x) for x in data] try: client.set(key, data) except socket_error as serr: print "Error saving data to in-memory cache." if serr.errno == errno.ECONNREFUSED: print "Memcached is likely not running or the data exceeds memcached item size limit. Start memcached with supervisord." raise serr else: print "Data found in cache." else: print "Could not connect to memcached client. Bypassing..." data = self._build_data(request, *args, **kwargs) else: print "Not caching data (settings.geodash_CACHE_DATA set to False)." data = self._build_data(request, *args, **kwargs) #content = json.dumps(data, default=jdefault) #content = re.sub( # settings.GEODASH_REGEX_CLIP_COORDS_PATTERN, # settings.GEODASH_REGEX_CLIP_COORDS_REPL, # content, # flags=re.IGNORECASE) root = self._build_root(request, *args, **kwargs) attributes = self._build_attributes(request, *args, **kwargs) if attributes: data = grep(obj=data, root=root, attributes=attributes, filters=getRequestParameters(request, "grep", None)) if ext_lc == "json": return HttpResponse(json.dumps(data, default=jdefault), content_type="application/json") elif ext_lc == "yml" or ext_lc == "yaml": response = yaml.safe_dump(data, encoding="utf-8", allow_unicode=True, default_flow_style=False) return HttpResponse(response, content_type="text/plain") elif ext_lc == "csv" or ext_lc == "csv": writer = GeoDashDictWriter("", attributes) writer.writeheader() writer.writerows(extract(root, data, [])) response = writer.getvalue() return HttpResponse(response, content_type="text/csv") elif ext_lc == "zip": # See the following for how to create zipfile in memory, mostly. # https://newseasandbeyond.wordpress.com/2014/01/27/creating-in-memory-zip-file-with-python/ tempDirectory = tempfile.mkdtemp() print "Temp Directory:", tempDirectory if tempDirectory: geometryType = self._build_geometry_type( request, *args, **kwargs) ########### Create Files ########### os.environ['SHAPE_ENCODING'] = "utf-8" # See following for how to create shapefiles using OGR python bindings # https://pcjericks.github.io/py-gdalogr-cookbook/vector_layers.html#filter-and-select-input-shapefile-to-new-output-shapefile-like-ogr2ogr-cli basepath, out_filename, ext = parse_path(request.path) out_shapefile = os.path.join(tempDirectory, out_filename + ".shp") out_driver = ogr.GetDriverByName("ESRI Shapefile") if os.path.exists(out_shapefile): out_driver.DeleteDataSource(out_shapefile) out_datasource = out_driver.CreateDataSource(out_shapefile) out_layer = out_datasource.CreateLayer( (out_filename + ".shp").encode('utf-8'), geom_type=geometryType) ########### Create Fields ########### out_layer.CreateField(ogr.FieldDefn( "id", ogr.OFTInteger)) # Create ID Field for attribute in attributes: label = attribute.get('label_shp') or attribute.get( 'label') out_layer.CreateField( ogr.FieldDefn( label, ATTRIBUTE_TYPE_TO_OGR.get( attribute.get('type', 'string')))) ########### Create Features ########### features = extract(root, data, []) for i in range(len(features)): feature = features[i] out_feature = ogr.Feature(out_layer.GetLayerDefn()) geom = extract( self._build_geometry(request, *args, **kwargs), feature, None) out_feature.SetGeometry( ogr.CreateGeometryFromJson( json.dumps(geom, default=jdefault))) out_feature.SetField("id", i) for attribute in attributes: label = attribute.get('label_shp') or attribute.get( 'label') out_value = extract(attribute.get('path'), feature, None) out_feature.SetField( (attribute.get('label_shp') or attribute.get('label')), out_value.encode('utf-8') if isinstance( out_value, basestring) else out_value) out_layer.CreateFeature(out_feature) out_datasource.Destroy() ########### Create Projection ########### spatialRef = osr.SpatialReference() spatialRef.ImportFromEPSG(4326) spatialRef.MorphToESRI() with open(os.path.join(tempDirectory, out_filename + ".prj"), 'w') as f: f.write(spatialRef.ExportToWkt()) f.close() ########### Create Zipfile ########### buff = StringIO.StringIO() zippedShapefile = zipfile.ZipFile(buff, mode='w') #memoryFiles = [] component_filenames = os.listdir(tempDirectory) #for i in range(len(componentFiles)): # memoryFiles.append(StringIO.StringIO()) for i in range(len(component_filenames)): with open( os.path.join(tempDirectory, component_filenames[i]), 'r') as f: contents = f.read() zippedShapefile.writestr(component_filenames[i], contents) zippedShapefile.close() print "zippedShapefile.printdir()", zippedShapefile.printdir() ########### Delete Temporary Directory ########### shutil.rmtree(tempDirectory) ########### Response ########### return HttpResponse(buff.getvalue(), content_type="application/zip") #for i in range(len(componentFiles)): # with open(componentFiles[i], 'w') as componentFile: # memoryFiles[i].write(componentFile.read()) else: raise Http404( "Could not acquire temporary directory for building shapefile." ) elif ext_lc == "geodash": response = HttpResponse(content_type='application/octet-stream') # Need to do by bytes(bytearray(x)) to properly translate integers to 1 byte each # If you do bytes(data) it will give 4 bytes to each integer. response.write(bytes(bytearray(data))) return response else: raise Http404("Unknown config format.")
def countryhazardmonth_detail(request, iso3=None, hazard=None, month=None): now = datetime.datetime.now() #current_month = now.strftime("%B") current_month = now.month t = "sparc2/countryhazardmonth_detail.html" country_title = extract("gaul.admin0_name", get_country(iso_alpha3=iso3), "") hazard_title = [h for h in SPARC_HAZARDS_CONFIG if h["id"]==hazard][0]["title"] month_num = get_month_number(month) if month_num == -1: month_num = current_month month_title = MONTHS_SHORT3[month_num-1] ############## ############# #ctx = { # "map_config": map_config, # "map_config_json": json.dumps(map_config), # "state": initial_state, # "state_json": json.dumps(initial_state), # "state_schema": state_schema, # "state_schema_json": json.dumps(state_schema), # "endpoints_json": json.dumps(endpoints), # "sidebar_left_open": True, # "init_function": "init_countryhazardmonth", # "geodash_main_id": "geodash-main", # "include_sidebar_left": True #} dashboard_resources = [ { "loader": "endpoints", "url": reverse("api_endpoints", kwargs={"extension": "json"}) }, { "loader": "pages", "url": reverse("api_pages", kwargs={"extension": "json"}) }, { "loader": "popatrisk_summary", "url": reverse("api_data_countryhazard", kwargs={ "iso3": iso3, "hazard": hazard, "dataset": "summary", "title": iso3.upper()+"_NHR_PopAtRisk_"+hazard.title()+"_Summary", "extension": "json" }) }, { "loader": "context_summary", "url": "/api/data/country/{iso3}/dataset/context_summary/{iso3}_NHR_ContextLayers_Summary.json".format(iso3=iso3) }, { "loader": "context_geojson", "url": "/api/data/country/{iso3}/dataset/context/{iso3}_NHR_ContextLayers.json".format(iso3=iso3) }, { "loader": "vam_geojson", "url": "/api/data/country/{iso3}/dataset/vam/{iso3}_VAM.json".format(iso3=iso3) } ]; #geojson: {% endverbatim %}{ url: "/api/data/country/{{ iso_alpha3|upper }}/dataset/context.json" }{% verbatim %} ctx = { "dashboard_url": "/api/dashboard/country/{iso3}/hazard/{hazard}.json".format(iso3=iso3, hazard=hazard), "state_url": "/api/state/country/{iso3}/hazard/{hazard}/month/{month}.json".format(iso3=iso3, hazard=hazard, month=month_num), "state_schema_url": reverse("api_state_schema_hazard", kwargs={"hazard": hazard, "extension": "json"}), "geodash_main_id": "geodash-main", "include_sidebar_left": True, "sidebar_left_open": True, "modal_welcome": False } ctx.update({ "dashboard_resources_json": json.dumps(dashboard_resources) }) ctx.update({ "iso3": iso3, "hazard": hazard, "month_num": month_num, "country_title": country_title, "hazard_title": hazard_title, "month_title": month_title }) ctx.update({ "server_templates": json.dumps({ "main.tpl.html": get_template("sparc2/countryhazardmonth/main.tpl.html").render(ctx) }) }) return render_to_response(t, RequestContext(request, ctx))
def _build_data(self, request, *args, **kwargs): data = None iso3 = kwargs.pop('iso3', None) hazard = kwargs.pop('hazard', None) month = kwargs.pop('month', None) now = datetime.datetime.now() current_month = now.month country_title = extract("gaul.admin0_name", get_country(iso_alpha3=iso3), "") hazard_title = [h for h in SPARC_HAZARDS_CONFIG if h["id"]==hazard][0]["title"] month_num = get_month_number(month) if month_num == -1: month_num = current_month month_title = MONTHS_SHORT3[month_num-1] ############## # This is inefficient, so would be better to rework summary = None if hazard == "cyclone": summary = get_summary_cyclone(table_popatrisk="cyclone.admin2_popatrisk", iso_alpha3=iso3) elif hazard == "drought": summary = get_summary_drought(table_popatrisk="drought.admin2_popatrisk", iso_alpha3=iso3) elif hazard == "flood": summary = get_summary_flood(table_popatrisk="flood.admin2_popatrisk", iso_alpha3=iso3) elif hazard == "landslide": summary = get_summary_landslide(table_popatrisk="landslide.admin2_popatrisk", iso_alpha3=iso3) ############# maxValue = summary["all"]["max"]["at_admin2_month"] ############# popatrisk_range = [0.0, summary["all"]["max"]["at_admin2_month"]] ############# dashboard = yaml.load(get_template("sparc2/maps/countryhazardmonth_detail.yml").render({ "iso_alpha3": iso3, "hazard_title": hazard_title, "country_title": country_title, "hazard": hazard, "maxValue": maxValue })) ############# view = { "baselayer": "osm", "featurelayers": ["popatrisk"] } response = requests.get(settings.SITEURL+"api/data/countries.json?grep=iso.alpha3%3D"+iso3) extent = extract(["countries", 0, "gaul", "extent"], response.json(), None) if extent is None: view["lat"] = dashboard["view"].get("latitude", 0) view["lon"] = dashboard["view"].get("longitude", 0) view["z"] = dashboard["view"]["zoom"] else: view["extent"] = extent ############# data = { "page": "countryhazardmonth_detail", "iso3": iso3, "country_title": country_title, "hazard": hazard, "hazard_title": hazard_title, "month": month_num, "view": view, "filters": { "popatrisk": { "popatrisk_range": popatrisk_range, "ldi_range": [1, 9], "erosion_propensity_range": [0, 100], "landcover_delta_negative_range": [0, 100], } }, "styles": { "popatrisk": "default", "context": "delta_mean" } } if hazard == "cyclone": data["filters"]["popatrisk"]["prob_class_max"] = 0.1 data["filters"]["popatrisk"]["category"] = "cat1_5" elif hazard == "drought": data["filters"]["popatrisk"]["prob_class_max"] = 0.1 elif hazard == "flood": data["filters"]["popatrisk"]["rp"] = 200 elif hazard == "landslide": data["filters"]["popatrisk"]["prob_class_max"] = 1 return data