Ejemplo n.º 1
0
    def visualize_spatial_extent(
        self,
    ):  # additional args, basemap, zoom level, cmap, export
        """
        Creates a map displaying the input spatial extent

        Examples
        --------
        >>> reg_a = ipx.Query('ATL06','path/spatialfile.shp',['2019-02-22','2019-02-28']) # doctest: +SKIP
        >>> reg_a.visualize_spatial_extent # doctest: +SKIP
        [visual map output]
        """
        gdf = geospatial.geodataframe(self.extent_type, self._spat_extent)

        try:
            from shapely.geometry import Polygon
            import geoviews as gv

            gv.extension("bokeh")

            line_geoms = Polygon(gdf["geometry"][0]).boundary
            bbox_poly = gv.Path(line_geoms).opts(color="red", line_color="red")
            tile = gv.tile_sources.EsriImagery.opts(width=500, height=500)
            return tile * bbox_poly

        except ImportError:
            world = gpd.read_file(gpd.datasets.get_path("naturalearth_lowres"))
            f, ax = plt.subplots(1, figsize=(12, 6))
            world.plot(ax=ax, facecolor="lightgray", edgecolor="gray")
            gdf.plot(ax=ax, color="#FF8C00", alpha=0.7)
            plt.show()
Ejemplo n.º 2
0
def view_time_series(filename, cmap='jet'):
    """
    Visualize and interact with time series netcdf files.
    """

    # Open geoviews interactive session.
    gv.extension('bokeh', 'matplotlib')
    with xr.open_dataset(filename, decode_times=False) as da:
        dataset = gv.Dataset(da)
        ensemble = dataset.to(gv.Image, ['x', 'y'])
        gv.output(ensemble.opts(cmap=cmap,
                                colorbar=True,
                                fig_size=200,
                                backend='matplotlib'),
                  backend='matplotlib')
    return
Ejemplo n.º 3
0
import geoviews as gv
import geoviews.feature as gf
import xarray as xr
from cartopy import crs

gv.extension('bokeh', 'matplotlib')

(gf.ocean + gf.land + gf.ocean * gf.land * gf.coastline * gf.borders).opts(
    'Feature', projection=crs.Geostationary(), global_extent=True,
    height=325).cols(3)

# nc_file = "pr_Amon_HadGEM2AO_standardCal.nc"
# nc_dir = "../nc_files/"
# cmip5_std_dir = "cmip5_standardCal/"
# nc_in = nc_dir+cmip5_std_dir+nc_file  # Your filename
# dataset = gv.Dataset(xr.open_dataset(nc_in))
# ensemble = dataset.to(gv.Image, ['lon', 'lat'], 'pr')
#
# gv.output(ensemble.opts(cmap='viridis', colorbar=True, fig_size=200, backend='matplotlib') * gf.coastline(),
#           backend='matplotlib')
Ejemplo n.º 4
0
import geoviews as gv
import panel as pn
import param
from panel.widgets import Checkbox
from mednum.widgets import TreeViewCheckBox
from holoviews.element.tiles import StamenTerrain
from mednum.loaders import *
from pygal.style import Style
from mednum import tools
from mednum.config import *
import cProfile
import copy

import mednum as mind

gv.extension("bokeh")

class OverallParameters(param.Parameterized):
    localisation = param.String(default="Jegun", label="")
    score = param.Range(default=(0, 250), bounds=(0, 250),)

    tout_axes = param.Boolean(True, label="")
    interfaces_num = param.ListSelector(label="", default=list(CATEGORIES_INT_NUM_REV.keys()))
    infos_num = param.ListSelector(label="", default=list(CATEGORIES_X_INFOS_REV))
    comp_admin = param.ListSelector(label="", default=list(CATEGORIES_X_COMP_ADMIN_REV.keys()))
    comp_usage_num = param.ListSelector(label="", default=list(CATEGORIES_X_COMP_USAGE_REV.keys()))

    point_ref = param.Selector(
        default=SELECT[2], objects=SELECT, label="Point de référence",
    )
Ejemplo n.º 5
0
#
#
# %%
# Import requisite modules
import numpy as np
import pandas as pd
import geopandas as gpd
from pathlib import Path
import geoviews as gv
import holoviews as hv
from cartopy import crs as ccrs
from bokeh.io import output_notebook
from shapely.geometry import Point
output_notebook()
hv.extension('bokeh')
gv.extension('bokeh')

# Set project root directory
ROOT_DIR = Path(__file__).parent.parent

# Set project data directory
DATA_DIR = ROOT_DIR.joinpath('data')

# Import custom project functions
import sys
SRC_DIR = ROOT_DIR.joinpath('src')
sys.path.append(str(SRC_DIR))
from my_functions import *

# Define plotting projection to use
ANT_proj = ccrs.SouthPolarStereo(true_scale_latitude=-71)
Ejemplo n.º 6
0
import numpy as np
import pandas as pd
import cartopy.crs as ccrs
from cartopy import crs
import geoviews as gv
import holoviews as hv
import geoviews.tile_sources as gts
import xarray as xr
import panel as pn
import os, sys, shutil, time, datetime
from collections import defaultdict

pn.extension()
gv.extension("bokeh", logo=False)
hv.extension("bokeh", logo=False)
hv.Dimension.type_formatters[np.datetime64] = '%Y-%m-%d'
gv.Dimension.type_formatters[np.datetime64] = '%Y-%m-%d'


#### Header Object #####
def make_header_html(header_html_path=None):
    logo = "https://www.kyushu-u.ac.jp/img/common_en/logo.png"
    title = '<p style="font-size:30px">Passive Seismic Monitoring System</p>'
    #     creator = 'Created by: Fernando Lawrens Hutapea, Takeshi Tsuji, and Tatsunori Ikeda <br>'
    info = "(Ambient noise data is provided by National Research Institute for Earth Science and Disaster Prevention (NIED) Japan)"
    creator = 'Presently the monitoring result is updated weekly (Saturday) <br> Please use Firefox or Google Chrome or Microsoft Edge <br>'
    current = np.datetime64(datetime.datetime.now(), "s").astype(str)
    last_update = "Last update: {} <br>".format(current)  # today's date
    header = pn.Row(
        logo, pn.Spacer(width=30),
        pn.Column(pn.Spacer(height=0), pn.Pane(title, height=16, width=900),
def simple_idw(x, y, z, xi, yi):
    dist = distance_matrix(x,y, xi,yi)

    # In IDW, weights are 1 / distance
    weights = 1.0 / dist

    # Make weights sum to one
    weights /= weights.sum(axis=0)

    # Multiply the weights for each interpolated point by all observed Z-values
    zi = np.dot(weights.T, z)
    return zi

hv.extension('bokeh', 'matplotlib', logo=False)
gv.extension('bokeh', 'matplotlib',logo=False)

#hv.extension('matplotlib', logo=False)
#gv.extension('matplotlib', logo=False)
#%%

#Import entire data set

Audubon_All = pd.DataFrame({})
files = glob('/Users/matthew/Desktop/data/Clarity_Backup/Audubon*.csv')
files.sort()
for file in files:
    Audubon_All = pd.concat([Audubon_All, pd.read_csv(file)], sort=False)

Adams_All = pd.DataFrame({})
files = glob('/Users/matthew/Desktop/data/Clarity_Backup/Adams*.csv')
Ejemplo n.º 8
0
# coding: utf-8

# In[ ]:

import numpy as np
import xarray as xr
import cartopy.crs as ccrs
import pandas as pd
import hvplot.xarray
import hvplot.pandas
import geoviews as gv
import holoviews as hv

gv.extension('bokeh', logo=False)
hv.extension('bokeh', logo=False)
renderer = hv.Store.renderers['bokeh'].instance(mode='server',
                                                holomap='server')

# In[ ]:

ds = xr.open_dataset('data/rgb.nc')
# ds = ds.isel(time=slice(0,2))

# In[ ]:


def parser(date):
    b = date.split(' ')
    c = b[1].split(':')
    return pd.datetime.strptime(b[0], '%Y/%m/%d') + pd.Timedelta(
        hours=int(c[0]), minutes=int(c[1]))
Ejemplo n.º 9
0
def draw(data):
    gv.extension('bokeh')

    df = pd.DataFrame(data, columns=['mag', 'lng', 'lat', 'lokasyon', 'date'])

    earthquakes_gv_points = gv.Points(df, ['lng', 'lat'],
                                      ['lokasyon', 'mag', 'date'])

    gvts.CartoLight
    gvts.CartoLight * earthquakes_gv_points
    gvts.CartoLight.options(
        width=1000, height=800, xaxis=None, yaxis=None,
        show_grid=False) * earthquakes_gv_points
    earthquakes_plot = (gvts.CartoLight * earthquakes_gv_points).opts(
        opts.Points(width=980,
                    height=700,
                    alpha=0.3,
                    xaxis=None,
                    yaxis=None,
                    size=np.sqrt(dim('mag')) * 20))

    earthquakes_plot = (gvts.CartoLight * earthquakes_gv_points).opts(
        opts.Points(width=980,
                    height=700,
                    alpha=0.3,
                    hover_line_color='black',
                    line_color='black',
                    xaxis=None,
                    yaxis=None,
                    tools=['hover'],
                    size=np.sqrt(dim('mag')) * 20))

    from bokeh.models import HoverTool
    tooltips = [
        ('Magnitude', '@mag'),
        ('Date', '@date'),
        ('Location', '@lokasyon'),
        ('Longitude', '$x'),
        ('Latitude', '$y'),
    ]

    hover = HoverTool(tooltips=tooltips)

    earthquakes_plot = (gvts.CartoLight * earthquakes_gv_points).opts(
        opts.Points(width=980,
                    height=700,
                    alpha=0.3,
                    hover_line_color='black',
                    line_color='black',
                    xaxis=None,
                    yaxis=None,
                    tools=[hover],
                    size=np.sqrt(dim('mag')) * 20,
                    hover_fill_color=None,
                    hover_fill_alpha=0.5))

    return (earthquakes_plot).opts(
        opts.Graph(edge_selection_line_color='black',
                   edge_hover_line_color='red',
                   edge_line_width=1,
                   edge_line_alpha=0.01,
                   edge_nonselection_line_alpha=0.01,
                   width=800,
                   height=600))
Ejemplo n.º 10
0
import holoviews as hv
from holoviews import opts
import utils as ut
import build_table_survey as bts

path = os.getcwd()
data_folder = 'data'
DATA_PATH = path + os.sep + data_folder + os.sep

### Examples
## for bokeh extension (interactive map)
## https://thedatafrog.com/fr/articles/choropleth-maps-python/
## for matplotlib extension
#https://towardsdatascience.com/lets-make-a-map-using-geopandas-pandas-and-matplotlib-to-make-a-chloropleth-map-dddc31c1983d

gv.extension('matplotlib')
#gv.extension('bokeh')

mapname = "France_regions.geojson"
sf = gdp.read_file(DATA_PATH + mapname)

## full_insee_table comes from import build_table_survey.py
# Indice de vunérabilité région
dict_codereg_to_regname = {
    1: 'Guadeloupe',
    2: 'Martinique',
    3: 'Guyane',
    4: 'Réunion',
    11: 'Île-de-France',
    21: 'Champagne-Ardenne',
    22: 'Picardie',
Ejemplo n.º 11
0
# Evalúa qué puntos del grid del CRU corresponden a
# la Cuenca del Valle de México.

import os


import pandas as pd
import numpy as np

import geoviews as gv
import geopandas as gpd

gv.extension("matplotlib")
gv.output(size = 150)


fdir_d = os.getcwd() +  "/data/Cuencas/Regiones_Hidrologicas_Administrativas/"
fdir_r = os.getcwd() +  "/results/sequia/"
fname = "rha250kgw.shp"

# Si no existe la carpeta, la crea.
if not os.path.exists(fdir_r):
    os.mkdir(fdir_r)

# Se cargan las regiones hidrológico administrativas.
gdf = gpd.read_file(fdir_d + fname)
# Se obtiene el contorno de las cuencas-
gdf["boundary"] = gdf.boundary

# Se selecciona la Cuenca del Valle de México.
cuenca = gv.Path(
Ejemplo n.º 12
0
import geoviews as gv
import geoviews.feature as gf
import xarray as xr
from cartopy import crs

import pandas as pd
import numpy as np

gv.extension("bokeh", "matplotlib")

xr_ensemble = xr.open_dataset(
    "Data-Analysis/datashader-work/geoviews-examples/data/ensemble.nc").load()

from sqlalchemy import create_engine

engine = create_engine("postgres://localhost:5432/global_fishing_watch")
engine.table_names()
df = pd.read_sql("""SELECT * FROM fishing_effort LIMIT 10000""",
                 engine,
                 parse_dates=["date"])

df["flag"] = df["flag"].astype("category")
df["geartype"] = df["geartype"].astype("category")
df["lat"] = df["lat_bin"] / 100
df["lon"] = df["lon_bin"] / 100
df.info()


def format_df(df, n=10_000):
    df = df.iloc[:n]
    df = df.drop_duplicates(subset=["lat", "lon", "date"])
Ejemplo n.º 13
0
#for the dataset cleaning
import pandas as pd
import numpy as np

#for the map
import geoviews as gv
import geopandas as gpd  #to read the geojson #conda install geopandas
from geoviews import dim

#to create the gorgraphic zones
from shapely.geometry import Polygon
from shapely.ops import cascaded_union

#for map interactions
gv.extension('bokeh')  #bokeh as backend

import pandas.testing as tm

#for the dashboard
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output

import dash_bootstrap_components as dbc  #for the navbar

#
# Data modified (in the notebook using jupyter)
#
df = pd.read_csv(
Ejemplo n.º 14
0
import os
import h5py
import pandas as pd
import geopandas as gp
import holoviews as hv
import geoviews as gv

from shapely.geometry import Point
from geoviews import opts
from geoviews import tile_sources as gvts

gv.extension('bokeh', 'matplotlib', display_formats=['svg', 'html'])
gv.output(fig='png')


def point_visual(features, vdims):
    """
    :param features:
    :param vdims:
    :return:
    """
    return (gvts.EsriImagery *
            gv.Points(features, vdims=vdims).options(tools=['hover'],
                                                     height=500,
                                                     width=900,
                                                     size=5,
                                                     color='yellow',
                                                     fontsize={
                                                         'xticks': 10,
                                                         'yticks': 10,
                                                         'xlabel': 16,
Ejemplo n.º 15
0
def get_contour(ds,lat_name ="latitude",lon_name="longitude",levels=10,**kwargs):
    """
    get_contour [summary]
    
    Arguments:
        ds {DataArray} -- A 2D dataArray
    
    Keyword Arguments:
        lat_name {str} -- Dimension name for latitude (default: {"latitude"})
        lon_name {str} -- Dimension name for longitude (default: {"longitude"})
        levels {int,list} -- Number of levels or list of levels for contours  (default: {10})
    """
    if not isinstance(ds,xr.core.dataarray.DataArray): 
        raise(ValueError("In get_geojson_contour, input dataset should be a DataArray. Get :"%type(ds)))
        
    if len(ds.dims) != 2:
        raise(ValueError("Dataset should be 2D"))
        
    if lat_name not in ds.dims or lon_name not in ds.dims: 
        raise(ValueError("Latitude or longitude name are not present. Should get %s %s and get %s"%(lat_name,lon_name,ds.dims)))
    
    gv.extension("bokeh")  
    hv_ds = gv.Dataset(ds,[lon_name,lat_name])
    contours = gv.operation.contours(hv_ds,filled=True,levels=levels)
    
    
    #contours = gv.operation.contours(hv_ds,filled=True,levels=levels)
    #print("In geo_contour",contours,type(contours))
    
    
    
    polygon_list=list() 
    dict_poly = gu.polygons_to_geom_dicts(contours)    
    #print("In geo_contour",dict_poly)
    cmap = kwargs.get("cmap",cm.RdBu)
    mini = kwargs.get("mini",list(contours.data[0].values())[0])
    maxi = kwargs.get("maxi",list(contours.data[-1].values())[0])

    for i in range(len(dict_poly)):
        list_poly=[]
        for holes in dict_poly[i]["holes"]: 
            l_p = [sh.geometry.Polygon(x) for x in holes]
            if len(l_p)>0:
                list_poly.extend(l_p)
        if len(list_poly):
            mp_holes = sh.geometry.MultiPolygon(list_poly)
            mp_init = dict_poly[i]["geometry"]
            if not mp_init.is_valid: 
                mp_init = change_invalid_polygon(mp_init)
            if not mp_holes.is_valid: 
                mp_holes = change_invalid_polygon(mp_holes)   
            mp_final = mp_init - mp_holes
        else:
            if not dict_poly[i]["geometry"].is_valid:
                mp_final = change_invalid_polygon(dict_poly[i]["geometry"])
            else:
                mp_final = dict_poly[i]["geometry"]
        
        if kwargs.get("qualitative",False) and not mp_final.is_empty:
            # Permet d'avoir un rendu un peu plus net sur les variables qualitative. 
            # Pas obligatoire
            buffer_arg = kwargs.get("buffer",5e-4)
            mp_temp = mp_final.buffer(-buffer_arg).buffer(1.1*buffer_arg)    
            if mp_temp.area > 0:
                mp_diff = (mp_final - mp_temp)
                if mp_diff.area > 0 :
                    mp_final = mp_final - mp_diff
            else:
                mp_final = sh.geometry.Polygon([])
        
        ## On stock le resultat dans un geojson 
        # A voir s'il n'y a pas mieux a faire. 
        if not mp_final.is_empty:
            try:
                res = Feature( geometry = mp_final)
            except Exception: 
                mp_temp = mp_final.buffer(-1e-5).buffer(1.1*1e-5)
                res = Feature(geometry = mp_temp)
 
            if isinstance(levels,Iterable): 
                value = list(contours.data[i].values())[0]
                descending_list = np.sort(levels)[::-1]
                bound_min = descending_list[np.argmax(descending_list < value)]
                bound_max = levels[np.argmax(levels >=value)]
  

                res["properties"] = {
                    "value_min":bound_min*1.0,
                    "value_max":bound_max*1.0,
                    "units":ds.attrs.get('units'),
                    "name":ds.attrs.get("long_name",ds.name)
                }
            else:
                res["properties"] = {
                    "value":list(contours.data[i].values())[0],
                    "units":ds.attrs.get('units'),
                    "name":ds.attrs.get("long_name",ds.name)
                    }
        
            res["properties"]["cmap"] = {
                    "value":list(contours.data[i].values())[0],
                    "mini":mini*1.0,
                    "maxi":maxi*1.0,
                 }
            res["properties"]["style"] = {
                "fillColor": rgb2hex(cmap((list(contours.data[i].values())[0]-mini)/(maxi-mini))),
                "fillOpacity":0.9,
                "opacity": 0.2,
                "color": "#000000",
                "dashArray": '5',
                "weight": 2,
                }
            polygon_list.append(res)
        else:
            print("Empty polygon for ",list(contours.data[i].values())[0])
            
    feature_collection = FeatureCollection(polygon_list)
    return feature_collection