コード例 #1
0
ファイル: element.py プロジェクト: eddy-ojb/holoviews
class contours(ElementOperation):
    """
    Given a Image with a single channel, annotate it with contour
    lines for a given set of contour levels.

    The return is an NdOverlay with a Contours layer for each given
    level, overlaid on top of the input Image.
    """

    output_type = Overlay

    levels = param.NumericTuple(default=(0.5, ),
                                doc="""
        A list of scalar values used to specify the contour levels.""")

    group = param.String(default='Level',
                         doc="""
        The group assigned to the output contours.""")

    filled = param.Boolean(default=False,
                           doc="""
        Whether to generate filled contours""")

    overlaid = param.Boolean(default=True,
                             doc="""
        Whether to overlay the contour on the supplied Element.""")

    def _process(self, element, key=None):
        try:
            from matplotlib import pyplot as plt
        except ImportError:
            raise ImportError("contours operation requires matplotlib.")
        figure_handle = plt.figure()
        extent = element.range(0) + element.range(1)[::-1]
        if self.p.filled:
            contour_fn = plt.contourf
            contour_type = Polygons
        else:
            contour_fn = plt.contour
            contour_type = Contours
        if type(element) is Raster:
            data = [np.flipud(element.data)]
        elif isinstance(element, Raster):
            data = [element.data]

        elif isinstance(element, QuadMesh):
            data = (element.dimension_values(0, False),
                    element.dimension_values(1, False), element.data[2])
        contour_set = contour_fn(*data, extent=extent, levels=self.p.levels)

        contours = NdOverlay(None, kdims=['Levels'])
        for level, cset in zip(self.p.levels, contour_set.collections):
            paths = []
            for path in cset.get_paths():
                paths.extend(
                    np.split(path.vertices,
                             np.where(path.codes == 1)[0][1:]))
            contours[level] = contour_type(paths,
                                           level=level,
                                           group=self.p.group,
                                           label=element.label,
                                           kdims=element.kdims,
                                           vdims=element.vdims)

        plt.close(figure_handle)
        if self.p.overlaid:
            contours = element * contours
        return contours
コード例 #2
0
ファイル: tutorial.py プロジェクト: ahuang11/ahlive
class TutorialData(param.Parameterized):

    label = param.String(allow_None=True)
    raw = param.Boolean()
    verbose = param.Boolean()
    return_meta = param.Boolean()
    use_cache = param.Boolean()

    _source = None
    _base_url = None
    _data_url = None
    _description = None

    def __init__(self, **kwds):
        super().__init__(**kwds)
        self._cache_dir = DEFAULTS["cache_kwds"]["directory"]
        self._remove_href = re.compile(r"<(a|/a).*?>")
        os.makedirs(self._cache_dir, exist_ok=True)
        self._init_owid()

    @property
    def _cache_path(self):
        cache_file = f"{self.label}.pkl"
        return os.path.join(self._cache_dir, cache_file)

    @property
    def _dataset_options(self):
        options = set([])
        for method in dir(self):
            if method.startswith("_load_") and "owid" not in method:
                options.add(method.replace("_load_", ""))
        return list(options) + list(self._owid_labels_df.columns)

    @staticmethod
    def _specify_cache(cache_path, **kwds):
        if kwds:
            cache_ext = ("_".join(f"{key}={val}".replace(os.sep, "")
                                  for key, val in kwds.items()).replace(
                                      " ", "_").replace(",",
                                                        "_").replace("'", ""))
            cache_path = f"{os.path.splitext(cache_path)[0]}_{cache_ext}.pkl"
        return cache_path

    def _cache_dataset(self, df, cache_path=None, **kwds):
        if cache_path is None:
            cache_path = self._cache_path
        cache_path = self._specify_cache(cache_path, **kwds)

        df.to_pickle(cache_path)

    def _read_cache(self, cache_path=None, **kwds):
        if not self.use_cache:
            return None

        if cache_path is None:
            cache_path = self._cache_path
        cache_path = self._specify_cache(cache_path, **kwds)

        try:
            return pd.read_pickle(cache_path)
        except Exception:
            if os.path.exists(cache_path):
                os.remove(cache_path)
        return None

    @staticmethod
    def _snake_urlify(s):
        # Replace all hyphens with underscore
        s = s.replace(" - ", "_").replace("-", "_")
        # Remove all non-word characters (everything except numbers and letters)
        s = re.sub(r"[^\w\s]", "", s)
        # Replace all runs of whitespace with a underscore
        s = re.sub(r"\s+", "_", s)
        return s.lower()

    def _init_owid(self):
        cache_path = os.path.join(self._cache_dir, "owid_labels.pkl")
        self._owid_labels_df = self._read_cache(cache_path=cache_path)
        if self._owid_labels_df is not None:
            return

        owid_api_url = ("https://api.github.com/"
                        "repos/owid/owid-datasets/"
                        "git/trees/master?recursive=1")
        with urlopen(owid_api_url) as f:
            sources = json.loads(f.read().decode("utf-8"))

        owid_labels = {}
        owid_raw_url = "https://raw.githubusercontent.com/owid/owid-datasets/master/"
        for source_tree in sources["tree"]:
            path = source_tree["path"]
            if ".csv" not in path and ".json" not in path:
                continue

            label = "owid_" + self._snake_urlify(path.split("/")[-2].strip())
            if label not in owid_labels:
                owid_labels[label] = {}

            url = f"{owid_raw_url}/{quote(path)}"
            if ".csv" in path:
                owid_labels[label]["data"] = url
            elif ".json" in path:
                owid_labels[label]["meta"] = url

        self._owid_labels_df = pd.DataFrame(owid_labels)
        self._cache_dataset(self._owid_labels_df, cache_path=cache_path)

    def _load_owid(self, **kwds):
        self._data_url = self._owid_labels_df[self.label]["data"]
        meta_url = self._owid_labels_df[self.label]["meta"]
        with urlopen(meta_url) as response:
            meta = json.loads(response.read().decode())
        self.label = meta["title"]
        self._source = (" & ".join(
            source.get("dataPublishedBy", "") for source in meta["sources"]) +
                        " curated by Our World in Data (OWID)")
        self._base_url = (" & ".join(source["link"]
                                     for source in meta["sources"]) +
                          " through https://github.com/owid/owid-datasets")
        self._description = re.sub(self._remove_href, "", meta["description"])

        df = self._read_cache(**kwds)
        if df is None:
            if "names" in kwds.keys() and "header" not in kwds.keys():
                kwds["header"] = 0
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        df.columns = [self._snake_urlify(col) for col in df.columns]
        return df

    def _load_annual_co2(self, **kwds):
        self._source = "NOAA ESRL"
        self._base_url = "https://www.esrl.noaa.gov/"
        self._data_url = ("https://www.esrl.noaa.gov/"
                          "gmd/webdata/ccgg/trends/co2/co2_annmean_mlo.txt")
        self._description = (
            "The carbon dioxide data on Mauna Loa constitute the longest record "
            "of direct measurements of CO2 in the atmosphere. They were started "
            "by C. David Keeling of the Scripps Institution of Oceanography in "
            "March of 1958 at a facility of the National Oceanic and Atmospheric "
            "Administration [Keeling, 1976]. NOAA started its own CO2 measurements "
            "in May of 1974, and they have run in parallel with those made by "
            "Scripps since then [Thoning, 1989].")

        df = self._read_cache(**kwds)
        if df is None:
            base_kwds = dict(
                header=None,
                comment="#",
                sep="\s+",  # noqa
                names=["year", "co2_ppm", "uncertainty"],
            )
            base_kwds.update(kwds)
            df = pd.read_csv(self._data_url, **base_kwds)
            self._cache_dataset(df, **kwds)

        return df

    def _load_tc_tracks(self, **kwds):
        self._source = "IBTrACS v04 - USA"
        self._base_url = "https://www.ncdc.noaa.gov/ibtracs/"
        self._data_url = (
            "https://www.ncei.noaa.gov/data/"
            "international-best-track-archive-for-climate-stewardship-ibtracs/"
            "v04r00/access/csv/ibtracs.last3years.list.v04r00.csv")
        self._description = (
            "The intent of the IBTrACS project is to overcome data availability "
            "issues. This was achieved by working directly with all the Regional "
            "Specialized Meteorological Centers and other international centers "
            "and individuals to create a global best track dataset, merging storm "
            "information from multiple centers into one product and archiving "
            "the data for public use.")

        df = self._read_cache(**kwds)
        if df is None:
            base_kwds = dict(keep_default_na=False)
            base_kwds.update(kwds)
            df = pd.read_csv(self._data_url, **base_kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        cols = [
            "BASIN",
            "NAME",
            "LAT",
            "LON",
            "ISO_TIME",
            "USA_WIND",
            "USA_PRES",
            "USA_SSHS",
            "USA_RMW",
            "USA_STATUS",
            "USA_RECORD",
            "LANDFALL",
        ]
        df = df[cols]
        df.columns = df.columns.str.lower()
        df = df.iloc[1:]
        df = df.set_index("iso_time")
        df.index = pd.to_datetime(df.index)
        numeric_cols = [
            "lat", "lon", "usa_wind", "usa_pres", "usa_sshs", "usa_rmw"
        ]
        for col in numeric_cols:
            df[col] = pd.to_numeric(df[col], errors="coerce")
        return df

    def _load_covid19_us_cases(self, **kwds):
        self._source = "JHU CSSE COVID-19"
        self._base_url = "https://github.com/CSSEGISandData/COVID-19"
        self._data_url = (
            "https://github.com/CSSEGISandData/COVID-19/raw/master/"
            "csse_covid_19_data/csse_covid_19_time_series/"
            "time_series_covid19_confirmed_US.csv")

        df = self._read_cache(**kwds)
        if df is None:
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        df = df.drop(
            [
                "UID", "iso2", "iso3", "code3", "FIPS", "Admin2",
                "Country_Region"
            ],
            axis=1,
        )
        df.columns = df.columns.str.lower().str.rstrip("_")
        df = df.melt(
            id_vars=["lat", "long", "combined_key", "province_state"],
            var_name="date",
            value_name="cases",
        )
        df["date"] = pd.to_datetime(df["date"])
        return df

    def _load_covid19_global_cases(self, **kwds):
        self._source = "JHU CSSE COVID-19"
        self._base_url = "https://github.com/CSSEGISandData/COVID-19"
        self._data_url = (
            "https://github.com/CSSEGISandData/COVID-19/raw/master/"
            "csse_covid_19_data/csse_covid_19_time_series/"
            "time_series_covid19_confirmed_global.csv")
        self._description = (
            "This is the data repository for the 2019 Novel Coronavirus "
            "Visual Dashboard operated by the Johns Hopkins University Center "
            "for Systems Science and Engineering (JHU CSSE). Also, Supported "
            "by ESRI Living Atlas Team and the Johns Hopkins University "
            "Applied Physics Lab (JHU APL).")

        df = self._read_cache(**kwds)
        if df is None:
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        df.columns = df.columns.str.lower().str.rstrip("_")
        df = df.melt(
            id_vars=["province/state", "country/region", "lat", "long"],
            var_name="date",
            value_name="cases",
        )
        df.columns = df.columns.str.replace("/", "_")
        df["date"] = pd.to_datetime(df["date"])
        return df

    def _load_covid19_population(self, **kwds):
        self._source = "JHU CSSE COVID-19"
        self._base_url = "https://github.com/CSSEGISandData/COVID-19"
        self._data_url = ("https://raw.githubusercontent.com/"
                          "CSSEGISandData/COVID-19/master/"
                          "csse_covid_19_data/UID_ISO_FIPS_LookUp_Table.csv")
        self._description = (
            "This is the data repository for the 2019 Novel Coronavirus "
            "Visual Dashboard operated by the Johns Hopkins University Center "
            "for Systems Science and Engineering (JHU CSSE). Also, Supported "
            "by ESRI Living Atlas Team and the Johns Hopkins University "
            "Applied Physics Lab (JHU APL).")

        df = self._read_cache(**kwds)
        if df is None:
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        df.columns = df.columns.str.lower().str.rstrip("_")
        return df

    def _load_gapminder_life_expectancy(self, **kwds):
        self._source = "World Bank Gapminder"
        self._base_url = (
            "https://github.com/open-numbers/ddf--gapminder--systema_globalis")
        self._data_url = ("https://raw.githubusercontent.com/open-numbers/"
                          "ddf--gapminder--systema_globalis/master/"
                          "countries-etc-datapoints/ddf--datapoints--"
                          "life_expectancy_years--by--geo--time.csv")
        self._description = (
            "This is the main dataset used in tools on the official Gapminder "
            "website. It contains local & global statistics combined from "
            "hundreds of sources.")

        df = self._read_cache(**kwds)
        if df is None:
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        df = df.rename(columns={"life_expectancy_years": "life_expectancy"})
        return df

    def _load_gapminder_income(self, **kwds):
        self._source = "World Bank Gapminder"
        self._base_url = (
            "https://github.com/open-numbers/ddf--gapminder--systema_globalis")
        self._data_url = (
            "https://raw.githubusercontent.com/open-numbers/"
            "ddf--gapminder--systema_globalis/master/"
            "countries-etc-datapoints/ddf--datapoints--"
            "income_per_person_gdppercapita_ppp_inflation_adjusted"
            "--by--geo--time.csv")
        self._description = (
            "This is the main dataset used in tools on the official Gapminder "
            "website. It contains local & global statistics combined from "
            "hundreds of sources.")

        df = self._read_cache(**kwds)
        if df is None:
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        df = df.rename(
            columns={
                "income_per_person_gdppercapita_ppp_inflation_adjusted":
                "income"  # noqa
            })
        return df

    def _load_gapminder_population(self, **kwds):
        self._source = "World Bank Gapminder"
        self._base_url = (
            "https://github.com/open-numbers/ddf--gapminder--systema_globalis")
        self._data_url = ("https://raw.githubusercontent.com/open-numbers/"
                          "ddf--gapminder--systema_globalis/master/"
                          "countries-etc-datapoints/ddf--datapoints--"
                          "population_total--by--geo--time.csv")
        self._description = (
            "This is the main dataset used in tools on the official Gapminder "
            "website. It contains local & global statistics combined from "
            "hundreds of sources.")

        df = self._read_cache(**kwds)
        if df is None:
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        df = df.rename(columns={"population_total": "population"})
        return df

    def _load_gapminder_country(self, **kwds):
        self._source = "World Bank Gapminder"
        self._base_url = (
            "https://github.com/open-numbers/ddf--gapminder--systema_globalis")
        self._data_url = ("https://raw.githubusercontent.com/open-numbers/"
                          "ddf--gapminder--systema_globalis/master/"
                          "ddf--entities--geo--country.csv")
        self._description = (
            "This is the main dataset used in tools on the official Gapminder "
            "website. It contains local & global statistics combined from "
            "hundreds of sources.")

        df = self._read_cache(**kwds)
        if df is None:
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **kwds)

        if self.raw:
            return df

        df = df[["country", "name",
                 "world_6region"]].rename(columns={"world_6region": "region"})
        df["region"] = df["region"].str.replace("_", " ").str.title()
        return df

    def _load_iem_asos(
        self,
        ini="2020-01-01",
        end="2020-01-03",
        stn="CMI",
        tz="utc",
        data="all",
        latlon="no",
        elev="no",
        **kwds,
    ):
        if isinstance(stn, str):
            stn = [stn]
        stn = "&station=".join(stn)

        if isinstance(data, str):
            data = [data]

        valid_tzs = OPTIONS["iem_tz"]
        tz = valid_tzs.get(tz, tz)
        if tz not in valid_tzs.values():
            raise ValueError(
                f"tz must be one of the following: {valid_tzs}; got {tz}")

        valid_data = OPTIONS["iem_data"]
        cols = []
        for col in data:
            col = col.strip()
            if col not in valid_data and col != "all":
                raise ValueError(
                    f"data must be a subset of: {valid_data}; got {col}")
            cols.append(col)
        data = "&data=".join(cols)

        ini_dt = pd.to_datetime(ini)
        end_dt = pd.to_datetime(end)

        self._source = "Iowa Environment Mesonet ASOS"
        self._base_url = "https://mesonet.agron.iastate.edu/ASOS/"
        self._data_url = (
            f"https://mesonet.agron.iastate.edu/cgi-bin/request/asos.py?"
            f"station={stn}&data={data}&latlon={latlon}&elev={elev}&"
            f"year1={ini_dt:%Y}&month1={ini_dt:%m}&day1={ini_dt:%d}&"
            f"year2={end_dt:%Y}&month2={end_dt:%m}&day2={end_dt:%d}&"
            f"tz={tz}&format=onlycomma&"
            f"missing=empty&trace=empty&"
            f"direct=no&report_type=1&report_type=2")
        self._description = (
            "The IEM maintains an ever growing archive of automated airport "
            "weather observations from around the world! These observations "
            "are typically called 'ASOS' or sometimes 'AWOS' sensors. "
            "A more generic term may be METAR data, which is a term that "
            "describes the format the data is transmitted as. If you don't "
            "get data for a request, please feel free to contact us for help. "
            "The IEM also has a one minute interval dataset for US ASOS (2000-) "
            "and Iowa AWOS (1995-2011) sites. This archive simply provides the "
            "as-is collection of historical observations, very little "
            "quality control is done.")

        cache_kwds = kwds.copy()
        cache_kwds.update(ini=ini,
                          end=end,
                          stn=stn,
                          tz=tz,
                          data=data,
                          latlon=latlon,
                          elev=elev)
        df = self._read_cache(**cache_kwds)
        if df is None:
            df = pd.read_csv(self._data_url, **kwds)
            self._cache_dataset(df, **cache_kwds)

        if self.raw:
            return df

        df["valid"] = pd.to_datetime(df["valid"])
        df = df.set_index("valid")
        return df

    def open_dataset(self, **kwds):
        if self.label is None or self.label not in self._dataset_options:
            self.list_datasets()
            raise ValueError("Select a valid dataset listed above")

        if self.label.startswith("owid_"):
            data = getattr(self, "_load_owid")(**kwds)
        else:
            data = getattr(self, f"_load_{self.label}")(**kwds)

        label = self.label.replace("_", " ").upper()
        attr = f"{label}\n\nSource: {self._source}\n{self._base_url}\n"
        if self.verbose:
            attr = (
                f"{attr}\nDescription: {self._description}\n\nData: {self._data_url}\n"
            )

        if self.return_meta:
            meta = {}
            meta["label"] = self.label
            meta["source"] = self._source
            meta["base_url"] = self._base_url
            meta["description"] = self._description
            meta["data_url"] = self._data_url
            return data, meta
        else:
            print(attr)
            return data

    def list_datasets(self, pattern=None, sample=None):
        signatures = {}
        for option in self._dataset_options:
            if "owid" in option:
                signatures[option] = {}
                continue
            signature = inspect.signature(getattr(self, f"_load_{option}"))
            signatures[option] = {
                k: v.default
                for k, v in signature.parameters.items()
                if v.default is not inspect.Parameter.empty
            }

        keys = signatures.keys()
        if pattern is not None:
            if "*" not in pattern:
                pattern = f"*{pattern}*"
            keys = [key for key in keys if fnmatch.fnmatch(key, pattern)]

        if sample is not None:
            num_keys = len(keys)
            if num_keys < sample:
                sample = num_keys
            keys = random.sample(keys, sample)

        for key in keys:
            val = signatures[key]
            print(f"- {key}")
            if val:
                print("    adjustable keywords")
                for k, v in val.items():
                    print(f"    {k}: {v}")
コード例 #3
0
ファイル: __init__.py プロジェクト: ioam/parambokeh
class JSONInit(param.Parameterized):
    """
    Callable that can be passed to Widgets.initializer to set Parameter
    values using JSON. There are three approaches that may be used:
    1. If the json_file argument is specified, this takes precedence.
    2. The JSON file path can be specified via an environment variable.
    3. The JSON can be read directly from an environment variable.
    Here is an easy example of setting such an environment variable on
    the commandline:
    PARAM_JSON_INIT='{"p1":5}' jupyter notebook
    This addresses any JSONInit instances that are inspecting the
    default environment variable called PARAM_JSON_INIT, instructing it to set
    the 'p1' parameter to 5.
    """

    varname = param.String(default='PARAM_JSON_INIT', doc="""
        The name of the environment variable containing the JSON
        specification.""")

    target = param.String(default=None, doc="""
        Optional key in the JSON specification dictionary containing the
        desired parameter values.""")

    json_file = param.String(default=None, doc="""
        Optional path to a JSON file containing the parameter settings.""")


    def __call__(self, parameterized):

        warnobj = param.main if isinstance(parameterized, type) else parameterized
        param_class = (parameterized if isinstance(parameterized, type)
                       else parameterized.__class__)


        target = self.target if self.target is not None else param_class.__name__

        env_var = os.environ.get(self.varname, None)
        if env_var is None and self.json_file is None: return

        if self.json_file or env_var.endswith('.json'):
            try:
                fname = self.json_file if self.json_file else env_var
                spec = json.load(open(os.path.abspath(fname), 'r'))
            except:
                warnobj.warning('Could not load JSON file %r' % spec)
        else:
            spec = json.loads(env_var)

        if not isinstance(spec, dict):
            warnobj.warning('JSON parameter specification must be a dictionary.')
            return

        if target in spec:
            params = spec[target]
        else:
            params = spec

        for name, value in params.items():
           try:
               parameterized.set_param(**{name:value})
           except ValueError as e:
               warnobj.warning(str(e))
コード例 #4
0
 class TestString(param.Parameterized):
     a = param.String()
     b = param.String(default='', allow_None=True)
     c = param.String(default=None)
コード例 #5
0
ファイル: text_to_speech.py プロジェクト: syamajala/panel
class Utterance(param.Parameterized):
    """
    An *utterance* is the smallest unit of speech in spoken language analysis.

    The Utterance Model wraps the HTML5 SpeechSynthesisUtterance API

    See https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance
    """

    value = param.String(default="",
                         doc="""
        The text that will be synthesised when the utterance is
        spoken. The text may be provided as plain text, or a
        well-formed SSML document.""")

    lang = param.ObjectSelector(default="",
                                doc="""
        The language of the utterance.""")

    pitch = param.Number(default=1.0,
                         bounds=(0.0, 2.0),
                         doc="""
        The pitch at which the utterance will be spoken at expressed
        as a number between 0 and 2.""")

    rate = param.Number(default=1.0,
                        bounds=(0.1, 10.0),
                        doc="""
        The speed at which the utterance will be spoken at expressed
        as a number between 0.1 and 10.""")

    voice = param.ObjectSelector(doc="""
        The voice that will be used to speak the utterance.""")

    volume = param.Number(default=1.0,
                          bounds=(0.0, 1.0),
                          doc=""" The
        volume that the utterance will be spoken at expressed as a
        number between 0 and 1.""")

    def __init__(self, **params):
        voices = params.pop('voices', [])
        super().__init__(**params)
        self._voices_by_language = {}
        self.set_voices(voices)

    def to_dict(self, include_uuid=True):
        """Returns the object parameter values in a dictionary

        Returns:
            Dict: [description]
        """
        result = {
            "lang": self.lang,
            "pitch": self.pitch,
            "rate": self.rate,
            "text": self.value,
            "volume": self.volume,
        }
        if self.voice and self.voice.name:
            result["voice"] = self.voice.name
        if include_uuid:
            result["uuid"] = str(uuid.uuid4())
        return result

    def set_voices(self, voices):
        """Updates the `lang` and `voice` parameter objects, default and value"""
        if not voices:
            self.param.lang.objects = ["en-US"]
            self.param.lang.default = "en-US"
            self.lang = "en-US"
            return

        self._voices_by_language = Voice.group_by_lang(voices)
        self.param.lang.objects = list(self._voices_by_language.keys())
        if "en-US" in self._voices_by_language:
            default_lang = "en-US"
        else:
            default_lang = list(self._voices_by_language.keys())[0]
        self.param.lang.default = default_lang
        self.lang = default_lang
        self.param.trigger("lang")

    @param.depends("lang", watch=True)
    def _handle_lang_changed(self):
        if not self._voices_by_language or not self.lang:
            self.param.voice.default = None
            self.voice = None
            self.param.voice.objects = []
            return

        voices = self._voices_by_language[self.lang]
        if self.voice and self.voice in voices:
            default_voice = self.voice
        else:
            default_voice = voices[0]
            for voice in voices:
                if voice.default:
                    default_voice = voice

        self.param.voice.objects = voices
        self.param.voice.default = default_voice
        self.voice = default_voice
コード例 #6
0
class SelectFile(param.Parameterized):
    file_path = param.String(default='')
    show_browser = param.Boolean(default=False)
    browse_toggle = param.Action(lambda self: self.toggle(), label='Browse')
    file_browser = param.ClassSelector(FileBrowser)
    title = param.String(default='File Path')
    help_text = param.String()

    def __init__(self, **params):
        super().__init__(**params)
        self.file_browser = self.file_browser or FileBrowser(delayed_init=True)

    @param.depends('file_browser', watch=True)
    def update_callback(self):
        self.file_browser.callback = self.update_file

    def update_file(self, new_selection):
        if new_selection:
            self.file_path = self.file_browser.value[0]

    def toggle(self):
        self.show_browser = not self.show_browser
        self.param.browse_toggle.label = 'Hide' if self.show_browser else 'Browse'

    @param.depends('show_browser')
    def file_browser_panel(self):
        if self.show_browser:
            self.file_browser.path_text = self.file_path
            self.file_browser.init()
            return self.file_browser.panel

    def input_row(self):
        return pn.Param(
            self,
            parameters=['file_path', 'browse_toggle'],
            widgets={
                'file_path': {
                    'width_policy': 'max',
                    'show_name': False
                },
                'browse_toggle': {
                    'button_type': 'primary',
                    'width': 100,
                    'align': 'end'
                }
            },
            default_layout=pn.Row,
            show_name=False,
            width_policy='max',
            margin=0,
        )

    @property
    def panel(self):
        self.param.file_path.label = self.title
        return pn.Column(
            self.input_row,
            pn.pane.HTML(
                f'<span style="font-style: italic;">{self.help_text}</span>'),
            self.file_browser_panel,
            width_policy='max')
コード例 #7
0
class FileTransfer(param.Parameterized):
    uit_client = param.ClassSelector(Client, precedence=-1)

    from_location = param.ObjectSelector(default='onyx',
                                         objects=['jim', 'onyx', 'local'],
                                         precedence=0.21)
    from_directory = param.String(precedence=0.22)
    to_location = param.ObjectSelector(default='onyx',
                                       objects=['jim', 'onyx', 'local'],
                                       precedence=0.31)
    to_directory = param.String(precedence=0.32)
    file_manager = param.ClassSelector(class_=FileManager, precedence=0.4)
    transfer_button = param.Action(
        lambda self: self.param.trigger('transfer_button'),
        label='Transfer',
        precedence=1.0)

    def __init__(self, uit_client, **params):

        super().__init__(**params)
        self.uit_client = uit_client or Client()
        self.file_manager = FileManagerHPC(uit_client=self.uit_client)

        # adjust to/from based on uit_client
        self.param.from_location.objects = [self.uit_client.system, 'local']
        self.from_location = self.uit_client.system
        self.param.to_location.objects = [self.uit_client.system, 'local']
        self.to_location = self.uit_client.system

    @param.depends('transfer_button', watch=True)
    def transfer(self):
        if self.from_location == 'local':
            for local_file in self.file_manager.cross_selector.value:
                self.uit_client.put_file(local_file, self.to_directory)
        elif self.to_location == 'local':
            for remote_file in self.file_manager.cross_selector.value:
                log.info('transferring {}'.format(remote_file))
                self.uit_client.get_file(remote_file,
                                         local_path=os.path.join(
                                             self.to_directory,
                                             os.path.basename(remote_file)))

        else:
            log.warning('HPC to HPC transfers are not supported.')

    @param.depends('from_directory', watch=True)
    def _update_file_manager(self):
        """
        """
        self.file_manager.directory = self.from_directory

    def _from_location(self):
        return pn.Column(self.param.from_location, self.param.from_directory)

    @param.depends('from_location', watch=True)
    def _to_location(self):
        remote_dir = str(self.uit_client.WORKDIR)
        local_dir = os.getcwd()

        if self.from_location == 'local':
            # set from location and dir
            self.from_directory = local_dir

            # set to location and dir
            self.to_location = self.uit_client.system
            self.to_directory = remote_dir

            # set file manager to local manager
            self.file_manager = FileManager()
        else:
            # set to location and dir
            self.to_location = 'local'
            self.to_directory = local_dir
            self.from_directory = remote_dir

            # set file manager to hpc manager
            self.file_manager = FileManagerHPC(uit_client=self.uit_client)

        # set cross selector directory
        self.file_manager._update_files()

    @param.depends('from_directory', watch=True)
    def panel(self):
        from_box = pn.WidgetBox(
            pn.Column(self._from_location,
                      pn.Column(self.file_manager.cross_selector)))

        to_box = pn.WidgetBox(pn.Column(self.param.to_location,
                                        self.param.to_directory),
                              width=900,
                              width_policy='max')

        return pn.Column(from_box, to_box,
                         pn.panel(self.param.transfer_button))
コード例 #8
0
ファイル: geo.py プロジェクト: shiwalimohan/geoviews
class Shape(Dataset):
    """
    Shape wraps any shapely geometry type.
    """

    group = param.String(default='Shape')

    datatype = param.List(default=['geom_dictionary'])

    level = param.Number(default=None,
                         doc="""
        Optional level associated with the set of Contours.""")

    vdims = param.List(default=[],
                       doc="""
        Shpae optionally accept a value dimension, corresponding
        to the supplied values.""",
                       bounds=(0, None))

    def __init__(self, data, kdims=None, vdims=None, **params):
        if params.get('level') is not None:
            if vdims is None:
                vdims = [Dimension('Level')]
            self.warning('Supplying a level to a Shape is deprecated '
                         'provide the value as part of a dictionary of '
                         'the form {\'geometry\': <shapely.Geometry>, '
                         '\'level\': %s} instead' % params['level'])
        super(Shape, self).__init__(data, kdims=kdims, vdims=vdims, **params)

    @classmethod
    def from_shapefile(cls, shapefile, *args, **kwargs):
        """
        Loads a shapefile from disk and optionally merges
        it with a dataset. See ``from_records`` for full
        signature.
        """
        reader = Reader(shapefile)
        return cls.from_records(reader.records(), *args, **kwargs)

    @classmethod
    def from_records(cls,
                     records,
                     dataset=None,
                     on=None,
                     value=None,
                     index=[],
                     drop_missing=False,
                     element=None,
                     **kwargs):
        """
        Load data from a collection of
        ``cartopy.io.shapereader.Record`` objects and optionally merge
        it with a dataset to assign values to each polygon and form a
        chloropleth. Supplying just records will return an NdOverlay
        of Shape Elements with a numeric index. If a dataset is
        supplied, a mapping between the attribute names in the records
        and the dimension names in the dataset must be supplied. The
        values assigned to each shape file can then be drawn from the
        dataset by supplying a ``value`` and keys the Shapes are
        indexed by specifying one or index dimensions.

        * records - An iterator of cartopy.io.shapereader.Record
                    objects.
        * dataset - Any HoloViews Dataset type.
        * on      - A mapping between the attribute names in
                    the records and the dimensions in the dataset.
        * value   - The value dimension in the dataset the
                    values will be drawn from.
        * index   - One or more dimensions in the dataset
                    the Shapes will be indexed by.
        * drop_missing - Whether to drop shapes which are missing from
                         the provided dataset.

        Returns an NdOverlay of Shapes.
        """
        if dataset is not None and not on:
            raise ValueError('To merge dataset with shapes mapping '
                             'must define attribute(s) to merge on.')

        if util.pd and isinstance(dataset, util.pd.DataFrame):
            dataset = Dataset(dataset)

        if not isinstance(on, (dict, list)):
            on = [on]
        if on and not isinstance(on, dict):
            on = {o: o for o in on}
        if not isinstance(index, list):
            index = [index]

        kdims = []
        for ind in index:
            if dataset and dataset.get_dimension(ind):
                dim = dataset.get_dimension(ind)
            else:
                dim = Dimension(ind)
            kdims.append(dim)

        ddims = []
        if dataset:
            if value:
                vdims = [dataset.get_dimension(value)]
            else:
                vdims = dataset.vdims
            if None in vdims:
                raise ValueError('Value dimension not found '
                                 'in dataset: {}'.format(value))
            ddims = dataset.dimensions()
        else:
            vdims = []

        data = []
        for i, rec in enumerate(records):
            geom = {}
            if dataset:
                selection = {
                    dim: rec.attributes.get(attr, None)
                    for attr, dim in on.items()
                }
                row = dataset.select(**selection)
                if len(row):
                    values = {
                        k: v[0]
                        for k, v in row.iloc[0].columns().items()
                    }
                elif drop_missing:
                    continue
                else:
                    values = {vd.name: np.nan for vd in vdims}
                geom.update(values)

            if index:
                for kdim in kdims:
                    if kdim in ddims and len(row):
                        k = row[kdim.name][0]
                    elif kdim.name in rec.attributes:
                        k = rec.attributes[kdim.name]
                    else:
                        k = None
                    geom[kdim.name] = k
            geom['geometry'] = rec.geometry
            data.append(geom)

        if element is not None:
            pass
        elif data and data[0]:
            if isinstance(data[0]['geometry'], poly_types):
                element = Polygons
            else:
                element = Path
        else:
            element = Polygons

        return Polygons(data, vdims=kdims + vdims,
                        **kwargs).options(color_index=value)

    def geom(self):
        """
        Returns Shape as a shapely geometry
        """
        return self.data['geometry']
コード例 #9
0
class QuickLookComponent(Component):

    data_repository = param.String(default=sample_data_directory,
                                   label=None,
                                   allow_None=True)

    query_filter = param.String(label="Query Expression")

    query_filter_active = param.String(label="Active Query Filter", default="")

    active_query_by_filter = param.Dict(
        default={f: ""
                 for f in store.active_dataset.filters})

    new_column_expr = param.String(label="Data Column Expression")

    tract_count = param.Number(default=0)

    status_message_queue = param.List(default=[])

    patch_count = param.Number(default=0)

    visit_count = param.Number(default=0)

    filter_count = param.Number(default=0)

    unique_object_count = param.Number(default=0)

    comparison = param.String()

    selected = param.Tuple(default=(None, None, None, None), length=4)

    selected_metrics_by_filter = param.Dict(
        default={f: []
                 for f in store.active_dataset.filters})

    selected_flag_filters = param.Dict(default={})

    view_mode = ["Overview", "Skyplot View", "Detail View"]
    data_stack = ["Forced Coadd", "Unforced Coadd"]

    plot_top = None
    plots_list = []
    skyplot_list = []
    detail_plots = {}

    label = param.String(default="Quick Look")

    def __init__(self, store, **param):

        super().__init__(**param)

        self.store = store

        self.overview = create_overview(self.on_tracts_updated)

        self._clear_metrics_button = pn.widgets.Button(name="Clear",
                                                       width=30,
                                                       align="end")
        self._clear_metrics_button.on_click(self._on_clear_metrics)

        self._submit_repository = pn.widgets.Button(name="Load Data",
                                                    width=50,
                                                    align="end")
        self._submit_repository.on_click(self._on_load_data_repository)

        self._submit_comparison = pn.widgets.Button(name="Submit",
                                                    width=50,
                                                    align="end")
        self._submit_comparison.on_click(self._update)

        self.flag_filter_select = pn.widgets.Select(
            name="Add Flag Filter",
            width=160,
            options=self.store.active_dataset.flags)

        self.flag_state_select = pn.widgets.Select(name="Flag State",
                                                   width=75,
                                                   options=["True", "False"])

        self.flag_submit = pn.widgets.Button(name="Add Flag Filter",
                                             width=10,
                                             height=30,
                                             align="end")
        self.flag_submit.on_click(self.on_flag_submit_click)

        self.flag_filter_selected = pn.widgets.Select(
            name="Active Flag Filters", width=250)

        self.flag_remove = pn.widgets.Button(name="Remove Flag Filter",
                                             width=50,
                                             height=30,
                                             align="end")
        self.flag_remove.on_click(self.on_flag_remove_click)

        self.query_filter_submit = pn.widgets.Button(name="Run Query Filter",
                                                     width=100,
                                                     align="end")
        self.query_filter_submit.on_click(self.on_run_query_filter_click)

        self.query_filter_clear = pn.widgets.Button(name="Clear",
                                                    width=50,
                                                    align="end")
        self.query_filter_clear.on_click(self.on_query_filter_clear)

        self.new_column_submit = pn.widgets.Button(name="Define New Column",
                                                   width=100,
                                                   align="end")
        self.new_column_submit.on_click(self.on_define_new_column_click)

        self.status_message = pn.pane.HTML(sizing_mode="stretch_width",
                                           max_height=10)
        self.adhoc_js = pn.pane.HTML(sizing_mode="stretch_width",
                                     max_height=10)
        self._info = pn.pane.HTML(sizing_mode="stretch_width", max_height=10)
        self._flags = pn.pane.HTML(sizing_mode="stretch_width", max_height=10)
        self._metric_panels = []
        self._metric_layout = pn.Column()
        self._switch_view = self._create_switch_view_buttons()
        self._switch_stack = self._create_switch_datastack_buttons()
        self._plot_top = pn.Row(sizing_mode="stretch_width",
                                margin=(10, 10, 10, 10))

        self._plot_layout = pn.Column(sizing_mode="stretch_width",
                                      margin=(10, 10, 10, 10))

        self._skyplot_tabs = pn.Tabs(sizing_mode="stretch_both")
        self.skyplot_layout = pn.Column(sizing_mode="stretch_width",
                                        margin=(10, 10, 10, 10))

        self._detail_tabs = pn.Tabs(sizing_mode="stretch_both")
        self.list_layout = pn.Column(sizing_mode="stretch_width")
        self.detail_plots_layout = pn.Column(sizing_mode="stretch_width")

        self._filter_streams = {}
        self._skyplot_range_stream = RangeXY()
        self._scatter_range_stream = RangeXY()

        self._update(None)

    def _on_load_data_repository(self, event, load_metrics=True):

        # Setup Variables
        global datasets
        # global datavisits
        # global filtered_datavisits

        self.store.active_dataset = Dataset("")
        self.skyplot_list = []
        self.plots_list = []
        self.plot_top = None

        datasets = {}
        filtered_datasets = {}
        # datavisits = {}
        # filtered_datavisits = {}

        # Setup UI
        self._switch_view_mode()
        self.update_display()

        # Load Data
        self.add_status_message("Load Data Start...",
                                self.data_repository,
                                level="info")

        dstack_switch_val = self._switch_stack.value.lower()
        datastack = "unforced" if "unforced" in dstack_switch_val else "forced"
        try:
            self.store.active_dataset = load_data(self.data_repository,
                                                  datastack)

        except Exception as e:
            self.update_display()
            self.add_message_from_error("Data Loading Error",
                                        self.data_repository, e)
            raise

        self.add_status_message("Data Ready",
                                self.data_repository,
                                level="success",
                                duration=3)

        # Update UI
        self.flag_filter_select.options = self.store.active_dataset.flags

        for f in self.store.active_dataset.filters:
            self.selected_metrics_by_filter[f] = []
            self.active_query_by_filter[f] = ""

        if load_metrics:
            self._load_metrics()

        self._switch_view_mode()
        self.update_display()

    def update_display(self):
        self.set_checkbox_style()

    def set_checkbox_style(self):
        code = """$("input[type='checkbox']").addClass("metric-checkbox");"""
        self.execute_js_script(code)

        global store
        for filter_type, fails in store.active_dataset.failures.items():
            error_metrics = json.dumps(fails)
            code = (
                '$(".' + filter_type +
                '-checkboxes .metric-checkbox").siblings().filter(function () { return '
                + error_metrics +
                '.indexOf($(this).text()) > -1;}).css("color", "orange");')
            self.execute_js_script(code)

    def add_status_message(self, title, body, level="info", duration=5):
        msg = {"title": title, "body": body}
        msg_args = dict(msg=msg, level=level, duration=duration)
        self.status_message_queue.append(msg_args)
        self.param.trigger("status_message_queue")  # to work with panel 0.7
        # Drop message in terminal/logger too
        try:
            # temporary try/except until 'level' values are all checked
            getattr(logger, level)(msg)
        except:
            pass

    def on_flag_submit_click(self, event):
        flag_name = self.flag_filter_select.value
        flag_state = self.flag_state_select.value == "True"
        self.selected_flag_filters.update({flag_name: flag_state})
        self.param.trigger("selected_flag_filters")
        self.add_status_message("Added Flag Filter",
                                "{} : {}".format(flag_name, flag_state),
                                level="info")

    def on_flag_remove_click(self, event):
        flag_name = self.flag_filter_selected.value.split()[0]
        del self.selected_flag_filters[flag_name]
        self.param.trigger("selected_flag_filters")
        self.add_status_message("Removed Flag Filter", flag_name, level="info")

    def on_run_query_filter_click(self, event):
        self.query_filter_active = self.query_filter
        self.query_filter = ""

    def on_query_filter_clear(self, event):
        self.query_filter = ""
        self.query_filter_active = ""

    def _on_clear_metrics(self, event):
        for k in self.selected_metrics_by_filter.keys():
            self.selected_metrics_by_filter[k] = []
        self.param.trigger("selected_metrics_by_filter")
        code = """$("input[type='checkbox']").prop("checked", false);"""
        self.execute_js_script(code)

    def on_define_new_column_click(self, event):
        new_column_expr = self.new_column_expr

    def _create_switch_view_buttons(self):
        radio_group = pn.widgets.RadioBoxGroup(name="SwitchView",
                                               options=self.view_mode,
                                               align="center",
                                               value=self.view_mode[0],
                                               inline=True)
        radio_group.param.watch(self._switch_view_mode, ["value"])
        return radio_group

    def _create_switch_datastack_buttons(self):
        radio_group = pn.widgets.RadioBoxGroup(
            name="SwitchDataStack",
            options=self.data_stack,
            align="center",
            value=self.data_stack[0],
            inline=True,
        )
        radio_group.param.watch(self._switch_data_stack, ["value"])
        return radio_group

    def update_selected_by_filter(self, filter_type, selected_values):
        self.selected_metrics_by_filter.update({filter_type: selected_values})
        self.param.trigger("selected_metrics_by_filter")

    def _update(self, event):
        self._update_info()
        self._load_metrics()

    def create_info_element(self, name, value):
        box_css = """
        background-color: #EEEEEE;
        border: 1px solid #777777;
        display: inline-block;
        padding-left: 5px;
        padding-right: 5px;
        margin-left:7px;
        """

        fval = format(value, ",")
        outel = '<li><span style="{}"><b>{}</b> {}</span></li>'
        return outel.format(box_css, fval, name)

    @param.depends("tract_count",
                   "patch_count",
                   "visit_count",
                   "filter_count",
                   "unique_object_count",
                   watch=True)
    def _update_info(self):
        """
        Updates the _info HTML pane with info loaded
        from the current repository.
        """
        html = ""
        html += self.create_info_element("Tracts", self.tract_count)
        html += self.create_info_element("Patches", self.patch_count)
        html += self.create_info_element("Visits", self.visit_count)
        # html += self.create_info_element('Unique Objects',
        #                                  self.unique_object_count)
        self._info.object = '<ul class="list-group list-group-horizontal" style="list-style: none;">{}</ul>'.format(
            html)

    def create_status_message(self, msg, level="info", duration=5):
        import uuid

        msg_id = str(uuid.uuid1())
        color_levels = dict(
            info="rgba(0,191,255, .8)",
            error="rgba(249, 180, 45, .8)",
            warning="rgba(240, 255, 0, .8)",
            success="rgba(3, 201, 169, .8)",
        )

        box_css = """
        width: 15rem;
        background-color: {};
        border: 1px solid #CCCCCC;
        display: inline-block;
        color: white;
        padding: 5px;
        margin-top: 1rem;
        """.format(color_levels.get(level, "rgba(0,0,0,0)"))

        remove_msg_func = ("<script>(function() { "
                           'setTimeout(function(){ document.getElementById("' +
                           msg_id + '").outerHTML = ""; }, ' +
                           str(duration * 1000) + ")})()"
                           "</script>")

        text = '<span style="{}"><h5>{}</h5><hr/><p>{}</p></span></li>'.format(
            box_css, msg.get("title"), msg.get("body"))

        return ('<li id="{}" class="status-message nav-item">'
                "{}"
                "{}"
                "</lil>").format(msg_id, remove_msg_func, text)

    def gen_clear_func(self, msg):
        async def clear_message():
            try:
                if msg in self.status_message_queue:
                    self.status_message_queue.remove(msg)
            except ValueError:
                pass

        return clear_message

    @param.depends("status_message_queue", watch=True)
    def _update_status_message(self):

        queue_css = """
        list-style-type: none;
        position: fixed;
        bottom: 2rem;
        right: 2rem;
        background-color: rgba(0,0,0,0);
        border: none;
        display: inline-block;
        margin-left: 7px;
        """

        html = ""

        for msg in self.status_message_queue:
            html += self.create_status_message(**msg)
            set_timeout(msg.get("duration", 5), self.gen_clear_func(msg))

        self.status_message.object = '<ul style="{}">{}</ul>'.format(
            queue_css, html)

    def execute_js_script(self, js_body):
        script = "<script>(function() { " + js_body + "})()</script>"  # to work with panel 0.7
        self.adhoc_js.object = script

    def get_patch_count(self):
        filters = self.selected_metrics_by_filter.keys()
        return self.store.active_dataset.get_patch_count(
            filters, self.store.active_tracts)

    def get_tract_count(self):
        return len(self.store.active_tracts)

    def get_visit_count(self):
        return 1
        dvisits = self.get_datavisits()
        visits = set()
        for filt, metrics in self.selected_metrics_by_filter.items():
            for metric in metrics:
                df = dvisits[filt][metric]
                visits = visits.union(set(df["visit"]))
        return len(visits)

    def update_info_counts(self):
        self.tract_count = self.get_tract_count()
        self.patch_count = self.get_patch_count()
        self.visit_count = self.get_visit_count()
        self.unique_object_count = get_unique_object_count()

    def _load_metrics(self):
        """
        Populates the _metrics Row with metrics loaded from the repository
        """
        panels = [
            MetricPanel(metric="LSST",
                        filters=self.store.active_dataset.filters,
                        parent=self)
        ]
        self._metric_panels = panels

        self._metric_layout.objects = [p.panel() for p in panels]
        self.update_display()

    @param.depends("query_filter_active", watch=True)
    def _update_query_filter(self):
        self.filter_main_dataframe()

    @param.depends("selected_flag_filters", watch=True)
    def _update_selected_flags(self):
        selected_flags = [
            "{} : {}".format(f, v)
            for f, v in self.selected_flag_filters.items()
        ]
        self.flag_filter_selected.options = selected_flags
        self.filter_main_dataframe()

    def filter_main_dataframe(self):
        global filtered_datasets
        global datasets
        for filt, qa_dataset in datasets.items():
            try:
                query_expr = self._assemble_query_expression()
                if query_expr:
                    filtered_datasets[filt] = datasets[filt].query(query_expr)
            except Exception as e:
                self.add_message_from_error("Filtering Error", "", e)
                raise
                return
        self._update_selected_metrics_by_filter()

    def _assemble_query_expression(self, ignore_query_expr=False):
        query_expr = ""

        flags_query = []
        for flag, state in self.selected_flag_filters.items():
            flags_query.append("{}=={}".format(flag, state))
        if flags_query:
            query_expr += " & ".join(flags_query)

        if ignore_query_expr:
            return query_expr

        query_filter = self.query_filter.strip()
        if query_filter:
            if query_expr:
                query_expr += " & {!s}".format(query_filter)
            else:
                query_expr = "{!s}".format(query_filter)

        return query_expr

    def get_dataset_by_filter(self, filter_type, metrics):
        global datasets
        global filtered_datasets

        warnings = []
        df = self.store.active_dataset.get_coadd_ddf_by_filter_metric(
            filter_type,
            metrics=metrics,
            tracts=self.store.active_tracts,
            coadd_version=self.store.active_dataset.coadd_version,
            warnings=warnings,
        )
        if warnings:
            msg = ";".join(warnings)
            self.add_status_message("Selected Tracts Warning",
                                    msg,
                                    level="error")

        datasets[filter_type] = df
        filtered_datasets[filter_type] = df

        if self.query_filter or len(self.selected_flag_filters) > 0:

            query_expr = self._assemble_query_expression()

            if query_expr:
                df = df.query(query_expr)
                filtered_datasets[filter_type] = df

        stats = self.store.active_dataset.stats[
            f"coadd_{self.store.active_dataset.coadd_version}"]
        if self.store.active_tracts:
            stats = stats.loc[filter_type,
                              self.store.active_tracts, :].reset_index(
                                  ["filter", "tract"], drop=True)
        else:
            stats = stats.loc[filter_type, :, :].reset_index(
                ["filter", "tract"], drop=True)

        return create_hv_dataset(df, stats=stats)

    def get_datavisits(self):
        return store.active_dataset.stats["visit"]

    def add_message_from_error(self,
                               title,
                               info,
                               exception_obj,
                               level="error"):

        tb = traceback.format_exception_only(type(exception_obj),
                                             exception_obj)[0]
        msg_body = "<b>Info:</b> " + info + "<br />"
        msg_body += "<b>Cause:</b> " + tb
        logger.error(title)
        logger.error(msg_body)
        self.add_status_message(title, msg_body, level=level, duration=10)

    @param.depends("selected_metrics_by_filter", watch=True)
    # @profile(immediate=True)
    def _update_selected_metrics_by_filter(self):
        skyplot_list = []
        detail_plots = {}
        existing_skyplots = {}

        dvisits = self.get_datavisits()
        for filt, metrics in self.selected_metrics_by_filter.items():
            plots_list = []
            if not metrics:
                continue
            top_plot = None
            try:
                errors = []
                top_plot = visits_plot(dvisits,
                                       self.selected_metrics_by_filter, filt,
                                       errors)
                if errors:
                    msg = "exhibiting metrics {} failed"
                    msg = msg.format(" ".join(errors))
                    self.add_status_message("Visits Plot Warning",
                                            msg,
                                            level="error",
                                            duration=10)
            except Exception as e:
                self.add_message_from_error("Visits Plot Error", "", e)

            self.plot_top = top_plot
            detail_plots[filt] = [top_plot]

            if filt in self._filter_streams:
                filter_stream = self._filter_streams[filt]
            else:
                self._filter_streams[filt] = filter_stream = FilterStream()
            dset = self.get_dataset_by_filter(filt, metrics=metrics)
            for i, metric in enumerate(metrics):
                # Sky plots
                skyplot_name = filt + " - " + metric
                plot_sky = skyplot(dset,
                                   filter_stream=filter_stream,
                                   range_stream=self._skyplot_range_stream,
                                   vdim=metric)
                if skyplot_name in existing_skyplots:
                    sky_panel = existing_sky_plots[skyplot_name]
                    sky_panel.object = plot_sky
                else:
                    sky_panel = pn.panel(plot_sky)
                skyplot_list.append((skyplot_name, sky_panel))

                # Detail plots
                plots_ss = scattersky(
                    dset,
                    xdim="psfMag",
                    ydim=metric,
                    sky_range_stream=self._skyplot_range_stream,
                    scatter_range_stream=self._scatter_range_stream,
                    filter_stream=filter_stream,
                )
                plots_list.append((metric, plots_ss))
            detail_plots[filt].extend([p for m, p in plots_list])

        self.skyplot_list = skyplot_list
        self.plots_list = plots_list
        self.detail_plots = detail_plots

        self.update_display()
        self._switch_view_mode()

    def _update_detail_plots(self):
        tabs = []
        for filt, plots in self.detail_plots.items():
            plots_list = pn.Column(*plots, sizing_mode="stretch_width")
            tabs.append((filt, plots_list))
        self._detail_tabs[:] = tabs

    def attempt_to_clear(self, obj):
        try:
            obj.clear()
        except:
            pass

    def _switch_data_stack(self, *events):
        # clear existing plot layouts
        self.attempt_to_clear(self._plot_top)
        self.attempt_to_clear(self._plot_layout)
        self.attempt_to_clear(self.list_layout)
        self.attempt_to_clear(self.detail_plots_layout)

        self._on_clear_metrics(event=None)
        self._on_load_data_repository(None)

    def _switch_view_mode(self, *events):

        # clear existing plot layouts
        self.attempt_to_clear(self._plot_top)
        self.attempt_to_clear(self._plot_layout)
        self.attempt_to_clear(self.list_layout)
        self.attempt_to_clear(self.detail_plots_layout)
        self.attempt_to_clear(self.skyplot_layout)

        if self._switch_view.value == "Skyplot View":
            cmd = """$( ".skyplot-plot-area" ).show();""" """$( ".metrics-plot-area" ).hide();"""
            self.execute_js_script(cmd)
            clear_dynamicmaps(self._skyplot_tabs)
            self._skyplot_tabs[:] = self.skyplot_list
            self.skyplot_layout[:] = [self._skyplot_tabs]
        elif self._switch_view.value == "Overview":
            cmd = ("""$( ".skyplot-plot-area" ).hide();"""
                   """$( ".metrics-plot-area" ).hide();"""
                   """$( ".overview-plot-area" ).show();""")
            self.execute_js_script(cmd)
        else:
            cmd = ("""$( ".skyplot-plot-area" ).hide();"""
                   """$( ".metrics-plot-area" ).show();"""
                   """$( ".overview-plot-area" ).hide();""")
            self.execute_js_script(cmd)

            self._update_detail_plots()
            clear_dynamicmaps(self._detail_tabs)
            self._plot_top[:] = [self.plot_top]
            self.list_layout[:] = [p for _, p in self.plots_list]
            self._plot_layout[:] = [self.list_layout]
            self.detail_plots_layout[:] = [self._detail_tabs]

    def on_tracts_updated(self, tracts):

        if self.store.active_tracts != tracts:
            self.store.active_tracts = tracts
            self.attempt_to_clear(self._plot_top)
            self.attempt_to_clear(self._plot_layout)
            self.attempt_to_clear(self.skyplot_layout)
            self.attempt_to_clear(self.list_layout)

            self._on_load_data_repository(None, load_metrics=False)
            self.update_info_counts()
            print("TRACTS UPDATED!!!!!! {}".format(tracts))

    def jinja(self):

        tmpl = pn.Template(dashboard_html_template)

        data_repo_widget = pn.panel(self.param.data_repository,
                                    show_labels=False)
        data_repo_widget.width = 300
        data_repo_row = pn.Row(data_repo_widget, self._submit_repository)
        data_repo_row.css_classes = ["data-repo-input"]

        query_filter_widget = pn.panel(self.param.query_filter)
        query_filter_widget.width = 260

        query_filter_active_widget = pn.panel(self.param.query_filter_active)
        query_filter_active_widget.width = 260
        query_filter_active_widget.disabled = True

        new_column_widget = pn.panel(self.param.new_column_expr)
        new_column_widget.width = 260

        datastack_switcher = pn.Row(self._switch_stack)
        datastack_switcher.css_classes = ["stack-switcher"]

        view_switcher = pn.Row(self._switch_view)
        view_switcher.css_classes = ["view-switcher"]

        clear_button_row = pn.Row(self._clear_metrics_button)

        components = [
            ("metrics_clear_button", clear_button_row),
            ("data_repo_path", data_repo_row),
            ("status_message_queue", self.status_message),
            ("adhoc_js", self.adhoc_js),
            ("infobar", self._info),
            ("stack_switcher", datastack_switcher),
            ("view_switcher", view_switcher),
            ("metrics_selectors", self._metric_layout),
            # ('metrics_plots', self._plot_layout),
            # ('plot_top', self._plot_top),
            ("plot_top", None),
            ("metrics_plots", self.detail_plots_layout),
            ("skyplot_metrics_plots", self.skyplot_layout),
            ("overview_plots", self.overview),
            (
                "flags",
                pn.Column(
                    pn.Row(self.flag_filter_select, self.flag_state_select),
                    pn.Row(self.flag_submit),
                    pn.Row(self.flag_filter_selected),
                    pn.Row(self.flag_remove),
                ),
            ),
            (
                "query_filter",
                pn.Column(
                    query_filter_widget,
                    query_filter_active_widget,
                    pn.Row(self.query_filter_clear, self.query_filter_submit),
                ),
            ),
            (
                "new_column",
                pn.Column(new_column_widget, pn.Row(self.new_column_submit)),
            ),
        ]

        for l, c in components:
            tmpl.add_panel(l, c)

        return tmpl
コード例 #10
0
ファイル: dimension.py プロジェクト: acrosby/holoviews
class Dimensioned(LabelledData):
    """
    Dimensioned is a base class that allows the data contents of a
    class to be associated with dimensions. The contents associated
    with dimensions may be partitioned into one of three types

    * key dimensions: These are the dimensions that can be indexed via
                      the __getitem__ method. Dimension objects
                      supporting key dimensions must support indexing
                      over these dimensions and may also support
                      slicing. This list ordering of dimensions
                      describes the positional components of each
                      multi-dimensional indexing operation.

                      For instance, if the key dimension names are
                      'weight' followed by 'height' for Dimensioned
                      object 'obj', then obj[80,175] indexes a weight
                      of 80 and height of 175.

                      Accessed using either kdims or key_dimensions.

    * value dimensions: These dimensions correspond to any data held
                        on the Dimensioned object not in the key
                        dimensions. Indexing by value dimension is
                        supported by dimension name (when there are
                        multiple possible value dimensions); no
                        slicing semantics is supported and all the
                        data associated with that dimension will be
                        returned at once. Note that it is not possible
                        to mix value dimensions and deep dimensions.

                        Accessed using either vdims or value_dimensions.


    * deep dimensions: These are dynamically computed dimensions that
                       belong to other Dimensioned objects that are
                       nested in the data. Objects that support this
                       should enable the _deep_indexable flag. Note
                       that it is not possible to mix value dimensions
                       and deep dimensions.

                       Accessed using either ddims or deep_dimensions.

    Dimensioned class support generalized methods for finding the
    range and type of values along a particular Dimension. The range
    method relies on the appropriate implementation of the
    dimension_values methods on subclasses.

    The index of an arbitrary dimension is its positional index in the
    list of all dimensions, starting with the key dimensions, followed
    by the value dimensions and ending with the deep dimensions.
    """

    cdims = param.Dict(default=OrderedDict(),
                       doc="""
       The constant dimensions defined as a dictionary of Dimension:value
       pairs providing additional dimension information about the object.

       Aliased with constant_dimensions.""")

    kdims = param.List(bounds=(0, None),
                       constant=True,
                       doc="""
       The key dimensions defined as list of dimensions that may be
       used in indexing (and potential slicing) semantics. The order
       of the dimensions listed here determines the semantics of each
       component of a multi-dimensional indexing operation.

       Aliased with key_dimensions.""")

    vdims = param.List(bounds=(0, None),
                       constant=True,
                       doc="""
       The value dimensions defined as the list of dimensions used to
       describe the components of the data. If multiple value
       dimensions are supplied, a particular value dimension may be
       indexed by name after the key dimensions.

       Aliased with value_dimensions.""")

    group = param.String(default='Dimensioned',
                         constant=True,
                         doc="""
       A string describing the data wrapped by the object.""")

    __abstract = True
    _dim_groups = ['kdims', 'vdims', 'cdims', 'ddims']
    _dim_aliases = dict(key_dimensions='kdims',
                        value_dimensions='vdims',
                        constant_dimensions='cdims',
                        deep_dimensions='ddims')

    # Long-name aliases

    @property
    def key_dimensions(self):
        return self.kdims

    @property
    def value_dimensions(self):
        return self.vdims

    @property
    def constant_dimensions(self):
        return self.cdims

    @property
    def deep_dimensions(self):
        return self.ddims

    def __init__(self, data, **params):
        for group in self._dim_groups + list(self._dim_aliases.keys()):
            if group in ['deep_dimensions', 'ddims']: continue
            if group in params:
                if group in self._dim_aliases:
                    params[self._dim_aliases[group]] = params.pop(group)
                    group = self._dim_aliases[group]
                if group == 'cdims':
                    dimensions = {
                        d if isinstance(d, Dimension) else Dimension(d): val
                        for d, val in params.pop(group).items()
                    }
                else:
                    dimensions = [
                        d if isinstance(d, Dimension) else Dimension(d)
                        for d in params.pop(group)
                    ]
                params[group] = dimensions
        super(Dimensioned, self).__init__(data, **params)
        self.ndims = len(self.kdims)
        cdims = [(d.name, val) for d, val in self.cdims.items()]
        self._cached_constants = OrderedDict(cdims)
        self._settings = None
        self.redim = redim(self)

    def _valid_dimensions(self, dimensions):
        """Validates key dimension input

        Returns kdims if no dimensions are specified"""
        if dimensions is None:
            dimensions = self.kdims
        elif not isinstance(dimensions, list):
            dimensions = [dimensions]

        valid_dimensions = []
        for dim in dimensions:
            if isinstance(dim, Dimension): dim = dim.name
            if dim not in self.kdims:
                raise Exception("Supplied dimensions %s not found." % dim)
            valid_dimensions.append(dim)
        return valid_dimensions

    @property
    def ddims(self):
        "The list of deep dimensions"
        if self._deep_indexable and self:
            return self.values()[0].dimensions()
        else:
            return []

    def dimensions(self, selection='all', label=False):
        """
        Provides convenient access to Dimensions on nested
        Dimensioned objects. Dimensions can be selected
        by their type, i.e. 'key' or 'value' dimensions.
        By default 'all' dimensions are returned.
        """
        if label in ['name', True]:
            label = 'short'
        elif label == 'label':
            label = 'long'
        elif label:
            raise ValueError(
                "label needs to be one of True, False, 'name' or 'label'")

        lambdas = {
            'k': (lambda x: x.kdims, {
                'full_breadth': False
            }),
            'v': (lambda x: x.vdims, {}),
            'c': (lambda x: x.cdims, {})
        }
        aliases = {'key': 'k', 'value': 'v', 'constant': 'c'}
        if selection == 'all':
            groups = [d for d in self._dim_groups if d != 'cdims']
            dims = [dim for group in groups for dim in getattr(self, group)]
        elif isinstance(selection, list):
            dims = [
                dim for group in selection
                for dim in getattr(self, '%sdims' % aliases.get(group))
            ]
        elif aliases.get(selection) in lambdas:
            selection = aliases.get(selection, selection)
            lmbd, kwargs = lambdas[selection]
            key_traversal = self.traverse(lmbd, **kwargs)
            dims = [dim for keydims in key_traversal for dim in keydims]
        else:
            raise KeyError("Invalid selection %r, valid selections include"
                           "'all', 'value' and 'key' dimensions" %
                           repr(selection))
        return [(dim.label if label == 'long' else dim.name) if label else dim
                for dim in dims]

    def get_dimension(self, dimension, default=None, strict=False):
        """
        Access a Dimension object by name or index.
        Returns the default value if the dimension is not found and
        strict is False. If strict is True, a KeyError is raised
        instead.
        """
        all_dims = self.dimensions()
        if isinstance(dimension, Dimension):
            dimension = dimension.name
        if isinstance(dimension, int):
            if 0 <= dimension < len(all_dims):
                return all_dims[dimension]
            elif strict:
                raise KeyError("Dimension %s not found" % dimension)
            else:
                return default
        name_map = {dim.name: dim for dim in all_dims}
        name_map.update({dim.label: dim for dim in all_dims})
        name_map.update(
            {dimension_sanitizer(dim.name): dim
             for dim in all_dims})
        if strict and dimension not in name_map:
            raise KeyError("Dimension %s not found" % dimension)
        else:
            return name_map.get(dimension, default)

    def get_dimension_index(self, dim):
        """
        Returns the index of the requested dimension.
        """
        if isinstance(dim, Dimension): dim = dim.name
        if isinstance(dim, int):
            if (dim < (self.ndims + len(self.vdims))
                    or dim < len(self.dimensions())):
                return dim
            else:
                return IndexError('Dimension index out of bounds')
        try:
            dimensions = self.kdims + self.vdims
            return [i for i, d in enumerate(dimensions) if d == dim][0]
        except IndexError:
            raise Exception("Dimension %s not found in %s." %
                            (dim, self.__class__.__name__))

    def get_dimension_type(self, dim):
        """
        Returns the specified Dimension type if specified or
        if the dimension_values types are consistent otherwise
        None is returned.
        """
        dim_obj = self.get_dimension(dim)
        if dim_obj and dim_obj.type is not None:
            return dim_obj.type
        dim_vals = [type(v) for v in self.dimension_values(dim)]
        if len(set(dim_vals)) == 1:
            return dim_vals[0]
        else:
            return None

    def __getitem__(self, key):
        """
        Multi-dimensional indexing semantics is determined by the list
        of key dimensions. For instance, the first indexing component
        will index the first key dimension.

        After the key dimensions are given, *either* a value dimension
        name may follow (if there are multiple value dimensions) *or*
        deep dimensions may then be listed (for applicable deep
        dimensions).
        """
        return self

    def select(self, selection_specs=None, **kwargs):
        """
        Allows slicing or indexing into the Dimensioned object
        by supplying the dimension and index/slice as key
        value pairs. Select descends recursively through the
        data structure applying the key dimension selection.
        The 'value' keyword allows selecting the
        value dimensions on objects which have any declared.

        The selection may also be selectively applied to
        specific objects by supplying the selection_specs
        as an iterable of type.group.label specs, types or
        functions.
        """

        # Apply all indexes applying on this object
        vdims = self.vdims + ['value'] if self.vdims else []
        kdims = self.kdims
        local_kwargs = {k: v for k, v in kwargs.items() if k in kdims + vdims}

        # Check selection_spec applies
        if selection_specs is not None:
            if not isinstance(selection_specs, (list, tuple)):
                selection_specs = [selection_specs]
            matches = any(self.matches(spec) for spec in selection_specs)
        else:
            matches = True

        # Apply selection to self
        if local_kwargs and matches:
            ndims = self.ndims
            if any(d in self.vdims for d in kwargs):
                ndims = len(self.kdims + self.vdims)
            select = [slice(None) for _ in range(ndims)]
            for dim, val in local_kwargs.items():
                if dim == 'value':
                    select += [val]
                else:
                    if isinstance(val, tuple): val = slice(*val)
                    select[self.get_dimension_index(dim)] = val
            if self._deep_indexable:
                selection = self.get(tuple(select), None)
                if selection is None:
                    selection = self.clone(shared_data=False)
            else:
                selection = self[tuple(select)]
        else:
            selection = self

        if not isinstance(selection, Dimensioned):
            return selection
        elif type(selection) is not type(self) and isinstance(
                selection, Dimensioned):
            # Apply the selection on the selected object of a different type
            dimensions = selection.dimensions() + ['value']
            if any(kw in dimensions for kw in kwargs):
                selection = selection.select(selection_specs, **kwargs)
        elif isinstance(selection, Dimensioned) and selection._deep_indexable:
            # Apply the deep selection on each item in local selection
            items = []
            for k, v in selection.items():
                dimensions = v.dimensions() + ['value']
                if any(kw in dimensions for kw in kwargs):
                    items.append((k, v.select(selection_specs, **kwargs)))
                else:
                    items.append((k, v))
            selection = selection.clone(items)
        return selection

    def dimension_values(self, dimension, expanded=True, flat=True):
        """
        Returns the values along the specified dimension. This method
        must be implemented for all Dimensioned type.
        """
        val = self._cached_constants.get(dimension, None)
        if val:
            return np.array([val])
        else:
            raise Exception("Dimension %s not found in %s." %
                            (dimension, self.__class__.__name__))

    def range(self, dimension, data_range=True):
        """
        Returns the range of values along the specified dimension.

        If data_range is True, the data may be used to try and infer
        the appropriate range. Otherwise, (None,None) is returned to
        indicate that no range is defined.
        """
        dimension = self.get_dimension(dimension)
        if dimension is None:
            return (None, None)
        if None not in dimension.range:
            return dimension.range
        elif data_range:
            if dimension in self.kdims + self.vdims:
                dim_vals = self.dimension_values(dimension.name)
                drange = find_range(dim_vals)
            else:
                dname = dimension.name
                match_fn = lambda x: dname in x.kdims + x.vdims
                range_fn = lambda x: x.range(dname)
                ranges = self.traverse(range_fn, [match_fn])
                drange = max_range(ranges)
            soft_range = [r for r in dimension.soft_range if r is not None]
            if soft_range:
                drange = max_range([drange, soft_range])
        else:
            drange = dimension.soft_range
        if dimension.range[0] is not None:
            return (dimension.range[0], drange[1])
        elif dimension.range[1] is not None:
            return (drange[0], dimension.range[1])
        else:
            return drange

    def __repr__(self):
        return PrettyPrinter.pprint(self)

    def __str__(self):
        return repr(self)

    def __unicode__(self):
        return unicode(PrettyPrinter.pprint(self))

    def __call__(self, options=None, **kwargs):
        return self.opts(options, **kwargs)

    def opts(self, options=None, **kwargs):
        """
        Apply the supplied options to a clone of the object which is
        then returned. Note that if no options are supplied at all,
        all ids are reset.
        """
        from ..util.parser import OptsSpec
        if isinstance(options, basestring):
            try:
                options = OptsSpec.parse(options)
            except SyntaxError:
                options = OptsSpec.parse('{clsname} {options}'.format(
                    clsname=self.__class__.__name__, options=options))

        groups = set(Store.options().groups.keys())
        if kwargs and set(kwargs) <= groups:
            if not all(isinstance(v, dict) for v in kwargs.values()):
                raise Exception(
                    "The %s options must be specified using dictionary groups"
                    % ','.join(repr(k) for k in kwargs.keys()))

            # Check whether the user is specifying targets (such as 'Image.Foo')
            entries = Store.options().children
            targets = [
                k.split('.')[0] in entries for grp in kwargs.values()
                for k in grp
            ]
            if any(targets) and not all(targets):
                raise Exception(
                    "Cannot mix target specification keys such as 'Image' with non-target keywords."
                )
            elif not any(targets):
                # Not targets specified - add current object as target
                sanitized_group = group_sanitizer(self.group)
                if self.label:
                    identifier = ('%s.%s.%s' %
                                  (self.__class__.__name__, sanitized_group,
                                   label_sanitizer(self.label)))
                elif sanitized_group != self.__class__.__name__:
                    identifier = '%s.%s' % (self.__class__.__name__,
                                            sanitized_group)
                else:
                    identifier = self.__class__.__name__

                kwargs = {k: {identifier: v} for k, v in kwargs.items()}

        if options is None and kwargs == {}:
            deep_clone = self.map(lambda x: x.clone(id=None))
        else:
            deep_clone = self.map(lambda x: x.clone(id=x.id))
        StoreOptions.set_options(deep_clone, options, **kwargs)
        return deep_clone
コード例 #11
0
class TemplateGenerator(param.Parameterized):
    """The TemplateGenerator can create templates based on the url query arguments"""

    css_path = param.ClassSelector(doc="A path to custom css", class_=pathlib.Path)
    js_path = param.ClassSelector(doc="A path to custom js", class_=pathlib.Path)

    default_template = param.String(doc="The default template to use", default=DEFAULT_TEMPLATE)
    default_theme = param.String(doc="The default theme to use", default=DEFAULT_THEME)
    templates = param.Dict(doc="A dictionary of template name: template class", default=TEMPLATES)
    themes = param.Dict(
        doc="A dictionary of template name: a dictionary of theme name: theme class", default=THEMES
    )

    def _set_template_css(self, template, theme):
        # remove other site css
        pn.config.raw_css = [
            css for css in pn.config.raw_css if not css.startswith(_TEMPLATE_CSS_ID)
        ]
        if not self.css_path:
            return

        files = [
            "all.css",
            f"all_{theme}.css",
            f"{template}.css",
            f"{template}_{theme}.css",
        ]
        text = ""
        for file in files:
            text = ""
            if not file in pn.state.cache:
                file_css_id = f"/* {file} */\n"
                css_file = self.css_path / file
                if css_file.exists():
                    text = _TEMPLATE_CSS_ID + file_css_id + css_file.read_text()
            else:
                text = pn.state.cache[file]
                pn.state.cache.pop(file)
            if text:
                pn.config.raw_css.append(text)

    def _get_template_js(self, template):
        if not self.js_path:
            return ""
        jss = []
        files = [
            f"{template}.js",
        ]
        for file in files:
            if not file in pn.state.cache:
                file_js_id = f" {file} \n"
                text = _TEMPLATE_JS_ID + file_js_id + (self.js_path / file).read_text()
            else:
                text = pn.state.cache[file]
                pn.state.cache.pop(file)
            jss.append(text)
        return "\n".join(jss)

    @staticmethod
    def _get_params(value, class_):
        logger.debug("_get_params %s %s", value, class_)
        if isinstance(value, class_):
            return value
        if isinstance(value, tuple):
            value = [*value]
        elif not isinstance(value, list):
            value = [value]
        # Important to fx. convert @param.depends functions
        value = [pn.panel(item) for item in value]

        if class_ == pn.layout.ListLike:
            return class_(objects=value)
        if class_ == pn.layout.GridSpec:
            logger.debug(
                "grid %s",
                value,
            )
            grid = class_()
            for index, item in enumerate(value):
                print(index, item)
                grid[index, :] = item
            return grid

        return value

    def get_template(  # pylint: disable=too-many-arguments, too-complex
        self,
        template: Optional[str] = None,
        theme: Optional[str] = None,
        **params,
    ) -> BasicTemplate:
        """Returns the specified BasicTemplate

        Args:
            template (str, optional): The name of the template. Defaults to TEMPLATE.
            theme (str, optional): The name of the theme. Defaults to THEME.
            **params: Optional parameters

        Returns:
            BasicTemplate: The specified Template
        """
        logger.info("Getting Template")
        logger.info(pn.state.session_args)
        site_parameters = []
        if not template:
            site_parameters.append("template")
        if not theme:
            site_parameters.append("theme")

        if not template:
            template = pn.state.session_args.get("template", self.default_template)
            if isinstance(template, list):
                template = template[0].decode("utf-8")
                template = template.strip("'").strip('"')
        if not theme:
            theme = pn.state.session_args.get("theme", self.default_theme)
            if isinstance(theme, list):
                theme = theme[0].decode("utf-8")
                theme = theme.strip("'").strip('"')
        # pylint: disable=unsubscriptable-object
        template_class = self.templates.get(str(template), self.templates[self.default_template])
        # pylint: enable=unsubscriptable-object
        # To be fixed with PR https://github.com/holoviz/panel/pull/1694
        if "header" in params:
            params["header"] = self._get_params(
                params["header"], template_class.param.header.class_
            )
        if "main" in params:
            params["main"] = self._get_params(params["main"], template_class.param.main.class_)
        if "sidebar" in params:
            params["sidebar"] = self._get_params(
                params["sidebar"], template_class.param.sidebar.class_
            )
        if "modal" in params:
            params["modal"] = self._get_params(params["modal"], template_class.param.modal.class_)

        self._set_template_css(template, theme)
        # pylint: disable=unsubscriptable-object
        template_instance = template_class(
            theme=self.themes.get(str(template), self.themes[self.default_template]).get(
                str(theme), self.default_theme
            ),
            **params,
        )
        # enable: disable=unsubscriptable-object

        if site_parameters and "fast" not in str(template_class).lower():
            site_settings = TemplateSettings(parameters=site_parameters)
            header = pn.Row(pn.layout.HSpacer(), site_settings.view, sizing_mode="stretch_width")
            template_instance.header.append(header)

        return template_instance
コード例 #12
0
ファイル: dimension.py プロジェクト: acrosby/holoviews
class LabelledData(param.Parameterized):
    """
    LabelledData is a mix-in class designed to introduce the group and
    label parameters (and corresponding methods) to any class
    containing data. This class assumes that the core data contents
    will be held in the attribute called 'data'.

    Used together, group and label are designed to allow a simple and
    flexible means of addressing data. For instance, if you are
    collecting the heights of people in different demographics, you
    could specify the values of your objects as 'Height' and then use
    the label to specify the (sub)population.

    In this scheme, one object may have the parameters set to
    [group='Height', label='Children'] and another may use
    [group='Height', label='Adults'].

    Note: Another level of specification is implict in the type (i.e
    class) of the LabelledData object. A full specification of a
    LabelledData object is therefore given by the tuple
    (<type>, <group>, label>). This additional level of specification is
    used in the traverse method.

    Any strings can be used for the group and label, but it can be
    convenient to use a capitalized string of alphanumeric characters,
    in which case the keys used for matching in the matches and
    traverse method will correspond exactly to {type}.{group}.{label}.
    Otherwise the strings provided will be sanitized to be valid
    capitalized Python identifiers, which works fine but can sometimes
    be confusing.
    """

    group = param.String(default='LabelledData',
                         constant=True,
                         doc="""
       A string describing the type of data contained by the object.
       By default this will typically mirror the class name.""")

    label = param.String(default='',
                         constant=True,
                         doc="""
       Optional label describing the data, typically reflecting where
       or how it was measured. The label should allow a specific
       measurement or dataset to be referenced for a given group..""")

    _deep_indexable = False

    def __init__(self, data, id=None, plot_id=None, **params):
        """
        All LabelledData subclasses must supply data to the
        constructor, which will be held on the .data attribute.
        This class also has an id instance attribute, which
        may be set to associate some custom options with the object.
        """
        self.data = data
        self.id = id
        self._plot_id = plot_id or builtins.id(self)
        if isinstance(params.get('label', None), tuple):
            (alias, long_name) = params['label']
            label_sanitizer.add_aliases(**{alias: long_name})
            params['label'] = long_name

        if isinstance(params.get('group', None), tuple):
            (alias, long_name) = params['group']
            group_sanitizer.add_aliases(**{alias: long_name})
            params['group'] = long_name

        super(LabelledData, self).__init__(**params)
        if not group_sanitizer.allowable(self.group):
            raise ValueError("Supplied group %r contains invalid characters." %
                             self.group)
        elif not label_sanitizer.allowable(self.label):
            raise ValueError("Supplied label %r contains invalid characters." %
                             self.label)

    def clone(self,
              data=None,
              shared_data=True,
              new_type=None,
              *args,
              **overrides):
        """
        Returns a clone of the object with matching parameter values
        containing the specified args and kwargs.

        If shared_data is set to True and no data explicitly supplied,
        the clone will share data with the original. May also supply
        a new_type, which will inherit all shared parameters.
        """
        params = dict(self.get_param_values())
        if new_type is None:
            clone_type = self.__class__
        else:
            clone_type = new_type
            new_params = new_type.params()
            params = {k: v for k, v in params.items() if k in new_params}
            if params.get('group') == self.params()['group'].default:
                params.pop('group')
        settings = dict(params, **overrides)
        if 'id' not in settings:
            settings['id'] = self.id

        if data is None and shared_data:
            data = self.data
            settings['plot_id'] = self._plot_id
        # Apply name mangling for __ attribute
        pos_args = getattr(self, '_' + type(self).__name__ + '__pos_params',
                           [])
        return clone_type(
            data, *args,
            **{k: v
               for k, v in settings.items() if k not in pos_args})

    def relabel(self, label=None, group=None, depth=0):
        """
        Assign a new label and/or group to an existing LabelledData
        object, creating a clone of the object with the new settings.
        """
        new_data = self.data
        if (depth > 0) and getattr(self, '_deep_indexable', False):
            new_data = []
            for k, v in self.data.items():
                relabelled = v.relabel(group=group,
                                       label=label,
                                       depth=depth - 1)
                new_data.append((k, relabelled))
        keywords = [('label', label), ('group', group)]
        kwargs = {k: v for k, v in keywords if v is not None}
        return self.clone(new_data, **kwargs)

    def matches(self, spec):
        """
        A specification may be a class, a tuple or a string.
        Equivalent to isinstance if a class is supplied, otherwise
        matching occurs on type, group and label. These may be supplied
        as a tuple of strings or as a single string of the
        form "{type}.{group}.{label}". Matching may be done on {type}
        alone, {type}.{group}, or {type}.{group}.{label}.  The strings
        for the type, group, and label will each be sanitized before
        the match, and so the sanitized versions of those values will
        need to be provided if the match is to succeed.
        """
        if callable(spec) and not isinstance(spec, type): return spec(self)
        elif isinstance(spec, type): return isinstance(self, spec)
        specification = (self.__class__.__name__, self.group, self.label)
        split_spec = tuple(
            spec.split('.')) if not isinstance(spec, tuple) else spec
        split_spec, nocompare = zip(
            *((None, True) if s == '*' or s is None else (s, False)
              for s in split_spec))
        if all(nocompare): return True
        match_fn = itemgetter(*(idx for idx, nc in enumerate(nocompare)
                                if not nc))
        self_spec = match_fn(split_spec)
        unescaped_match = match_fn(
            specification[:len(split_spec)]) == self_spec
        if unescaped_match: return True
        sanitizers = [sanitize_identifier, group_sanitizer, label_sanitizer]
        identifier_specification = tuple(
            fn(ident, escape=False)
            for ident, fn in zip(specification, sanitizers))
        identifier_match = match_fn(
            identifier_specification[:len(split_spec)]) == self_spec
        return identifier_match

    def traverse(self, fn, specs=None, full_breadth=True):
        """
        Traverses any nested LabelledData object (i.e LabelledData
        objects containing LabelledData objects), applying the
        supplied function to each constituent element if the supplied
        specifications. The output of these function calls are
        collected and returned in the accumulator list.

        If specs is None, all constituent elements are
        processed. Otherwise, specs must be a list of
        type.group.label specs, types, and functions.
        """
        accumulator = []
        matches = specs is None
        if not matches:
            for spec in specs:
                matches = self.matches(spec)
                if matches: break
        if matches:
            accumulator.append(fn(self))

        # Assumes composite objects are iterables
        if self._deep_indexable:
            for el in self:
                if el is None:
                    continue
                accumulator += el.traverse(fn, specs, full_breadth)
                if not full_breadth: break
        return accumulator

    def map(self, map_fn, specs=None, clone=True):
        """
        Recursively replaces elements using a map function when the
        specification applies.
        """
        if specs and not isinstance(specs, list): specs = [specs]
        applies = specs is None or any(self.matches(spec) for spec in specs)

        if self._deep_indexable:
            deep_mapped = self.clone(shared_data=False) if clone else self
            for k, v in self.items():
                deep_mapped[k] = v.map(map_fn, specs, clone)
            if applies: deep_mapped = map_fn(deep_mapped)
            return deep_mapped
        else:
            return map_fn(self) if applies else self

    def __getstate__(self):
        """
        When pickling, make sure to save the relevant style and
        plotting options as well.
        """
        obj_dict = self.__dict__.copy()
        try:
            if Store.save_option_state and (obj_dict.get('id', None)
                                            is not None):
                custom_key = '_custom_option_%d' % obj_dict['id']
                if custom_key not in obj_dict:
                    obj_dict[custom_key] = {
                        backend: s[obj_dict['id']]
                        for backend, s in Store._custom_options.items()
                        if obj_dict['id'] in s
                    }
            else:
                obj_dict['id'] = None
        except:
            self.warning("Could not pickle custom style information.")
        return obj_dict

    def __setstate__(self, d):
        """
        When unpickled, restore the saved style and plotting options
        to ViewableElement.options.
        """
        d = param_aliases(d)
        try:
            load_options = Store.load_counter_offset is not None
            if load_options:
                matches = [k for k in d if k.startswith('_custom_option')]
                for match in matches:
                    custom_id = int(match.split('_')[-1])
                    if not isinstance(d[match], dict):
                        # Backward compatibility before multiple backends
                        backend_info = {'matplotlib': d[match]}
                    else:
                        backend_info = d[match]
                    for backend, info in backend_info.items():
                        if backend not in Store._custom_options:
                            Store._custom_options[backend] = {}
                        Store._custom_options[backend][
                            Store.load_counter_offset + custom_id] = info

                    d.pop(match)

                if d['id'] is not None:
                    d['id'] += Store.load_counter_offset
                else:
                    d['id'] = None
        except:
            self.warning("Could not unpickle custom style information.")
        self.__dict__.update(d)
コード例 #13
0
ファイル: dimension.py プロジェクト: acrosby/holoviews
class Dimension(param.Parameterized):
    """
    Dimension objects are used to specify some important general
    features that may be associated with a collection of values.

    For instance, a Dimension may specify that a set of numeric values
    actually correspond to 'Height' (dimension name), in units of
    meters, with a descriptive label 'Height of adult males'.

    All dimensions object have a name that identifies them and a label
    containing a suitable description. If the label is not explicitly
    specified it matches the name.

    These two parameters define the core identity of the dimension
    object and must match if two dimension objects are to be considered
    equivalent. All other parameters are considered optional metadata
    and are not used when testing for equality.

    Unlike all the other parameters, these core parameters can be used
    to construct a Dimension object from a tuple. This format is
    sufficient to define an identical Dimension:

    Dimension('a', label='Dimension A') == Dimension(('a', 'Dimension A'))

    Everything else about a dimension is considered to reflect
    non-semantic preferences. Examples include the default value (which
    may be used in a visualization to set an initial slider position),
    how the value is to rendered as text (which may be used to specify
    the printed floating point precision) or a suitable range of values
    to consider for a particular analysis.

    Units
    -----

    Full unit support with automated conversions are on the HoloViews
    roadmap. Once rich unit objects are supported, the unit (or more
    specifically the type of unit) will be part of the core dimension
    specification used to establish equality.

    Until this feature is implemented, there are two auxillary
    parameters that hold some partial information about the unit: the
    name of the unit and whether or not it is cyclic. The name of the
    unit is used as part of the pretty-printed representation and
    knowing whether it is cyclic is important for certain operations.
    """

    name = param.String(doc="""
       Short name associated with the Dimension, such as 'height' or
       'weight'. Valid Python identifiers make good names, because they
       can be used conveniently as a keyword in many contexts.""")

    label = param.String(default=None,
                         doc="""
        Unrestricted label used to describe the dimension. A label
        should succinctly describe the dimension and may contain any
        characters, including Unicode and LaTeX expression.""")

    cyclic = param.Boolean(default=False,
                           doc="""
        Whether the range of this feature is cyclic such that the
        maximum allowed value (defined by the range parameter) is
        continuous with the minimum allowed value.""")

    value_format = param.Callable(default=None,
                                  doc="""
        Formatting function applied to each value before display.""")

    range = param.Tuple(default=(None, None),
                        doc="""
        Specifies the minimum and maximum allowed values for a
        Dimension. None is used to represent an unlimited bound.""")

    soft_range = param.Tuple(default=(None, None),
                             doc="""
        Specifies a minimum and maximum reference value, which
        may be overridden by the data.""")

    type = param.Parameter(default=None,
                           doc="""
        Optional type associated with the Dimension values. The type
        may be an inbuilt constructor (such as int, str, float) or a
        custom class object.""")

    step = param.Number(default=None,
                        doc="""
        Optional floating point step specifying how frequently the
        underlying space should be sampled. May be used to define a
        discrete sampling of over the range.""")

    unit = param.String(default=None,
                        allow_None=True,
                        doc="""
        Optional unit string associated with the Dimension. For
        instance, the string 'm' may be used represent units of meters
        and 's' to represent units of seconds.""")

    values = param.List(default=[],
                        doc="""
        Optional specification of the allowed value set for the
        dimension that may also be used to retain a categorical
        ordering.""")

    # Defines default formatting by type
    type_formatters = {}
    unit_format = ' ({unit})'
    presets = {}  # A dictionary-like mapping name, (name,) or

    # (name, unit) to a preset Dimension object

    def __init__(self, spec, **params):
        """
        Initializes the Dimension object with the given name.
        """
        if 'name' in params:
            raise KeyError(
                'Dimension name must only be passed as the positional argument'
            )

        if isinstance(spec, Dimension):
            existing_params = dict(spec.get_param_values())
        elif (spec, params.get('unit', None)) in self.presets.keys():
            preset = self.presets[(str(spec), str(params['unit']))]
            existing_params = dict(preset.get_param_values())
        elif spec in self.presets:
            existing_params = dict(self.presets[spec].get_param_values())
        elif (spec, ) in self.presets:
            existing_params = dict(self.presets[(spec, )].get_param_values())
        else:
            existing_params = {}

        all_params = dict(existing_params, **params)
        if isinstance(spec, tuple):
            name, label = spec
            all_params['name'] = name
            all_params['label'] = label
            if 'label' in params and (label != params['label']):
                self.warning(
                    'Using label as supplied by keyword ({!r}), ignoring '
                    'tuple value {!r}'.format(params['label'], label))
                all_params['label'] = params['label']
        elif isinstance(spec, basestring):
            all_params['name'] = spec
            all_params['label'] = params.get('label', spec)

        if all_params['name'] == '':
            raise ValueError('Dimension name cannot be the empty string')
        if all_params['label'] in ['', None]:
            raise ValueError(
                'Dimension label cannot be None or the empty string')

        values = params.get('values', [])
        if isinstance(values, basestring) and values == 'initial':
            self.warning(
                "The 'initial' string for dimension values is no longer supported."
            )
            values = []

        all_params['values'] = list(unique_array(values))
        super(Dimension, self).__init__(**all_params)

    @property
    def spec(self):
        "Returns the corresponding tuple specification"
        return (self.name, self.label)

    def __call__(self, spec=None, **overrides):
        "Aliased to clone method. To be deprecated in 2.0"
        return self.clone(spec=spec, **overrides)

    def clone(self, spec=None, **overrides):
        """
        Derive a new Dimension that inherits existing parameters
        except for the supplied, explicit overrides
        """
        settings = dict(self.get_param_values(onlychanged=True), **overrides)

        if spec is None:
            spec = (self.name, overrides.get('label', self.label))
        if 'label' in overrides and isinstance(spec, basestring):
            spec = (spec, overrides['label'])
        elif 'label' in overrides and isinstance(spec, tuple):
            self.warning('Using label as supplied by keyword ({!r}), ignoring '
                         'tuple value {!r}'.format(overrides['label'],
                                                   spec[1]))
            spec = (spec[0], overrides['label'])

        return self.__class__(
            spec, **{
                k: v
                for k, v in settings.items() if k not in ['name', 'label']
            })

    def __hash__(self):
        """
        The hash allows Dimension objects to be used as dictionary keys in Python 3.
        """
        return hash(self.spec)

    def __setstate__(self, d):
        """
        Compatibility for pickles before alias attribute was introduced.
        """
        super(Dimension, self).__setstate__(d)
        self.label = self.name

    def __eq__(self, other):
        "Implements equals operator including sanitized comparison."

        if isinstance(other, Dimension):
            return self.spec == other.spec

        # For comparison to strings. Name may be sanitized.
        return other in [self.name, self.label, dimension_sanitizer(self.name)]

    def __ne__(self, other):
        "Implements not equal operator including sanitized comparison."
        return not self.__eq__(other)

    def __lt__(self, other):
        "Dimensions are sorted alphanumerically by name"
        return self.name < other.name if isinstance(
            other, Dimension) else self.name < other

    def __str__(self):
        return self.name

    def __repr__(self):
        return self.pprint()

    @property
    def pprint_label(self):
        "The pretty-printed label string for the Dimension"
        unit = ('' if self.unit is None else type(self.unit)(
            self.unit_format).format(unit=self.unit))
        return bytes_to_unicode(self.label) + bytes_to_unicode(unit)

    def pprint(self):
        changed = dict(self.get_param_values(onlychanged=True))
        if len(set([changed.get(k, k) for k in ['name', 'label']])) == 1:
            return 'Dimension({spec})'.format(spec=repr(self.name))

        ordering = sorted(sorted(changed.keys()),
                          key=lambda k: (-float('inf')
                                         if self.params(k).precedence is None
                                         else self.params(k).precedence))
        kws = ", ".join('%s=%r' % (k, changed[k]) for k in ordering
                        if k != 'name')
        return 'Dimension({spec}, {kws})'.format(spec=repr(self.name), kws=kws)

    def pprint_value(self, value):
        """
        Applies the defined formatting to the value.
        """
        own_type = type(value) if self.type is None else self.type
        formatter = (self.value_format if self.value_format else
                     self.type_formatters.get(own_type))
        if formatter:
            if callable(formatter):
                return formatter(value)
            elif isinstance(formatter, basestring):
                if isinstance(value, dt.datetime):
                    return value.strftime(formatter)
                elif isinstance(value, np.datetime64):
                    return dt64_to_dt(value).strftime(formatter)
                elif re.findall(r"\{(\w+)\}", formatter):
                    return formatter.format(value)
                else:
                    return formatter % value
        return unicode(bytes_to_unicode(value))

    def pprint_value_string(self, value):
        """
        Pretty prints the dimension name and value using the global
        title_format variable, including the unit string (if
        set). Numeric types are printed to the stated rounding level.
        """
        unit = '' if self.unit is None else ' ' + bytes_to_unicode(self.unit)
        value = self.pprint_value(value)
        return title_format.format(name=bytes_to_unicode(self.label),
                                   val=value,
                                   unit=unit)
コード例 #14
0
ファイル: element.py プロジェクト: eddy-ojb/holoviews
class histogram(ElementOperation):
    """
    Returns a Histogram of the input element data, binned into
    num_bins over the bin_range (if specified) along the specified
    dimension.
    """

    bin_range = param.NumericTuple(default=None,
                                   length=2,
                                   doc="""
      Specifies the range within which to compute the bins.""")

    dimension = param.String(default=None,
                             doc="""
      Along which dimension of the Element to compute the histogram.""")

    individually = param.Boolean(default=True,
                                 doc="""
      Specifies whether the histogram will be rescaled for each Element in a UniformNdMapping."""
                                 )

    log = param.Boolean(default=False,
                        doc="""
      Whether to use base 10 logarithmic samples for the bin edges.""")

    mean_weighted = param.Boolean(default=False,
                                  doc="""
      Whether the weighted frequencies are averaged.""")

    normed = param.Boolean(default=True,
                           doc="""
      Whether the histogram frequencies are normalized.""")

    nonzero = param.Boolean(default=False,
                            doc="""
      Whether to use only nonzero values when computing the histogram""")

    num_bins = param.Integer(default=20,
                             doc="""
      Number of bins in the histogram .""")

    weight_dimension = param.String(default=None,
                                    doc="""
       Name of the dimension the weighting should be drawn from""")

    style_prefix = param.String(default=None,
                                allow_None=None,
                                doc="""
      Used for setting a common style for histograms in a HoloMap or AdjointLayout."""
                                )

    def _process(self, view, key=None):
        if self.p.dimension:
            selected_dim = self.p.dimension
        else:
            selected_dim = [d.name for d in view.vdims + view.kdims][0]
        data = np.array(view.dimension_values(selected_dim))
        if self.p.nonzero:
            mask = data > 0
            data = data[mask]
        if self.p.weight_dimension:
            weights = np.array(view.dimension_values(self.p.weight_dimension))
            if self.p.nonzero:
                weights = weights[mask]
        else:
            weights = None
        hist_range = find_minmax((np.nanmin(data), np.nanmax(data)), (0, -float('inf')))\
            if self.p.bin_range is None else self.p.bin_range

        # Avoids range issues including zero bin range and empty bins
        if hist_range == (0, 0):
            hist_range = (0, 1)
        data = data[np.invert(np.isnan(data))]
        if self.p.log:
            bin_min = max([abs(hist_range[0]), data[data > 0].min()])
            edges = np.logspace(np.log10(bin_min), np.log10(hist_range[1]),
                                self.p.num_bins + 1)
        else:
            edges = np.linspace(hist_range[0], hist_range[1],
                                self.p.num_bins + 1)
        normed = False if self.p.mean_weighted and self.p.weight_dimension else self.p.normed
        try:
            hist, edges = np.histogram(data[np.isfinite(data)],
                                       normed=normed,
                                       range=hist_range,
                                       weights=weights,
                                       bins=edges)
            if not normed and self.p.weight_dimension and self.p.mean_weighted:
                hist_mean, _ = np.histogram(data[np.isfinite(data)],
                                            normed=normed,
                                            range=hist_range,
                                            bins=self.p.num_bins)
                hist /= hist_mean
        except:
            hist = np.zeros(self.p.num_bins)

        hist[np.isnan(hist)] = 0

        params = {}
        if self.p.weight_dimension:
            params['vdims'] = [view.get_dimension(self.p.weight_dimension)]
        if view.group != view.__class__.__name__:
            params['group'] = view.group

        return Histogram(hist,
                         edges,
                         kdims=[view.get_dimension(selected_dim)],
                         label=view.label,
                         **params)
コード例 #15
0
class Ellipse(BaseShape):
    """
    Draw an axis-aligned ellipse at the specified x,y position with
    the given orientation.

    The simplest (default) Ellipse is a circle, specified using:

    Ellipse(x,y, diameter)

    A circle is a degenerate ellipse where the width and height are
    equal. To specify these explicitly, you can use:

    Ellipse(x,y, (width, height))

    There is also an aspect parameter allowing you to generate an ellipse
    by specifying a multiplicating factor that will be applied to the
    height only.

    Note that as a subclass of Path, internally an Ellipse is a
    sequence of (x,y) sample positions. Ellipse could also be
    implemented as an annotation that uses a dedicated ellipse artist.
    """
    x = param.Number(default=0, doc="The x-position of the ellipse center.")

    y = param.Number(default=0, doc="The y-position of the ellipse center.")

    width = param.Number(default=1, doc="The width of the ellipse.")

    height = param.Number(default=1, doc="The height of the ellipse.")

    orientation = param.Number(default=0,
                               doc="""
       Orientation in the Cartesian coordinate system, the
       counterclockwise angle in radians between the first axis and the
       horizontal.""")

    aspect = param.Number(default=1.0,
                          doc="""
       Optional multiplier applied to the diameter to compute the width
       in cases where only the diameter value is set.""")

    samples = param.Number(default=100,
                           doc="The sample count used to draw the ellipse.")

    group = param.String(default='Ellipse',
                         constant=True,
                         doc="The assigned group name.")

    __pos_params = ['x', 'y', 'height']

    def __init__(self, x, y, spec, **params):

        if isinstance(spec, tuple):
            if 'aspect' in params:
                raise ValueError(
                    'Aspect parameter not supported when supplying '
                    '(width, height) specification.')
            (width, height) = spec
        else:
            width, height = params.get('width', spec), spec

        params['width'] = params.get('width', width)
        super(Ellipse, self).__init__(x=x, y=y, height=height, **params)
        angles = np.linspace(0, 2 * np.pi, self.samples)
        half_width = (self.width * self.aspect) / 2.0
        half_height = self.height / 2.0
        #create points
        ellipse = np.array(
            list(zip(half_width * np.sin(angles),
                     half_height * np.cos(angles))))
        #rotate ellipse and add offset
        rot = np.array([[np.cos(self.orientation), -np.sin(self.orientation)],
                        [np.sin(self.orientation),
                         np.cos(self.orientation)]])
        self.data = [
            np.tensordot(rot, ellipse.T, axes=[1, 0]).T + np.array([x, y])
        ]
コード例 #16
0
ファイル: annotation.py プロジェクト: zzwei1/holoviews
class Annotation(Element2D):
    """
    An Annotation is a special type of element that is designed to be
    overlaid on top of any arbitrary 2D element. Annotations have
    neither key nor value dimensions allowing them to be overlaid over
    any type of data.

    Note that one or more Annotations *can* be displayed without being
    overlaid on top of any other data. In such instances (by default)
    they will be displayed using the unit axis limits (0.0-1.0 in both
    directions) unless an explicit 'extents' parameter is
    supplied. The extents of the bottom Annotation in the Overlay is
    used when multiple Annotations are displayed together.
    """

    kdims = param.List(default=[Dimension('x'), Dimension('y')],
                       bounds=(2, 2))

    group = param.String(default='Annotation', constant=True)

    _auxiliary_component = True

    def __init__(self, data, **params):
        super(Annotation, self).__init__(data, **params)

    def __len__(self):
        return 1

    def __getitem__(self, key):
        if key in self.dimensions():
            return self.dimension_values(key)
        if not isinstance(key, tuple) or len(key) == 1:
            key = (key, slice(None))
        elif len(key) == 0:
            return self.clone()
        if not all(isinstance(k, slice) for k in key):
            raise KeyError("%s only support slice indexing" %
                           self.__class__.__name__)
        xkey, ykey = tuple(key[:len(self.kdims)])
        xstart, xstop = xkey.start, xkey.stop
        ystart, ystop = ykey.start, ykey.stop
        return self.clone(self.data, extents=(xstart, ystart, xstop, ystop))


    def dimension_values(self, dimension, expanded=True, flat=True):
        index = self.get_dimension_index(dimension)
        if index == 0:
            return np.array([self.data if np.isscalar(self.data) else self.data[index]])
        elif index == 1:
            return [] if np.isscalar(self.data) else np.array([self.data[1]])
        else:
            return super(Annotation, self).dimension_values(dimension)

    # Note: This version of clone is identical in path.BaseShape
    # Consider implementing a mix-in class if it is needed again.
    def clone(self, *args, **overrides):
        if len(args) == 1 and isinstance(args[0], tuple):
            args = args[0]
        # Apply name mangling for __ attribute
        pos_args = getattr(self, '_' + type(self).__name__ + '__pos_params', [])
        settings = {k: v for k, v in dict(self.get_param_values(), **overrides).items()
                    if k not in pos_args[:len(args)]}
        if 'id' not in settings:
            settings['id'] = self.id
        return self.__class__(*args, **settings)
コード例 #17
0
class FileBrowser(param.Parameterized):
    """
    """
    path = param.ClassSelector(Path, precedence=-1)
    path_text = param.String(label='', precedence=0.3)
    home = param.Action(lambda self: self.go_home(), label='🏠', precedence=0.1)
    up = param.Action(lambda self: self.move_up(), label='⬆️', precedence=0.2)
    refresh_control = param.Action(lambda self: self.refresh(),
                                   label='🔄',
                                   precedence=0.25)
    callback = param.Action(lambda x: None, label='Select', precedence=0.4)
    file_listing = param.ListSelector(default=[], label='', precedence=0.5)
    patterns = param.List(precedence=-1, default=['*'])
    show_hidden = param.Boolean(default=False,
                                label='Show Hidden Files',
                                precedence=0.35)

    def __init__(self, delayed_init=False, **params):
        self.delayed_init = delayed_init
        super().__init__(**params)
        self._initialize_path()

    def init(self):
        self.delayed_init = False
        self._initialize_path()

    def _initialize_path(self):
        if self.delayed_init:
            return

        if self.path_text:
            self.validate()

        if not self.path:
            self.go_home()
        else:
            self.make_options()

    def _new_path(self, path):
        return Path(path)

    @property
    def controls(self):
        return ['home', 'up', 'refresh_control']

    @property
    def control_styles(self):
        styles = {c: {'width': 25} for c in self.controls}

        styles.update(
            path_text={'width_policy': 'max'},
            callback={
                'width': 100,
                'button_type': 'success'
            },
        )
        return styles

    @property
    def panel(self):
        return pn.Column(
            pn.Param(
                self,
                parameters=self.controls + ['path_text', 'callback'],
                widgets=self.control_styles,
                default_layout=pn.Row,
                width_policy='max',
                show_name=False,
                margin=0,
            ),
            self.param.show_hidden,
            pn.Param(self.param.file_listing,
                     widgets={'file_listing': {
                         'height': 200
                     }},
                     width_policy='max'),
            width_policy='max',
            margin=0,
        )

    @property
    def value(self):
        if self.file_listing:
            return [str(self.path / v) for v in self.file_listing]
        else:
            return [self.path.as_posix()]

    def go_home(self):
        self.path = Path.cwd()

    def move_up(self):
        self.path = self.path.parent

    @param.depends('file_listing', watch=True)
    def move_down(self):
        for filename in self.file_listing:
            fn = self.path / filename
            if fn.is_dir():
                self.path = fn
                self.make_options()
            if self.callback:
                self.callback(True)

    def refresh(self):
        self.file_listing = ['.']

    @param.depends('path_text', watch=True)
    def validate(self):
        """Check that inputted path is valid - set validator accordingly"""
        path = self._new_path(self.path_text)
        if path and path.is_dir():
            self.path = path
        elif path and path.is_file():
            self.path = path.parent
        else:
            log.warning(f'Invalid Directory: {path}')

    @param.depends('path', 'show_hidden', watch=True)
    def make_options(self):
        self.path_text = self.path.as_posix()
        selected = []
        try:
            selected = [
                p.name + '/' for p in self.path.glob('*') if p.is_dir()
            ]
            for pattern in self.patterns:
                selected.extend([
                    p.name for p in self.path.glob(pattern) if not p.is_dir()
                ])
            if not self.show_hidden:
                selected = [p for p in selected if not str(p).startswith('.')]
        except Exception as e:
            log.exception(str(e))

        self.file_listing = []
        self.param.file_listing.objects = sorted(selected)
コード例 #18
0
class ElementPlot(PlotlyPlot, GenericElementPlot):

    aspect = param.Parameter(default='cube',
                             doc="""
        The aspect ratio mode of the plot. By default, a plot may
        select its own appropriate aspect ratio but sometimes it may
        be necessary to force a square aspect ratio (e.g. to display
        the plot as an element of a grid). The modes 'auto' and
        'equal' correspond to the axis modes of the same name in
        matplotlib, a numeric value may also be passed.""")

    bgcolor = param.ClassSelector(class_=(str, tuple),
                                  default=None,
                                  doc="""
        If set bgcolor overrides the background color of the axis.""")

    invert_axes = param.ObjectSelector(default=False,
                                       doc="""
        Inverts the axes of the plot. Note that this parameter may not
        always be respected by all plots but should be respected by
        adjoined plots when appropriate.""")

    invert_xaxis = param.Boolean(default=False,
                                 doc="""
        Whether to invert the plot x-axis.""")

    invert_yaxis = param.Boolean(default=False,
                                 doc="""
        Whether to invert the plot y-axis.""")

    invert_zaxis = param.Boolean(default=False,
                                 doc="""
        Whether to invert the plot z-axis.""")

    labelled = param.List(default=['x', 'y', 'z'],
                          doc="""
        Whether to label the 'x' and 'y' axes.""")

    logx = param.Boolean(default=False,
                         doc="""
         Whether to apply log scaling to the x-axis of the Chart.""")

    logy = param.Boolean(default=False,
                         doc="""
         Whether to apply log scaling to the y-axis of the Chart.""")

    logz = param.Boolean(default=False,
                         doc="""
         Whether to apply log scaling to the y-axis of the Chart.""")

    margins = param.NumericTuple(default=(50, 50, 50, 50),
                                 doc="""
         Margins in pixel values specified as a tuple of the form
         (left, bottom, right, top).""")

    show_legend = param.Boolean(default=False,
                                doc="""
        Whether to show legend for the plot.""")

    xaxis = param.ObjectSelector(
        default='bottom',
        objects=['top', 'bottom', 'bare', 'top-bare', 'bottom-bare', None],
        doc="""
        Whether and where to display the xaxis, bare options allow suppressing
        all axis labels including ticks and xlabel. Valid options are 'top',
        'bottom', 'bare', 'top-bare' and 'bottom-bare'.""")

    xticks = param.Parameter(default=None,
                             doc="""
        Ticks along x-axis specified as an integer, explicit list of
        tick locations, list of tuples containing the locations.""")

    yaxis = param.ObjectSelector(
        default='left',
        objects=['left', 'right', 'bare', 'left-bare', 'right-bare', None],
        doc="""
        Whether and where to display the yaxis, bare options allow suppressing
        all axis labels including ticks and ylabel. Valid options are 'left',
        'right', 'bare' 'left-bare' and 'right-bare'.""")

    yticks = param.Parameter(default=None,
                             doc="""
        Ticks along y-axis specified as an integer, explicit list of
        tick locations, list of tuples containing the locations.""")

    zlabel = param.String(default=None,
                          doc="""
        An explicit override of the z-axis label, if set takes precedence
        over the dimension label.""")

    zticks = param.Parameter(default=None,
                             doc="""
        Ticks along z-axis specified as an integer, explicit list of
        tick locations, list of tuples containing the locations.""")

    trace_kwargs = {}

    _style_key = None

    # Whether vectorized styles are applied per trace
    _per_trace = False

    # Declare which styles cannot be mapped to a non-scalar dimension
    _nonvectorized_styles = []

    def initialize_plot(self, ranges=None):
        """
        Initializes a new plot object with the last available frame.
        """
        # Get element key and ranges for frame
        fig = self.generate_plot(self.keys[-1], ranges)
        self.drawn = True
        return fig

    def generate_plot(self, key, ranges, element=None):
        if element is None:
            element = self._get_frame(key)

        if element is None:
            return self.handles['fig']

        # Set plot options
        plot_opts = self.lookup_options(element, 'plot').options
        self.set_param(
            **{k: v
               for k, v in plot_opts.items() if k in self.params()})

        # Get ranges
        ranges = self.compute_ranges(self.hmap, key, ranges)
        ranges = util.match_spec(element, ranges)

        # Get style
        self.style = self.lookup_options(element, 'style')
        style = self.style[self.cyclic_index]

        # Get data and options and merge them
        data = self.get_data(element, ranges, style)
        opts = self.graph_options(element, ranges, style)
        graphs = []
        for i, d in enumerate(data):
            # Initialize graph
            graph = self.init_graph(d, opts, index=i)
            graphs.append(graph)
        self.handles['graphs'] = graphs

        # Initialize layout
        layout = self.init_layout(key, element, ranges)
        self.handles['layout'] = layout

        # Create figure and return it
        self.drawn = True
        fig = dict(data=graphs, layout=layout)
        self.handles['fig'] = fig
        return fig

    def graph_options(self, element, ranges, style):
        if self.overlay_dims:
            legend = ', '.join([
                d.pprint_value_string(v) for d, v in self.overlay_dims.items()
            ])
        else:
            legend = element.label

        opts = dict(showlegend=self.show_legend,
                    legendgroup=element.group,
                    name=legend,
                    **self.trace_kwargs)

        if self._style_key is not None:
            styles = self._apply_transforms(element, ranges, style)
            opts[self._style_key] = {
                STYLE_ALIASES.get(k, k): v
                for k, v in styles.items()
            }
        else:
            opts.update({
                STYLE_ALIASES.get(k, k): v
                for k, v in style.items() if k != 'cmap'
            })

        return opts

    def init_graph(self, data, options, index=0):
        trace = dict(options)
        for k, v in data.items():
            if k in trace and isinstance(trace[k], dict):
                trace[k].update(v)
            else:
                trace[k] = v

        if self._style_key and self._per_trace:
            vectorized = {
                k: v
                for k, v in options[self._style_key].items()
                if isinstance(v, np.ndarray)
            }
            trace[self._style_key] = dict(trace[self._style_key])
            for s, val in vectorized.items():
                trace[self._style_key][s] = val[index]
        return trace

    def get_data(self, element, ranges, style):
        return []

    def get_aspect(self, xspan, yspan):
        """
        Computes the aspect ratio of the plot
        """
        return self.width / self.height

    def _get_axis_dims(self, element):
        """Returns the dimensions corresponding to each axis.

        Should return a list of dimensions or list of lists of
        dimensions, which will be formatted to label the axis
        and to link axes.
        """
        dims = element.dimensions()[:3]
        pad = [None] * max(3 - len(dims), 0)
        return dims + pad

    def _apply_transforms(self, element, ranges, style):
        new_style = dict(style)
        for k, v in dict(style).items():
            if isinstance(v, util.basestring):
                if k == 'marker' and v in 'xsdo':
                    continue
                elif v in element:
                    v = dim(v)
                elif any(d == v for d in self.overlay_dims):
                    v = dim([d for d in self.overlay_dims if d == v][0])

            if not isinstance(v, dim):
                continue
            elif (not v.applies(element)
                  and v.dimension not in self.overlay_dims):
                new_style.pop(k)
                self.warning(
                    'Specified %s dim transform %r could not be applied, as not all '
                    'dimensions could be resolved.' % (k, v))
                continue

            if len(v.ops) == 0 and v.dimension in self.overlay_dims:
                val = self.overlay_dims[v.dimension]
            else:
                val = v.apply(element, ranges=ranges, flat=True)

            if (not util.isscalar(val) and len(util.unique_array(val)) == 1
                    and not 'color' in k):
                val = val[0]

            if not util.isscalar(val):
                if k in self._nonvectorized_styles:
                    element = type(element).__name__
                    raise ValueError(
                        'Mapping a dimension to the "{style}" '
                        'style option is not supported by the '
                        '{element} element using the {backend} '
                        'backend. To map the "{dim}" dimension '
                        'to the {style} use a groupby operation '
                        'to overlay your data along the dimension.'.format(
                            style=k,
                            dim=v.dimension,
                            element=element,
                            backend=self.renderer.backend))

            # If color is not valid colorspec add colormapper
            numeric = isinstance(val, np.ndarray) and val.dtype.kind in 'uifMm'
            if ('color' in k and isinstance(val, np.ndarray) and numeric):
                copts = self.get_color_opts(v, element, ranges, style)
                new_style.pop('cmap', None)
                new_style.update(copts)
            new_style[k] = val
        return new_style

    def init_layout(self, key, element, ranges):
        el = element.traverse(lambda x: x, [Element])
        el = el[0] if el else element

        extent = self.get_extents(element, ranges)

        if len(extent) == 4:
            l, b, r, t = extent
        else:
            l, b, z0, r, t, z1 = extent

        options = {}

        dims = self._get_axis_dims(el)
        if len(dims) > 2:
            xdim, ydim, zdim = dims
        else:
            xdim, ydim = dims
            zdim = None
        xlabel, ylabel, zlabel = self._get_axis_labels(dims)

        if self.invert_axes:
            xlabel, ylabel = ylabel, xlabel
            ydim, xdim = xdim, ydim
            l, b, r, t = b, l, t, r

        if 'x' not in self.labelled:
            xlabel = ''
        if 'y' not in self.labelled:
            ylabel = ''
        if 'z' not in self.labelled:
            zlabel = ''

        if xdim:
            xrange = [r, l] if self.invert_xaxis else [l, r]
            xaxis = dict(range=xrange, title=xlabel)
            if self.logx:
                xaxis['type'] = 'log'
            self._get_ticks(xaxis, self.xticks)
        else:
            xaxis = {}

        if ydim:
            yrange = [t, b] if self.invert_yaxis else [b, t]
            yaxis = dict(range=yrange, title=ylabel)
            if self.logy:
                yaxis['type'] = 'log'
            self._get_ticks(yaxis, self.yticks)
        else:
            yaxis = {}

        if self.projection == '3d':
            scene = dict(xaxis=xaxis, yaxis=yaxis)
            if zdim:
                zrange = [z1, z0] if self.invert_zaxis else [z0, z1]
                zaxis = dict(range=zrange, title=zlabel)
                if self.logz:
                    zaxis['type'] = 'log'
                self._get_ticks(zaxis, self.zticks)
                scene['zaxis'] = zaxis
            if self.aspect == 'cube':
                scene['aspectmode'] = 'cube'
            else:
                scene['aspectmode'] = 'manual'
                scene['aspectratio'] = self.aspect
            options['scene'] = scene
        else:
            l, b, r, t = self.margins
            options['xaxis'] = xaxis
            options['yaxis'] = yaxis
            options['margin'] = dict(l=l, r=r, b=b, t=t, pad=4)

        return dict(width=self.width,
                    height=self.height,
                    title=self._format_title(key, separator=' '),
                    plot_bgcolor=self.bgcolor,
                    **options)

    def _get_ticks(self, axis, ticker):
        axis_props = {}
        if isinstance(ticker, (tuple, list)):
            if all(isinstance(t, tuple) for t in ticker):
                ticks, labels = zip(*ticker)
                labels = [
                    l if isinstance(l, util.basestring) else str(l)
                    for l in labels
                ]
                axis_props['tickvals'] = ticks
                axis_props['ticktext'] = labels
            else:
                axis_props['tickvals'] = ticker
            axis.update(axis_props)

    def update_frame(self, key, ranges=None, element=None):
        """
        Updates an existing plot with data corresponding
        to the key.
        """
        self.generate_plot(key, ranges, element)
コード例 #19
0
class FileViewer(param.Parameterized):
    update_btn = param.Action(lambda self: self.get_file_contents(),
                              label='Update',
                              precedence=3)
    n = param.Integer(default=500, bounds=(0, 10_000), precedence=2)
    cmd = param.ObjectSelector(default='head',
                               objects=['head', 'tail'],
                               label='Command',
                               precedence=1)
    file_select = param.ClassSelector(SelectFile, default=SelectFile())
    file_path = param.String()
    file_contents = param.String()
    uit_client = param.ClassSelector(Client)

    @param.depends('uit_client', watch=True)
    def configure_file_selector(self):
        if self.uit_client.connected:
            file_browser = HpcFileBrowser(uit_client=self.uit_client,
                                          delayed_init=True)
            self.file_select = SelectFile(file_browser=file_browser)
            self.file_select.toggle()
            self.configure_path()

            self.file_select.param.watch(self.get_file_contents, 'file_path')

    @param.depends('file_path', watch=True)
    def configure_path(self):
        self.file_path = self.file_path or str(self.uit_client.WORKDIR)
        self.file_select.file_browser.path_text = self.file_path
        self.file_select.update_file(True)

    def get_file_contents(self, event=None):
        if self.uit_client.connected:
            try:
                self.file_contents = self.uit_client.call(
                    f'{self.cmd} -n {self.n} {self.file_select.file_path}')
            except Exception as e:
                log.debug(e)
                # self.file_contents = f'ERROR!: {e}'
                self.file_contents = ''
            self.param.trigger('update_btn')

    @param.depends('update_btn')
    def view(self):
        file_path = self.file_select.file_path
        return pn.Column(
            pn.widgets.TextInput(value=file_path, disabled=True),
            pn.widgets.Ace(value=self.file_contents,
                           width_policy='max',
                           height_policy='max',
                           height=1000,
                           readonly=True,
                           filename=file_path),
            width_policy='max',
            height_policy='max',
            max_height=1000,
        )

    def panel(self):
        return pn.Column(
            self.file_select.panel,
            pn.WidgetBox(
                pn.Param(
                    self,
                    parameters=['cmd', 'n', 'update_btn'],
                    widgets={
                        'cmd': {
                            'width': 100
                        },
                        'n':
                        pn.widgets.Spinner(value=self.n,
                                           width=100,
                                           name=self.param.n.label),
                        'update_btn': {
                            'button_type': 'primary',
                            'width': 100,
                            'align': 'end'
                        }
                    },
                    default_layout=pn.Row,
                    show_name=False,
                ),
                width=400,
            ),
            self.view,
            width_policy='max',
            height_policy='max',
            max_height=1000,
        )
コード例 #20
0
class interactive(PaneBase):

    default_layout = param.ClassSelector(default=Column,
                                         class_=(Panel),
                                         is_instance=False)

    manual_update = param.Boolean(default=False,
                                  doc="""
        Whether to update manually by clicking on button.""")

    manual_name = param.String(default='Run Interact')

    def __init__(self, object, params={}, **kwargs):
        if signature is None:
            raise ImportError(
                'interact requires either recent Python version '
                '(>=3.3 or IPython to inspect function signatures.')

        super(interactive, self).__init__(object, **params)

        new_kwargs = self.find_abbreviations(kwargs)
        # Before we proceed, let's make sure that the user has passed a set of args+kwargs
        # that will lead to a valid call of the function. This protects against unspecified
        # and doubly-specified arguments.
        try:
            check_argspec(object)
        except TypeError:
            # if we can't inspect, we can't validate
            pass
        else:
            getcallargs(object, **{n: v for n, v, _ in new_kwargs})

        widgets = self.widgets_from_abbreviations(new_kwargs)
        if self.manual_update:
            widgets.append(('manual', Button(name=self.manual_name)))
        self._widgets = OrderedDict(widgets)
        self._pane = panel(self.object(**self.kwargs), name=self.name)
        self._inner_layout = Row(self._pane)
        widgets = [
            widget for _, widget in widgets if isinstance(widget, Widget)
        ]
        if 'name' in params:
            widgets.insert(0, HTML('<h2>%s</h2>' % self.name))
        self.widget_box = Column(*widgets)
        self.layout.objects = [self.widget_box, self._inner_layout]
        self._link_widgets()

    #----------------------------------------------------------------
    # Model API
    #----------------------------------------------------------------

    def _get_model(self, doc, root=None, parent=None, comm=None):
        return self._inner_layout._get_model(doc, root, parent, comm)

    #----------------------------------------------------------------
    # Callback API
    #----------------------------------------------------------------

    def _synced_params(self):
        return []

    def _link_widgets(self):
        if self.manual_update:
            widgets = [('manual', self._widgets['manual'])]
        else:
            widgets = self._widgets.items()

        for name, widget in widgets:

            def update_pane(change):
                # Try updating existing pane
                new_object = self.object(**self.kwargs)
                pane_type = self.get_pane_type(new_object)
                if type(self._pane) is pane_type:
                    if isinstance(new_object, (PaneBase, Panel)):
                        new_params = {
                            k: v
                            for k, v in new_object.get_param_values()
                            if k != 'name'
                        }
                        self._pane.param.set_param(**new_params)
                    else:
                        self._pane.object = new_object
                    return

                # Replace pane entirely
                self._pane = panel(new_object)
                self._inner_layout[0] = self._pane

            pname = 'clicks' if name == 'manual' else 'value'
            watcher = widget.param.watch(update_pane, pname)
            self._callbacks.append(watcher)

    def _cleanup(self, root):
        self._inner_layout._cleanup(root)
        super(interactive, self)._cleanup(root)

    #----------------------------------------------------------------
    # Public API
    #----------------------------------------------------------------

    @property
    def kwargs(self):
        return {
            k: widget.value
            for k, widget in self._widgets.items() if k != 'manual'
        }

    def signature(self):
        return signature(self.object)

    def find_abbreviations(self, kwargs):
        """Find the abbreviations for the given function and kwargs.
        Return (name, abbrev, default) tuples.
        """
        new_kwargs = []
        try:
            sig = self.signature()
        except (ValueError, TypeError):
            # can't inspect, no info from function; only use kwargs
            return [(key, value, value) for key, value in kwargs.items()]

        for parameter in sig.parameters.values():
            for name, value, default in _yield_abbreviations_for_parameter(
                    parameter, kwargs):
                if value is empty:
                    raise ValueError(
                        'cannot find widget or abbreviation for argument: {!r}'
                        .format(name))
                new_kwargs.append((name, value, default))
        return new_kwargs

    def widgets_from_abbreviations(self, seq):
        """Given a sequence of (name, abbrev, default) tuples, return a sequence of Widgets."""
        result = []
        for name, abbrev, default in seq:
            if isinstance(abbrev, fixed):
                widget = abbrev
            else:
                widget = self.widget_from_abbrev(abbrev, name, default)
            if not (isinstance(widget, Widget) or isinstance(widget, fixed)):
                if widget is None:
                    continue
                else:
                    raise TypeError("{!r} is not a ValueWidget".format(widget))
            result.append((name, widget))
        return result

    @classmethod
    def applies(cls, object):
        return isinstance(object, types.FunctionType)

    @classmethod
    def widget_from_abbrev(cls, abbrev, name, default=empty):
        """Build a ValueWidget instance given an abbreviation or Widget."""
        if isinstance(abbrev, Widget):
            return abbrev

        if isinstance(abbrev, tuple):
            widget = cls.widget_from_tuple(abbrev, name, default)
            if default is not empty:
                try:
                    widget.value = default
                except Exception:
                    # ignore failure to set default
                    pass
            return widget

        # Try single value
        widget = cls.widget_from_single_value(abbrev, name)
        if widget is not None:
            return widget

        # Something iterable (list, dict, generator, ...). Note that str and
        # tuple should be handled before, that is why we check this case last.
        if isinstance(abbrev, Iterable):
            widget = cls.widget_from_iterable(abbrev, name)
            if default is not empty:
                try:
                    widget.value = default
                except Exception:
                    # ignore failure to set default
                    pass
            return widget

        # No idea...
        return fixed(abbrev)

    @staticmethod
    def widget_from_single_value(o, name):
        """Make widgets from single values, which can be used as parameter defaults."""
        if isinstance(o, string_types):
            return TextInput(value=as_unicode(o), name=name)
        elif isinstance(o, bool):
            return Checkbox(value=o, name=name)
        elif isinstance(o, Integral):
            min, max, value = _get_min_max_value(None, None, o)
            return IntSlider(value=o, start=min, end=max, name=name)
        elif isinstance(o, Real):
            min, max, value = _get_min_max_value(None, None, o)
            return FloatSlider(value=o, start=min, end=max, name=name)
        else:
            return None

    @staticmethod
    def widget_from_tuple(o, name, default=empty):
        """Make widgets from a tuple abbreviation."""
        int_default = (default is empty or isinstance(default, int))
        if _matches(o, (Real, Real)):
            min, max, value = _get_min_max_value(o[0], o[1])
            if all(isinstance(_, Integral) for _ in o) and int_default:
                cls = IntSlider
            else:
                cls = FloatSlider
            return cls(value=value, start=min, end=max, name=name)
        elif _matches(o, (Real, Real, Real)):
            step = o[2]
            if step <= 0:
                raise ValueError("step must be >= 0, not %r" % step)
            min, max, value = _get_min_max_value(o[0], o[1], step=step)
            if all(isinstance(_, Integral) for _ in o) and int_default:
                cls = IntSlider
            else:
                cls = FloatSlider
            return cls(value=value, start=min, end=max, step=step, name=name)
        elif _matches(o, (Real, Real, Real, Real)):
            step = o[2]
            if step <= 0:
                raise ValueError("step must be >= 0, not %r" % step)
            min, max, value = _get_min_max_value(o[0],
                                                 o[1],
                                                 value=o[3],
                                                 step=step)
            if all(isinstance(_, Integral) for _ in o):
                cls = IntSlider
            else:
                cls = FloatSlider
            return cls(value=value, start=min, end=max, step=step, name=name)
        elif len(o) == 4:
            min, max, value = _get_min_max_value(o[0], o[1], value=o[3])
            if all(isinstance(_, Integral) for _ in [o[0], o[1], o[3]]):
                cls = IntSlider
            else:
                cls = FloatSlider
            return cls(value=value, start=min, end=max, name=name)

    @staticmethod
    def widget_from_iterable(o, name):
        """Make widgets from an iterable. This should not be done for
        a string or tuple."""
        # Select expects a dict or list, so we convert an arbitrary
        # iterable to either of those.
        values = list(o.values()) if isinstance(o, Mapping) else list(o)
        widget_type = DiscreteSlider if all(
            param._is_number(v) for v in values) else Select
        if isinstance(o, (list, dict)):
            return widget_type(options=o, name=name)
        elif isinstance(o, Mapping):
            return widget_type(options=list(o.items()), name=name)
        else:
            return widget_type(options=list(o), name=name)

    # Return a factory for interactive functions
    @classmethod
    def factory(cls):
        options = dict(manual_update=False, manual_name="Run Interact")
        return _InteractFactory(cls, options)
コード例 #21
0
ファイル: pandas.py プロジェクト: laurentperrinet/holoviews
class DataFrameView(Element):
    """
    DataFrameView provides a convenient compatibility wrapper around
    Pandas DataFrames. It provides several core functions:

        * Allows integrating several Pandas plot types with the
          HoloViews plotting system (includes plot, boxplot, histogram
          and scatter_matrix).

        * Provides several convenient wrapper methods to apply
          DataFrame methods and slice data. This includes:

              1) The apply method, which takes the DataFrame method to
                 be applied as the first argument and passes any
                 supplied args or kwargs along.

              2) The select and __getitem__ method which allow for
                 selecting and slicing the data using NdMapping.
    """

    plot_type = param.ObjectSelector(default=None,
                                     objects=['plot', 'boxplot',
                                              'hist', 'scatter_matrix',
                                              'autocorrelation_plot',
                                              None],
                                     doc="""Selects which Pandas plot type to use,
                                            when visualizing the ViewableElement.""")

    x = param.String(doc="""Dimension to visualize along the x-axis.""")

    x2 = param.String(doc="""Dimension to visualize along a second
                             dependent axis.""")

    y = param.String(doc="""Dimension to visualize along the y-axis.""")

    group = param.String(default='DFrame')

    value_dimensions = param.List(doc="DataFrameView has no value dimension.")

    def __init__(self, data, dimensions={}, key_dimensions=None, **params):
        if pd is None:
            raise Exception("Pandas is required for the Pandas interface.")
        if not isinstance(data, pd.DataFrame):
            raise Exception('DataFrame ViewableElement type requires Pandas dataframe as data.')
        if key_dimensions:
            if len(key_dimensions) != len(data.columns):
                raise ValueError("Supplied key dimensions do not match data columns")
            dims = key_dimensions
        else:
            dims = list(data.columns)
        for name, dim in dimensions.items():
            if name in data.columns:
                dims[list(data.columns).index(name)] = dim

        self._xlim = None
        self._ylim = None
        ViewableElement.__init__(self, data, key_dimensions=dims, **params)
        self.data.columns = self._cached_index_names


    def __getitem__(self, key):
        """
        Allows slicing and selecting along the DataFrameView dimensions.
        """
        if key is ():
            return self
        else:
            if len(key) <= self.ndims:
                return self.select(**dict(zip(self._cached_index_names, key)))
            else:
                raise Exception('Selection contains %d dimensions, DataFrameView '
                                'only has %d index dimensions.' % (self.ndims, len(key)))


    def select(self, **select):
        """
        Allows slice and select individual values along the DataFrameView
        dimensions. Supply the dimensions and values or slices as
        keyword arguments.
        """
        df = self.data
        for dim, k in select.items():
            if isinstance(k, slice):
                df = df[(k.start < df[dim]) & (df[dim] < k.stop)]
            else:
                df = df[df[dim] == k]
        return self.clone(df)


    def dimension_values(self, dim):
        return np.array(self.data[dim])


    def apply(self, name, *args, **kwargs):
        """
        Applies the Pandas dframe method corresponding to the supplied
        name with the supplied args and kwargs.
        """
        return self.clone(getattr(self.data, name)(*args, **kwargs))


    def dframe(self):
        """
        Returns a copy of the internal dframe.
        """
        return self.data.copy()


    def reduce(self, dimensions=[], function=None, **reductions):
        """
        The reduce function accepts either a list of Dimensions
        and a function to apply to find the aggregate across
        those Dimensions or a list of dimension/function pairs
        to apply one by one.
        """
        if not dimensions and not reductions:
            raise Exception("Supply either a list of Dimensions or"
                            "reductions as keyword arguments")
        reduced = self.data
        if dimensions:
            if not function:
                raise Exception("Supply a function to reduce the Dimensions with")
            reduced = reduced.groupby(dimensions, as_index=True).aggregate(function)
        if reductions:
            for dim, fn in reductions.items():
                reduced = reduced.groupby(dim, as_index=True).aggregate(fn)
        key_dimensions = [d for d in self.dimensions('key') if d.name in reduced.columns]
        return self.clone(reduced, key_dimensions=key_dimensions)


    def groupby(self, dimensions, container_type=NdMapping):
        invalid_dims = list(set(dimensions) - set(self._cached_index_names))
        if invalid_dims:
            raise Exception('Following dimensions could not be found %s.'
                            % invalid_dims)

        index_dims = [self.get_dimension(d) for d in dimensions]
        mapping = container_type(None, key_dimensions=index_dims)
        view_dims = set(self._cached_index_names) - set(dimensions)
        view_dims = [self.get_dimension(d) for d in view_dims]
        for k, v in self.data.groupby(dimensions):
            mapping[k] = self.clone(v.drop(dimensions, axis=1),
                                    key_dimensions=view_dims)
        return mapping


    def overlay(self, dimensions):
        return self.groupby(dimensions, NdOverlay)


    def layout(self, dimensions=[], cols=4):
        return self.groupby(dimensions, NdLayout).cols(4)


    def grid(self, dimensions):
        """
        Splits the supplied the dimensions out into a GridSpace.
        """
        if len(dimensions) > 2:
            raise Exception('Grids hold a maximum of two dimensions.')
        return self.groupby(dimensions, GridSpace)


    def holomap(self, key_dimensions=[]):
        """
        Splits the supplied dimensions out into a HoloMap.
        """
        return self.groupby(key_dimensions, HoloMap)

    @property
    def xlabel(self):
        return self.x

    @property
    def ylabel(self):
        return self.y

    @property
    def xlim(self):
        if self._xlim:
            return self._xlim
        if self.x:
            xdata = self.data[self.x]
            return min(xdata), max(xdata)
        else:
            return None

    @property
    def ylim(self):
        if self._ylim:
            return self._ylim
        elif self.y:
            ydata = self.data[self.y]
            return min(ydata), max(ydata)
        else:
            return None
コード例 #22
0
class weighted_regrid(regrid):
    """
    Implements weighted regridding of rectilinear and curvilinear
    grids using the xESMF library, supporting all the ESMF regridding
    algorithms including bilinear, conservative and nearest neighbour
    regridding. The operation will always store the sparse weight
    matrix to disk and reuse the weights for later aggregations. To
    delete the weight files call the clean_weight_files method on the
    operation.
    """

    interpolation = param.ObjectSelector(default='bilinear',
        objects=['bilinear', 'conservative', 'nearest_s2d', 'nearest_d2s'], doc="""
        Interpolation method""")

    reuse_weights = param.Boolean(default=True, doc="""
        Whether the sparse regridding weights should be cached as a local
        NetCDF file in the path defined by the file_pattern parameter.
        Can provide considerable speedups when exploring a larger dataset.""")

    file_pattern = param.String(default='{method}_{x_range}_{y_range}_{width}x{height}.nc',
                                doc="""
        The file pattern used to store the regridding weights when the
        reuse_weights parameter is disabled. Note the files are not
        cleared automatically so make sure you clean up the cached
        files when you are done.""")

    _files = []

    _per_element = True

    def _get_regridder(self, element):
        try:
            import xesmf as xe
        except:
            raise ImportError("xESMF library required for weighted regridding.")
        x, y = element.kdims
        if self.p.target:
            tx, ty = self.p.target.kdims[:2]
            if issubclass(self.p.target.interface, XArrayInterface):
                ds_out = self.p.target.data
                ds_out = ds_out.rename({tx.name: 'lon', ty.name: 'lat'})
                height, width = ds_out[tx.name].shape
            else:
                xs = self.p.target.dimension_values(0, expanded=False)
                ys = self.p.target.dimension_values(1, expanded=False)
                ds_out = xr.Dataset({'lat': ys, 'lon': xs})
                height, width = len(ys), len(xs)
            x_range = ds_out[tx.name].min(), ds_out[tx.name].max()
            y_range = ds_out[ty.name].min(), ds_out[ty.name].max()
            xtype, ytype = 'numeric', 'numeric'
        else:
            info = self._get_sampling(element, x, y)
            (x_range, y_range), _, (width, height), (xtype, ytype) = info
            if x_range[0] > x_range[1]:
                x_range = x_range[::-1]
            element = element.select(**{x.name: x_range, y.name: y_range})
            ys = np.linspace(y_range[0], y_range[1], height)
            xs = np.linspace(x_range[0], x_range[1], width)
            ds_out = xr.Dataset({'lat': ys, 'lon': xs})

        irregular = any(element.interface.irregular(element, d)
                        for d in [x, y])
        coord_opts = {'flat': False} if irregular else {'expanded': False}
        coords = tuple(element.dimension_values(d, **coord_opts)
                       for d in [x, y])
        arrays = self._get_xarrays(element, coords, xtype, ytype)
        ds = xr.Dataset(arrays)
        ds = ds.rename({x.name: 'lon', y.name: 'lat'})

        x_range = str(tuple('%.3f' % r for r in x_range)).replace("'", '')
        y_range = str(tuple('%.3f' % r for r in y_range)).replace("'", '')
        filename = self.file_pattern.format(method=self.p.interpolation,
                                            width=width, height=height,
                                            x_range=x_range, y_range=y_range)
        self._files.append(os.path.abspath(filename))
        regridder = xe.Regridder(ds, ds_out, self.p.interpolation,
                                 reuse_weights=self.p.reuse_weights,
                                 filename=filename)
        return regridder, arrays


    def _process(self, element, key=None):
        regridder, arrays = self._get_regridder(element)
        x, y = element.kdims
        ds = xr.Dataset({vd: regridder(arr) for vd, arr in arrays.items()})
        ds = ds.rename({'lon': x.name, 'lat': y.name})
        params = get_param_values(element)
        if is_geographic(element):
            try:
                return Image(ds, crs=element.crs, **params)
            except:
                return QuadMesh(ds, crs=element.crs, **params)
        try:
            return HvImage(ds, **params)
        except:
            return HvQuadMesh(ds, **params)


    @classmethod
    def clean_weight_files(cls):
        """
        Cleans existing weight files.
        """
        deleted = []
        for f in cls._files:
            try:
                os.remove(f)
                deleted.append(f)
            except FileNotFoundError:
                pass
        print('Deleted %d weight files' % len(deleted))
        cls._files = []
コード例 #23
0
ファイル: widgets.py プロジェクト: ldsalomone/holoviews
class ProgressBar(ProgressIndicator):
    """
    A simple text progress bar suitable for both the IPython notebook
    and the IPython interactive prompt.

    ProgressBars are automatically nested if a previous instantiated
    progress bars has not achieved 100% completion.
    """

    display = param.ObjectSelector(default='stdout',
                                   objects=['stdout', 'disabled', 'broadcast'],
                                   doc="""
        Parameter to control display of the progress bar. By default,
        progress is shown on stdout but this may be disabled e.g. for
        jobs that log standard output to file.

        If the output mode is set to 'broadcast', a socket is opened on
        a stated port to broadcast the completion percentage. The
        RemoteProgress class may then be used to view the progress from
        a different process.""")

    width = param.Integer(default=70,
                          doc="""
        The width of the progress bar as the number of characters""")

    fill_char = param.String(default='#',
                             doc="""
        The character used to fill the progress bar.""")

    blank_char = param.String(default=' ',
                              doc="""
        The character for the blank portion of the progress bar.""")

    elapsed_time = param.Boolean(default=True,
                                 doc="""
        If enabled, the progress bar will disappear and display the
        total elapsed time once 100% completion is reached.""")

    cache = {}

    current_progress = []

    def __init__(self, **params):
        self.start_time = None
        self._stdout_display(0, False)
        ProgressBar.current_progress.append(self)
        super().__init__(**params)

    def __call__(self, percentage):
        " Update the progress bar within the specified percent_range"

        if self.start_time is None: self.start_time = time.time()
        span = (self.percent_range[1] - self.percent_range[0])
        percentage = self.percent_range[0] + ((percentage / 100.0) * span)

        if self.display == 'disabled': return
        elif self.display == 'stdout':
            if percentage == 100 and self.elapsed_time:
                elapsed = time.time() - self.start_time
                if clear_output and not ipython2: clear_output()
                if clear_output and ipython2: clear_output(wait=True)
                self.out = '\r' + ('100%% %s %02d:%02d:%02d' %
                                   (self.label.lower(), elapsed // 3600,
                                    elapsed // 60, elapsed % 60))
                output = ''.join([pg.out for pg in self.current_progress])
                sys.stdout.write(output)
            else:
                self._stdout_display(percentage)
            if percentage == 100 and ProgressBar.current_progress:
                ProgressBar.current_progress.pop()
            return

        if 'socket' not in self.cache:
            self.cache['socket'] = self._get_socket()

        if self.cache['socket'] is not None:
            self.cache['socket'].send('%s|%s' % (percentage, self.label))

    def _stdout_display(self, percentage, display=True):
        if clear_output and not ipython2: clear_output()
        if clear_output and ipython2: clear_output(wait=True)
        percent_per_char = 100.0 / self.width
        char_count = int(
            math.floor(percentage /
                       percent_per_char) if percentage < 100.0 else self.width)
        blank_count = self.width - char_count
        prefix = '\n' if len(self.current_progress) > 1 else ''
        self.out = prefix + (
            "%s[%s%s] %0.1f%%" %
            (self.label + ':\n' if self.label else '', self.fill_char *
             char_count, ' ' * len(self.fill_char) * blank_count, percentage))
        if display:
            sys.stdout.write(''.join([pg.out for pg in self.current_progress]))
            sys.stdout.flush()
            time.sleep(0.0001)

    def _get_socket(self, min_port=8080, max_port=8100, max_tries=20):
        import zmq
        context = zmq.Context()
        sock = context.socket(zmq.PUB)
        try:
            port = sock.bind_to_random_port('tcp://*',
                                            min_port=min_port,
                                            max_port=max_port,
                                            max_tries=max_tries)
            self.param.message("Progress broadcast bound to port %d" % port)
            return sock
        except:
            self.param.message(
                "No suitable port found for progress broadcast.")
            return None
コード例 #24
0
class Contours(Path):
    """
    The Contours element is a subtype of a Path which is characterized
    by the fact that each path geometry may only be associated with
    scalar values. It supports all the same data formats as a `Path`
    but does not allow continuously varying values along the path
    geometry's coordinates. Conceptually Contours therefore represent
    iso-contours or isoclines, i.e. a function of two variables which
    describes a curve along which the function has a constant value.

    The canonical representation is a list of dictionaries storing the
    x- and y-coordinates along with any other (scalar) values:

        [{'x': 1d-array, 'y': 1d-array, 'value': scalar}, ...]

    Since not all formats allow storing scalar values as actual
    scalars arrays which are the same length as the coordinates but
    have only one unique value are also considered scalar. This is
    strictly enforced, ensuring that each path geometry represents
    a valid iso-contour.

    The easiest way of accessing the individual geometries is using
    the `Contours.split` method, which returns each path geometry as a
    separate entity, while the other methods assume a flattened
    representation where all paths are separated by NaN values.
    """

    level = param.Number(default=None,
                         doc="""
        Optional level associated with the set of Contours.""")

    vdims = param.List(default=[],
                       constant=True,
                       doc="""
        Contours optionally accept a value dimension, corresponding
        to the supplied values.""")

    group = param.String(default='Contours', constant=True)

    _level_vdim = Dimension('Level')  # For backward compatibility

    def __init__(self, data, kdims=None, vdims=None, **params):
        data = [] if data is None else data
        if params.get('level') is not None:
            self.warning(
                "The level parameter on %s elements is deprecated, "
                "supply the value dimension(s) as columns in the data.",
                type(self).__name__)
            vdims = vdims or [self._level_vdim]
            params['vdims'] = []
        else:
            params['vdims'] = vdims
        super(Contours, self).__init__(data, kdims=kdims, **params)
        if params.get('level') is not None:
            with disable_constant(self):
                self.vdims = [asdim(d) for d in vdims]
        else:
            all_scalar = all(
                self.interface.isscalar(self, vdim) for vdim in self.vdims)
            if not all_scalar:
                raise ValueError(
                    "All value dimensions on a Contours element must be scalar"
                )

    def dimension_values(self, dim, expanded=True, flat=True):
        dimension = self.get_dimension(dim, strict=True)
        if dimension in self.vdims and self.level is not None:
            if expanded:
                return np.full(len(self), self.level)
            return np.array([self.level])
        return super(Contours, self).dimension_values(dim, expanded, flat)
コード例 #25
0
ファイル: renderer.py プロジェクト: roxyboy/holoviews
class BokehRenderer(Renderer):

    theme = param.ClassSelector(default=None,
                                class_=Theme,
                                allow_None=True,
                                doc="""
       The applicable Bokeh Theme object (if any).""")

    backend = param.String(default='bokeh', doc="The backend name.")

    fig = param.ObjectSelector(default='auto',
                               objects=['html', 'json', 'auto', 'png'],
                               doc="""
        Output render format for static figures. If None, no figure
        rendering will occur. """)

    holomap = param.ObjectSelector(
        default='auto',
        objects=['widgets', 'scrubber', 'server', None, 'auto'],
        doc="""
        Output render multi-frame (typically animated) format. If
        None, no multi-frame rendering will occur.""")

    mode = param.ObjectSelector(default='default',
                                objects=['default', 'server'],
                                doc="""
        Whether to render the object in regular or server mode. In server
        mode a bokeh Document will be returned which can be served as a
        bokeh server app. By default renders all output is rendered to HTML."""
                                )

    # Defines the valid output formats for each mode.
    mode_formats = {
        'fig': {
            'default': ['html', 'json', 'auto', 'png'],
            'server': ['html', 'json', 'auto']
        },
        'holomap': {
            'default': ['widgets', 'scrubber', 'auto', None],
            'server': ['server', 'auto', None]
        }
    }

    webgl = param.Boolean(default=False,
                          doc="""Whether to render plots with WebGL
        if bokeh version >=0.10""")

    widgets = {
        'scrubber': BokehScrubberWidget,
        'widgets': BokehSelectionWidget,
        'server': BokehServerWidgets
    }

    backend_dependencies = {
        'js': CDN.js_files if CDN.js_files else tuple(INLINE.js_raw),
        'css': CDN.css_files if CDN.css_files else tuple(INLINE.css_raw)
    }

    _loaded = False

    # Define the handler for updating bokeh plots
    comm_msg_handler = bokeh_msg_handler if bokeh_version > '0.12.14' else None

    def __call__(self, obj, fmt=None, doc=None):
        """
        Render the supplied HoloViews component using the appropriate
        backend. The output is not a file format but a suitable,
        in-memory byte stream together with any suitable metadata.
        """
        plot, fmt = self._validate(obj, fmt, doc=doc)
        info = {'file-ext': fmt, 'mime_type': MIME_TYPES[fmt]}

        if self.mode == 'server':
            return self.server_doc(plot, doc), info
        elif isinstance(plot, tuple(self.widgets.values())):
            return plot(), info
        elif fmt == 'png':
            from bokeh.io.export import get_screenshot_as_png
            img = get_screenshot_as_png(plot.state, None)
            imgByteArr = BytesIO()
            img.save(imgByteArr, format='PNG')
            return imgByteArr.getvalue(), info
        elif fmt == 'html':
            html = self._figure_data(plot, doc=doc)
            html = "<div style='display: table; margin: 0 auto;'>%s</div>" % html
            return self._apply_post_render_hooks(html, obj, fmt), info
        elif fmt == 'json':
            return self.diff(plot), info

    @bothmethod
    def _save_prefix(self_or_cls, ext):
        "Hook to prefix content for instance JS when saving HTML"
        if ext == 'html':
            return '\n'.join(self_or_cls.html_assets()).encode('utf8')
        return

    @bothmethod
    def get_plot(self_or_cls, obj, doc=None, renderer=None):
        """
        Given a HoloViews Viewable return a corresponding plot instance.
        Allows supplying a document attach the plot to, useful when
        combining the bokeh model with another plot.
        """
        doc = curdoc() if doc is None else doc
        doc.theme = self_or_cls.theme
        plot = super(BokehRenderer, self_or_cls).get_plot(obj, renderer)
        plot.document = doc
        return plot

    @bothmethod
    def get_widget(self_or_cls, plot, widget_type, doc=None, **kwargs):
        if not isinstance(plot, Plot):
            plot = self_or_cls.get_plot(plot, doc)
        if self_or_cls.mode == 'server':
            return BokehServerWidgets(plot,
                                      renderer=self_or_cls.instance(),
                                      **kwargs)
        else:
            return super(BokehRenderer,
                         self_or_cls).get_widget(plot, widget_type, **kwargs)

    @bothmethod
    def app(self_or_cls,
            plot,
            show=False,
            new_window=False,
            websocket_origin=None):
        """
        Creates a bokeh app from a HoloViews object or plot. By
        default simply attaches the plot to bokeh's curdoc and returns
        the Document, if show option is supplied creates an
        Application instance and displays it either in a browser
        window or inline if notebook extension has been loaded.  Using
        the new_window option the app may be displayed in a new
        browser tab once the notebook extension has been loaded.  A
        websocket origin is required when launching from an existing
        tornado server (such as the notebook) and it is not on the
        default port ('localhost:8888').
        """
        if not isinstance(self_or_cls,
                          BokehRenderer) or self_or_cls.mode != 'server':
            renderer = self_or_cls.instance(mode='server')
        else:
            renderer = self_or_cls

        def modify_doc(doc):
            renderer(plot, doc=doc)

        handler = FunctionHandler(modify_doc)
        app = Application(handler)

        if not show:
            # If not showing and in notebook context return app
            return app
        elif self_or_cls.notebook_context and not new_window:
            # If in notebook, show=True and no new window requested
            # display app inline
            opts = dict(
                notebook_url=websocket_origin) if websocket_origin else {}
            return bkshow(app, **opts)

        # If app shown outside notebook or new_window requested
        # start server and open in new browser tab
        from tornado.ioloop import IOLoop
        loop = IOLoop.current()
        opts = dict(allow_websocket_origin=[websocket_origin
                                            ]) if websocket_origin else {}
        opts['io_loop'] = loop
        server = Server({'/': app}, port=0, **opts)

        def show_callback():
            server.show('/')

        server.io_loop.add_callback(show_callback)
        server.start()
        try:
            loop.start()
        except RuntimeError:
            pass
        return server

    @bothmethod
    def server_doc(self_or_cls, obj, doc=None):
        """
        Get a bokeh Document with the plot attached. May supply
        an existing doc, otherwise bokeh.io.curdoc() is used to
        attach the plot to the global document instance.
        """
        if not isinstance(obj, (Plot, BokehServerWidgets)):
            if not isinstance(self_or_cls,
                              BokehRenderer) or self_or_cls.mode != 'server':
                renderer = self_or_cls.instance(mode='server')
            else:
                renderer = self_or_cls
            plot, _ = renderer._validate(obj, 'auto')
        else:
            plot = obj

        root = plot.state
        if isinstance(plot, BokehServerWidgets):
            plot = plot.plot

        if doc is None:
            doc = plot.document
        else:
            plot.document = doc

        plot.traverse(lambda x: attach_periodic(x), [GenericElementPlot])
        doc.add_root(root)
        return doc

    def _figure_data(self,
                     plot,
                     fmt='html',
                     doc=None,
                     as_script=False,
                     **kwargs):
        """
        Given a plot instance, an output format and an optional bokeh
        document, return the corresponding data. If as_script is True,
        the content will be split in an HTML and a JS component.
        """
        model = plot.state
        if doc is None:
            doc = plot.document
        else:
            plot.document = doc

        for m in model.references():
            m._document = None

        doc.theme = self.theme
        doc.add_root(model)

        comm_id = plot.comm.id if plot.comm else None
        # Bokeh raises warnings about duplicate tools and empty subplots
        # but at the holoviews level these are not issues
        logger = logging.getLogger(bokeh.core.validation.check.__file__)
        logger.disabled = True
        try:
            js, div, _ = notebook_content(model, comm_id)
            html = NOTEBOOK_DIV.format(plot_script=js, plot_div=div)
            html = encode_utf8(html)
            doc.hold()
        except:
            logger.disabled = False
            raise
        logger.disabled = False
        plot.document = doc
        if as_script:
            return div, js
        return html

    def diff(self, plot, binary=True):
        """
        Returns a json diff required to update an existing plot with
        the latest plot data.
        """
        events = list(plot.document._held_events)
        if not events:
            return None
        msg = Protocol("1.0").create("PATCH-DOC", events, use_buffers=binary)
        plot.document._held_events = []
        return msg

    @classmethod
    def plot_options(cls, obj, percent_size):
        """
        Given a holoviews object and a percentage size, apply heuristics
        to compute a suitable figure size. For instance, scaling layouts
        and grids linearly can result in unwieldy figure sizes when there
        are a large number of elements. As ad hoc heuristics are used,
        this functionality is kept separate from the plotting classes
        themselves.

        Used by the IPython Notebook display hooks and the save
        utility. Note that this can be overridden explicitly per object
        using the fig_size and size plot options.
        """
        obj = obj.last if isinstance(obj, HoloMap) else obj
        plot = Store.registry[cls.backend].get(type(obj), None)
        if not hasattr(plot, 'width') or not hasattr(plot, 'height'):
            from .plot import BokehPlot
            plot = BokehPlot
        options = plot.lookup_options(obj, 'plot').options
        width = options.get('width', plot.width)
        height = options.get('height', plot.height)
        return dict(options, **{'width': int(width), 'height': int(height)})

    @bothmethod
    def get_size(self_or_cls, plot):
        """
        Return the display size associated with a plot before
        rendering to any particular format. Used to generate
        appropriate HTML display.

        Returns a tuple of (width, height) in pixels.
        """
        if isinstance(plot, Plot):
            plot = plot.state
        elif not isinstance(plot, Model):
            raise ValueError('Can only compute sizes for HoloViews '
                             'and bokeh plot objects.')
        return compute_plot_size(plot)

    @classmethod
    def load_nb(cls, inline=True):
        """
        Loads the bokeh notebook resources.
        """
        LOAD_MIME_TYPE = bokeh.io.notebook.LOAD_MIME_TYPE
        bokeh.io.notebook.LOAD_MIME_TYPE = MIME_TYPES['jlab-hv-load']
        load_notebook(hide_banner=True, resources=INLINE if inline else CDN)
        bokeh.io.notebook.LOAD_MIME_TYPE = LOAD_MIME_TYPE
        bokeh.io.notebook.curstate().output_notebook()
コード例 #26
0
class Path(Geometry):
    """
    The Path element represents a collection of path geometries with
    associated values. Each path geometry may be split into
    sub-geometries on NaN-values and may be associated with scalar
    values or array values varying along its length. In analogy to
    GEOS geometry types a Path is a collection of LineString and
    MultiLineString geometries with associated values.

    Like all other elements a Path may be defined through an
    extensible list of interfaces. Natively, HoloViews provides the
    MultiInterface which allows representing paths as lists of regular
    columnar data objects including arrays, dataframes and
    dictionaries of column arrays and scalars.

    The canonical representation is a list of dictionaries storing the
    x- and y-coordinates along with any other values:

        [{'x': 1d-array, 'y': 1d-array, 'value': scalar, 'continuous': 1d-array}, ...]

    Both scalar values and values continuously varying along the
    geometries coordinates a Path may be used to color the geometry
    by. Since not all formats allow storing scalar values as actual
    scalars arrays which are the same length as the coordinates but
    have only one unique value are also considered scalar.

    The easiest way of accessing the individual geometries is using
    the `Path.split` method, which returns each path geometry as a
    separate entity, while the other methods assume a flattened
    representation where all paths are separated by NaN values.
    """

    group = param.String(default="Path", constant=True)

    datatype = param.ObjectSelector(default=['multitabular'])

    def __init__(self, data, kdims=None, vdims=None, **params):
        if isinstance(data, tuple) and len(data) == 2:
            x, y = map(np.asarray, data)
            if y.ndim == 1:
                y = np.atleast_2d(y).T
            if len(x) != y.shape[0]:
                raise ValueError(
                    "Path x and y values must be the same length.")
            data = [np.column_stack((x, y[:, i])) for i in range(y.shape[1])]
        elif isinstance(data, list) and all(
                isinstance(path, Path) for path in data):
            data = [p for path in data for p in path.data]
        super(Path, self).__init__(data, kdims=kdims, vdims=vdims, **params)

    def __getitem__(self, key):
        if key in self.dimensions(): return self.dimension_values(key)
        if not isinstance(key, tuple) or len(key) == 1:
            key = (key, slice(None))
        elif len(key) == 0:
            return self.clone()
        if not all(isinstance(k, slice) for k in key):
            raise KeyError("%s only support slice indexing" %
                           self.__class__.__name__)
        xkey, ykey = key
        xstart, xstop = xkey.start, xkey.stop
        ystart, ystop = ykey.start, ykey.stop
        return self.clone(extents=(xstart, ystart, xstop, ystop))

    def select(self, selection_specs=None, **kwargs):
        """
        Bypasses selection on data and sets extents based on selection.
        """
        return super(Element2D, self).select(selection_specs, **kwargs)

    def split(self, start=None, end=None, datatype=None, **kwargs):
        """
        The split method allows splitting a Path type into a list of
        subpaths of the same type. A start and/or end may be supplied
        to select a subset of paths.
        """
        if not self.interface.multi:
            if datatype == 'array':
                obj = self.array(**kwargs)
            elif datatype == 'dataframe':
                obj = self.dframe(**kwargs)
            elif datatype == 'columns':
                obj = self.columns(**kwargs)
            elif datatype is None:
                obj = self
            else:
                raise ValueError("%s datatype not support" % datatype)
            return [obj]
        return self.interface.split(self, start, end, datatype, **kwargs)

    # Deprecated methods

    @classmethod
    def collapse_data(cls, data_list, function=None, kdims=None, **kwargs):
        if config.future_deprecations:
            param.main.warning('Path.collapse_data is deprecated, collapsing '
                               'may now be performed through concatenation '
                               'and aggregation.')
        if function is None:
            return [path for paths in data_list for path in paths]
        else:
            raise Exception("Path types are not uniformly sampled and"
                            "therefore cannot be collapsed with a function.")

    def __setstate__(self, state):
        """
        Ensures old-style unpickled Path types without an interface
        use the MultiInterface.

        Note: Deprecate as part of 2.0
        """
        self.__dict__ = state
        if 'interface' not in state:
            self.interface = MultiInterface
        super(Dataset, self).__setstate__(state)
コード例 #27
0
ファイル: __init__.py プロジェクト: ioam/parambokeh
class Widgets(param.ParameterizedFunction):

    callback = param.Callable(default=None, doc="""
        Custom callable to execute on button press
        (if `button`) else whenever a widget is changed,
        Should accept a Parameterized object argument.""")

    view_position = param.ObjectSelector(default='below',
                                         objects=['below', 'right', 'left', 'above'],
                                         doc="""
        Layout position of any View parameter widgets.""")

    next_n = param.Parameter(default=0, doc="""
        When executing cells, integer number to execute (or 'all').
        A value of zero means not to control cell execution.""")

    on_init = param.Boolean(default=False, doc="""
        Whether to do the action normally taken (executing cells
        and/or calling a callable) when first instantiating this
        object.""")

    button = param.Boolean(default=False, doc="""
        Whether to show a button to control cell execution.
        If false, will execute `next` cells on any widget
        value change.""")

    button_text = param.String(default="Run", doc="""
        Text to show on the 'next_n'/run button.""")

    show_labels = param.Boolean(default=True)

    display_threshold = param.Number(default=0,precedence=-10,doc="""
        Parameters with precedence below this value are not displayed.""")

    default_precedence = param.Number(default=1e-8,precedence=-10,doc="""
        Precedence value to use for parameters with no declared precedence.
        By default, zero predecence is available for forcing some parameters
        to the top of the list, and other values above the default_precedence
        values can be used to sort or group parameters arbitrarily.""")

    initializer = param.Callable(default=None, doc="""
        User-supplied function that will be called on initialization,
        usually to update the default Parameter values of the
        underlying parameterized object.""")

    layout = param.ObjectSelector(default='column',
                                  objects=['row','column'],doc="""
        Whether to lay out the buttons as a row or a column.""")

    continuous_update = param.Boolean(default=False, doc="""
        If true, will continuously update the next_n and/or callback,
        if any, as a slider widget is dragged.""")

    mode = param.ObjectSelector(default='notebook', objects=['server', 'raw', 'notebook'], doc="""
        Whether to use the widgets in server or notebook mode. In raw mode
        the widgets container will simply be returned.""")

    push = param.Boolean(default=True, doc="""
        Whether to push data in notebook mode. Allows disabling pushing
        of data if the callback handles this itself.""")

    width = param.Integer(default=300, bounds=(0, None), doc="""
        Width of widgetbox the parameter widgets are displayed in.""")

    label_formatter = param.Callable(default=default_label_formatter, allow_None=True,
        doc="Callable used to format the parameter names into widget labels.")

    # Timeout if a notebook comm message is swallowed
    timeout = 20000

    # Timeout before the first event is processed
    debounce = 20

    def __call__(self, parameterized, doc=None, plots=[], **params):
        self.p = param.ParamOverrides(self, params)
        if self.p.initializer:
            self.p.initializer(parameterized)

        self._widgets = {}
        self.parameterized = parameterized
        self.document = None
        if self.p.mode == 'notebook':
            if not IPYTHON_AVAILABLE:
                raise ImportError('IPython is not available, cannot use '
                                  'Widgets in notebook mode.')
            self.comm = JupyterCommManager.get_client_comm(on_msg=self.on_msg)
            # HACK: Detects HoloViews plots and lets them handle the comms
            hv_plots = [plot for plot in plots if hasattr(plot, 'comm')]
            self.server_comm = JupyterCommManager.get_server_comm()
            if hv_plots:
                self.document = [p.document for p in hv_plots][0]
                self.p.push = False
            else:
                self.document = doc or Document()
        else:
            self.document = doc or curdoc()
            self.server_comm = None
            self.comm = None

        self._queue = []
        self._active = False
        self._widget_options = {}
        self.shown = False

        # Initialize root container
        widget_box = widgetbox(width=self.p.width)
        view_params = any(isinstance(p, _View) for p in parameterized.params().values())
        layout = self.p.view_position
        container_type = column if layout in ['below', 'above'] else row
        container = container_type() if plots or view_params else widget_box
        self.plot_id = container.ref['id']

        # Initialize widgets and populate container
        widgets, views = self.widgets()
        plots = views + plots
        widget_box.children = widgets

        plots = process_hv_plots(self, plots)

        if plots:
            view_box = column(plots)
            if layout in ['below', 'right']:
                children = [widget_box, view_box]
            else:
                children = [view_box, widget_box]
            container.children = children

        # Initialize view parameters
        for view in views:
            p_obj = self.parameterized.params(view.name)
            value = getattr(self.parameterized, view.name)
            if value is not None:
                rendered = p_obj.renderer(value, p_obj)
                self._update_trait(view.name, rendered)

        # Keeps track of changes between button presses
        self._changed = {}

        if self.p.on_init:
            self.execute()

        if self.p.mode == 'raw':
            return container

        self.document.add_root(container)
        if self.p.mode == 'notebook':
            notebook_show(container, self.document, self.server_comm)
            if self.document._hold is None:
                self.document.hold()
            self.shown = True
            return
        return self.document


    def on_msg(self, msg):
        p_name = msg['p_name']
        p_obj = self.parameterized.params(p_name)
        if isinstance(p_obj, param.Action):
            getattr(self.parameterized, p_name)(self.parameterized)
            return
        w = self._widgets[p_name]
        self._queue.append((w, p_obj, p_name, None, None, msg['value']))
        self.change_event()


    def on_change(self, w, p_obj, p_name, attr, old, new):
        self._queue.append((w, p_obj, p_name, attr, old, new))
        if not self._active:
            self.document.add_timeout_callback(self.change_event, 50)
            self._active = True


    def change_event(self):
        if not self._queue:
            self._active = False
            return
        w, p_obj, p_name, attr, old, new_values = self._queue[-1]
        self._queue = []

        error = False
        # Apply literal evaluation to values
        if (isinstance(w, TextInput) and isinstance(p_obj, literal_params)):
            try:
                new_values = ast.literal_eval(new_values)
            except:
                error = 'eval'

        if p_name in self._widget_options:
            mapping = self._widget_options[p_name]
            if isinstance(new_values, list):
                new_values = [mapping[el] for el in new_values]
            else:
                new_values = mapping.get(new_values, new_values)

        if isinstance(p_obj, param.Range):
            new_values = tuple(new_values)

        if isinstance(w, CheckboxGroup):
            new_values = True if (len(new_values)>0 and new_values[0]==0) else False

        # If no error during evaluation try to set parameter
        if not error:
            try:
                setattr(self.parameterized, p_name, new_values)
            except ValueError:
                error = 'validation'

        # Style widget to denote error state
        # apply_error_style(w, error)

        if not error and not self.p.button:
            self.execute({p_name: new_values})
        else:
            self._changed[p_name] = new_values

        # document.hold() must have been done already? because this seems to work
        if self.p.mode == 'notebook' and self.p.push and self.document._held_events:
            self._send_notebook_diff()
        self._active = False


    def _send_notebook_diff(self):
        events = list(self.document._held_events)
        msg = Protocol("1.0").create("PATCH-DOC", events, use_buffers=True)
        self.document._held_events = []
        if msg is None:
            return
        self.server_comm.send(msg.header_json)
        self.server_comm.send(msg.metadata_json)
        self.server_comm.send(msg.content_json)
        for header, payload in msg.buffers:
            self.server_comm.send(json.dumps(header))
            self.server_comm.send(buffers=[payload])

    def _update_trait(self, p_name, p_value, widget=None):
        widget = self._widgets[p_name] if widget is None else widget
        if isinstance(p_value, tuple):
            p_value, size = p_value
        if isinstance(widget, Div):
            widget.text = p_value
        else:
            if widget.children:
                widget.children.remove(widget.children[0])
            widget.children.append(p_value)


    def _make_widget(self, p_name):
        p_obj = self.parameterized.params(p_name)

        if isinstance(p_obj, _View):
            p_obj._comm = self.server_comm
            p_obj._document = self.document
            p_obj._notebook = self.p.mode == 'notebook'

        widget_class = wtype(p_obj)
        value = getattr(self.parameterized, p_name)

        kw = dict(value=value)

        if self.p.label_formatter is not None:
            kw['title'] = self.p.label_formatter(p_name)
        else:
            kw['title'] = p_name

        kw['name'] = p_name

        if hasattr(p_obj, 'get_range') and not isinstance(kw['value'], dict):
            options = named_objs(p_obj.get_range().items())
            value = kw['value']
            lookup = {v: k for k, v in options}
            if isinstance(value, list):
                kw['value'] = [lookup[v] for v in value]
            elif isinstance(p_obj, param.FileSelector) and value is None:
                kw['value'] = ''
            else:
                kw['value'] = lookup[value]
            opt_lookup = {k: v for k, v in options}
            self._widget_options[p_name] = opt_lookup
            options = [(k, k) for k, v in options]
            kw['options'] = options

        if hasattr(p_obj, 'get_soft_bounds'):
            kw['start'], kw['end'] = p_obj.get_soft_bounds()

        w = widget_class(**kw)

        if hasattr(p_obj, 'callbacks') and value is not None:
            rendered = p_obj.renderer(value, p_obj)
            self._update_trait(p_name, rendered, w)

        if hasattr(p_obj, 'callbacks'):
            p_obj.callbacks[id(self.parameterized)] = functools.partial(self._update_trait, p_name)
        elif isinstance(w, CheckboxGroup):
            if self.p.mode in ['server', 'raw']:
                w.on_change('active', functools.partial(self.on_change, w, p_obj, p_name))
            else:
                js_callback = self._get_customjs('active', p_name)
                w.js_on_change('active', js_callback)
        elif isinstance(w, Button):
            if self.p.mode in ['server', 'raw']:
                w.on_click(functools.partial(value,self.parameterized))
            else:
                w.js_on_click(self._get_customjs('active', p_name))
        elif not p_obj.constant:
            if self.p.mode in ['server', 'raw']:
                cb = functools.partial(self.on_change, w, p_obj, p_name)
                if 'value' in w.properties():
                    w.on_change('value', cb)
                elif 'range' in w.properties():
                    w.on_change('range', cb)
            else:
                if 'value' in w.properties():
                    change = 'value'
                elif 'range' in w.properties():
                    change = 'range'
                customjs = self._get_customjs(change, p_name)
                w.js_on_change(change, customjs)

        return w


    def _get_customjs(self, change, p_name):
        """
        Returns a CustomJS callback that can be attached to send the
        widget state across the notebook comms.
        """
        data_template = "data = {{p_name: '{p_name}', value: cb_obj['{change}']}};"
        fetch_data = data_template.format(change=change, p_name=p_name)
        self_callback = JS_CALLBACK.format(comm_id=self.comm.id,
                                           timeout=self.timeout,
                                           debounce=self.debounce,
                                           plot_id=self.plot_id)
        js_callback = CustomJS(code='\n'.join([fetch_data,
                                               self_callback]))
        return js_callback


    def widget(self, param_name):
        """Get widget for param_name"""
        if param_name not in self._widgets:
            self._widgets[param_name] = self._make_widget(param_name)
        return self._widgets[param_name]


    def execute(self, changed={}):
        if self.p.callback is not None:
            if get_method_owner(self.p.callback) is self.parameterized:
                self.p.callback(**changed)
            else:
                self.p.callback(self.parameterized, **changed)

    def widgets(self):
        """Return name,widget boxes for all parameters (i.e., a property sheet)"""

        params = self.parameterized.params().items()
        key_fn = lambda x: x[1].precedence if x[1].precedence is not None else self.p.default_precedence
        sorted_precedence = sorted(params, key=key_fn)
        outputs = [k for k, p in sorted_precedence if isinstance(p, _View)]
        filtered = [(k,p) for (k,p) in sorted_precedence
                    if ((p.precedence is None) or (p.precedence >= self.p.display_threshold))
                    and k not in outputs]
        groups = itertools.groupby(filtered, key=key_fn)
        sorted_groups = [sorted(grp) for (k,grp) in groups]
        ordered_params = [el[0] for group in sorted_groups for el in group]

        # Format name specially
        ordered_params.pop(ordered_params.index('name'))
        widgets = [Div(text='<b>{0}</b>'.format(self.parameterized.name))]

        def format_name(pname):
            p = self.parameterized.params(pname)
            # omit name for buttons, which already show the name on the button
            name = "" if issubclass(type(p),param.Action) else pname
            return Div(text=name)

        if self.p.show_labels:
            widgets += [self.widget(pname) for pname in ordered_params]
        else:
            widgets += [self.widget(pname) for pname in ordered_params]

        if self.p.button and not (self.p.callback is None and self.p.next_n==0):
            display_button = Button(label=self.p.button_text)
            def click_cb():
                # Execute and clear changes since last button press
                try:
                    self.execute(self._changed)
                except Exception as e:
                    self._changed.clear()
                    raise e
                self._changed.clear()
            display_button.on_click(click_cb)
            widgets.append(display_button)

        outputs = [self.widget(pname) for pname in outputs]
        return widgets, outputs
コード例 #28
0
class Polygons(Contours):
    """
    The Polygons element represents a collection of polygon geometries
    with associated scalar values. Each polygon geometry may be split
    into sub-geometries on NaN-values and may be associated with
    scalar values. In analogy to GEOS geometry types a Polygons
    element is a collection of Polygon and MultiPolygon
    geometries. Polygon geometries are defined as a set of coordinates
    describing the exterior bounding ring and any number of interior
    holes.

    Like all other elements a Polygons element may be defined through
    an extensible list of interfaces. Natively HoloViews provides the
    MultiInterface which allows representing paths as lists of regular
    columnar data objects including arrays, dataframes and
    dictionaries of column arrays and scalars.

    The canonical representation is a list of dictionaries storing the
    x- and y-coordinates, a list-of-lists of arrays representing the
    holes, along with any other values:

        [{'x': 1d-array, 'y': 1d-array, 'holes': list-of-lists-of-arrays, 'value': scalar}, ...]

    The list-of-lists format of the holes corresponds to the potential
    for each coordinate array to be split into a multi-geometry
    through NaN-separators. Each sub-geometry separated by the NaNs
    therefore has an unambiguous mapping to a list of holes. If a
    (multi-)polygon has no holes, the 'holes' key may be ommitted.

    Any value dimensions stored on a Polygons geometry must be scalar,
    just like the Contours element. Since not all formats allow
    storing scalar values as actual scalars arrays which are the same
    length as the coordinates but have only one unique value are also
    considered scalar.

    The easiest way of accessing the individual geometries is using
    the `Polygons.split` method, which returns each path geometry as a
    separate entity, while the other methods assume a flattened
    representation where all paths are separated by NaN values.
    """

    group = param.String(default="Polygons", constant=True)

    vdims = param.List(default=[],
                       doc="""
        Polygons optionally accept a value dimension, corresponding
        to the supplied value.""")

    _level_vdim = Dimension('Value')

    # Defines which key the DictInterface uses to look for holes
    _hole_key = 'holes'

    @property
    def has_holes(self):
        """
        Detects whether any polygon in the Polygons element defines
        holes. Useful to avoid expanding Polygons unless necessary.
        """
        return self.interface.has_holes(self)

    def holes(self):
        """
        Returns a list-of-lists-of-lists of hole arrays. The three levels
        of nesting reflects the structure of the polygons:

          1. The first level of nesting corresponds to the list of geometries
          2. The second level corresponds to each Polygon in a MultiPolygon
          3. The third level of nesting allows for multiple holes per Polygon
        """
        return self.interface.holes(self)
コード例 #29
0
class NdMapping(MultiDimensionalMapping):
    """
    NdMapping supports the same indexing semantics as
    MultiDimensionalMapping but also supports slicing semantics.

    Slicing semantics on an NdMapping is dependent on the ordering
    semantics of the keys. As MultiDimensionalMapping sort the keys, a
    slice on an NdMapping is effectively a way of filtering out the
    keys that are outside the slice range.
    """

    group = param.String(default='NdMapping', constant=True)

    def __getitem__(self, indexslice):
        """
        Allows slicing operations along the key and data
        dimensions. If no data slice is supplied it will return all
        data elements, otherwise it will return the requested slice of
        the data.
        """
        if indexslice in [Ellipsis, ()]:
            return self

        map_slice, data_slice = self._split_index(indexslice)
        map_slice = self._transform_indices(map_slice)
        map_slice = self._expand_slice(map_slice)

        if all(not isinstance(el, (slice, set, list, tuple)) for el in map_slice):
            return self._dataslice(self.data[map_slice], data_slice)
        else:
            conditions = self._generate_conditions(map_slice)
            items = self.data.items()
            for cidx, (condition, dim) in enumerate(zip(conditions, self.kdims)):
                values = self._cached_index_values.get(dim.name, None)
                items = [(k, v) for k, v in items
                         if condition(values.index(k[cidx]) if values else k[cidx])]
            items = [(k, self._dataslice(v, data_slice)) for k, v in items]
            if len(items) == 0:
                raise KeyError('No items within specified slice.')
            with item_check(False):
                return self.clone(items)


    def _expand_slice(self, indices):
        """
        Expands slices containing steps into a list.
        """
        keys = list(self.data.keys())
        expanded = []
        for idx, ind in enumerate(indices):
            if isinstance(ind, slice) and ind.step is not None:
                dim_ind = slice(ind.start, ind.stop)
                if dim_ind == slice(None):
                    condition = self._all_condition()
                elif dim_ind.start is None:
                    condition = self._upto_condition(dim_ind)
                elif dim_ind.stop is None:
                    condition = self._from_condition(dim_ind)
                else:
                    condition = self._range_condition(dim_ind)
                dim_vals = unique_iterator(k[idx] for k in keys)
                expanded.append(set([k for k in dim_vals if condition(k)][::int(ind.step)]))
            else:
                expanded.append(ind)
        return tuple(expanded)


    def _transform_indices(self, indices):
        """
        Identity function here but subclasses can implement transforms
        of the dimension indices from one coordinate system to another.
        """
        return indices


    def _generate_conditions(self, map_slice):
        """
        Generates filter conditions used for slicing the data structure.
        """
        conditions = []
        for dim, dim_slice in zip(self.kdims, map_slice):
            if isinstance(dim_slice, slice):
                start, stop = dim_slice.start, dim_slice.stop
                if dim.values:
                    values = self._cached_index_values[dim.name]
                    dim_slice = slice(None if start is None else values.index(start),
                                      None if stop is None else values.index(stop))
                if dim_slice == slice(None):
                    conditions.append(self._all_condition())
                elif start is None:
                    conditions.append(self._upto_condition(dim_slice))
                elif stop is None:
                    conditions.append(self._from_condition(dim_slice))
                else:
                    conditions.append(self._range_condition(dim_slice))
            elif isinstance(dim_slice, set):
                if dim.values:
                    dim_slice = [self._cached_index_values[dim.name].index(dim_val)
                                 for dim_val in dim_slice]
                conditions.append(self._values_condition(dim_slice))
            elif dim_slice is Ellipsis:
                conditions.append(self._all_condition())
            elif isinstance(dim_slice, (list, tuple)):
                raise ValueError("Keys may only be selected with sets, not lists or tuples.")
            else:
                if dim.values:
                    dim_slice = self._cached_index_values[dim.name].index(dim_slice)
                conditions.append(self._value_condition(dim_slice))
        return conditions


    def _value_condition(self, value):
        return lambda x: x == value


    def _values_condition(self, values):
        return lambda x: x in values


    def _range_condition(self, slice):
        if slice.step is None:
            lmbd = lambda x: slice.start <= x < slice.stop
        else:
            lmbd = lambda x: slice.start <= x < slice.stop and not (
                (x-slice.start) % slice.step)
        return lmbd


    def _upto_condition(self, slice):
        if slice.step is None:
            lmbd = lambda x: x < slice.stop
        else:
            lmbd = lambda x: x < slice.stop and not (x % slice.step)
        return lmbd


    def _from_condition(self, slice):
        if slice.step is None:
            lmbd = lambda x: x > slice.start
        else:
            lmbd = lambda x: x > slice.start and ((x-slice.start) % slice.step)
        return lmbd

    def _all_condition(self):
        return lambda x: True
コード例 #30
0
ファイル: element.py プロジェクト: eddy-ojb/holoviews
class image_overlay(ElementOperation):
    """
    Operation to build a overlay of images to a specification from a
    subset of the required elements.

    This is useful for reordering the elements of an overlay,
    duplicating layers of an overlay or creating blank image elements
    in the appropriate positions.

    For instance, image_overlay may build a three layered input
    suitable for the RGB factory operation even if supplied with one
    or two of the required channels (creating blank channels for the
    missing elements).

    Note that if there is any ambiguity regarding the match, the
    strongest match will be used. In the case of a tie in match
    strength, the first layer in the input is used. One successful
    match is always required.
    """

    output_type = Overlay

    spec = param.String(doc="""
       Specification of the output Overlay structure. For instance:

       Image.R * Image.G * Image.B

       Will ensure an overlay of this structure is created even if
       (for instance) only (Image.R * Image.B) is supplied.

       Elements in the input overlay that match are placed in the
       appropriate positions and unavailable specification elements
       are created with the specified fill group.""")

    fill = param.Number(default=0)

    default_range = param.Tuple(default=(0, 1),
                                doc="""
        The default range that will be set on the value_dimension of
        any automatically created blank image elements.""")

    group = param.String(default='Transform',
                         doc="""
        The group assigned to the resulting overlay.""")

    @classmethod
    def _match(cls, el, spec):
        "Return the strength of the match (None if no match)"
        spec_dict = dict(zip(['type', 'group', 'label'], spec.split('.')))
        if not isinstance(el, Image) or spec_dict['type'] != 'Image':
            raise NotImplementedError("Only Image currently supported")

        sanitizers = {'group': group_sanitizer, 'label': label_sanitizer}
        strength = 1
        for key in ['group', 'label']:
            attr_value = sanitizers[key](getattr(el, key))
            if key in spec_dict:
                if spec_dict[key] != attr_value: return None
                strength += 1
        return strength

    def _match_overlay(self, raster, overlay_spec):
        """
        Given a raster or input overlay, generate a list of matched
        elements (None if no match) and corresponding tuple of match
        strength values.
        """
        ordering = [None] * len(overlay_spec)  # Elements to overlay
        strengths = [0] * len(overlay_spec)  # Match strengths

        elements = raster.values() if isinstance(raster, Overlay) else [raster]

        for el in elements:
            for pos in range(len(overlay_spec)):
                strength = self._match(el, overlay_spec[pos])
                if strength is None: continue  # No match
                elif (strength <= strengths[pos]): continue  # Weaker match
                else:  # Stronger match
                    ordering[pos] = el
                    strengths[pos] = strength
        return ordering, strengths

    def _process(self, raster, key=None):
        specs = tuple(el.strip() for el in self.p.spec.split('*'))
        ordering, strengths = self._match_overlay(raster, specs)
        if all(el is None for el in ordering):
            raise Exception(
                "The image_overlay operation requires at least one match")

        completed = []
        strongest = ordering[np.argmax(strengths)]
        for el, spec in zip(ordering, specs):
            if el is None:
                spec_dict = dict(
                    zip(['type', 'group', 'label'], spec.split('.')))
                el = Image(np.ones(strongest.data.shape) * self.p.fill,
                           group=spec_dict.get('group', 'Image'),
                           label=spec_dict.get('label', ''))
                el.vdims[0].range = self.p.default_range
            completed.append(el)
        return np.prod(completed)