Esempio n. 1
0
    def reload(self):
        """
        Re-read files, and update data
        """
        try:
            d = load_data(self.path, factory=self.factory, **self.kwargs)
        except (OSError, IOError) as exc:
            warnings.warn("Could not reload %s.\n%s" % (self.path, exc))
            if self.watcher is not None:
                self.watcher.stop()
            return

        log = as_list(d)[0]._load_log

        for dold, dnew in zip(self.data, as_list(d)):
            if dold.shape != dnew.shape:
                warnings.warn("Cannot refresh data -- data shape changed")
                return

            mapping = dict((c, log.component(self.id(c)).data)
                           for c in dold._components.values()
                           if c in self.components
                           and type(c) == Component)
            dold.coords = dnew.coords
            dold.update_components(mapping)
Esempio n. 2
0
    def _combine_data(self, data, new_state, add_if_empty=False):
        """ Dispatches to the combine method of mode attribute.

        The behavior is dependent on the mode it dispatches to.
        By default, the method uses ReplaceMode, which overwrites
        the edit_subsets' subset_state with new_state

        :param edit_subset: The current edit_subset
        :param new_state: The new SubsetState
        :param add_if_empty: If True and a data set has no subsets,
                             a new one will be added and assigned
                             using new-state
        """
        empty = data.edit_subset is None or data.edit_subset == []
        if add_if_empty and empty:
            if self.data_collection is None:
                raise RuntimeError("Must set data_collection before "
                                   "calling update")
            data.edit_subset = self.data_collection.new_subset_group()
        if empty and not add_if_empty:
            logging.getLogger(__name__).info("Ignoring subset update")
            return
        subs = data.edit_subset
        for s in as_list(subs):
            self.mode(s, new_state)
Esempio n. 3
0
    def update(self, view=None):
        """
        Redraw the layer
        """
        if not self._visible:
            return

        self.clear()

        if self._coordinator.remove_artists:
            old = all_artists(self._axes.figure)

        if isinstance(self._layer, Data):
            a = self._coordinator.plot_data(layer=self._layer)
        else:
            a = self._coordinator.plot_subset(layer=self._layer, subset=self._layer)

        # if user explicitly returns the newly-created artists,
        # then use them. Otherwise, introspect to find the new artists
        if a is None:
            if self._coordinator.remove_artists:
                self.artists = list(new_artists(self._axes.figure, old))
            else:
                self.artists = []
        else:
            self.artists = as_list(a)

        for a in self.artists:
            a.set_zorder(self.zorder)
Esempio n. 4
0
def load_data(path, factory=None, **kwargs):
    """Use a factory to load a file and assign a label.

    This is the preferred interface for loading data into Glue,
    as it logs metadata about how data objects relate to files
    on disk.

    :param path: Path to a file
    :param factory: factory function to use. Defaults to :func:`auto_data`

    Extra keywords are passed through to factory functions
    """
    from glue.qglue import parse_data

    coord_first = kwargs.pop('coord_first', True)

    def as_data_objects(ds, lbl):
        # pack other container types like astropy tables
        # into glue data objects
        for d in ds:
            if isinstance(d, Data):
                yield d
                continue
            for item in parse_data(d, lbl):
                yield item

    factory = factory or auto_data
    lbl = data_label(path)

    d = as_list(factory(path, **kwargs))
    d = list(as_data_objects(d, lbl))
    log = LoadLog(path, factory, kwargs)
    for item in d:
        if not item.label:
            item.label = lbl
        log.log(item)  # attaches log metadata to item

        if coord_first:
            # We just follow the order in which the components are now loaded,
            # which is coordinate components first, followed by all other
            # components
            for cid in item.primary_components:
                log.log(item.get_component(cid))
        else:
            # In this case the first component was the first one that is not a
            # coordinate component, followed by the coordinate components,
            # followed by the remaining components.
            cid = item.primary_components[item.ndim * 2]
            log.log(item.get_component(cid))
            for icid, cid in enumerate(item.primary_components):
                if icid != item.ndim * 2:
                    log.log(item.get_component(cid))

    if len(d) == 1:
        # unpack single-length lists for user convenience
        return d[0]

    return d
Esempio n. 5
0
    def __init__(self, data=None):
        """
        :param data: :class:`~glue.core.data.Data` object, or list of such objects
        """
        super(DataCollection, self).__init__()
        self._link_manager = LinkManager()
        self._data = []

        self.hub = None

        self._subset_groups = []
        self.register_to_hub(Hub())
        self.extend(as_list(data or []))
        self._sg_count = 0
Esempio n. 6
0
    def add_datasets(cls, data_collection, datasets):
        """
        Utility method to interactively add datasets to a
        data_collection

        :param data_collection: :class:`~glue.core.data_collection.DataCollection`
        :param datasets: one or more :class:`~glue.core.data.Data` instances

        Adds datasets to the collection
        """

        datasets = as_list(datasets)
        data_collection.extend(datasets)
        list(map(partial(cls._suggest_mergers, data_collection), datasets))
Esempio n. 7
0
    def _combine_data(self, new_state):
        """ Dispatches to the combine method of mode attribute.

        The behavior is dependent on the mode it dispatches to.
        By default, the method uses ReplaceMode, which overwrites
        the edit_subsets' subset_state with new_state

        :param edit_subset: The current edit_subset
        :param new_state: The new SubsetState
        """
        if not self.edit_subset:
            if self.data_collection is None:
                raise RuntimeError("Must set data_collection before "
                                   "calling update")
            self.edit_subset = self.data_collection.new_subset_group()
        subs = self.edit_subset
        for s in as_list(subs):
            self.mode(s, new_state)
Esempio n. 8
0
def load_data(path, factory=None, **kwargs):
    """Use a factory to load a file and assign a label.

    This is the preferred interface for loading data into Glue,
    as it logs metadata about how data objects relate to files
    on disk.

    :param path: Path to a file
    :param factory: factory function to use. Defaults to :func:`auto_data`

    Extra keywords are passed through to factory functions
    """
    from glue.qglue import parse_data

    def as_data_objects(ds, lbl):
        # pack other container types like astropy tables
        # into glue data objects
        for d in ds:
            if isinstance(d, Data):
                yield d
                continue
            for item in parse_data(d, lbl):
                yield item

    factory = factory or auto_data
    lbl = data_label(path)

    d = as_list(factory(path, **kwargs))
    d = list(as_data_objects(d, lbl))
    log = LoadLog(path, factory, kwargs)
    for item in d:
        if item.label is '':
            item.label = lbl
        log.log(item)  # attaches log metadata to item
        for cid in item.primary_components:
            log.log(item.get_component(cid))

    if len(d) == 1:
        # unpack single-length lists for user convenience
        return d[0]

    return d
Esempio n. 9
0
def load_data(path, factory=None, **kwargs):
    """Use a factory to load a file and assign a label.

    This is the preferred interface for loading data into Glue,
    as it logs metadata about how data objects relate to files
    on disk.

    :param path: Path to a file
    :param factory: factory function to use. Defaults to :func:`auto_data`

    Extra keywords are passed through to factory functions
    """
    from glue.qglue import parse_data

    def as_data_objects(ds, lbl):
        # pack other container types like astropy tables
        # into glue data objects
        for d in ds:
            if isinstance(d, Data):
                yield d
                continue
            for item in parse_data(d, lbl):
                yield item

    factory = factory or auto_data
    lbl = data_label(path)

    d = as_list(factory(path, **kwargs))
    d = list(as_data_objects(d, lbl))
    log = LoadLog(path, factory, kwargs)
    for item in d:
        if item.label is '':
            item.label = lbl
        log.log(item)  # attaches log metadata to item
        for cid in item.primary_components:
            log.log(item.get_component(cid))

    if len(d) == 1:
        # unpack single-length lists for user convenience
        return d[0]

    return d
Esempio n. 10
0
    def _combine_data(self, new_state, override_mode=None):
        """ Dispatches to the combine method of mode attribute.

        The behavior is dependent on the mode it dispatches to.
        By default, the method uses ReplaceMode, which overwrites
        the edit_subsets' subset_state with new_state

        :param edit_subset: The current edit_subset
        :param new_state: The new SubsetState
        :param override_mode: Mode to use instead of EditSubsetMode.mode
        """
        mode = override_mode or self.mode
        if not self._edit_subset or mode is NewMode:
            if self.data_collection is None:
                raise RuntimeError("Must set data_collection before "
                                   "calling update")
            self.edit_subset = [self.data_collection.new_subset_group()]
        subs = self._edit_subset
        for s in as_list(subs):
            mode(s, new_state)
Esempio n. 11
0
    def update(self, *args, **kwargs):

        if not self._visible:
            return

        self.clear()

        old = all_artists(self.axes.figure)

        if isinstance(self.state.layer, Data):
            a = self._coordinator.plot_data(layer=self.state.layer)
        else:
            a = self._coordinator.plot_subset(layer=self.state.layer, subset=self.state.layer)

        # if user explicitly returns the newly-created artists,
        # then use them. Otherwise, introspect to find the new artists
        if a is None:
            self.mpl_artists = list(new_artists(self.axes.figure, old))
        else:
            self.mpl_artists = as_list(a)

        for a in self.mpl_artists:
            a.set_zorder(self.state.zorder)
Esempio n. 12
0
    def update(self, *args, **kwargs):

        if not self._visible:
            return

        self.clear()

        old = all_artists(self.axes.figure)

        if isinstance(self.state.layer, BaseData):
            a = self._coordinator.plot_data(layer=self.state.layer)
        else:
            a = self._coordinator.plot_subset(layer=self.state.layer,
                                              subset=self.state.layer)

        # if user explicitly returns the newly-created artists,
        # then use them. Otherwise, introspect to find the new artists
        if a is None:
            self.mpl_artists = list(new_artists(self.axes.figure, old))
        else:
            self.mpl_artists = as_list(a)

        for a in self.mpl_artists:
            a.set_zorder(self.state.zorder)
Esempio n. 13
0
def load_data(path, factory=None, **kwargs):
    """
    Use a factory to load a file and assign a label.

    This is the preferred interface for loading data into Glue,
    as it logs metadata about how data objects relate to files
    on disk.

    Parameters
    ----------
    path : str
        Path to the file.

    factory : callable
        Factory function to use. Defaults to :func:`glue.core.data_factories.auto_data`
        callback.

        Extra keywords are passed through to factory functions.
    """
    from glue.qglue import parse_data

    coord_first = kwargs.pop('coord_first', True)
    force_coords = kwargs.pop('force_coords', False)

    def as_data_objects(ds, lbl):
        # pack other container types like astropy tables
        # into glue data objects
        for d in ds:
            if isinstance(d, BaseData):
                yield d
                continue
            for item in parse_data(d, lbl):
                yield item

    if factory is None:
        factory = find_factory(path, **kwargs)
        if factory is None:
            raise KeyError("Don't know how to open file: %s" % path)
    lbl = data_label(path)

    d = as_list(factory(path, **kwargs))
    d = list(as_data_objects(d, lbl))

    log = LoadLog(path, factory, kwargs)
    for item in d:

        # NOTE: The LoadLog infrastructure is specifically designed for Data
        # objects in mind and not more general data classes.
        if not isinstance(item, Data):
            continue

        if item.coords is None and force_coords:
            item.coords = IdentityCoordinates(n_dim=item.ndim)

        if not item.label:
            item.label = lbl
        log.log(item)  # attaches log metadata to item

        if coord_first:
            # We just follow the order in which the components are now loaded,
            # which is coordinate components first, followed by all other
            # components
            for cid in item.coordinate_components + item.main_components:
                log.log(item.get_component(cid))
        else:
            # In this case the first component was the first one that is not a
            # coordinate component, followed by the coordinate components,
            # followed by the remaining components.
            cid = item.main_components[0]
            log.log(item.get_component(cid))
            for icid, cid in enumerate(item.coordinate_components):
                log.log(item.get_component(cid))
            for icid, cid in enumerate(item.main_components[1:]):
                log.log(item.get_component(cid))

    if len(d) == 1:
        # unpack single-length lists for user convenience
        return d[0]

    return d
Esempio n. 14
0
 def __setgluestate__(cls, rec, context):
     fac = context.object(rec['factory'])
     kwargs = dict(*rec['kwargs'])
     d = load_data(rec['path'], factory=fac, **kwargs)
     return as_list(d)[0]._load_log
Esempio n. 15
0
 def __setgluestate__(cls, rec, context):
     fac = context.object(rec['factory'])
     kwargs = dict(*rec['kwargs'])
     d = load_data(rec['path'], factory=fac, **kwargs)
     return as_list(d)[0]._load_log
Esempio n. 16
0
 def __setgluestate__(cls, rec, context):
     fac = context.object(rec['factory'])
     kwargs = dict(*rec['kwargs'])
     kwargs['coord_first'] = rec.get('_protocol', 0) >= 1
     d = load_data(rec['path'], factory=fac, **kwargs)
     return as_list(d)[0]._load_log
Esempio n. 17
0
 def __setgluestate__(cls, rec, context):
     fac = context.object(rec['factory'])
     kwargs = dict(*rec['kwargs'])
     kwargs['coord_first'] = rec.get('_protocol', 0) >= 1
     d = load_data(rec['path'], factory=fac, **kwargs)
     return as_list(d)[0]._load_log
Esempio n. 18
0
def load_data(path, factory=None, **kwargs):
    """Use a factory to load a file and assign a label.

    This is the preferred interface for loading data into Glue,
    as it logs metadata about how data objects relate to files
    on disk.

    :param path: Path to a file
    :param factory: factory function to use. Defaults to :func:`auto_data`

    Extra keywords are passed through to factory functions
    """
    from glue.qglue import parse_data

    coord_first = kwargs.pop('coord_first', True)

    def as_data_objects(ds, lbl):
        # pack other container types like astropy tables
        # into glue data objects
        for d in ds:
            if isinstance(d, Data):
                yield d
                continue
            for item in parse_data(d, lbl):
                yield item

    if factory is None:
        factory = find_factory(path, **kwargs)
        if factory is None:
            raise KeyError("Don't know how to open file: %s" % path)

    lbl = data_label(path)

    d = as_list(factory(path, **kwargs))
    d = list(as_data_objects(d, lbl))
    log = LoadLog(path, factory, kwargs)
    for item in d:
        if not item.label:
            item.label = lbl
        log.log(item)  # attaches log metadata to item

        if coord_first:
            # We just follow the order in which the components are now loaded,
            # which is coordinate components first, followed by all other
            # components
            for cid in item.primary_components:
                log.log(item.get_component(cid))
        else:
            # In this case the first component was the first one that is not a
            # coordinate component, followed by the coordinate components,
            # followed by the remaining components.
            cid = item.primary_components[item.ndim * 2]
            log.log(item.get_component(cid))
            for icid, cid in enumerate(item.primary_components):
                if icid != item.ndim * 2:
                    log.log(item.get_component(cid))

    if len(d) == 1:
        # unpack single-length lists for user convenience
        return d[0]

    return d