Esempio n. 1
0
    def show_leaks(self, output: str = 'counts', fail: bool = True):
        """Show all resources/memory leaks in the current facade.  First, this
        deallocates the facade, then prints any lingering objects using
        :class:`~zensols.persist.Deallocatable`.

        **Important**: :obj:`allocation_tracking` must be set to ``True`` for
        this to work.

        :param output: one of ``stack``, ``counts``, or ``tensors``

        :param fail: if ``True``, raise an exception if there are any
                     unallocated references found

        """
        if self._facade is None:
            raise DeepLearnError('No facade created yet')
        if self.allocation_tracking:
            self._facade.deallocate()
            if output == 'counts':
                Deallocatable._print_undeallocated(only_counts=True, fail=fail)
            elif output == 'stack':
                Deallocatable._print_undeallocated(include_stack=True, fail=fail)
            elif output == 'tensors':
                TorchConfig.write_in_memory_tensors()
            else:
                raise DeepLearnError(f'Unknown output type: {output}')
            self._facade = None
Esempio n. 2
0
    def create_facade(self, *args) -> ModelFacade:
        """Create and return a facade.  This deallocates and cleans up state from any
        previous facade creation as a side effect.

        :param args: given to the :obj:`cli_args_fn` function to create
                     arguments passed to the CLI

        """
        if len(self.config_overwrites) > 0:
            dconf = DictionaryConfig(self.config_overwrites)
            app_args = {'config_overwrites': dconf}
        else:
            app_args = None
        self.deallocate()
        # reclaim memory running GC and GPU cache clear
        self.cleanup()
        try:
            # reset random state for consistency of each new test
            if self.reset_torch:
                TorchConfig.init()
            # create a factory that instantiates Python objects
            cli_args_fn = self.cli_args_fn(*args)
            # create the facade used for this instance
            self._facade: ModelFacade = self._create_facade(
                cli_args_fn, app_args)
            return self._facade
        except Exception as e:
            try:
                # recover the best we can
                self.cleanup(quiet=True)
                self._facade = None
            except Exception:
                pass
            raise DeepLearnError(f'Could not create facade: {e}') from e
Esempio n. 3
0
    def print_information(self, info_item: InfoItem = None):
        """Output facade data set, vectorizer and other configuration information.

        :param info_item: what to print

        """
        # see :class:`.FacadeApplicationFactory'
        def write_batch():
            for batch in it.islice(facade.batch_stash.values(), 2):
                batch.write()

        if not hasattr(self, '_no_op'):
            with dealloc(self.create_facade()) as facade:
                print(f'{facade.model_settings.model_name}:')
                fn_map = \
                    {None: facade.write,
                     InfoItem.meta: facade.batch_metadata.write,
                     InfoItem.param: facade.executor.write_settings,
                     InfoItem.model: facade.executor.write_model,
                     InfoItem.config: facade.config.write,
                     InfoItem.batch: write_batch}
                fn = fn_map.get(info_item)
                if fn is None:
                    raise DeepLearnError(f'No such info item: {info_item}')
                fn()
Esempio n. 4
0
    def get_labels(self) -> torch.Tensor:
        """Return the label tensor for this batch.

        """
        bmap: BatchFeatureMapping = self._get_batch_feature_mappings()
        if bmap is None:
            raise DeepLearnError('No batch feature mapping set')
        label_attr = bmap.label_attribute_name
        return self.attributes[label_attr]
Esempio n. 5
0
 def prime(self):
     if logger.isEnabledFor(logging.DEBUG):
         logger.debug(f'priming {self.__class__}, is child: ' +
                      f'{self.is_child}, currently priming: {self.priming}')
     if self.priming:
         raise DeepLearnError('Already priming')
     self.priming = True
     try:
         self.batch_data_point_sets
         self._prime_vectorizers()
         super().prime()
     finally:
         self.priming = False
Esempio n. 6
0
 def __post_init__(self, decoded_attributes):
     super().__post_init__()
     Deallocatable.__init__(self)
     # TODO: this class conflates key split and delegate stash functionality
     # in the `split_stash_container`.  An instance of this type serves the
     # purpose, but it need not be.  Instead it just needs to be both a
     # SplitKeyContainer and a Stash.  This probably should be split out in
     # to two different fields.
     cont = self.split_stash_container
     if not isinstance(cont, SplitStashContainer) \
        and (not isinstance(cont, SplitKeyContainer) or
             not isinstance(cont, Stash)):
         raise DeepLearnError('Expecting SplitStashContainer but got ' +
                              f'{self.split_stash_container.__class__}')
     self.data_point_id_sets_path.parent.mkdir(parents=True, exist_ok=True)
     self._batch_data_point_sets = PersistedWork(
         self.data_point_id_sets_path, self)
     self.priming = False
     self.decoded_attributes = decoded_attributes
     self._update_comp_stash_attribs()
Esempio n. 7
0
    def run(self, display_results: bool = True):
        """Train, test and optionally show results.

        :param display_results: if ``True``, write and plot the results

        """
        try:
            facade = self.facade
            facade.train()
            facade.test()
            if display_results:
                facade.write_result()
                facade.plot_result()
        except Exception as e:
            try:
                facade = None
                # recover the best we can
                self.cleanup(quiet=True)
            except Exception:
                pass
            raise DeepLearnError('Could not run the model') from e
Esempio n. 8
0
 def __getstate__(self):
     raise DeepLearnError('Data points should not be pickeled')