def _update_evaluation(self): """Calculate evaluation function of system.""" now = time.time() if not self._buffer['continue']: func = self._get_evaluation_algorithm() value = self._get_evaluation_value() self._config['tracker_eval_enable'] = False return ui.info('found optimum with: %s = %s' % ( func['name'], func['formater'](value))) if ((now - self._buffer['eval_prev_time']) > self._config['tracker_eval_time_interval']): func = self._get_evaluation_algorithm() value = self._get_evaluation_value() progress = self._get_progress() # update time of last evaluation self._buffer['eval_prev_time'] = now # add evaluation to array if not isinstance(self._buffer['eval_values'], numpy.ndarray): self._buffer['eval_values'] = \ numpy.array([[progress, value]]) else: self._buffer['eval_values'] = \ numpy.vstack((self._buffer['eval_values'], \ numpy.array([[progress, value]]))) return ui.info('finished %.1f%%: %s = %s' % ( progress * 100., func['name'], func['formater'](value))) return False
def _install_pkg(self, pkg=None): if not pkg: ui.info("trying to install bioconductor base") else: ui.info("trying to install " f"bioconductor package: '{pkg}'") # try to evaluate the remote R script biocLite() bioclite = "https://bioconductor.org/biocLite.R" sysstout = sys.stdout try: sys.stdout = NullDevice() from rpy2.robjects.packages import importr base = importr('base') base.source(bioclite) base.require('biocLite') sys.stdout = sysstout except Exception as err: sys.stdout = sysstout raise ValueError( f"could not evaluate remote R script: '{bioclite}'") from err # try to install bioconductor packages with biocLite() if not pkg: return self._exec_rcmd("biocLite()") return self._exec_rcmd("biocLite('%s')" % pkg)
def optimize(self, config = None, **kwds): """ """ if not self._set_config(config, **kwds): return None if not self._set_buffer_reset(): return None # get name of optimization algorithm name = self._config.get('algorithm', None) if not name: raise ValueError("""could not optimize '%s' (%s): no optimization algorithm has been set.""" % (self.model.name, self.model.system.type)) or None # get instance of optimization algorithm algorithm = self._get_algorithm(name, category = 'optimization') if not algorithm: raise ValueError("""could not optimize '%s': unsupported optimization algorithm '%s'.""" % (self.model.name, name)) or None # start optimization if algorithm.get('type', None) == 'algorithm': ui.info("optimize '%s' (%s) using %s." % (self.model.name, self.model.system.type, name)) # start key events if not self._buffer['key_events_started']: ui.info("press 'h' for help or 'q' to quit.") self._buffer['key_events_started'] = True rian.set('shell', 'buffmode', 'key') # TODO: retval, try / except etc. run_optimizer = algorithm.get('reference', None) if not run_optimizer: return None retval = True try: retval &= run_optimizer() retval &= self.model.network.initialize(self.model.system) except KeyboardInterrupt: retval = False rian.set('shell', 'buffmode', 'line') return retval
def print_usage() -> None: """Print script usage to standard output.""" ui.info("Usage: rian [options]\n\n" "Options:\n\n" " -h --help " " Print this\n" " -s --shell " " Start rian session in IPython interactive shell\n" " -l --list " " List workspaces\n" " -w --workspace " " List scripts in workspace\n" " -r --run-script " " Open workspace and execute script\n" " -a --arguments " " Arguments passed to script\n" " -v --version " " Print version")
def print_workspaces() -> None: """Print list of workspaces to standard output.""" rian.set('mode', 'silent') workspaces = rian.list('workspaces', base='user') ui.info('Workspaces:\n') for workspace in workspaces: ui.info(' %s' % (workspace)) ui.info('')
def print_scripts(workspace: str) -> None: """Print list of scripts to standard output.""" rian.set('mode', 'silent') if rian.open(workspace): ui.info('Scripts in workspace %s:\n' % (rian.get('workspace'))) for script in rian.list('scripts'): ui.info(' %s' % (script)) ui.info('')
def _update_keypress(self): """Check Keyboard.""" char = rian.get('shell', 'inkey') if not char: return True if char == 'e': pass elif char == 'h': ui.info( "Keyboard Shortcuts\n" "'e' -- calculate evaluation function\n" "'h' -- show this\n" "'q' -- quit optimization\n" "'t' -- estimate finishing time") elif char == 'q': ui.info('aborting optimization') self._buffer['continue'] = False elif char == 't': ftime = self._get_estimatetime() ui.info('estimated finishing time %s' % ftime) return True
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path.insert(0, path) package = importlib.import_module(package_name) tests = importlib.import_module('tests') # Search and filter TestCases within tests loader = unittest.TestLoader() suite = unittest.TestSuite() cases = pkg.search( module=tests, classinfo=unittest.TestCase, val='reference', errors=True) for ref in cases.values(): if module: if not hasattr(ref, 'module'): continue if not hasattr(ref.module, '__name__'): continue if not fnmatch.fnmatch(ref.module.__name__, f'*{module}*'): continue suite.addTests(loader.loadTestsFromTestCase(ref)) # Initialize TestRunner and run TestCases package_version = getattr(package, '__version__', '') ui.info(f"testing {package_name} {package_version}") cur_level = ui.get_notification_level() ui.set_notification_level('CRITICAL') runner = unittest.TextTestRunner(stream=sys.stderr, verbosity=2) try: runner.run(suite) finally: ui.set_notification_level(cur_level)
def _cdiv(self): """Contrastive Divergency parameter optimization.""" system = self.model.system config = self._config # set enable flags for restriction extensions config['con_klpt_enable'] = False if config['con_module']: found = False if config['con_module'] == 'klpt': config['con_klpt_enable'] = True about = """Kullback-Leibler penalty (expectation value %.2f)""" % config['con_klpt_expect'] found = True if found: ui.info('using restriction: %s' % about) # set enable flags for denoising extensions if config['denoising']: found = False if config['denoising'].lower() == 'noise': config['noise_enable'] = True about = """data corruption (noise model '%s', factor %.2f)""" % (config['noise_type'], config['noise_factor']) found = True if found: ui.info('using denoising: %s' % (about)) # set enable flags for acceleration extensions config['acc_vmra_enable'] = False if config['acc_module']: found = False if config['acc_module'].lower() == 'vmra': config['acc_vmra_enable'] = True about = """variance maximizing rate adaption (tail length %i)""" % config['acc_vmra_length'] found = True if found: ui.info('using acceleration: %s' % about) # set enable flags for globalization extensions config['gen_rasa_enable'] = False if config['gen_module']: found = False if config['gen_module'].lower() == 'rasa': config['gen_rasa_enable'] = True about = """rate adaptive annealing (temperature %.1f, annealing %.1f)""" % (config['gen_rasa_init_temperature'], config['gen_rasa_annealing_factor']) found = True if found: ui.info('using generalization: %s' % (about)) # init rasa self.write('sa', init_rate=config['update_rate']) while self.update(): # get training data (sample from stratified minibatches) data = self._get_data_training()[0] # update parameters self._cdiv_update(data) return True
def print_version() -> None: """Print rian version to standard output.""" version = env.get_var('version') or '' ui.info('rian ' + version)
def _dbn_pretraining(self): """Deep belief network pretraining. Deep belief network pretraining is a meta algorithm that wraps unittype specific optimization schedules, intended to perform system local optimization from outer layers to inner layers. The default optimization schedules uses restricted boltzmann machines and contrastive divergency optimization. """ system = self.model.system config = self._config if 'units' not in system._params: raise ValueError("""could not configure subsystems: no layers have been defined!""") or None # create backup of dataset (before transformation) dataset = self.model.dataset dataset_backup = dataset.get('copy') # create layerwise subsystems for RBM pretraining cid = int((len(system._units) - 1) / 2) rbmparams = {'units': [], 'links': []} for lid in range(cid): src = system._params['units'][lid] srcnodes = src['id'] + system._params['units'][-1]['id'] \ if src['visible'] else src['id'] tgt = system._params['units'][lid + 1] tgtnodes = tgt['id'] cpy = system._params['units'][-(lid + 1)] links = system._params['links'][(lid, lid + 1)] linkclass = (src['class'], tgt['class']) name = '%s <-> %s <-> %s' % (src['layer'], tgt['layer'], cpy['layer']) systype = { ('gauss', 'sigmoid'): 'rbm.GRBM', ('sigmoid', 'sigmoid'): 'rbm.RBM' }.get(linkclass, None) if not systype: raise ValueError("""could not create rbm: unsupported pair of unit classes '%s <-> %s'""" % linkclass) or None # create subsystem subsystem = rian.system.new( config={ 'name': name, 'type': systype, 'init': { 'ignore_units': ['visible'] if lid else [] } }) # create subnetwork and configure subsystem with network network = rian.network.create('factor', name=name, visible_nodes=srcnodes, visible_type=src['class'], hidden_nodes=tgtnodes, hidden_type=tgt['class']) subsystem.configure(network) # transform dataset with previous system and initialize # subsystem with dataset if lid: vlayer = prevsys._params['units'][0]['layer'] hlayer = prevsys._params['units'][1]['layer'] dataset._initialize_transform_system(system=prevsys, mapping=(vlayer, hlayer), func='expect') dataset.set('colfilter', visible=srcnodes) # create model model = rian.model.new(config={ 'type': 'base.Model', 'name': name }, dataset=dataset, network=network, system=subsystem) # copy parameters from perantal subsystems hidden units # to current subsystems visible units if lid: dsrc = rbmparams['units'][-1] dtgt = model.system._params['units'][0] lkeep = ['id', 'layer', 'layer_id', 'visible', 'class'] lcopy = [key for key in list(dsrc.keys()) if not key in lkeep] for key in lcopy: dtgt[key] = dsrc[key] # reference parameters of current subsystem # in first layer reference visible, links and hidden # in other layers only reference links and hidden links['init'] = model.system._params['links'][(0, 1)] if lid == 0: src['init'] = model.system._units['visible'].params tgt['init'] = model.system._units['hidden'].params # optimize model schedule = self._get_schedule( self._config.get('schedule_%s' % systype.lower(), 'default')) if systype in schedule: model.optimize(schedule[systype]) else: model.optimize() if not lid: rbmparams['units'].append(model.system.get('layer', 'visible')) rbmparams['links'].append(model.system._params['links'][(0, 1)]) rbmparams['units'].append(model.system.get('layer', 'hidden')) prevsys = model.system # reset data to initial state (before transformation) dataset.set('copy', **dataset_backup) # keep original inputs and outputs mapping = system._get_mapping() inputs = system._units[mapping[0]].params['id'] outputs = system._units[mapping[-1]].params['id'] # initialize ann with rbm optimized parameters units = system._params['units'] links = system._params['links'] # initialize units and links until central unit layer cid = int((len(units) - 1) / 2) for id in range(cid): # copy unit parameters for attrib in list(units[id]['init'].keys()): # keep name and visibility of layers if attrib in ['layer', 'layer_id', 'visible', 'class']: continue # keep labels of hidden layers if attrib == 'id' and not units[id]['visible']: continue units[id][attrib] = units[id]['init'][attrib] units[-(id + 1)][attrib] = units[id][attrib] del units[id]['init'] # copy link parameters and transpose numpy arrays for attrib in list(links[(id, id + 1)]['init'].keys()): if attrib in ['source', 'target']: continue links[(id, id + 1)][attrib] = \ links[(id, id + 1)]['init'][attrib] links[(len(units) - id - 2, len(units) - id - 1)][attrib] = \ links[(id, id + 1)]['init'][attrib].T del links[(id, id + 1)]['init'] # initialize central unit layer for attrib in list(units[cid]['init'].keys()): # keep name and visibility of layers if attrib in ['id', 'layer', 'layer_id', 'visible', 'class']: continue units[cid][attrib] = \ units[cid]['init'][attrib] del units[cid]['init'] # remove output units from input layer, and vice versa ui.info('cleanup unit and linkage parameter arrays.') system._remove_units(mapping[0], outputs) system._remove_units(mapping[-1], inputs) return True