def fromconfig(data_=None, prior_=None): learnertype = config.get('learner.type') if ':' in learnertype: CustomLearner( data_ or data.fromconfig(), prior_ or prior.fromconfig(), learnerurl=learnertype ) else: learnermodule,learnerclass = learnertype.split('.') mymod = __import__("pebl.learner.%s" % learnermodule, fromlist=['pebl.learner']) mylearner = getattr(mymod, learnerclass) return mylearner(data_ or data.fromconfig(), prior_ or prior.fromconfig())
def run(self): # re-create the custom learner tempdir = tempfile.mkdtemp() with file(os.path.join(tempdir, self.learner_filename), 'w') as f: f.write(self.learner_source) sys.path.insert(0, tempdir) modname = self.learner_filename.split('.')[0] mod = __import__(modname, fromlist=['*']) reload(mod) # to load the latest if an older version exists custlearner = getattr(mod, self.learner_class) # run the custom learner clearn = custlearner( self.data or data.fromconfig(), self.prior or prior.fromconfig(), **self.kw ) self.result = clearn.run() # cleanup sys.path.remove(tempdir) shutil.rmtree(tempdir) return self.result
def __init__(self, data_=None, prior_=None, whitelist=tuple(), blacklist=tuple(), **kw): self.data = data_ or data.fromconfig() self.prior = prior_ or prior.fromconfig() self.black_edges = kw.pop('blacklist', ()) self.white_edges = kw.pop('whitelist', ()) self.__dict__.update(kw) # parameters self.numtasks = config.get('learner.numtasks') # stats self.reverse = 0 self.add = 0 self.remove = 0
def fromconfig(data_=None, network_=None, prior_=None): """Create an evaluator based on configuration parameters. This function will return the correct evaluator based on the relevant configuration parameters. """ data_ = data_ or data.fromconfig() network_ = network_ or network.fromdata(data_) prior_ = prior_ or prior.fromconfig() if data_.missing.any(): e = _missingdata_evaluators[config.get('evaluator.missingdata_evaluator')] return e(data_, network_, prior_) else: return SmartNetworkEvaluator(data_, network_, prior_)