def config(self, cfgdata=None): if cfgdata is None: self.LogError('Please provide configuration file!') Daisy.CfgParser(self, cfgdata, self.cfg) self.foname = self.cfg['outputfile_name'] self.dspath = self.cfg['hdf5dataset_path'] return True
def initialize(self, default_init=None): super().createSvc("PyDataStoreSvc/DataStore") self.snoopy = self.Snoopy() self.snoopy.config() self.snoopy.initialize() self.datastore = self.find("DataMemSvc").find('DataStore').data() if default_init is not None: self.LogDebug( str(type(default_init)) + ' value ' + str(default_init)) init_cfg = Daisy.CfgParser(self, default_init) for key in init_cfg.keys(): alg_name = key alg_class = init_cfg[key]['class_name'] alg = self.createAlg(alg_class + '/' + alg_name) try: algParas = init_cfg[key]['init_paras'] self.LogDebug('name: ' + str(alg_name) + ' class: ' + str(alg_class) + " parameters: " + str(algParas) + ' type of ' + str(type(algParas))) self[alg_name].initialize(default_init=algParas) except KeyError as error: self.LogDebug(error) self[alg_name].initialize()
def config(self, cfgdata=None): if cfgdata is None: self.LogError('Please provide configuration file!') Daisy.CfgParser(self, cfgdata, self.cfg) self.ai.setFit2D(directDist=self.cfg['directDist'], centerX=self.cfg['centerX'], \ centerY=self.cfg['centerY'], tilt=self.cfg['tilt'], \ tiltPlanRotation=self.cfg['PlanRotation'], pixelX=self.cfg['pixelX'],\ pixelY=self.cfg['pixelY'], splineFile=None) return True
def config(self, cfgdata=None): if cfgdata is None: self.LogError('Please provide configuration file!') Daisy.CfgParser(self, cfgdata, self.cfg) self.ipath = self.cfg['input_path'] if os.path.isdir(self.ipath): self.LogInfo("initialized, read TIF Files in Path: " + self.ipath) return True else: self.LogError('initiallized, Path ' + self.ipath + ' does not exist') return False
def initialize(self, default_init=None): #self.addSvc(datasvc) #self.createSvc("PyDataStoreSvc/Operators") self.snoopy = self.Snoopy() self.snoopy.config() self.snoopy.initialize() #p=PyMQDataStore.PyMQDataStore('DataStore') #self.operators['DataStore'] = p #self.datastore = p.data() self.operators['DataStore'] = Daisy.Base.PyMQDataStore.PyMQDataStore('DataStore') self.datastore = self.operators['DataStore'].data() if default_init is not None: init_cfg = Daisy.CfgParser(self, default_init) for key in init_cfg.keys(): alg_name = key alg_class = init_cfg[key]['class_name'] alg = self.createAlg(alg_class+'/'+alg_name) self[alg_name].initialize(default_init=init_cfg[key]['init_paras'])
def initialize(self, workflow_engine=None, default_init=None, workflow_environment={}, algorithms_cfg={}): #print(self.objName()+' workflow_environment type of '+str(type(workflow_environment))+" value: "+str(workflow_environment)) #print(self.objName()+' default_init type of '+str(type(default_init))+" value: "+str(default_init)) if default_init is not None: if workflow_engine is None: if 'workflow_engine' in default_init: workflow_engine = default_init['workflow_engine'] else: self.LogError('Please enter Workflow Engine name!') if 'workflow_environment' in default_init: workflow_environment = default_init['workflow_environment'] elif workflow_engine is None: self.LogError('Please enter Workflow Engin name!') self.engine = Daisy.CreateWorkflowEngine(class_name=workflow_engine, name=self.objName()) self.engine.initialize(default_init=workflow_environment) self.engine.config(default_cfg=algorithms_cfg)
def config(self, cfgdata=None): if cfgdata is None: self.LogError('Please provide configuration file!') Daisy.CfgParser(self, cfgdata, self.cfg) self.finame = self.cfg['inputfile_name'] #with h5py.File(self.finame, 'r') as handle: # print(handle) # self.hdf5 = handle # self.LogInfo("initialized, read HDF5 File: "+self.finame) #return True #if default_init is not None: # if inputfile_name is None: # if 'inputfile_name' in default_init: # inputfile_name = default_init['inputfile_name'] # else: # self.LogError('Please enter Input File name!') #elif inputfile_name is None: # self.LogError('Please enter Input File name!') #self.finame = inputfile_name self.hdf5 = h5py.File(self.finame, 'r') self.LogInfo("initialized, read HDF5 File: "+self.finame) return True
def config(self, cfgdata=None): if cfgdata is None: self.LogError('Please provide configuration file!') Daisy.CfgParser(self, cfgdata, self.cfg) self.cmd = self.cfg['shell_command'] try: if self.cfg['shell'].upper() == 'TRUE': self.shell = True except: pass ret = self.__shell.run(['which', self.cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE) if ret.returncode == 0: self.LogInfo("Find " + self.cmd + ' at ' + ret.stdout.decode()[:-1]) return True else: self.LogError("Can not find " + self.cmd + ' error message: ' + ret.stderr.decode()[:-1]) return False
#!/usr/bin/env python # -*- coding:utf-8 -*- # author: tianhl # using the new Workflow Engine for data analysis. import Daisy init_dict = { 'ls' :{'class_name':'PyAlgorithms.AlgCommandline' },\ 'pwd':{'class_name':'PyAlgorithms.AlgCommandline' },\ } cfg_dict = { 'ls':{'shell_command':'ls' },\ 'pwd':{'shell_command':'pwd' },\ } if __name__ == "__main__": wf = Daisy.CreateWorkflowEngine(class_name='PyWorkflowEngine', name='workflow') wf.initialize(default_init=init_dict) wf.config(default_cfg=cfg_dict) wf.setLogLevel(3) wf['pwd'].execute() wf['ls'].execute(parameters=['-al']) wf.finalize()
def config(self, default_cfg=None): if default_cfg is not None: cfg = Daisy.CfgParser(self, default_cfg) for key in cfg.keys(): self.LogTest(key + ' configure with dict ' + str(cfg[key])) self[key].config(cfg[key])