def process_dataset(options,_dataset,processors,*args,**kw): """Given options, Information Set and processors filenames execute processors over the information set """ for pr in processors: if not exists(pr): pr = join(options.process_path,pr) if not exists(pr): logger.error('E01#001: Processor config file not exists (%s)',pr) raise ValueError, 'E:MKEL:001' cfg = udict(cfg2hash(pr)) if 'PROCESSOR' in cfg: pcfg = udict(cfg.xget('PROCESSOR')) pname = pcfg.xget('NAME') if not pname: logger.error('E01#002: Key NAME not exists in processor config (%s)',pr) continue processor = getProcessor(pname, _dataset, config=pcfg) if processor: _dataset = processor.execute() else: logger.error('E01#003: Processor not exists (%s)',pname) return _dataset
def _inline_processor(self,_ts,name,kw): """ Inline processor """ if kw['PROC']: # logger.debug( 'calling processor functions: %s on %s',kw['PROC'],name ) names = (name,) if name else _ts.keys() for name in names: proc = getProcessor('INLINE_PROCESSOR', _ts, config=kw ) _ts = proc.execute(name) # returns full results return _ts
else: if kw.has_key('NAME') and kw['NAME']: self._missing.extend(kw['NAME'].split(',')) else: self._missing.extend([ r['Instrument'] for r in reqs]) return # ...if post-process if kw['PROC']: # logger.debug('calling inline processor functions: %s',kw['PROC']) for name in _ts.keys(): logger.debug('calling processor functions: %s for %s',kw['PROC'],name) # we re-take each time the processor to pass the Information Set # as modified from the previous operation proc = getProcessor('INLINE_PROCESSOR', self._res , config=kw ) self._res = proc.execute(name) # returns full results return self._res class DatastreamProvider(WithSourceProvider): """ Dataprovider facade for Thomson DataStream (DWE) source >>> dwe = DatastreamProvider() >>> dwe.append('dstream://Datastream/AGINDEMIF') >>> dwe.request() <DataSet of len 1: AGINDEMIF(13)> """