コード例 #1
0
def start_workers(data_queues, settings, job_queue):
    global threads
    main_logger.info("Starting Workers")
    worker = Worker(data_queues, settings, job_queue)
    worker.daemon = True
    worker.start()
    threads.append({'name': 'Workers', 'thread': worker})
    return worker
コード例 #2
0
ファイル: tabzilla.py プロジェクト: alexbuz/TabZilla
	def runWorker(self, w):
		#clr.AddReferenceByName('Microsoft.Office.Interop.Excel, Version=11.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')                
		#pprint(w)
		#pprint(self._workers)
		#pprint(self._pipeline_meta)
		from lib.worker import Worker 
		wo = Worker(self._environment,self._logger)
		#pprint(sorted(w['node'].keys()))
		for subworker_id in sorted(w['node'].keys()):
			#worker_type=w['node'].keys()[0]
			#rx = re.compile( r'(\d+)')		
			#short_t = rx.sub( '', subworker_type).strip('_')
			#print short_t
			subworker_type= w['node'][subworker_id]['type']
			confirm(self._pipeline_meta['worker'].has_key(subworker_type),"Specified worker type <%s> is not listed in pipeline meta." % subworker_type)
			confirm(self._workers.has_key(subworker_type),"Specified worker type <%s> is not defined in pipeline workers." % subworker_type)
			subworker_name=w['node'][subworker_id]['node'].keys()[0]
			print subworker_name
			confirm(self._pipeline_meta['worker'][subworker_type].has_key(subworker_name),"Specified worker <%s> of type <%s> is not defined in pipeline meta." % (subworker_name,subworker_type))
			subworker_attr=self._pipeline_meta['worker'][subworker_type][subworker_name]['attr']
			subworker_key='%s.%s' % (subworker_attr['module_name'],subworker_attr['name'])
			#pprint((self._workers[worker_type]));
			#pprint(worker_attr)
			confirm(self._workers[subworker_type].has_key(subworker_key),
					"Specified worker <%s> of type <%s> is not defined in pipeline workers." % (subworker_key,subworker_type))
			subworker_obj = self._workers[subworker_type][subworker_key]
			confirm( not type(subworker_obj) is 'instance', 'Worker object <%s> is not set.' % subworker_key)
		#print worker_obj.__module__
			subwo = copy.copy(subworker_obj)
			#pprint(subwo)
			#sys.exit(1)
			#set etl_object
			etl_object = w['node'][subworker_id]['node'][subworker_attr['name']]
			etl_object['name']=w['attr']['name']
			#pprint(self._environment._pipeline_flags)
			subwo._etl_object=etl_object
			wo.add(subwo)
		#print type(wo)
		#pprint(w)
		wo.set(w)
		#pprint(wo._pp)
		#print 'params: ', wo._pp
		#pprint(dir(wo._pp))
		#sys.exit(1)
		#if not int(self._environment._pipeline_flags.release):
		ft =wo.get_p('FLOW_TYPE', 'ASYNC')
		if ft=='ASYNC':
			wo.start()
		else:
			if ft=='SYNC':
				wo.run()
			else:
				self._logger.error('Unknown FLOW_TYPE %s.' % ft)
コード例 #3
0
ファイル: service.py プロジェクト: williamkunz/unmanic
def start_workers(data_queues, settings, job_queue):
    main_logger.info("Starting Workers")
    worker = Worker(data_queues, settings, job_queue)
    worker.daemon = True
    worker.start()
    return worker
コード例 #4
0
ファイル: main.py プロジェクト: DSHQ/redis-submitter


from core.model import Model
from lib.worker import Worker
from conf.database import redis
import json
import logging

from data.default import template

db = Model()
worker = Worker()

worker.setName('Thread 1')
worker.start()
# worker.join()

pattern = template()


subs = db.redis.pubsub()

for pat in pattern.pattern_subscriber:
    subs.psubscribe(pat)

logging.info("start listening")

for item in subs.listen():
    # print item
    if item['pattern'] is not None: