class WorkerFeeder(): def __init__(self, args, log, dm, end_event, name): self._args, self._dm = args, dm self._end_event = end_event self._name = name self.input = Queue(maxsize=1) self.thread = Thread(target=self._enter) self.thread.name = self._name self.thread.start() def wait(self): assert self.thread != None self.thread.join() def _enter(self): log('Starting WorkerFeeder', self._name) self._worker = Worker(self._args, log, self._dm) while not self.input.empty() or not self._end_event.is_set(): item = None try: item = self.input.get(timeout=0.01) except Empty: continue if item: direction, item = item if direction == 'enc': self._worker.process_enc(*item) else: self._worker.process_clear(*item) log('Ending WorkerFeeder', self._name)
def __init__(self, func, data): if not Controller.SIGNAL_HANDLE: Controller.signal_init() Controller.SIGNAL_HANDLE = Controller.interrupt_handler self.cmdMgr = GetCmdMgr() self.worker = Worker(func, data) self.worker.set_checkFunc(self.get_state)
def start_workers(data_queues, settings, job_queue): global threads main_logger.info("Starting Workers") worker = Worker(data_queues, settings, job_queue) worker.daemon = True worker.start() threads.append({'name': 'Workers', 'thread': worker}) return worker
def addbn(pt='../resnet-cifar10-caffe/resnet-56/prb_mem_bn_trainval.prototxt', model="../resnet-cifar10-caffe/resnet-56/snapshot/prb_VH_bn__iter_64000.caffemodel"): worker=Worker() def ad(pt, model): net = Net(pt, model=model, noTF=1) #net.computation() pt, WPQ = net.add_bn() return {'new_pt': pt, 'model':model, 'WPQ':WPQ} outs = worker.do(ad, pt=pt, model=model) worker.do(stepend, **outs)
def runWorker(self, w): #clr.AddReferenceByName('Microsoft.Office.Interop.Excel, Version=11.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c') #pprint(w) #pprint(self._workers) #pprint(self._pipeline_meta) from lib.worker import Worker wo = Worker(self._environment,self._logger) #pprint(sorted(w['node'].keys())) for subworker_id in sorted(w['node'].keys()): #worker_type=w['node'].keys()[0] #rx = re.compile( r'(\d+)') #short_t = rx.sub( '', subworker_type).strip('_') #print short_t subworker_type= w['node'][subworker_id]['type'] confirm(self._pipeline_meta['worker'].has_key(subworker_type),"Specified worker type <%s> is not listed in pipeline meta." % subworker_type) confirm(self._workers.has_key(subworker_type),"Specified worker type <%s> is not defined in pipeline workers." % subworker_type) subworker_name=w['node'][subworker_id]['node'].keys()[0] print subworker_name confirm(self._pipeline_meta['worker'][subworker_type].has_key(subworker_name),"Specified worker <%s> of type <%s> is not defined in pipeline meta." % (subworker_name,subworker_type)) subworker_attr=self._pipeline_meta['worker'][subworker_type][subworker_name]['attr'] subworker_key='%s.%s' % (subworker_attr['module_name'],subworker_attr['name']) #pprint((self._workers[worker_type])); #pprint(worker_attr) confirm(self._workers[subworker_type].has_key(subworker_key), "Specified worker <%s> of type <%s> is not defined in pipeline workers." % (subworker_key,subworker_type)) subworker_obj = self._workers[subworker_type][subworker_key] confirm( not type(subworker_obj) is 'instance', 'Worker object <%s> is not set.' % subworker_key) #print worker_obj.__module__ subwo = copy.copy(subworker_obj) #pprint(subwo) #sys.exit(1) #set etl_object etl_object = w['node'][subworker_id]['node'][subworker_attr['name']] etl_object['name']=w['attr']['name'] #pprint(self._environment._pipeline_flags) subwo._etl_object=etl_object wo.add(subwo) #print type(wo) #pprint(w) wo.set(w) #pprint(wo._pp) #print 'params: ', wo._pp #pprint(dir(wo._pp)) #sys.exit(1) #if not int(self._environment._pipeline_flags.release): ft =wo.get_p('FLOW_TYPE', 'ASYNC') if ft=='ASYNC': wo.start() else: if ft=='SYNC': wo.run() else: self._logger.error('Unknown FLOW_TYPE %s.' % ft)
def run_worker(self, count=None): try: self.log.debug("Manager starts runing %s workers." % self.worker_count) if self.worker_log == None: self.log.debug("Set worker logger to manager's logger.") self.worker_log = self.log if count != None: self.worker_count = int(count) workers = [ Worker(self.worker_log, self.task_queue, self.finished_task_queue, self.pid_queue, self.rest_time, self.stop_signal) for i in xrange(self.worker_count) ] for worker in workers: worker.start() self.workers[worker.name] = worker self.workers_pid[worker.name] = worker.pid self.log.debug("%s PID = %s" % (worker.name, worker.pid)) time.sleep(1) return True except Exception as e: self.log.error("Manager failed to run workers. %s" % e) return False
def _enter(self): log('Starting WorkerFeeder', self._name) self._worker = Worker(self._args, log, self._dm) while not self.input.empty() or not self._end_event.is_set(): item = None try: item = self.input.get(timeout=0.01) except Empty: continue if item: direction, item = item if direction == 'enc': self._worker.process_enc(*item) else: self._worker.process_clear(*item) log('Ending WorkerFeeder', self._name)
def main(): """ main """ appname = "winwin" parser = get_option_parser(appname) options, args = parser.parse_args() conf_file = os.path.abspath(options.config) conf = get_conf(conf_file) setup_logger(conf['logging']) log.debug("start...") worker = Worker(conf) worker.run()
def c3(pt=cfgs.vgg.model,model=cfgs.vgg.weights): # TODO: Consider changing cfgs.vgg.model and cfgs.vgg.weights (paths to the .prototxt and .caffemodel files) for a generic model reference -by Mario dcfgs.splitconvrelu=True cfgs.accname='accuracy@5' # name of layer in the prototxt -by Mario def solve(pt, model): net = Net(pt, model=model) net.load_frozen() # this method can load images from memory if we pass a feats_dic. For what? -by Mario WPQ, new_pt = net.R3() return {"WPQ": WPQ, "new_pt": new_pt} def stepend(new_pt, model, WPQ): net = Net(new_pt, model=model) net.WPQ = WPQ net.finalmodel(save=False) # load weights into the caffemodel -by Mario net.dis_memory() #final = net.finalmodel(WPQ, prefix='3r') new_pt, new_model = net.save(prefix='3c') print('caffe test -model',new_pt, '-weights',new_model) return {"final": None} worker = Worker() outputs = worker.do(step0, pt=pt, model=model) printstage("freeze") pt = outputs['pt'] outputs = worker.do(step1,**outputs) printstage("speed", dcfgs.dic.keep) outputs['pt'] = mem_pt(pt) if 0: outputs = solve(**outputs) else: outputs = worker.do(solve, **outputs) printstage("saving") outputs = worker.do(stepend, model=model, **outputs)
def c3(pt=cfgs.vgg.model,model=cfgs.vgg.weights): dcfgs.splitconvrelu=True cfgs.accname='accuracy@5' def solve(pt, model): net = Net(pt, model=model) net.load_frozen() WPQ, new_pt = net.R3() return {"WPQ": WPQ, "new_pt": new_pt} def stepend(new_pt, model, WPQ): net = Net(new_pt, model=model) net.WPQ = WPQ net.finalmodel(save=False) net.dis_memory() #final = net.finalmodel(WPQ, prefix='3r') new_pt, new_model = net.save(prefix='3c') print('caffe test -model',new_pt, '-weights',new_model) return {"final": None} worker = Worker() outputs = worker.do(step0, pt=pt, model=model) printstage("freeze") pt = outputs['pt'] outputs = worker.do(step1,**outputs) printstage("speed", dcfgs.dic.keep) outputs['pt'] = mem_pt(pt) if 0: outputs = solve(**outputs) else: outputs = worker.do(solve, **outputs) printstage("saving") outputs = worker.do(stepend, model=model, **outputs)
class Controller(object): NEED_EXIT = False SIGNAL_HANDLE = None def __init__(self, func, data): if not Controller.SIGNAL_HANDLE: Controller.signal_init() Controller.SIGNAL_HANDLE = Controller.interrupt_handler self.cmdMgr = GetCmdMgr() self.worker = Worker(func, data) self.worker.set_checkFunc(self.get_state) def get_state(self): return Controller.NEED_EXIT def run_cmd(self, cmd): func = self.cmdMgr.get_cmd(cmd[0]) if not func: print('Invalid command') return if self.cmdMgr.is_builtin_cmd(cmd[0]): Execute(func, cmd[1:], None) else: self.worker.run(func, cmd[1:]) def get_output(self): pass @classmethod def interrupt_handler(cls, signum, frame): cls.NEED_EXIT = True @classmethod def signal_init(cls): signal.signal(signal.SIGINT, cls.interrupt_handler) signal.signal(signal.SIGTERM, cls.interrupt_handler)
for url in start_url: resp = requests.get(url, headers=headers, cookies=cookies) product_list += parser_product(resp.content) print len(product_list) return product_list if __name__ == '__main__': start_url = [ 'https://list.tmall.com/search_product.htm?q=%C5%AE%D7%B0&type=p&vmarket=&spm=875.7931836%2FB.a2227oh.d100&from=mallfp..pc_1_searchbutton', 'https://list.tmall.com/search_product.htm?q=%C4%D0%D7%B0&type=p&spm=875.7931836%2FB.a2227oh.d100&from=mallfp..pc_1_searchbutton', 'https://list.tmall.com/search_product.htm?q=%C5%AE%D0%AC&type=p&spm=a220m.1000858.a2227oh.d100&from=.list.pc_1_searchbutton', 'https://list.tmall.com/search_product.htm?q=%C4%D0%D0%AC&type=p&spm=a220m.1000858.a2227oh.d100&from=.list.pc_1_searchbutton', ] file_lock = Lock() for i in start_url: product_list = get_product(i) task_queue = Queue() for p in product_list: task_queue.put(p) thread = [] for i in range(4): thread.append(Worker(file_lock, task_queue)) for i in thread: i.start() for i in thread: i.join()
def worker(self): worker_instance = Worker() worker_instance.set_config(self.config) worker_instance.set_redis_connection(self.get_redis_connection()) return worker_instance
__author__ = 'ob3' from core.model import Model from lib.worker import Worker from conf.database import redis import json import logging from data.default import template db = Model() worker = Worker() worker.setName('Thread 1') worker.start() # worker.join() pattern = template() subs = db.redis.pubsub() for pat in pattern.pattern_subscriber: subs.psubscribe(pat) logging.info("start listening") for item in subs.listen(): # print item
def start_workers(data_queues, settings, job_queue): main_logger.info("Starting Workers") worker = Worker(data_queues, settings, job_queue) worker.daemon = True worker.start() return worker
from lib.worker import Worker from doubles import ObjectDouble, allow from lib.assembly_line import Assembly_Line al = ObjectDouble(Assembly_Line) w = Worker() def test_worker_initializes_with_no_components(): assert w.components == [] def test_worker_initializes_in_collecting_mode(): assert w.collecting_mode def test_worker_initializes_without_a_completed_widget(): assert w.widgets == None def test_worker_stores_new_component(): w.store_component('A') assert w.components == ['A'] def test_worker_with_two_components_not_in_collecting_mode(): w.store_component('B') assert not w.collecting_mode w2 = Worker()
def magicExecutor(self): self.threadpool.start(Worker(lambda: startSensor(commandport)))