class ExecutionCache(): def __init__(self, function, jobs, SchedulerClass): self._results = dict() self._targets = set() self._update_lock = Lock() self._runner = Pipeline(UnorderedStage(function, jobs)) def fetch(): for target, result in self._runner.results(): if target in self._targets: self._results[target.host] = result else: print(f'dropping obsolete result for {target}') self._fetcher = Thread(target=fetch) self._fetcher.start() self.scheduler = SchedulerClass(self._runner) def teardown(self): self.scheduler.cancel_all() self._runner.put(None) self._fetcher.join() def update_targets(self, targets): with self._update_lock: new_targets = set(targets) old_targets = self._targets new_hosts = {target.host for target in targets} old_hosts = {target.host for target in self._targets} removed_hosts = old_hosts - new_hosts self._targets = new_targets for target in old_targets - new_targets: self.scheduler.remove(target) if target.host in removed_hosts: try: del self._results[target.host] except KeyError: pass for target in new_targets - old_targets: self.scheduler.add(target) def __getitem__(self, key): return self._results[key]
def __init__(self, function, jobs, SchedulerClass): self._results = dict() self._targets = set() self._update_lock = Lock() self._runner = Pipeline(UnorderedStage(function, jobs)) def fetch(): for target, result in self._runner.results(): if target in self._targets: self._results[target.host] = result else: print(f'dropping obsolete result for {target}') self._fetcher = Thread(target=fetch) self._fetcher.start() self.scheduler = SchedulerClass(self._runner)
def sub2Pdf(path, backup, remove_dir, debug): stage1 = Stage(GetImages, 2) stage2 = Stage(MakePdf, 7) if backup: stage3 = Stage(MakeBackup, 7) stage1.link(stage3) if not os.path.exists(path + '\\BACKUP'): os.makedirs(path + '\\BACKUP') stage1.link(stage2) pipe = Pipeline(stage1) lt_dir = get_subdiretories(path) for folder in lt_dir: pipe.put(folder) pipe.put(None) if debug: print("Converting to pdf :\n%s\n" % ('Backup activated' if backup else 'Backup deactivated')) # This allows to wait until all task are done for res in pipe.results(): try: if debug: print(' Done :=> ' + res) except Exception: pass if remove_dir: delete_dirs(lt_dir) print('\nFinished treating : %s' % get_leaf_from_path(path))
def main(): pipe = Pipeline(OrderedStage(yes, disable_result=True)) for number in range(10): pipe.put(number) pipe.put(None) for result in pipe.results(): print(result)
def main(): s1 = OrderedStage(f2, size=2) s2 = OrderedStage(f3) s1.link(s2) p = Pipeline(s1) def f1(): for task in [1, 2, 3, 4, 5, None]: p.put(task) f1()
def evaluate_workflow(stages,inp): # if __name__ == '__main__': # freeze_support() pipe = Pipeline(stages[0]) pipe.put(inp) pipe.put(None) return pipe.results()
def main(): pipe = Pipeline(OrderedStage(echo, 2)) for number in range(12): pipe.put(number) time.sleep(0.010) pipe.put(None)
def main(): stage = Stage(Echo, do_stop_task=True) pipe = Pipeline(stage) for number in range(10): pipe.put(number) pipe.put(None)
def main(): stage1 = Stage(Adder, 1, number=5) pipe = Pipeline(stage1) for number in range(10): pipe.put(number) pipe.put(None) for result in pipe.results(): print(result)
def main(): stage = FilterStage((OrderedStage(echo), ), max_tasks=2) pipe = Pipeline(stage) for number in range(12): pipe.put(number) time.sleep(0.010) pipe.put(None)
def main(): stage = UnorderedStage(increment) pipe = Pipeline(stage) # for task in range(sys.maxint if sys.version_info.major <= 2 else sys.maxsize): for task in range(10000): pipe.put(task) pipe.put(None) for result in pipe.results(): print(result)
def main(): stage1 = UnorderedStage(increment, 3) stage2 = OrderedStage(double, 3) stage1.link(stage2) pipe = Pipeline(stage1) for number in range(10): pipe.put(number) pipe.put(None) for result in pipe.results(): print(result)
def main(): stage = Stage(Yes, 4, disable_result=True) pipe = Pipeline(stage) for number in range(10): pipe.put(number) pipe.put(None) count = 0 for _ in pipe.results(): count += 1 print(count)
def main(): s1 = Stage(Echo, do_stop_task=True) s2 = FilterStage( (s1, ), max_tasks=999, do_stop_task=True, ) pipe = Pipeline(s2) for number in range(10): pipe.put(number) pipe.put(None)
def main(): stage1 = OrderedStage(increment) stage2 = OrderedStage(double) stage3 = OrderedStage(echo) stage1.link(stage2) stage2.link(stage3) pipe = Pipeline(stage1) for number in range(10): pipe.put(number) pipe.put(None)
def main(): stage1 = Stage(Accumulator) stage2 = OrderedStage(echo, 50) stage1.link(stage2) pipe = Pipeline(stage1) size = 1000 prices = np.linspace(0, np.pi * 10, size) prices = np.sin(prices) + 1 for price in prices: pipe.put(price) pipe.put(None)
def extract_feat_from_FCL(): input_shape = (224, 224, 3) model = VGG16(weights='imagenet', input_shape=(input_shape[0], input_shape[1], input_shape[2]), pooling='max', include_top=True) t1 = time.time() layer_name = "fc2" intermediate_layer_model = Model( inputs=model.input, outputs=model.get_layer(layer_name).output) list = get_imlist("img_cut") stage1 = OrderedStage(send_batch) stage2 = OrderedStage(create_bag_of_window) stage1.link(stage2) pipe = Pipeline(stage1) batch_size = 1 total_batch = math.ceil(len(list) / batch_size) for p in range(total_batch): v = list[p * batch_size:(p + 1) * batch_size] print("Batch number %s" % p) pipe.put(v) pipe.put(None) for result in pipe.results(): t0 = time.time() print("Predicting...") feature_tensor = intermediate_layer_model.predict( np.vstack((r for r in result))) t1 = time.time() print("time to predict : %ss" % (t1 - t0)) # gc.collect() print(feature_tensor.shape) del result # feature_tensor = intermediate_layer_model.predict_generator(generator=generator_, steps=1, max_queue_size=1) t2 = time.time() print("Total time to predict: " + str(t2 - t1)) print(feature_tensor.shape) return feature_tensor
def main(): config = load_config() clean_data = True if "True" in config["CLEAN_DATA"] else False # create stages stage_setup = Stage(SetupDatabase, 1) stage_load_ex = Stage(LoadExchange, 1) save_exchange = Stage(SaveGetExchange) get_symbols = Stage(GetSymbols) save_symbols = Stage(SaveSymbol, 1) save_curency_pair = Stage(SaveCurrencyPair) trade_dates = Stage(GetTradeRanges, 1) add_price = Stage(AddPriceData, 1) # link stages stage_setup.link(stage_load_ex) stage_load_ex.link(save_exchange) save_exchange.link(get_symbols) #get_symbols.link(save_symbols) get_symbols.link(save_curency_pair) save_curency_pair.link(trade_dates) trade_dates.link(add_price) # setup pipeline pipe = Pipeline(stage_setup) pipe.put(clean_data) pipe.put(None) insert_count = [] for result in pipe.results(): print 'pipe result %s' % (result) insert_count.append(result) print "inserted %s records" % (sum(insert_count)) close1 = CloseSymbolRepoSession() close1.doTask("") close2 = CloseCurrencyPairRepoSession() close2.doTask("") close3 = CloseDateRepoSession() close3.doTask("") close4 = ClosePriceRepoSession() close4.doTask("")
def process(): list = get_imlist("img_cut/") stage1 = OrderedStage(send_batch) stage2 = OrderedStage(create_bag_of_window) stage3 = OrderedStage(last_stage) stage1.link(stage2) stage2.link(stage3) pipe = Pipeline(stage1) batch_size = 20 total_batch = math.ceil(len(list) / batch_size) for p in range(total_batch): v = list[p * batch_size:(p + 1) * batch_size] print("Batch number %s" % p) pipe.put(v) pipe.put(None)
import sys from mpipe import UnorderedStage, Pipeline def increment(value): return value + 1 stage = UnorderedStage(increment) pipe = Pipeline(stage) def pull(value): for result in pipe.results(): print(result) pipe2 = Pipeline(UnorderedStage(pull)) pipe2.put(True) for task in xrange(sys.maxint): pipe.put(task) pipe.put(None) pipe2.put(None)
from mpipe import OrderedStage, Pipeline def increment(value): return value + 1 def double(value): return value * 2 def echo(value): print(value) stage1 = OrderedStage(increment) stage2 = OrderedStage(double) stage3 = OrderedStage(echo) stage1.link(stage2) stage2.link(stage3) pipe = Pipeline(stage1) for number in range(10): pipe.put(number) pipe.put(None)
import time from mpipe import OrderedStage, FilterStage, Pipeline def passthru(value): time.sleep(0.013) return value s1 = FilterStage( (OrderedStage(passthru),), max_tasks=1, drop_results=True, ) p1 = Pipeline(s1) def pull(task): for result in p1.results(): if result: print(result) p2 = Pipeline(OrderedStage(pull)) p2.put(True) for number in range(10): p1.put(number) time.sleep(0.010) p1.put(None) p2.put(None)
def main(): pipe = Pipeline(UnorderedStage(increment)) pipe2 = Pipeline(UnorderedStage(Pull(pipe))) pipe2.put(True) # for task in range(sys.maxint): for task in range(10000): pipe.put(task) pipe.put(None) pipe2.put(None)
from mpipe import OrderedStage, FilterStage, Pipeline import time def echo(value): print(value) time.sleep(0.0125) return value pipe1 = Pipeline( FilterStage( (OrderedStage(echo),), max_tasks=2 ) ) def pull(task): for result in pipe1.results(): pass pipe2 = Pipeline(OrderedStage(pull)) pipe2.put(True) pipe2.put(None) for number in range(10): pipe1.put(number) time.sleep(0.0100) pipe1.put(None)
from mpipe import OrderedStage, Pipeline def increment(value): return value + 1 def double(value): return value * 2 stage1 = OrderedStage(increment) stage2 = OrderedStage(double) stage1.link(stage2) pipe = Pipeline(stage1) for number in range(10): pipe.put(number) pipe.put(None) for result in pipe.results(): print(result)
from mpipe import OrderedStage, Pipeline def yes(value): return value pipe = Pipeline(OrderedStage(yes, disable_result=True)) for number in range(10): pipe.put(number) pipe.put(None) for result in pipe.results(): print(result)
import time from mpipe import OrderedStage, FilterStage, Pipeline def passthru(value): time.sleep(0.013) return value s1 = FilterStage( (OrderedStage(passthru), ), max_tasks=1, cache_results=True, ) p1 = Pipeline(s1) def pull(task): for task, result in p1.results(): if result: print('{0} {1}'.format(task, result[0])) p2 = Pipeline(OrderedStage(pull)) p2.put(True) for number in range(10): p1.put(number) time.sleep(0.010) p1.put(None)
from mpipe import OrderedStage as OStage, Pipeline def magnify(value): return value*10 p1 = Pipeline( OStage(magnify).link( OStage(magnify).link( OStage(magnify).link( OStage(magnify) ) ) ) ) for val in list(range(10)) + [None]: p1.put(val) for result in p1.results(): print(result)
from mpipe import OrderedWorker, Stage, OrderedStage, Pipeline last10 = deque() junk = 'http://ws.cdyne.com/delayedstockquote/delayedstockquote.asmx/GetQuote?StockSymbol=fac&LicenseKey=0' j = 'http://www.google.com/ig/api?stock=AAPL' class Accumulator(OrderedWorker): def doTask(self, price): if last10: if price < min(last10): self.putResult(price) last10.append(price) if len(last10) > 10: last10.popleft() def echo(value): print('value = {0}'.format(value)) stage1 = Stage(Accumulator) stage2 = OrderedStage(echo, 50) stage1.link(stage2) pipe = Pipeline(stage1) SIZE = 1000 prices = np.linspace(0, np.pi*10, SIZE) prices = np.sin(prices) + 1 for price in prices: pipe.put(price) pipe.put(None)
import sys from mpipe import UnorderedStage, Pipeline def increment(value): return value + 1 stage = UnorderedStage(increment) pipe = Pipeline(stage) for task in xrange(sys.maxint): pipe.put(task) pipe.put(None) for result in pipe.results(): print(result)
def main(): s1 = FilterStage( (OrderedStage(pass_thru), ), max_tasks=1, ) p1 = Pipeline(s1) p2 = Pipeline(OrderedStage(Pull(p1))) p2.put(True) for number in range(10): p1.put(number) time.sleep(0.010) p1.put(None) p2.put(None)
import time from mpipe import OrderedStage, Pipeline def echo(value): print(value) time.sleep(0.013) return value pipe = Pipeline(OrderedStage(echo)) for number in range(12): pipe.put(number) time.sleep(0.010) pipe.put(None)
from mpipe import Stage, OrderedWorker, FilterStage, Pipeline class Echo(OrderedWorker): def doTask(self, value): print(value) s1 = Stage(Echo, do_stop_task=True) s2 = FilterStage( (s1,), max_tasks=999, do_stop_task=True, ) pipe = Pipeline(s2) for number in range(10): pipe.put(number) pipe.put(None)