Ejemplo n.º 1
0
def sub2Pdf(path, backup, remove_dir, debug):
    stage1 = Stage(GetImages, 2)
    stage2 = Stage(MakePdf, 7)

    if backup:
        stage3 = Stage(MakeBackup, 7)
        stage1.link(stage3)
        if not os.path.exists(path + '\\BACKUP'):
            os.makedirs(path + '\\BACKUP')

    stage1.link(stage2)

    pipe = Pipeline(stage1)
    lt_dir = get_subdiretories(path)
    for folder in lt_dir:
        pipe.put(folder)

    pipe.put(None)

    if debug:
        print("Converting to pdf :\n%s\n" %
              ('Backup activated' if backup else 'Backup deactivated'))

    # This allows to wait until all task are done
    for res in pipe.results():
        try:
            if debug:
                print('    Done :=> ' + res)
        except Exception:
            pass

    if remove_dir:
        delete_dirs(lt_dir)

    print('\nFinished treating : %s' % get_leaf_from_path(path))
Ejemplo n.º 2
0
def main():
    pipe = Pipeline(OrderedStage(yes, disable_result=True))

    for number in range(10):
        pipe.put(number)
    pipe.put(None)

    for result in pipe.results():
        print(result)
Ejemplo n.º 3
0
def evaluate_workflow(stages,inp):
  # if __name__ == '__main__':
  #   freeze_support()
    pipe = Pipeline(stages[0])

    pipe.put(inp)

    pipe.put(None)

    return pipe.results()
Ejemplo n.º 4
0
def main():
    stage1 = Stage(Adder, 1, number=5)
    pipe = Pipeline(stage1)

    for number in range(10):
        pipe.put(number)

    pipe.put(None)

    for result in pipe.results():
        print(result)
Ejemplo n.º 5
0
def main():
    stage1 = UnorderedStage(increment, 3)
    stage2 = OrderedStage(double, 3)
    stage1.link(stage2)
    pipe = Pipeline(stage1)

    for number in range(10):
        pipe.put(number)
    pipe.put(None)

    for result in pipe.results():
        print(result)
Ejemplo n.º 6
0
def main():
    stage = UnorderedStage(increment)
    pipe = Pipeline(stage)

    # for task in range(sys.maxint if sys.version_info.major <= 2 else sys.maxsize):
    for task in range(10000):
        pipe.put(task)

    pipe.put(None)

    for result in pipe.results():
        print(result)
Ejemplo n.º 7
0
def main():
    stage = Stage(Yes, 4, disable_result=True)
    pipe = Pipeline(stage)

    for number in range(10):
        pipe.put(number)
    pipe.put(None)

    count = 0
    for _ in pipe.results():
        count += 1

    print(count)
Ejemplo n.º 8
0
class ExecutionCache():
    def __init__(self, function, jobs, SchedulerClass):
        self._results = dict()
        self._targets = set()
        self._update_lock = Lock()

        self._runner = Pipeline(UnorderedStage(function, jobs))

        def fetch():
            for target, result in self._runner.results():
                if target in self._targets:
                    self._results[target.host] = result
                else:
                    print(f'dropping obsolete result for {target}')

        self._fetcher = Thread(target=fetch)
        self._fetcher.start()
        self.scheduler = SchedulerClass(self._runner)

    def teardown(self):
        self.scheduler.cancel_all()
        self._runner.put(None)
        self._fetcher.join()

    def update_targets(self, targets):
        with self._update_lock:
            new_targets = set(targets)
            old_targets = self._targets

            new_hosts = {target.host for target in targets}
            old_hosts = {target.host for target in self._targets}
            removed_hosts = old_hosts - new_hosts

            self._targets = new_targets

            for target in old_targets - new_targets:
                self.scheduler.remove(target)

                if target.host in removed_hosts:
                    try:
                        del self._results[target.host]
                    except KeyError:
                        pass

            for target in new_targets - old_targets:
                self.scheduler.add(target)

    def __getitem__(self, key):
        return self._results[key]
Ejemplo n.º 9
0
def extract_feat_from_FCL():
    input_shape = (224, 224, 3)
    model = VGG16(weights='imagenet',
                  input_shape=(input_shape[0], input_shape[1], input_shape[2]),
                  pooling='max',
                  include_top=True)
    t1 = time.time()
    layer_name = "fc2"
    intermediate_layer_model = Model(
        inputs=model.input, outputs=model.get_layer(layer_name).output)

    list = get_imlist("img_cut")

    stage1 = OrderedStage(send_batch)
    stage2 = OrderedStage(create_bag_of_window)
    stage1.link(stage2)
    pipe = Pipeline(stage1)

    batch_size = 1
    total_batch = math.ceil(len(list) / batch_size)

    for p in range(total_batch):
        v = list[p * batch_size:(p + 1) * batch_size]
        print("Batch number %s" % p)
        pipe.put(v)

    pipe.put(None)

    for result in pipe.results():
        t0 = time.time()
        print("Predicting...")
        feature_tensor = intermediate_layer_model.predict(
            np.vstack((r for r in result)))
        t1 = time.time()
        print("time to predict : %ss" % (t1 - t0))
        # gc.collect()
        print(feature_tensor.shape)
        del result

    # feature_tensor = intermediate_layer_model.predict_generator(generator=generator_, steps=1, max_queue_size=1)
    t2 = time.time()
    print("Total time to predict: " + str(t2 - t1))
    print(feature_tensor.shape)
    return feature_tensor
Ejemplo n.º 10
0
def main():
    config = load_config()
    clean_data = True if "True" in config["CLEAN_DATA"] else False

    # create stages
    stage_setup = Stage(SetupDatabase, 1)
    stage_load_ex = Stage(LoadExchange, 1)
    save_exchange = Stage(SaveGetExchange)
    get_symbols = Stage(GetSymbols)
    save_symbols = Stage(SaveSymbol, 1)
    save_curency_pair = Stage(SaveCurrencyPair)
    trade_dates = Stage(GetTradeRanges, 1)
    add_price = Stage(AddPriceData, 1)
    # link stages
    stage_setup.link(stage_load_ex)
    stage_load_ex.link(save_exchange)
    save_exchange.link(get_symbols)
    #get_symbols.link(save_symbols)
    get_symbols.link(save_curency_pair)
    save_curency_pair.link(trade_dates)
    trade_dates.link(add_price)
    # setup pipeline
    pipe = Pipeline(stage_setup)
    pipe.put(clean_data)
    pipe.put(None)
    insert_count = []
    for result in pipe.results():
        print 'pipe result %s' % (result)
        insert_count.append(result)
    print "inserted %s records" % (sum(insert_count))
    close1 = CloseSymbolRepoSession()
    close1.doTask("")
    close2 = CloseCurrencyPairRepoSession()
    close2.doTask("")
    close3 = CloseDateRepoSession()
    close3.doTask("")
    close4 = ClosePriceRepoSession()
    close4.doTask("")
Ejemplo n.º 11
0
from mpipe import OrderedStage, Pipeline


def increment(value):
    return value + 1


def double(value):
    return value * 2


stage1 = OrderedStage(increment)
stage2 = OrderedStage(double)
stage1.link(stage2)
pipe = Pipeline(stage1)

for number in range(10):
    pipe.put(number)

pipe.put(None)

for result in pipe.results():
    print(result)
Ejemplo n.º 12
0
from mpipe import OrderedStage as OStage, Pipeline

def magnify(value):
    return value*10

p1 = Pipeline(
    OStage(magnify).link(
        OStage(magnify).link(
            OStage(magnify).link(
                OStage(magnify)
                )
            )
        )
    )
for val in list(range(10)) + [None]:
    p1.put(val)

for result in p1.results():
    print(result)
Ejemplo n.º 13
0
from mpipe import OrderedStage as OStage, Pipeline


def magnify(value):
    return value * 10


p1 = Pipeline(
    OStage(magnify).link(
        OStage(magnify).link(OStage(magnify).link(OStage(magnify)))))
for val in list(range(10)) + [None]:
    p1.put(val)

for result in p1.results():
    print(result)