Ejemplo n.º 1
0
def test_with(kwargs):
    with mantichora(**kwargs) as mcore:
        mcore.run(task, 0.05, 'result 1')
        mcore.run(task, 0.01, 'result 2')
        mcore.run(task, 0.02, 'result 3')
        returns = mcore.returns()
        assert ['result 1', 'result 2', 'result 3'] == returns
Ejemplo n.º 2
0
def test_with_raise(kwargs):
    with pytest.raises(MyException):
        with mantichora(**kwargs) as mcore:
            mcore.run(task, 0.05, 'result 1')
            mcore.run(task, 0.01, 'result 2')
            mcore.run(task, 0.02, 'result 3')
            raise MyException
Ejemplo n.º 3
0
def test_end(kwargs):
    mcore = mantichora(**kwargs)
    mcore.run(task, 0.05, 'result 1')
    mcore.run(task, 0.01, 'result 2')
    mcore.run(task, 0.02, 'result 3')
    returns = mcore.returns()
    assert ['result 1', 'result 2', 'result 3'] == returns
    mcore.end()
Ejemplo n.º 4
0
def run_with_mantichora(nprocesses, ntasks, niterations):
    # print(nprocesses, ntasks, niterations)
    with mantichora(nworkers=nprocesses) as mcore:
        for i in range(ntasks):
            name = 'task {}'.format(i)
            n = niterations[i]
            mcore.run(task, n, name)
        returns = mcore.returns()
Ejemplo n.º 5
0
 def get_sparse_vecs(self,
                     sp_out_file,
                     vocab_out_file,
                     data_dir,
                     vocab_size=2000,
                     i_vocab=None,
                     full_histogram_file=None,
                     pat='*.json'):
     files = glob.glob(data_dir + '/' + pat)
     if i_vocab is None:
         counter = self.get_counter_dir_parallel(data_dir, pat)
         vocab = self.get_vocab(counter, vocab_size)
     else:
         vocab = i_vocab
     files_and_vocab = [(f, vocab) for f in files]
     if len(files_and_vocab) > 2:
         file_batches = list(
             self.chunks(files_and_vocab,
                         max(1,
                             len(files_and_vocab) // cpu_count())))
         with mantichora() as mcore:
             for i in range(len(file_batches)):
                 mcore.run(self.task_vec_fn,
                           "Vectorizing Batch {}".format(i),
                           file_batches[i])
             sp_vecs = mcore.returns()
         sp_vecs = [item for sl in sp_vecs for item in sl]
     else:
         sp_vecs = map(self.vectorize_fn, files_and_vocab)
     sp_list = list(sp_vecs)
     with io.open(sp_out_file, 'w', encoding=self.encoding) as fp:
         for block in sp_list:
             for (v, l) in block:
                 fp.write(str(l))
                 for (i, c) in v:
                     fp.write(' ')
                     fp.write(str(i))
                     fp.write(':')
                     fp.write(str(c))
                 fp.write('\n')
     if i_vocab is None:  ## print out vocab if we had to create it
         with io.open(vocab_out_file, 'w', encoding=self.encoding) as fp:
             for i in range(len(vocab.idx_to_token)):
                 fp.write(vocab.idx_to_token[i])
                 fp.write('\n')
     if full_histogram_file:
         with io.open(full_histogram_file, 'w',
                      encoding=self.encoding) as fp:
             items = list(counter.items())
             items.sort(key=lambda x: -x[1])
             for k, v in items:
                 fp.write(str(k))
                 fp.write(' ')
                 fp.write(str(v))
                 fp.write('\n')
     return vocab
Ejemplo n.º 6
0
def test_logging(caplog):
    with caplog.at_level(logging.INFO):
        with mantichora() as mcore:
            mcore.run(task, 0.05, 'result 1', 'message 1')
            mcore.run(task, 0.01, 'result 2', 'message 2')
            mcore.run(task, 0.02, 'result 3', 'message 3')
            returns = mcore.returns()
            assert ['result 1', 'result 2', 'result 3'] == returns

    assert 3 == len(caplog.records)
    for r in caplog.records:
        assert r.levelname == 'INFO'
    msgs_actual = sorted([r.msg for r in caplog.records])
    msgs_expected = sorted(['message 1', 'message 2', 'message 3'])
    assert msgs_expected == msgs_actual
Ejemplo n.º 7
0
 def get_counter_dir_parallel(self, data_dir, pat):
     files = glob.glob(data_dir + '/' + pat)
     file_batches = list(
         self.chunks(files, max(1,
                                len(files) // cpu_count())))
     logging.info(
         "Counting vocabulary over {} text files with {} batches".format(
             len(files), len(file_batches)))
     with mantichora() as mcore:
         for i in range(len(file_batches)):
             mcore.run(self.task,
                       "Counting Vocab Items - Batch {}".format(i),
                       file_batches[i])
         counters = mcore.returns()
     return sum(counters, Counter())
Ejemplo n.º 8
0
def test_receive_finished(kwargs):
    with mantichora(**kwargs) as mcore:
        runids = [
            mcore.run(task, 0.05, 'result 1'),
            mcore.run(task, 0.01, 'result 2'),
            mcore.run(task, 0.02, 'result 3'),
        ]
        pairs = [ ]
        while len(pairs) < 3:
            ps = mcore.receive_finished()
            pairs.extend(ps)
    expected = [
        (runids[0], 'result 1'),
        (runids[1], 'result 2'),
        (runids[2], 'result 3'),
    ]
    assert sorted(expected) == sorted(pairs)
Ejemplo n.º 9
0
    def get_counter_dir_parallel(self, txt_dir, pat='*.txt'):
        def batches(l, n):
            for i in range(0, len(l), n):
                yield l[i:i + n]

        files = glob.glob(txt_dir + '/' + pat)
        batch_size = max(1, int(len(files) / 20))
        file_batches = list(batches(files, batch_size))
        file_batch_batches = list(
            self.chunks(file_batches, max(1,
                                          len(files) // cpu_count())))
        with mantichora() as mcore:
            for i in range(len(file_batch_batches)):
                mcore.run(self.task,
                          "Counting Vocab Items - Batch {}".format(i),
                          file_batch_batches[i])
            counter_cs = mcore.returns()
        counters = [item for sl in counter_cs for item in sl]
        return sum(counters, Counter())
Ejemplo n.º 10
0
def test_receive_one(kwargs):
    with mantichora(**kwargs) as mcore:
        runids = [
            mcore.run(task, 0.05, 'result 1'),
            mcore.run(task, 0.01, 'result 2'),
            mcore.run(task, 0.02, 'result 3'),
        ]
        pairs = [ ]
        while True:
            p = mcore.receive_one()
            if p is None:
                break
            pairs.append(p)
    expected = [
        (runids[0], 'result 1'),
        (runids[1], 'result 2'),
        (runids[2], 'result 3'),
    ]
    assert sorted(expected) == sorted(pairs)
Ejemplo n.º 11
0
 def open(self):
     if self.mcore is not None:
         # already open
         return
     self.mcore = mantichora.mantichora(nworkers=self.nprocesses)
Ejemplo n.º 12
0
def multi(video, processes):
    """
    Splitting the points to multiple processes and creating a
    pool of workers.
    
    :param video: the video object with defined attributes
    :type video: object
    :param processes: number of processes. If negative, the number
        of processes is set to `psutil.cpu_count + processes`.
    :type processes: int
    :return: displacements
    :rtype: ndarray
    """
    if processes < 0:
        processes = cpu_count() + processes
    elif processes == 0:
        raise ValueError('Number of processes must not be zero.')

    points = video.points
    points_split = tools.split_points(points, processes=processes)

    idi_kwargs = {
        'cih_file': video.cih_file,
    }

    method_kwargs = {
        'roi_size': video.method.roi_size,
        'pad': video.method.pad,
        'max_nfev': video.method.max_nfev,
        'tol': video.method.tol,
        'verbose': video.method.verbose,
        'show_pbar': video.method.show_pbar,
        'int_order': video.method.int_order,
        'pbar_type': video.method.pbar_type,
        'resume_analysis': video.method.resume_analysis,
        'reference_image': video.method.reference_image
    }
    if video.method.pbar_type == 'atpbar':
        print(f'Computation start: {datetime.datetime.now()}')
    t_start = time.time()

    if video.method.multi_type == 'multiprocessing':
        if method_kwargs['pbar_type'] == 'atpbar':
            method_kwargs['pbar_type'] = 'tqdm'
            warnings.warn(
                '"atpbar" pbar_type was used with "multiprocessing". This is not supported. Changed pbar_type to "tqdm"'
            )

        pool = Pool(processes=processes)
        results = [
            pool.apply_async(worker, args=(p, idi_kwargs, method_kwargs, i))
            for i, p in enumerate(points_split)
        ]
        pool.close()
        pool.join()

        out = []
        for r in results:
            out.append(r.get())

        out1 = sorted(out, key=lambda x: x[1])
        out1 = np.concatenate([d[0] for d in out1])

    elif video.method.multi_type == 'mantichora':
        with mantichora.mantichora(nworkers=processes) as mcore:
            for i, p in enumerate(points_split):
                mcore.run(worker, p, idi_kwargs, method_kwargs, i)
            returns = mcore.returns()

        out = []
        for r in returns:
            out.append(r)

        out1 = sorted(out, key=lambda x: x[1])
        out1 = np.concatenate([d[0] for d in out1])

    t = time.time() - t_start
    minutes = t // 60
    seconds = t % 60
    hours = minutes // 60
    minutes = minutes % 60
    print(
        f'Computation duration: {hours:0>2.0f}:{minutes:0>2.0f}:{seconds:.2f}')

    return out1
Ejemplo n.º 13
0
# Tai Sakuma <*****@*****.**>
import time, random
from atpbar import atpbar
from mantichora import mantichora


##__________________________________________________________________||
def task_loop(name, ret=None):
    n = random.randint(1000, 10000)
    for i in atpbar(range(n), name=name):
        time.sleep(0.0001)
    return ret


result = task_loop('task', 'result')
print(repr(result))

##__________________________________________________________________||
with mantichora(nworkers=3) as mcore:
    mcore.run(task_loop, 'task', ret='result1')
    mcore.run(task_loop, 'another task', ret='result2')
    mcore.run(task_loop, 'still another task', ret='result3')
    mcore.run(task_loop, 'yet another task', ret='result4')
    mcore.run(task_loop, 'task again', ret='result5')
    mcore.run(task_loop, 'more task', ret='result6')
    results = mcore.returns()

print(results)

##__________________________________________________________________||
Ejemplo n.º 14
0
def test_init_mp_start_method_raise():
    with pytest.raises(ValueError):
        mantichora(mp_start_method='no-such-method')
Ejemplo n.º 15
0
def test_init_mode(mode):
    with mantichora(mode=mode):
        pass
Ejemplo n.º 16
0
def test_init_mode_raise():
    with pytest.raises(ValueError):
        mantichora(mode='no-such-mode')
Ejemplo n.º 17
0
def test_init_mp_start_method(mp_start_method):
    with mantichora(mp_start_method=mp_start_method):
        pass
Ejemplo n.º 18
0
def test_with_terminate(kwargs):
    with mantichora(**kwargs) as mcore:
        mcore.run(task_perpetual, 'result 1')
        mcore.run(task_perpetual, 'result 2')
        mcore.run(task_perpetual, 'result 3')