コード例 #1
0
ファイル: test_pp.py プロジェクト: resurgo-genetics/pathos
def run_ppmap(obj):
    from pathos.pools import ParallelPool
    p = ParallelPool(2)
    x = [1,2,3]
    assert list(map(obj, x)) == p.map(obj, x)
コード例 #2
0
ファイル: pp_map.py プロジェクト: daodaoliang/pathos
    def add(x, y, z):
        """Add three values"""
        return x + y + z

    def busybeaver(x):
        """This can take a while"""
        for num in range(1000000):
            x = x + num
        return x

    # Immediate evaluation example
    import time

    start = time.time()
    results = pool.map(busybeaver, range(10))
    print "Time to queue the jobs:", time.time() - start
    start = time.time()
    # Casting the ppmap generator to a list forces each result to be
    # evaluated.  When done immediately after the jobs are submitted,
    # our program twiddles its thumbs while the work is finished.
    print list(results)
    print "Time to get the results:", time.time() - start

    # Delayed evaluation example
    start = time.time()
    results = pool.imap(busybeaver, range(10))
    print "Time to queue the jobs:", time.time() - start
    # In contrast with the above example, this time we're submitting a
    # batch of jobs then going off to do more work while they're
    # processing.  Maybe "time.sleep" isn't the most exciting example,
コード例 #3
0
ファイル: MLP_SAS.py プロジェクト: yanbin-wang/IAS
def simple_search(dataset,
                  params,
                  val_accuracy,
                  N_workers,
                  restart_num=3,
                  output_dim=10,
                  lr=0.001,
                  epoch_limit=20,
                  random_seed=29):

    print("--------------------------------------------------------------")
    print("SAS starts")
    print("--------------------------------------------------------------")
    print("run for at most " + str(epoch_limit) + " epochs each")
    print("running in parallel using " + str(N_workers) + " workers ")
    print(datetime.datetime.now())

    val_param = copy.deepcopy(params)
    architecture = params['architecture']
    layer = len(architecture)
    instructions = []
    '''
    instructions are guideline for architecture transformations
    '''
    instructions.append({'Wider': [layer], 'Deeper': []})
    instructions.append({'Wider': [layer, layer], 'Deeper': []})
    instructions.append({'Wider': [], 'Deeper': [layer, layer + 1]})
    instructions.append({'Wider': [layer], 'Deeper': [layer]})
    instructions.append({'Wider': [], 'Deeper': [layer]})
    instructions.append({'Wider': [layer, layer], 'Deeper': [layer]})
    EveryLayer = np.arange(layer) + 1
    #Widen all layers at once
    instructions.append({'Wider': EveryLayer, 'Deeper': []})

    print("instruction generation complete")
    pool = ParallelPool(N_workers)
    print(" creating Pool and setting up workers")
    num_instructions = len(instructions)

    l_dataset = [dataset] * num_instructions
    l_output_dim = [output_dim] * num_instructions
    l_params = [params] * num_instructions
    l_lr = [lr] * num_instructions
    l_epoch_limit = [epoch_limit] * num_instructions
    l_restart_num = [restart_num] * num_instructions
    l_seed = [random_seed] * num_instructions

    print("function call preparation complete ")
    '''
    train the candidates in parallel
    '''
    candidates = pool.map(Just_Train, l_dataset, l_output_dim, l_params,
                          instructions, l_lr, l_epoch_limit, l_restart_num,
                          l_seed)
    print("all candidates received")
    print(datetime.datetime.now())

    best_accu = 0
    best_param = 0
    '''
    identify the best candidate
    '''
    for candidate in candidates:
        if candidate is None:
            print("find a none type")
            continue
        accuracy = candidate['accuracy']
        architecture = candidate['params']['architecture']
        print("for architecture : " + str(architecture))
        print(" achieved validation accuracy of " + str(accuracy))
        if (accuracy > best_accu):
            best_accu = accuracy
            best_param = candidate['params']

    if (val_accuracy > best_accu):
        best_accu = val_accuracy
        best_param = val_param

    print("best candidate has architecture")
    print(best_param['architecture'])

    return {'accuracy': best_accu, 'params': best_param}
コード例 #4
0
ファイル: test_pp.py プロジェクト: Michael0x2a/pathos
def test_ppmap(obj):
    from pathos.pools import ParallelPool
    p = ParallelPool(2)
    x = [1,2,3]
    assert map(obj, x) == p.map(obj, x)
コード例 #5
0
if __name__ == '__main__':

    def add(x, y, z):
        """Add three values"""
        return x + y + z

    def busybeaver(x):
        """This can take a while"""
        for num in range(1000000):
            x = x + num
        return x

    # Immediate evaluation example
    import time
    start = time.time()
    results = pool.map(busybeaver, range(10))
    print('Time to queue the jobs: %s' % (time.time() - start))
    start = time.time()
    # Casting the ppmap generator to a list forces each result to be
    # evaluated.  When done immediately after the jobs are submitted,
    # our program twiddles its thumbs while the work is finished.
    print(list(results))
    print('Time to get the results: %s' % (time.time() - start))

    # Delayed evaluation example
    start = time.time()
    results = pool.imap(busybeaver, range(10))
    print('Time to queue the jobs: %s' % (time.time() - start))
    # In contrast with the above example, this time we're submitting a
    # batch of jobs then going off to do more work while they're
    # processing.  Maybe "time.sleep" isn't the most exciting example,
コード例 #6
0
ファイル: test_pp.py プロジェクト: uqfoundation/pathos
def run_ppmap(obj):
    from pathos.pools import ParallelPool
    p = ParallelPool(2)
    x = [1,2,3]
    assert list(map(obj, x)) == p.map(obj, x)
コード例 #7
0
t1 = time()
T1 = t1 - t0
print(result)
print("in time = {0:.3f}".format(T1))
print('')

# parallel calculation
numbers = [
    3093215881333057, 3093215881333057, 3093215881333057, 3093215881333057
]
print("{} parallel calculations with {} out of {} CPUs".format(
    len(numbers), WORKERS, cpu_count()))

t0 = time()

# create the pool of workers
pool = ParallelPool(WORKERS)

# open the functions in their own threads and return the results
results = pool.map(function, numbers)
pool.close()
pool.join()

t1 = time()
T2 = t1 - t0

print(results)
print("in time = {0:.3f}".format(T2))
print('')
print("ratio =   {0:.2f}%".format(100. * T2 / T1))
コード例 #8
0
def test_ppmap(obj):
    from pathos.pools import ParallelPool
    p = ParallelPool(2)
    x = [1, 2, 3]
    assert map(obj, x) == p.map(obj, x)