Example #1
0
    def configure(self,
                  n_jobs=1,
                  parallel=None,
                  prefer=None,
                  require=None,
                  **memmappingpool_args):
        """Build a process or thread pool and return the number of workers"""

        n_jobs = self.effective_n_jobs(n_jobs)

        already_forked = int(os.environ.get(self.JOBLIB_SPAWNED_PROCESS, 0))
        if already_forked:
            raise ImportError(
                '[joblib] Attempting to do parallel computing '
                'without protecting your import on a system that does '
                'not support forking. To use parallel-computing in a '
                'script, you must protect your main loop using "if '
                "__name__ == '__main__'"
                '". Please see the joblib documentation on Parallel '
                'for more information')
        # Set an environment variable to avoid infinite loops
        os.environ[self.JOBLIB_SPAWNED_PROCESS] = '1'

        # Make sure to free as much memory as possible before forking
        gc.collect()
        self._pool = Pool()
        self.parallel = parallel

        return n_jobs
Example #2
0
    for _ in range(100):
        v.value += 1


def sync_incr(proc_id):
    for _ in range(100):
        with v.get_lock():
            v.value += 1


if __name__ == '__main__':
    # Raw shared value
    v = RawValue('i', 0)
    print(v.value)

    with Pool() as p:
        res = p.map_async(incr, [str(i) for i in range(4)])
        p.close()
        res.get()
        p.join()

    print(v.value)

    # Synchronized shared value
    v = Value('i')
    print(v.value)

    with Pool() as p:
        res = p.map_async(sync_incr, [str(i) for i in range(4)])
        p.close()
        res.get()
Example #3
0

def square(x):
    return x * x


def divide(x, y):
    return x / y


def sleep_seconds(s):
    time.sleep(s)


if __name__ == '__main__':
    with Pool() as pool:

        # Synchronously execute function square remotely
        res = pool.apply(hello, ('World', ))
        print(res)  # print "Hello World!"

        # Synchronously apply function square to every element of list
        res = pool.map(square, [1, 2, 3, 4, 5])
        print(res)  # print "[1, 4, 9, 16, 25]"

        # Asynchronously execute function square remotely
        res = pool.apply_async(square, (20, ))
        print(res.ready())  # prints "False"
        res.wait()
        print(res.ready())  # prints "True"
        print(res.get(timeout=5))  # prints "400"
Example #4
0
# from multiprocessing import Pool
from lithops.multiprocessing import Pool
import random


def is_inside(n):
    count = 0
    for i in range(n):
        x = random.random()
        y = random.random()
        if x * x + y * y < 1:
            count += 1
    return count


if __name__ == '__main__':
    np, n = 96, 15000000000
    part_count = [int(n / np)] * np
    pool = Pool(processes=np)
    count = pool.map(is_inside, part_count)
    pi = sum(count) / n * 4
    print("Esitmated Pi: {}".format(pi))
Example #5
0
import time

from lithops.multiprocessing import Pool, Barrier, current_process
# from multiprocessing import Pool, Barrier, current_process


def f():
    print('waiting...')
    barrier.wait()
    pid = current_process().pid
    msg = 'process: {} - timestamp: {}'.format(pid, time.time())
    return msg


if __name__ == "__main__":
    n = 2
    barrier = Barrier(n)

    async_results = []
    with Pool(processes=2) as p:
        res = p.apply_async(f, ())
        async_results.append(res)
        time.sleep(3)
        res = p.apply_async(f, ())
        async_results.append(res)

    for res in async_results:
        print(res.get())
    psf_file = "./../resources/%s" % args[1]
    par_file = "./../resources/%s" % args[2]
    min_temp = int(args[3])
    max_temp = int(args[4])
    num_replicas = int(args[5])
    replica_list = []
    monte_carlo_steps = protomol_utils.DEFAULT_MONTE_CARLO_STEPS

    upload_data = True

    print("Clean old data from COS - start")
    file_utils.clean_remote_storage("%s/simfiles" %
                                    (protomol_utils.output_path))
    print("Clean previous data from COS - completed")

    pool_client = Pool()

    total_run_time = time.time()
    # Split up the temperature range for assigning to each replica.
    inc = float((max_temp - min_temp)) / float(num_replicas - 1)
    print("number of replicas {}".format(num_replicas))
    replica_list = []
    temp_list = []
    #Assign temperature to each replica.
    for x in range(num_replicas):
        #Quart split assigns closer temperature values
        #    to the top and bottom 25% of replicas.
        if quart_temp_split:
            if x < math.ceil(0.25 * num_replicas):
                replica_temp = min_temp + (x * inc / 3)
Example #7
0
# from multiprocessing import Pool
from lithops.multiprocessing import Pool
import random


def is_inside(n):
    count = 0
    for i in range(n):
        x = random.random()
        y = random.random()
        if x*x + y*y < 1:
            count += 1
    return count


if __name__ == '__main__':
    np, n = 96, 15000000000
    part_count = [int(n/np)] * np
    pool = Pool(processes=np, initargs={'runtime_memory': 4096})
    count = pool.map(is_inside, part_count)
    pi = sum(count)/n*4
    print("Esitmated Pi: {}".format(pi))