Exemplo n.º 1
0
    def __init__(self, multiprocessing_library = None):
        self._pool = None

        if multiprocessing_library is None:
            multiprocessing_library = DataConstants().multiprocessing_library

        self._multiprocessing_library = multiprocessing_library
        self._thread_technique = 'na'

        if multiprocessing_library == 'multiprocess':
            try:
                import multiprocess;
                multiprocess.freeze_support()
            except:
                pass
        elif multiprocessing_library == 'multiprocessing_on_dill':
            try:
                import multiprocessing_on_dill;
                multiprocessing_on_dill.freeze_support()
            except:
                pass
        elif multiprocessing_library == 'multiprocessing':
            try:
                import multiprocessing;
                multiprocessing.freeze_support()
            except:
                pass
Exemplo n.º 2
0
    def analyze(self, threshold_multiplier, parallel=False):
        exp_name = self.config['experiment']['name']
        tr = Tracker(exp_name, self.outdir, threshold_multiplier, self.magnification, self.microscope, self.binning)
        gen = self.readin_stacks()
        multiprocess.freeze_support()
        pool = multiprocess.Pool()
        #This may need to be a function of memory
        offset = 10 
        self.resultdir = join(os.path.dirname(self.outdir), 'results')
        #Make directory in case it doesn't exist
        fileutils.mkdir(self.resultdir)
        self.exporter.prep_csv_file(self.resultdir, self.surv_fname)

        def output(well, neurons, crop_val):
            self.exporter.export(well, neurons, crop_val)

        if not parallel:
            for data in gen:
                well, neurons = tr.track(data)
                output(well, neurons, 20)

        else:
            stacks_left = True
            while stacks_left:
                try: 
                    it = itertools.chain([next(gen)], itertools.islice(gen, 0, offset - 1))
                    for well, neurons in pool.imap_unordered(func=tr.track, iterable=it):
                        output(well, neurons, 20)
                except StopIteration:
                    stacks_left = False
        group_labels = self.config['experiment']['imaging']['group_labels']
        group_control_label = self.config['experiment']['imaging']['group_control_label']
        run_cox_analysis(self.config, self.outdir)
Exemplo n.º 3
0
    def __init__(self, multiprocessing_library=None):
        self._pool = None

        if multiprocessing_library is None:
            multiprocessing_library = DataConstants.multiprocessing_library

        self._multiprocessing_library = multiprocessing_library

        if multiprocessing_library == 'multiprocess':
            try:
                import multiprocess
                multiprocess.freeze_support()
            except:
                pass
        elif multiprocessing_library == 'multiprocessing_on_dill':
            try:
                import multiprocessing_on_dill
                multiprocessing_on_dill.freeze_support()
            except:
                pass
        elif multiprocessing_library == 'multiprocessing':
            try:
                import multiprocessing
                multiprocessing.freeze_support()
            except:
                pass
Exemplo n.º 4
0
 def start(self):
     ''' Create tasks and results queues, and start consumers. '''
     mp.freeze_support()
     self.tasks = mp.JoinableQueue()
     self.results = mp.Queue()
     self.consumers = [
         Consumer(self.tasks, self.results)
         for i in range(self.getNConsumers())
     ]
     for c in self.consumers:
         c.start()
Exemplo n.º 5
0
    def __init__(self, parallel_library=None):
        self._pool = None

        if parallel_library is None:
            parallel_library = constants.parallel_library

        self._parallel_library = parallel_library

        if parallel_library == 'multiprocess':
            try:
                import multiprocess
                multiprocess.freeze_support()
            except:
                pass
        elif parallel_library == 'pathos':
            try:
                import pathos
                pathos.helpers.freeze_support()
            except:
                pass
Exemplo n.º 6
0
'''
Variation pathos, local method

Windows OS: Hangs/ multiprocess error with newer version of pathos
Mac OS:
Linux:
Debian (unclear because windows app but operated in same manner)

Cloud-based:
Repl.it: Works
Ideone.com: Fails-multiprocess error
'''

from multiprocess import freeze_support
from pathos.multiprocessing import ProcessPool


def f(vars):
    return vars[0]**vars[1]


if __name__ == "__main__":
    freeze_support()

    pool = ProcessPool(4)

    print(list(pool.imap(f, [(1, 5), (2, 8), (3, 9)])))
Exemplo n.º 7
0
import multiprocess
multiprocess.freeze_support()

from osrsmath.apps.optimize.gui_single import Ui_MainWindow
from osrsmath.apps.optimize.logic.optimize import get_sets, get_best_sets
from osrsmath.combat.boosts import BoostingSchemes, Prayers, Potions
from osrsmath.combat.monsters import get_monster_data
from osrsmath.combat.monsters import Monster
from osrsmath.combat.fighter import Fighter
import osrsmath.apps.GUI.resources
import osrsmath.config as config

from PySide2 import QtCore, QtGui, QtWidgets
from pprint import pprint
from pathlib import Path
import textwrap
import time
import glob
import os

slots = [
    'head', 'cape', 'neck', 'ammo', 'weapon', 'body', 'shield', 'legs',
    'hands', 'feet', 'ring'
]


class GUI(Ui_MainWindow):
    OVERVIEW_TEXT = textwrap.dedent("""\
		This app allows you to determine the optimal equipment to wear against a set of opponents.

		There are three main panels:
Exemplo n.º 8
0
script for generating training and testing data for the 
NMF portion of the SPS emulator 

Notes: 
    * 2021/10/12: modified redshift range to 0.3 < z < 1.5 to construct training 
        data for LRG 
    * 2021/06/21: lower upper limit on metalliicty history; wider wavelength range
        that extends to FUV 
'''
import os, sys
import numpy as np 
import multiprocess as mp
from provabgs import infer as Infer
from provabgs import models as Models

mp.freeze_support()
###########################################################################################
# input 
###########################################################################################
name    = 'nmf' 
version = 'lrg.0.1'
try: 
    ibatch = int(sys.argv[1]) 
except ValueError: 
    ibatch = sys.argv[1]
    assert ibatch == 'test'
ncpu    = int(sys.argv[2]) 

# hardcoded to NERSC directory  for LRG
#dat_dir='/global/cscratch1/sd/chahah/provabgs/emulator' # hardcoded to NERSC directory 
# for LRG