예제 #1
0
def main():
    task = futures.submit(func0, 20)
    # You can wait for a result before continuing computing
    futures.wait([task], return_when=futures.ALL_COMPLETED)
    result = task.result()
    print(result)
    return result
예제 #2
0
def multi_repeat(n, funcs):
    if USE_SCOOP:
        fs = [futures.submit(func) for _ in range(n) for func in funcs]
        futures.wait(fs)
        return [f.result() for f in fs]
    else:
        return [func() for _ in range(n) for func in funcs]
예제 #3
0
def calc_states(state, cost_table,depth=0):
	"""
	Recursive function to generate the Finite State Machine with all posible states, among then, all valid products.

	@type state:  SPLAcris instance
	@param state: The SPLAcris instance
	
	@type cost:  Cost instance
	@param cost_table: The Cost instance to calculate features cost
	
	@rtype: none
	@return: none
	"""
	states_list = []
	futures_list = []
	for feature in state.get_computables(state.term):
		new_state = state.compute_feature(state.term,feature,cost_table)
		if new_state != None:
			if new_state.has_nil(new_state.term):
				#print "Terminal state"
				states_list.append(new_state)
			else:
				#We make a recursive call with scoop (testing parallelism with scoop)
				# Using a max deep of 3 for the tree
				if depth < 1:
					#states_list.extend(futures.submit(calc_states,new_state,cost_table, depth+1).result())
					futures_list.append(futures.submit(calc_states,new_state,cost_table, depth+1))
				else:
					states_list.extend(calc_states(new_state,cost_table, depth+1))

	for fu in futures_list:
		states_list.extend(fu.result())

	return states_list
예제 #4
0
def main():
    task = futures.submit(func0, 20)
    # You can wait for a result before continuing computing
    futures.wait([task], return_when=futures.ALL_COMPLETED)
    result = task.result()
    print(result)
    return result
예제 #5
0
def preselect(sample,
              library,
              method='MI',
              number=10,
              mask=None,
              use_nl=False,
              flip=False,
              step=None,
              lib_add_n=0):
    '''calculate requested similarity function and return top number of elements from the library'''
    results = []
    column = 0

    # TODO: use multiple modalities for preselection?
    if use_nl:
        column = 4 + lib_add_n

    for (i, j) in enumerate(library):
        results.append(
            futures.submit(calculate_similarity,
                           sample,
                           MriDataset(scan=j[column]),
                           method=method,
                           mask=mask,
                           flip=flip,
                           step=step))
    futures.wait(results, return_when=futures.ALL_COMPLETED)

    val = [(j.result(), library[i]) for (i, j) in enumerate(results)]

    val_sorted = sorted(val, key=lambda s: s[0])

    return [i[1] for i in val_sorted[0:number]]
예제 #6
0
def main():
    try:
        os.remove("scf_results.txt")
    except OSError:
        pass


#
#     res =  run(5, mixings, SCFOpt)
#     print [str(mix) +" " + str(iter) for mix,iter in zip(reversed(mixings[-len(res):]), res)]

    fseq = [futures.submit(run, i, mixings, SCFOpt) for i in xrange(nrandom)]

    not_done = ["dummy"]
    while not_done:
        done, not_done = futures.wait(fseq, None, "FIRST_COMPLETED")

        for i in done:
            with open('scf_results.txt', 'a') as f:
                line = [
                    str(mix) + " " + str(iter) for mix, iter in zip(
                        reversed(mixings[-len(i.result()):]), i.result())
                ]
                print >> f, '\n'.join(line)
            fseq.remove(i)
예제 #7
0
def funcExcept(n):
    f = futures.submit(funcRaise, n)
    try:
        f.result()
    except:
        return True

    return False
예제 #8
0
def funcExcept(n):
    f = futures.submit(funcRaise, n)
    try:
        f.result()
    except:
        return True

    return False
예제 #9
0
def funcSharedFunction():
    shared.setConst(myRemoteFunc=func4)
    result = True
    for _ in range(100):
        try:
            result &= futures.submit(funcUseSharedFunction).result()
        except AssertionError:
            result = False
    return result
def main():
    path = 'tasks'
    task = futures.submit(get_data, path)
    futures.wait([task], return_when=futures.ALL_COMPLETED)
    result = task.result()
    sorted_result = sorted(result, key=result.__getitem__, reverse=True)
    for key in sorted_result:
        print("%s: %s" % (key, result[key]))
    return result
예제 #11
0
def submit_get_queues_size(n):
    task = futures.submit(func4, n)
    result = task.result()
    return [
        len(_control.execQueue.inprogress),
        len(_control.execQueue.ready),
        len(_control.execQueue.movable),
        len(_control.futureDict) - 1, # - 1 because the current function is a future too
    ]
예제 #12
0
def funcDone():
    f = futures.submit(func4, 100)
    futures.wait((f,))
    done = f.done()
    if done != True:
        return done
    res = f.result()
    done = f.done()
    return done
예제 #13
0
def func2(n):
    if n > 10:
        # This exception is treated in func1
        raise Exception(10)
    launches = []
    for i in range(n):
        launches.append(futures.submit(func3, i + 1))
    result = futures.as_completed(launches)
    return sum(res.result() for res in result)
예제 #14
0
def submit_get_queues_size(n):
    task = futures.submit(func4, n)
    result = task.result()
    return [
        len(_control.execQueue.inprogress),
        len(_control.execQueue.ready),
        len(_control.execQueue.movable),
        len(_control.futureDict) - 1, # - 1 because the current function is a future too
    ]
예제 #15
0
파일: scooptest.py 프로젝트: Mariovr/RG
def main(n):
  #creates a tree of all different workers (a worker goes down and generates other workers who generate other workers)
  n =n
  task = futures.submit(func0, n)
  # You can wait for a result before continuing computing
  futures.wait([task], return_when=futures.ALL_COMPLETED)
  result = task.result()
  print(result)
  return result
예제 #16
0
def funcDone():
    f = futures.submit(func4, 100)
    futures.wait((f,))
    done = f.done()
    if done != True:
        return done
    res = f.result()
    done = f.done()
    return done
예제 #17
0
def funcSharedFunction():
    shared.setConst(myRemoteFunc=func4)
    result = True
    for _ in range(100):
        try:
            result &= futures.submit(funcUseSharedFunction).result()
        except AssertionError:
            result = False
    return result
예제 #18
0
def func2(n):
    if n > 10:
        # This exception is treated in func1
        raise Exception(10)
    launches = []
    for i in range(n):
        launches.append(futures.submit(func3, i + 1))
    result = futures.as_completed(launches)
    return sum(res.result() for res in result)
예제 #19
0
def run(values, parameters):
    # Create launches

    launches = [futures.submit(func, values, parameters, i) for i in cycle]
    # Add a callback on every launches
    for launch in launches:
        launch.add_done_callback(doneElement)

    # Wait for the launches to complete.
    [completed for completed in futures.as_completed(launches)]
예제 #20
0
def main():
    # Create launches
    launches = [futures.submit(myFunc, i + 1) for i in range(5)]

    # Add a callback on every launches
    for launch in launches:
        launch.add_done_callback(doneElement)

    # Wait for the launches to complete.
    [completed for completed in futures.as_completed(launches)]
예제 #21
0
def main():
    # Create launches
    launches = [futures.submit(myFunc, i + 1) for i in range(5)]

    # Add a callback on every launches
    for launch in launches:
        launch.add_done_callback(doneElement)

    # Wait for the launches to complete.
    [completed for completed in futures.as_completed(launches)]
예제 #22
0
def resample_split_segmentations(input, output,xfm=None, like=None, order=4, invert_transform=False, symmetric=False):
    '''resample individual segmentations, using parallel execution'''
    results=[]
    base=input.seg.rsplit('.mnc',1)[0]
    for (i,j) in input.seg_split.items():
        if not output.seg_split.has_key(i):
            output.seg_split[i]='{}_{:03d}.mnc'.format(base,i)
            
        results.append(futures.submit(
            resample_file,j,output.seg_split[i],xfm=xfm,like=like,order=order,invert_transform=invert_transform
        ))
    if symmetric:
        base=input.seg_f.rsplit('.mnc',1)[0]
        for (i,j) in input.seg_f_split.items():
            if not output.seg_f_split.has_key(i):
                output.seg_split[i]='{}_{:03d}.mnc'.format(base,i)

            results.append(futures.submit(
                resample_file,j,output.seg_f_split[i],xfm=xfm,like=like,order=order,invert_transform=invert_transform
            ))
    futures.wait(results, return_when=futures.ALL_COMPLETED)
예제 #23
0
def funcSharedConstant():
    shared.setConst(myVar={1: 'Example 1',
                                2: 'Example 2',
                                3: 'Example 3',
                               })
    shared.setConst(secondVar="Hello World!")
    result = True
    for _ in range(100):
        try:
            result &= futures.submit(funcUseSharedConstant).result()
        except AssertionError:
            result = False
    return result
예제 #24
0
def funcSharedConstant():
    shared.setConst(myVar={1: 'Example 1',
                                2: 'Example 2',
                                3: 'Example 3',
                               })
    shared.setConst(secondVar="Hello World!")
    result = True
    for _ in range(100):
        try:
            result &= futures.submit(funcUseSharedConstant).result()
        except AssertionError:
            result = False
    return result
예제 #25
0
def funcCallback():
    f = futures.submit(func4, 100)

    def callBack(future):
        future.was_callabacked = True

    f.add_done_callback(callBack)
    if len(f.callback) == 0:
        return False
    futures.wait((f,))
    try:
        return f.was_callabacked
    except:
        return False
예제 #26
0
def funcDeleteSharedConstant():
    """Tests if shared constants can be deleted.

    First runs the test that creates constants and
    tests for *local* deletion afterwards.

    """
    result = funcSharedConstant()
    for _ in range(100):
        try:
            result &= futures.submit(funcUseDeletedSharedConstant).result()
        except AssertionError:
            result = False
    return result
예제 #27
0
def funcCallback():
    f = futures.submit(func4, 100)

    def callBack(future):
        future.was_callabacked = True

    f.add_done_callback(callBack)
    if len(f.callback) == 0:
        return False
    futures.wait((f,))
    try:
        return f.was_callabacked
    except:
        return False
예제 #28
0
파일: util.py 프로젝트: zach-youssef/RMG-Py
def submit_(func, *args, **kwargs):
    """
    Task submission of a function.

    returns the return value of the called function, or
    when SCOOP is loaded, the future object.
    """
    try:
        task = submit(WorkerWrapper(func), *args, **kwargs)#returns immediately
        return task
    except Exception, e:
        """
        Name error will be caught when the SCOOP library is not imported properly.
        """
        logger.debug('SCOOP not loaded. Submitting serial mode.')
        return func(*args, **kwargs)
예제 #29
0
파일: util.py 프로젝트: cainja/RMG-Py
def submit_(func, *args, **kwargs):
    """
    Task submission of a function.

    returns the return value of the called function, or
    when SCOOP is loaded, the future object.
    """
    try:
        task = submit(WorkerWrapper(func), *args, **kwargs)#returns immediately
        return task
    except Exception, e:
        """
        Name error will be caught when the SCOOP library is not imported properly.
        """
        logger.debug('SCOOP not loaded. Submitting serial mode.')
        return func(*args, **kwargs)
예제 #30
0
def preselect(sample,
              library,
              method='MI',
              number=10,
              mask=None,
              use_nl=False,
              flip=False,
              step=None,
              lib_add_n=0,
              groups=None):
    '''calculate requested similarity function and return top number of elements from the library'''
    results = []
    column = 2  # skip over grading and group

    # TODO: use multiple modalities for preselection?
    if use_nl:
        column = 6 + lib_add_n

    for (i, j) in enumerate(library):
        results.append(
            futures.submit(calculate_similarity,
                           sample,
                           MriDataset(scan=j[column]),
                           method=method,
                           mask=mask,
                           flip=flip,
                           step=step))
    futures.wait(results, return_when=futures.ALL_COMPLETED)

    val = [(j.result(), int(library[i][0]), library[i])
           for (i, j) in enumerate(results)]

    if groups is None:
        val_sorted = sorted(val, key=lambda s: s[0])
        return [i[2] for i in val_sorted[0:number]]
    else:
        s_number = number / groups
        res = []

        for i in range(groups):
            val_sorted = sorted([v for v in val if v[1] == i],
                                key=lambda s: s[0])
            res.extend(val_sorted[0:s_number])

        return [i[2] for i in res]
예제 #31
0
def submit_(func, *args, **kwargs):
    """
    Task submission of a function.

    returns the return value of the called function, or
    when SCOOP is loaded, the future object.
    """
    warnings.warn("The option scoop is no longer supported"
                  "and may be removed after Version: 2.4 ", DeprecationWarning)
    try:
        task = submit(WorkerWrapper(func), *args, **kwargs)#returns immediately
        return task
    except Exception:
        """
        Name error will be caught when the SCOOP library is not imported properly.
        """
        logger.debug('SCOOP not loaded. Submitting serial mode.')
        return func(*args, **kwargs)
예제 #32
0
def main():
    repeatNum = 5
    maxStrLen = 8
    tasksNum = 4
    for j in range(maxStrLen):
        for repeat in range(repeatNum):
            strToTest = 'H' * (10**j)
            n1 = time.time()
            # tasks = [futures.submit(sleepOneSec) for i in range(1)]
            tasks = [
                futures.submit(sleepOneSecWithString, strToTest)
                for i in range(tasksNum)
            ]
            results = [task.result() for task in tasks]
            n2 = time.time()
            t_sleep = (n2 - n1) * 10**3
            output = {'stringSize': j, 't_sleep/ms': t_sleep}
            with open('output.json', 'a') as outputFile:
                json.dump(output, outputFile)
                outputFile.write('\n')
예제 #33
0
파일: run_scf.py 프로젝트: FarnazH/hortonqa
def main():
    try:
        os.remove("scf_results.txt")
    except OSError:
        pass
#
#     res =  run(5, mixings, SCFOpt)
#     print [str(mix) +" " + str(iter) for mix,iter in zip(reversed(mixings[-len(res):]), res)]

    fseq = [futures.submit(run, i, mixings, SCFOpt) for i in xrange(nrandom)]

    not_done = ["dummy"]
    while not_done:
        done, not_done = futures.wait(fseq, None, "FIRST_COMPLETED")

        for i in done:
            with open('scf_results.txt', 'a') as f:
                line = [str(mix) +" "+ str(iter) for mix,iter in zip(reversed(mixings[-len(i.result()):]), i.result())]
                print >> f, '\n'.join(line)
            fseq.remove(i)
예제 #34
0
def calc_states(state, cost_table, depth=0):
    """
	Recursive function to generate the Finite State Machine with all posible states, among then, all valid products.

	@type state:  SPLAcris instance
	@param state: The SPLAcris instance
	
	@type cost:  Cost instance
	@param cost_table: The Cost instance to calculate features cost
	
	@rtype: none
	@return: none
	"""
    states_list = []
    futures_list = []
    for feature in state.get_computables(state.term):
        new_state = state.compute_feature(state.term, feature, cost_table)
        if new_state != None:
            if new_state.has_nil(new_state.term):
                #print "Terminal state"
                states_list.append(new_state)
            else:
                #We make a recursive call with scoop (testing parallelism with scoop)
                # Using a max deep of 3 for the tree
                if depth < 1:
                    #states_list.extend(futures.submit(calc_states,new_state,cost_table, depth+1).result())
                    futures_list.append(
                        futures.submit(calc_states, new_state, cost_table,
                                       depth + 1))
                else:
                    states_list.extend(
                        calc_states(new_state, cost_table, depth + 1))

    for fu in futures_list:
        states_list.extend(fu.result())

    return states_list
예제 #35
0
def funcSub(n):
    f = futures.submit(func4, n)
    return f.result()
예제 #36
0
if __name__ == '__main__':

    tasks = []

    print "HALFRINGS"
    survs = ["nominal", "full"]
    freqs = [30,44,70]
    for freq in freqs:
        smooth_combine_config = dict(fwhm=np.radians(1.), degraded_nside=128)
        chtags = [""]
        if freq == 70:
            chtags += ["18_23", "19_22", "20_21"]
        for chtag in chtags:
            for surv in survs:
                tasks.append(futures.submit(halfrings,freq, chtag, surv, pol='IQU', smooth_combine_config=smooth_combine_config, mapreader=mapreader, output_folder="dx9/halfrings2/",read_masks=read_dpc_masks))

    #print "SURVDIFF"
    #survs = [1,2,3,4,5]
    #freqs = [30, 44, 70]
    #for freq in freqs:
    #    ps_mask, gal_mask = read_dpc_masks(freq, NSIDE)
    #    smooth_combine_config = dict(fwhm=np.radians(1.), degraded_nside=128,smooth_mask=ps_mask, spectra_mask=gal_mask)
    #    chtags = [""]
    #    if freq == 70:
    #        chtags += ["18_23", "19_22", "20_21"]
    #    for chtag in chtags:
    #         surveydiff(freq, chtag, survs, pol='IQU', smooth_combine_config=smooth_combine_config, mapreader=mapreader, output_folder="dx9/surveydiff/",read_masks=read_dpc_masks)

    #print "SURVDIFF, CH"
    #survs = [1,2,3,4,5]
예제 #37
0
def funcWait(timeout):
    fs = [futures.submit(func4, i) for i in range(1000)]
    done, not_done = futures.wait(fs, timeout=timeout)
    return done, not_done
예제 #38
0
파일: demc.py 프로젝트: compmem/RunDEMC
    def _fixed_like(self, pop, *args):
        # the args will contain the list of params in the other models
        # that use this param
        if len(args) == 0:
            # we are not likely
            return -np.ones(len(pop))*np.inf

        # init like to zero
        log_like = np.zeros(len(pop))

        # loop over models, calculating their likes with the proposed value
        # of this fixed param
        res = []
        jobs = []
        mods = []
        for m in args:
            #from IPython.core.debugger import Tracer ; Tracer()()
            # make sure the submodel has initialized
            if not hasattr(m['model'], '_particles'):
                return -np.ones(len(pop))*np.inf

            # get the current population and replace with this proposal
            mpop = m['model']._particles[-1].copy()

            # set all the fixed params
            for i, j in m['param_ind']:
                mpop[:, i] = pop[:, j]

            # see if we're just updated log_like for updated children
            if np.all((mpop - m['model']._particles[-1]) == 0.0):
                # it's the same params, so just pull the likes
                mprop_log_likes = m['model']._log_likes[-1]
                log_like += mprop_log_likes
                self._mprop_log_likes[m['model']] = mprop_log_likes
            else:
                # calc the log-likes from all the models using these params
                if not isinstance(m['model'], HyperPrior) and \
                   not isinstance(m['model'], FixedParams) and \
                   ((scoop and scoop.IS_RUNNING) or self._parallel):
                    if (scoop and scoop.IS_RUNNING):
                        # submit the like_fun call to scoop
                        margs = [m['model'].apply_param_transform(mpop)] + \
                                 list(m['model']._like_args)
                        res.append(futures.submit(m['model']._like_fun, *margs))
                    else:
                        # submit to joblib
                        jobs.append(delayed(m['model']._like_fun)(m['model'].apply_param_transform(mpop),
                                                                  *m['model']._like_args))

                    # append to list of mods we're processing
                    mods.append(m)
                else:
                    # calc log likes in serial
                    mprop_log_likes,mprop_posts = m['model']._calc_log_likes(mpop)

                    # save these model likes for updating the model with those
                    # that were kept when we call _post_evolve
                    self._mprop_log_likes[m['model']] = mprop_log_likes

                    # aggregate log_likes for each particle
                    log_like += mprop_log_likes

                if m['model']._use_priors:
                    mprop_log_prior = m['model'].calc_log_prior(mpop)

                # save the prior
                if m['model']._use_priors:
                    self._mprop_log_prior[m['model']] = mprop_log_prior

        if len(jobs) > 0 and \
           not (scoop and scoop.IS_RUNNING) and self._parallel:
            # submit the joblib jobs
            res = self._parallel(jobs)

        if len(res) > 0:
            # collect the results
            for mi, m in enumerate(mods):
                # wait for the result
                if (scoop and scoop.IS_RUNNING):
                    out = res[mi].result()
                else:
                    # pull results from joblib
                    out = res[mi]

                if isinstance(out, tuple):
                    # split into likes and posts
                    mprop_log_likes,mprop_posts = out
                else:
                    # just likes
                    mprop_log_likes = out
                    mprop_posts = None

                # add the log_likes
                log_like += mprop_log_likes

                # save these model likes for updating the model with those
                # that were kept when we call _post_evolve
                self._mprop_log_likes[m['model']] = mprop_log_likes
                
        return log_like
예제 #39
0
def func2(n):
    launches = []
    for i in range(n):
        launches.append(futures.submit(func3, i + 1))
    result = futures.as_completed(launches)
    return sum(r.result() for r in result)
예제 #40
0
파일: scooptest.py 프로젝트: Mariovr/RG
def func2(n):
  launches = [futures.submit(func3, i + 1) for i in range(n)]
  # Spawn a generator for each completion, unordered
  result = (a.result() for a in futures.as_completed(launches))
  return sum(result)
예제 #41
0
def func0(n):
    task = futures.submit(func1, n)
    result = task.result()
    return result
예제 #42
0
from scoop import futures
import socket
import time

def getSize(string):
    """ This functions opens a web sites and then calculate the total
    size of the page in bytes. This is for the sake of the example. Do
    not use this technique in real code as it is not a very bright way
    to do this."""
    try:
        # We open the web page
        with urllib.request.urlopen(string, None, 1) as f:
            return sum(len(line) for line in f)
    except (urllib.error.URLError, socket.timeout) as e:
        return 0

if __name__ == "__main__":
    # The pageurl variable contains a link to a list of web sites. It is
    # commented for security's sake.
    pageurl = "http://httparchive.org/lists/Fortune%20500.txt"
    #pageurl  = "http://www.example.com"
    with urllib.request.urlopen(pageurl) as pagelist:
        pages = [page.decode() for page in pagelist][:30]

    # This will apply the getSize function on every item of the pages list
    # in parallel. The results will be treated as they are produced.
    fut = [futures.submit(getSize, page) for page in pages]
    for f in futures.as_completed(fut):
        time.sleep(0.1) # Work on the data
        print(f.result())
예제 #43
0
def funcKeywords(n, **kwargs):
    task = futures.submit(funcWithKW, n, **kwargs)
    futures.wait([task], return_when=futures.ALL_COMPLETED)
    result = task.result()
    return result
예제 #44
0
def funcWait(timeout):
    fs = [futures.submit(func4, i) for i in range(1000)]
    done, not_done = futures.wait(fs, timeout=timeout)
    return done, not_done
예제 #45
0
def funcSub(n):
    f = futures.submit(func4, n)
    return f.result()
예제 #46
0
def funcKeywords(n, **kwargs):
    task = futures.submit(funcWithKW, n, **kwargs)
    futures.wait([task], return_when=futures.ALL_COMPLETED)
    result = task.result()
    return result
예제 #47
0
def mainSimple(n):
    task = futures.submit(func3, n)
    futures.wait([task], return_when=futures.ALL_COMPLETED)
    result = task.result()
    return result
예제 #48
0
 masked=[]
 results=[]
 
 try:
     while True:
         queued=0
         
         masked=[]
         results=[]
         
         while len(results)<max_jobs_queue:
             if inp.value_mask():
                 masked.append(True)
             
                 # submit job for execution
                 results.append(futures.submit(run_nlme,inp.value()))
             else:
                 # we are passing-by voxels outside of the mask,
                 # assign default value
                 masked.append(False)
             # move to the next voxel
             inp.next()
         print "*"
         # now let's get results
         for i in masked:
             k=0
             if i :
                 # get result of processing (will wait for them to become available)
                 out.value(results[k].result())
                 k+=1
             else:
예제 #49
0
    def _fixed_like(self, pop, *args):
        # the args will contain the list of params in the other models
        # that use this param
        if len(args) == 0:
            # we are not likely
            return -np.ones(len(pop)) * np.inf

        # init like to zero
        log_like = np.zeros(len(pop))

        # loop over models, calculating their likes with the proposed value
        # of this fixed param
        res = []
        jobs = []
        mods = []
        for m in args:
            #from IPython.core.debugger import Tracer ; Tracer()()
            # make sure the submodel has initialized
            if not hasattr(m['model'], '_particles'):
                return -np.ones(len(pop)) * np.inf

            # get the current population and replace with this proposal
            mpop = m['model']._particles[-1].copy()

            # set all the fixed params
            for i, j in m['param_ind']:
                mpop[:, i] = pop[:, j]

            # see if we're just updated log_like for updated children
            if np.all((mpop - m['model']._particles[-1]) == 0.0):
                # it's the same params, so just pull the likes
                mprop_log_likes = m['model']._log_likes[-1]
                log_like += mprop_log_likes
                self._mprop_log_likes[m['model']] = mprop_log_likes
            else:
                # calc the log-likes from all the models using these params
                if not isinstance(m['model'], HyperPrior) and \
                   not isinstance(m['model'], FixedParams) and \
                   ((scoop and scoop.IS_RUNNING) or self._parallel):
                    if (scoop and scoop.IS_RUNNING):
                        # submit the like_fun call to scoop
                        margs = [m['model'].apply_param_transform(mpop)] + \
                                 list(m['model']._like_args)
                        res.append(futures.submit(m['model']._like_fun,
                                                  *margs))
                    else:
                        # submit to joblib
                        jobs.append(
                            delayed(m['model']._like_fun)(
                                m['model'].apply_param_transform(mpop),
                                *m['model']._like_args))

                    # append to list of mods we're processing
                    mods.append(m)
                else:
                    # calc log likes in serial
                    mprop_log_likes, mprop_posts = m['model']._calc_log_likes(
                        mpop)

                    # save these model likes for updating the model with those
                    # that were kept when we call _post_evolve
                    self._mprop_log_likes[m['model']] = mprop_log_likes

                    # aggregate log_likes for each particle
                    log_like += mprop_log_likes

                if m['model']._use_priors:
                    mprop_log_prior = m['model'].calc_log_prior(mpop)

                # save the prior
                if m['model']._use_priors:
                    self._mprop_log_prior[m['model']] = mprop_log_prior

        if len(jobs) > 0 and \
           not (scoop and scoop.IS_RUNNING) and self._parallel:
            # submit the joblib jobs
            res = self._parallel(jobs)

        if len(res) > 0:
            # collect the results
            for mi, m in enumerate(mods):
                # wait for the result
                if (scoop and scoop.IS_RUNNING):
                    out = res[mi].result()
                else:
                    # pull results from joblib
                    out = res[mi]

                if isinstance(out, tuple):
                    # split into likes and posts
                    mprop_log_likes, mprop_posts = out
                else:
                    # just likes
                    mprop_log_likes = out
                    mprop_posts = None

                # add the log_likes
                log_like += mprop_log_likes

                # save these model likes for updating the model with those
                # that were kept when we call _post_evolve
                self._mprop_log_likes[m['model']] = mprop_log_likes

        return log_like
예제 #50
0
def func2(n):
    launches = []
    for i in range(n):
        launches.append(futures.submit(func3, i + 1))
    result = futures.as_completed(launches)
    return sum(r.result() for r in result)
예제 #51
0
def mainSimple(n):
    task = futures.submit(func3, n)
    futures.wait([task], return_when=futures.ALL_COMPLETED)
    result = task.result()
    return result
예제 #52
0
def fusion_grading(input_scan,
                   library_description,
                   output_segment,
                   input_mask=None,
                   parameters={},
                   exclude=[],
                   work_dir=None,
                   debug=False,
                   ec_variant=None,
                   fuse_variant=None,
                   regularize_variant=None,
                   add=[],
                   cleanup=False,
                   cleanup_xfm=False,
                   exclude_re=None):
    """Apply fusion segmentation"""

    if debug:
        print("Segmentation parameters:")
        print(repr(parameters))

    out_variant = ''
    if fuse_variant is not None:
        out_variant += fuse_variant

    if regularize_variant is not None:
        out_variant += '_' + regularize_variant

    if ec_variant is not None:
        out_variant += '_' + ec_variant

    if work_dir is None:
        work_dir = output_segment + os.sep + 'work_segment'

    if not os.path.exists(work_dir):
        os.makedirs(work_dir)

    work_lib_dir = work_dir + os.sep + 'library'
    work_lib_dir_f = work_dir + os.sep + 'library_f'

    if not os.path.exists(work_lib_dir):
        os.makedirs(work_lib_dir)

    if not os.path.exists(work_lib_dir_f):
        os.makedirs(work_lib_dir_f)

    library_nl_samples_avail = library_description['nl_samples_avail']
    library_modalities = library_description.get('modalities', 1) - 1

    # perform symmetric segmentation
    segment_symmetric = parameters.get('segment_symmetric', False)

    # read filter paramters
    pre_filters = parameters.get('pre_filters', None)
    post_filters = parameters.get('post_filters',
                                  parameters.get('filters', None))

    # perform local linear registration
    do_initial_register = parameters.get('initial_register',
                                         parameters.get('linear_register', {}))

    if do_initial_register is not None and isinstance(do_initial_register,
                                                      dict):
        initial_register = do_initial_register
        do_initial_register = True
    else:
        initial_register = {}

    inital_reg_type = parameters.get(
        'initial_register_type',
        parameters.get('linear_register_type',
                       initial_register.get('type', '-lsq12')))

    inital_reg_ants = parameters.get(
        'initial_register_ants', parameters.get('linear_register_ants', False))

    inital_reg_options = parameters.get('initial_register_options',
                                        initial_register.get('options', None))

    inital_reg_downsample = parameters.get(
        'initial_register_downsample',
        initial_register.get('downsample', None))

    inital_reg_use_mask = parameters.get(
        'initial_register_use_mask', initial_register.get('use_mask', False))

    initial_reg_objective = initial_register.get('objective', '-xcorr')

    # perform local linear registration
    do_initial_local_register = parameters.get(
        'initial_local_register', parameters.get('local_linear_register', {}))
    if do_initial_local_register is not None and isinstance(
            do_initial_local_register, dict):
        initial_local_register = do_initial_local_register
        do_initial_local_register = True
    else:
        initial_local_register = {}

    local_reg_type = parameters.get(
        'local_register_type', initial_local_register.get('type', '-lsq12'))

    local_reg_ants = parameters.get('local_register_ants', False)

    local_reg_opts = parameters.get(
        'local_register_options', initial_local_register.get('options', None))

    local_reg_bbox = parameters.get('local_register_bbox',
                                    initial_local_register.get('bbox', False))

    local_reg_downsample = parameters.get(
        'local_register_downsample',
        initial_local_register.get('downsample', None))

    local_reg_use_mask = parameters.get(
        'local_register_use_mask',
        initial_local_register.get('use_mask', True))

    local_reg_objective = initial_local_register.get('objective', '-xcorr')
    # if non-linear registraiton should be performed for library creation
    do_nonlinear_register = parameters.get('non_linear_register', False)

    # if non-linear registraiton should be performed with ANTS
    do_nonlinear_register_ants = parameters.get('non_linear_register_ants',
                                                False)
    nonlinear_register_type = parameters.get('non_linear_register_type', None)
    if nonlinear_register_type is None:
        if do_nonlinear_register_ants:
            nonlinear_register_type = 'ants'

    # if non-linear registraiton should be performed pairwise
    do_pairwise = parameters.get('non_linear_pairwise', False)

    # if pairwise registration should be performed using ANTS
    do_pairwise_ants = parameters.get('non_linear_pairwise_ants', True)
    pairwise_register_type = parameters.get('non_linear_pairwise_type', None)
    if pairwise_register_type is None:
        if do_pairwise_ants:
            pairwise_register_type = 'ants'

    # should we use ANTs
    library_preselect = parameters.get('library_preselect', 10)
    library_preselect_step = parameters.get('library_preselect_step', None)
    library_preselect_method = parameters.get('library_preselect_method', 'MI')

    nlreg_level = parameters.get('non_linear_register_level', 2)
    nlreg_start = parameters.get('non_linear_register_start', 16)
    nlreg_options = parameters.get('non_linear_register_options', None)
    nlreg_downsample = parameters.get('non_linear_register_downsample', None)

    pairwise_level = parameters.get('pairwise_level', 2)
    pairwise_start = parameters.get('pairwise_start', 16)
    pairwise_options = parameters.get('pairwise_options', None)

    fuse_options = parameters.get('fuse_options', None)

    resample_order = parameters.get('resample_order', 2)
    label_resample_order = parameters.get('label_resample_order',
                                          resample_order)

    resample_baa = parameters.get('resample_baa', True)

    use_median = parameters.get('use_median', False)
    # QC image paramters
    qc_options = parameters.get('qc_options', None)

    # special case for training error correction, assume input scan is already pre-processed
    run_in_bbox = parameters.get('run_in_bbox', False)

    classes_number = library_description['classes_number']
    groups = library_description['groups']
    seg_datatype = 'byte'

    output_info = {}

    sample = MriDataset(scan=input_scan,
                        seg=None,
                        mask=input_mask,
                        protect=True,
                        add=add)
    # get parameters
    model = MriDataset(scan=library_description['model'],
                       mask=library_description['model_mask'],
                       add=library_description.get('model_add', []))

    local_model = MriDataset(
        scan=library_description['local_model'],
        mask=library_description['local_model_mask'],
        scan_f=library_description.get('local_model_flip', None),
        mask_f=library_description.get('local_model_mask_flip', None),
        seg=library_description.get('local_model_seg', None),
        seg_f=library_description.get('local_model_seg_flip', None),
        add=library_description.get('local_model_add', []),
        add_f=library_description.get('local_model_add_flip', []),
    )

    library = library_description['library']

    sample_modalities = len(add)

    print("\n\n")
    print("Sample modalities:{}".format(sample_modalities))
    print("\n\n")
    # apply the same steps as used in library creation to perform segmentation:

    # global
    initial_xfm = None
    nonlinear_xfm = None
    bbox_sample = None
    nl_sample = None
    bbox_linear_xfm = None

    sample_filtered = MriDataset(prefix=work_dir,
                                 name='flt_' + sample.name,
                                 add_n=sample_modalities)

    # QC file
    # TODO: allow for alternative location, extension
    sample_qc = work_dir + os.sep + 'qc_' + sample.name + '_' + out_variant + '.jpg'

    if run_in_bbox:
        segment_symmetric = False
        do_initial_register = False
        do_initial_local_register = False
        # assume filter already applied!
        pre_filters = None
        post_filters = None

    if segment_symmetric:
        # need to flip the inputs
        flipdir = work_dir + os.sep + 'flip'
        if not os.path.exists(flipdir):
            os.makedirs(flipdir)

        sample.scan_f = flipdir + os.sep + os.path.basename(sample.scan)
        sample.add_f = ['' for (i, j) in enumerate(sample.add)]

        for (i, j) in enumerate(sample.add):
            sample.add_f[i] = flipdir + os.sep + os.path.basename(
                sample.add[i])

        if sample.mask is not None:
            sample.mask_f = flipdir + os.sep + 'mask_' + os.path.basename(
                sample.scan)
        generate_flip_sample(sample)

    if pre_filters is not None:
        apply_filter(sample.scan,
                     sample_filtered.scan,
                     pre_filters,
                     model=model.scan,
                     model_mask=model.mask)

        if sample.mask is not None:
            shutil.copyfile(sample.mask, sample_filtered.mask)

        for i, j in enumerate(sample.add):
            shutil.copyfile(sample.add[i], sample_filtered.add[i])

        sample = sample_filtered
    else:
        sample_filtered = None

    output_info['sample_filtered'] = sample_filtered

    if do_initial_register:
        initial_xfm = MriTransform(prefix=work_dir, name='init_' + sample.name)

        if inital_reg_type == 'elx' or inital_reg_type == 'elastix':
            elastix_registration(sample,
                                 model,
                                 initial_xfm,
                                 symmetric=segment_symmetric,
                                 parameters=inital_reg_options,
                                 nl=False,
                                 downsample=inital_reg_downsample)
        elif inital_reg_type == 'ants' or inital_reg_ants:
            linear_registration(sample,
                                model,
                                initial_xfm,
                                symmetric=segment_symmetric,
                                reg_type=inital_reg_type,
                                linreg=inital_reg_options,
                                ants=True,
                                downsample=inital_reg_downsample)
        else:
            linear_registration(sample,
                                model,
                                initial_xfm,
                                symmetric=segment_symmetric,
                                reg_type=inital_reg_type,
                                linreg=inital_reg_options,
                                downsample=inital_reg_downsample,
                                objective=initial_reg_objective)

        output_info['initial_xfm'] = initial_xfm

    # local
    bbox_sample = MriDataset(prefix=work_dir,
                             name='bbox_init_' + sample.name,
                             add_n=sample_modalities)

    if do_initial_local_register:
        bbox_linear_xfm = MriTransform(prefix=work_dir,
                                       name='bbox_init_' + sample.name)

        if local_reg_type == 'elx' or local_reg_type == 'elastix':
            elastix_registration(sample,
                                 local_model,
                                 bbox_linear_xfm,
                                 symmetric=segment_symmetric,
                                 init_xfm=initial_xfm,
                                 resample_order=resample_order,
                                 parameters=local_reg_opts,
                                 bbox=local_reg_bbox,
                                 downsample=local_reg_downsample)
        elif local_reg_type == 'ants' or local_reg_ants:
            linear_registration(sample,
                                local_model,
                                bbox_linear_xfm,
                                init_xfm=initial_xfm,
                                symmetric=segment_symmetric,
                                reg_type=local_reg_type,
                                linreg=local_reg_opts,
                                resample_order=resample_order,
                                ants=True,
                                close=True,
                                bbox=local_reg_bbox,
                                downsample=local_reg_downsample)
        else:
            linear_registration(sample,
                                local_model,
                                bbox_linear_xfm,
                                init_xfm=initial_xfm,
                                symmetric=segment_symmetric,
                                reg_type=local_reg_type,
                                linreg=local_reg_opts,
                                resample_order=resample_order,
                                close=True,
                                bbox=local_reg_bbox,
                                downsample=local_reg_downsample,
                                objective=local_reg_objective)

    else:
        bbox_linear_xfm = initial_xfm

    output_info['bbox_initial_xfm'] = bbox_linear_xfm
    bbox_sample.mask = None
    bbox_sample.seg = None
    bbox_sample.seg_f = None

    warp_sample(
        sample,
        local_model,
        bbox_sample,
        transform=bbox_linear_xfm,
        symmetric=segment_symmetric,
        symmetric_flip=segment_symmetric,  # need to flip symmetric dataset
        resample_order=resample_order,
        filters=post_filters,
    )

    output_info['bbox_sample'] = bbox_sample

    # TODO: run local intensity normalization

    # 3. run non-linear registration if needed
    if do_nonlinear_register:
        nl_sample = MriDataset(prefix=work_dir,
                               name='nl_' + sample.name,
                               add_n=sample_modalities)
        nonlinear_xfm = MriTransform(prefix=work_dir, name='nl_' + sample.name)

        if nonlinear_register_type == 'elx' or nonlinear_register_type == 'elastix':
            elastix_registration(bbox_sample,
                                 local_model,
                                 nonlinear_xfm,
                                 symmetric=segment_symmetric,
                                 level=nlreg_level,
                                 start_level=nlreg_start,
                                 parameters=nlreg_options,
                                 nl=True,
                                 downsample=nlreg_downsample)
        elif nonlinear_register_type == 'ants' or do_nonlinear_register_ants:
            non_linear_registration(bbox_sample,
                                    local_model,
                                    nonlinear_xfm,
                                    symmetric=segment_symmetric,
                                    level=nlreg_level,
                                    start_level=nlreg_start,
                                    parameters=nlreg_options,
                                    ants=True,
                                    downsample=nlreg_downsample)
        else:
            non_linear_registration(bbox_sample,
                                    local_model,
                                    nonlinear_xfm,
                                    symmetric=segment_symmetric,
                                    level=nlreg_level,
                                    start_level=nlreg_start,
                                    parameters=nlreg_options,
                                    ants=False,
                                    downsample=nlreg_downsample)

        print("\n\n\nWarping the sample!:{}\n\n\n".format(bbox_sample))
        nl_sample.seg = None
        nl_sample.seg_f = None
        nl_sample.mask = None

        warp_sample(bbox_sample,
                    local_model,
                    nl_sample,
                    transform=nonlinear_xfm,
                    symmetric=segment_symmetric,
                    resample_order=resample_order)

        output_info['nl_sample'] = nl_sample
    else:
        nl_sample = bbox_sample

    output_info['nonlinear_xfm'] = nonlinear_xfm

    if exclude_re is not None:
        _exclude_re = re.compile(exclude_re)
        selected_library = [
            i for i in library
            if not _exclude_re.match(i[2]) and i[2] not in exclude
        ]
    else:
        selected_library = [i for i in library if i[2] not in exclude]

    selected_library_f = []

    if segment_symmetric:  # fill up with all entries
        selected_library_f = selected_library

    # library pre-selection if needed
    # we need balanced number of samples for each group
    if library_preselect > 0 and library_preselect < len(selected_library):
        loaded = False
        loaded_f = False

        if os.path.exists(work_lib_dir + os.sep + 'sel_library.json'):
            with open(work_lib_dir + os.sep + 'sel_library.json', 'r') as f:
                selected_library = json.load(f)
            loaded = True

        if segment_symmetric and os.path.exists(work_lib_dir_f + os.sep +
                                                'sel_library.json'):
            with open(work_lib_dir_f + os.sep + 'sel_library.json', 'r') as f:
                selected_library_f = json.load(f)
            loaded_f = True

        if do_nonlinear_register:
            if not loaded:
                selected_library = preselect(nl_sample,
                                             selected_library,
                                             method=library_preselect_method,
                                             number=library_preselect,
                                             use_nl=library_nl_samples_avail,
                                             step=library_preselect_step,
                                             lib_add_n=library_modalities,
                                             groups=groups)
            if segment_symmetric:
                if not loaded_f:
                    selected_library_f = preselect(
                        nl_sample,
                        selected_library,
                        method=library_preselect_method,
                        number=library_preselect,
                        use_nl=library_nl_samples_avail,
                        flip=True,
                        step=library_preselect_step,
                        lib_add_n=library_modalities,
                        groups=groups)
        else:
            if not loaded:
                selected_library = preselect(bbox_sample,
                                             selected_library,
                                             method=library_preselect_method,
                                             number=library_preselect,
                                             use_nl=False,
                                             step=library_preselect_step,
                                             lib_add_n=library_modalities,
                                             groups=groups)
            if segment_symmetric:
                if not loaded_f:
                    selected_library_f = preselect(
                        bbox_sample,
                        selected_library,
                        method=library_preselect_method,
                        number=library_preselect,
                        use_nl=False,
                        flip=True,
                        step=library_preselect_step,
                        lib_add_n=library_modalities,
                        groups=groups)

        if not loaded:
            with open(work_lib_dir + os.sep + 'sel_library.json', 'w') as f:
                json.dump(selected_library, f)

        if not loaded_f:
            if segment_symmetric:
                with open(work_lib_dir_f + os.sep + 'sel_library.json',
                          'w') as f:
                    json.dump(selected_library_f, f)

        output_info['selected_library'] = selected_library
        if segment_symmetric:
            output_info['selected_library_f'] = selected_library_f

    selected_library_scan = []
    selected_library_xfm = []
    selected_library_warped2 = []
    selected_library_xfm2 = []

    selected_library_scan_f = []
    selected_library_xfm_f = []
    selected_library_warped_f = []
    selected_library_warped2_f = []
    selected_library_xfm2_f = []

    for (i, j) in enumerate(selected_library):
        d = MriDataset(scan=j[2],
                       seg=j[3],
                       add=j[4:4 + library_modalities],
                       group=int(j[0]),
                       grading=float(j[1]))

        selected_library_scan.append(d)

        selected_library_warped2.append(
            MriDataset(name=d.name,
                       prefix=work_lib_dir,
                       add_n=sample_modalities,
                       group=int(j[0]),
                       grading=float(j[1])))
        selected_library_xfm2.append(
            MriTransform(name=d.name, prefix=work_lib_dir))

        if library_nl_samples_avail:
            selected_library_xfm.append(
                MriTransform(xfm=j[4 + library_modalities],
                             xfm_inv=j[5 + library_modalities]))

    output_info['selected_library_warped2'] = selected_library_warped2
    output_info['selected_library_xfm2'] = selected_library_xfm2
    if library_nl_samples_avail:
        output_info['selected_library_xfm'] = selected_library_xfm

    if segment_symmetric:
        for (i, j) in enumerate(selected_library_f):
            d = MriDataset(scan=j[2],
                           seg=j[3],
                           add=j[4:4 + library_modalities],
                           group=int(j[0]),
                           grading=float(j[1]))
            selected_library_scan_f.append(d)
            selected_library_warped2_f.append(
                MriDataset(name=d.name,
                           prefix=work_lib_dir_f,
                           add_n=sample_modalities))
            selected_library_xfm2_f.append(
                MriTransform(name=d.name, prefix=work_lib_dir_f))

            if library_nl_samples_avail:
                selected_library_xfm_f.append(
                    MriTransform(xfm=j[4 + library_modalities],
                                 xfm_inv=j[5 + library_modalities]))

        output_info['selected_library_warped2_f'] = selected_library_warped2_f
        output_info['selected_library_xfm2_f'] = selected_library_xfm2_f
        if library_nl_samples_avail:
            output_info['selected_library_xfm_f'] = selected_library_xfm_f

    # nonlinear registration to template or individual

    if do_pairwise:  # Right now ignore precomputed transformations
        results = []
        if debug:
            print("Performing pairwise registration")

        for (i, j) in enumerate(selected_library):
            # TODO: make clever usage of precomputed transform if available
            if pairwise_register_type == 'elx' or pairwise_register_type == 'elastix':
                results.append(
                    futures.submit(
                        elastix_registration,
                        bbox_sample,
                        selected_library_scan[i],
                        selected_library_xfm2[i],
                        level=pairwise_level,
                        start_level=pairwise_start,
                        parameters=pairwise_options,
                        nl=True,
                        output_inv_target=selected_library_warped2[i],
                        warp_seg=True,
                        resample_order=resample_order,
                        resample_baa=resample_baa))
            elif pairwise_register_type == 'ants' or do_pairwise_ants:
                results.append(
                    futures.submit(
                        non_linear_registration,
                        bbox_sample,
                        selected_library_scan[i],
                        selected_library_xfm2[i],
                        level=pairwise_level,
                        start_level=pairwise_start,
                        parameters=pairwise_options,
                        ants=True,
                        output_inv_target=selected_library_warped2[i],
                        warp_seg=True,
                        resample_order=resample_order,
                        resample_baa=resample_baa))
            else:
                results.append(
                    futures.submit(
                        non_linear_registration,
                        bbox_sample,
                        selected_library_scan[i],
                        selected_library_xfm2[i],
                        level=pairwise_level,
                        start_level=pairwise_start,
                        parameters=pairwise_options,
                        ants=False,
                        output_inv_target=selected_library_warped2[i],
                        warp_seg=True,
                        resample_order=resample_order,
                        resample_baa=resample_baa))

        if segment_symmetric:
            for (i, j) in enumerate(selected_library_f):
                # TODO: make clever usage of precomputed transform if available
                if pairwise_register_type == 'elx' or pairwise_register_type == 'elastix':
                    results.append(
                        futures.submit(
                            elastix_registration,
                            bbox_sample,
                            selected_library_scan_f[i],
                            selected_library_xfm2_f[i],
                            level=pairwise_level,
                            start_level=pairwise_start,
                            parameters=pairwise_options,
                            nl=True,
                            output_inv_target=selected_library_warped2_f[i],
                            warp_seg=True,
                            flip=True,
                            resample_order=resample_order,
                            resample_baa=resample_baa))
                elif pairwise_register_type == 'ants' or do_pairwise_ants:
                    results.append(
                        futures.submit(
                            non_linear_registration,
                            bbox_sample,
                            selected_library_scan_f[i],
                            selected_library_xfm2_f[i],
                            level=pairwise_level,
                            start_level=pairwise_start,
                            parameters=pairwise_options,
                            ants=True,
                            output_inv_target=selected_library_warped2_f[i],
                            warp_seg=True,
                            flip=True,
                            resample_order=resample_order,
                            resample_baa=resample_baa))
                else:
                    results.append(
                        futures.submit(
                            non_linear_registration,
                            bbox_sample,
                            selected_library_scan_f[i],
                            selected_library_xfm2_f[i],
                            level=pairwise_level,
                            start_level=pairwise_start,
                            parameters=pairwise_options,
                            ants=False,
                            output_inv_target=selected_library_warped2_f[i],
                            warp_seg=True,
                            flip=True,
                            resample_order=resample_order,
                            resample_baa=resample_baa))
        # TODO: do we really need to wait for result here?
        futures.wait(results, return_when=futures.ALL_COMPLETED)
    else:

        results = []

        for (i, j) in enumerate(selected_library):

            lib_xfm = None
            if library_nl_samples_avail:
                lib_xfm = selected_library_xfm[i]

            results.append(
                futures.submit(concat_resample,
                               selected_library_scan[i],
                               lib_xfm,
                               nonlinear_xfm,
                               selected_library_warped2[i],
                               resample_order=resample_order,
                               label_resample_order=label_resample_order,
                               resample_baa=resample_baa))

        if segment_symmetric:
            for (i, j) in enumerate(selected_library_f):
                lib_xfm = None
                if library_nl_samples_avail:
                    lib_xfm = selected_library_xfm_f[i]

                results.append(
                    futures.submit(concat_resample,
                                   selected_library_scan_f[i],
                                   lib_xfm,
                                   nonlinear_xfm,
                                   selected_library_warped2_f[i],
                                   resample_order=resample_order,
                                   label_resample_order=label_resample_order,
                                   resample_baa=resample_baa,
                                   flip=True))
        # TODO: do we really need to wait for result here?
        futures.wait(results, return_when=futures.ALL_COMPLETED)

    results = []

    sample_seg = MriDataset(name='bbox_seg_' + sample.name + out_variant,
                            prefix=work_dir)
    sample_grad = MriDataset(name='bbox_grad_' + sample.name + out_variant,
                             prefix=work_dir)

    results.append(
        futures.submit(fuse_grading,
                       bbox_sample,
                       sample_seg,
                       selected_library_warped2,
                       flip=False,
                       classes_number=classes_number,
                       fuse_options=fuse_options,
                       model=local_model,
                       debug=debug,
                       fuse_variant=fuse_variant,
                       groups=groups))

    if segment_symmetric:
        results.append(
            futures.submit(fuse_grading,
                           bbox_sample,
                           sample_seg,
                           selected_library_warped2_f,
                           flip=True,
                           classes_number=classes_number,
                           fuse_options=fuse_options,
                           model=local_model,
                           debug=debug,
                           fuse_variant=fuse_variant,
                           groups=groups))

    futures.wait(results, return_when=futures.ALL_COMPLETED)

    output_info['fuse'] = results[0].result()
    if segment_symmetric:
        output_info['fuse_f'] = results[1].result()

    if qc_options:
        # generate QC images
        output_info['qc'] = generate_qc_image(
            sample_seg,
            bbox_sample,
            sample_qc,
            options=qc_options,
            model=local_model,
            symmetric=segment_symmetric,
            labels=library_description['classes_number'])
    # cleanup if need
    if cleanup:
        shutil.rmtree(work_lib_dir)
        shutil.rmtree(work_lib_dir_f)
        if nl_sample is not None:
            nl_sample.cleanup()

    if cleanup_xfm:
        if nonlinear_xfm is not None:
            nonlinear_xfm.cleanup()

    if not run_in_bbox:
        # TODO: apply error correction here
        # rename labels to final results
        sample_seg_native = MriDataset(name='seg_' + sample.name + out_variant,
                                       prefix=work_dir)

        warp_rename_seg(
            sample_seg,
            sample,
            sample_seg_native,
            transform=bbox_linear_xfm,
            invert_transform=True,
            lut=library_description['map'],
            symmetric=segment_symmetric,
            symmetric_flip=segment_symmetric,
            use_flipped=
            segment_symmetric,  # needed to flip .seg_f back to right orientation
            flip_lut=library_description['flip_map'],
            resample_baa=resample_baa,
            resample_order=label_resample_order,
            datatype=seg_datatype)

        warp_sample(
            sample_seg,
            sample,
            sample_seg_native,
            transform=bbox_linear_xfm,
            invert_transform=True,
            symmetric=segment_symmetric,
            symmetric_flip=segment_symmetric,  # need to flip symmetric dataset
            resample_order=resample_order)

        output_info['sample_seg_native'] = sample_seg_native

        if segment_symmetric:
            join_left_right(sample_seg_native,
                            output_segment + '_seg.mnc',
                            output_segment + '_grad.mnc',
                            datatype=seg_datatype)
        else:
            shutil.copyfile(sample_seg_native.seg, output_segment + '_seg.mnc')
            shutil.copyfile(sample_seg_native.scan,
                            output_segment + '_grad.mnc')

        output_info['output_segment'] = output_segment + '_seg.mnc'
        output_info['output_grading'] = output_segment + '_grad.mnc'

        volumes = seg_to_volumes_grad(output_segment + '_seg.mnc',
                                      output_segment + '_vol.json',
                                      label_map=library_description.get(
                                          'label_map', None),
                                      grad=output_segment + '_grad.mnc',
                                      median=use_median)

        output_info['output_volumes'] = volumes
        output_info['output_volumes_json'] = output_segment + '_vol.json'

        # TODO: cleanup more here (?)

        return (output_segment + '_seg.mnc', output_segment + '_grad.mnc',
                volumes, output_info)
    else:  # special case, needed to train error correction TODO: remove?
        volumes = seg_to_volumes_grad(sample_seg.seg,
                                      output_segment + '_vol.json',
                                      grad=sample_seg.scan,
                                      median=use_median)
        return (sample_seg.seg, sample_seg.scan, volumes, output_info)
예제 #53
0
def func0(n):
    task = futures.submit(func1, n)
    result = task.result()
    return result
예제 #54
0
def generate_nonlinear_average(
    samples,
    initial_model  =None,
    output_model   =None,
    output_model_sd=None,
    prefix='.',
    options={},
    skip=0,
    stop_early=100000
    ):
    """ perform iterative model creation"""

    # use first sample as initial model
    if not initial_model:
        initial_model = samples[0]

    # current estimate of template
    current_model = initial_model
    current_model_sd = None

    transforms=[]
    corr=[]

    corr_transforms=[]
    sd=[]
    corr_samples=[]

    protocol=options.get('protocol', [{'iter':4,'level':32},
                                      {'iter':4,'level':32}] )

    cleanup=       options.get('cleanup',False)
    symmetric=     options.get('symmetric',False)
    parameters=    options.get('parameters',None)
    refine=        options.get('refine',True)
    qc=            options.get('qc',False)
    downsample_=   options.get('downsample',None)
    use_dd=        options.get('use_dd',False)
    use_ants=      options.get('use_ants',False)
    use_elastix=   options.get('use_elastix',False)
    start_level=   options.get('start_level',None)
    use_median=    options.get('median',False)

    models=[]
    models_sd=[]

    if symmetric:
        flipdir=prefix+os.sep+'flip'
        if not os.path.exists(flipdir):
            os.makedirs(flipdir)

        flip_all=[]
        # generate flipped versions of all scans
        for (i, s) in enumerate(samples):
            _s_name=os.path.basename(s.scan).rsplit('.gz',1)[0]
            s.scan_f=prefix+os.sep+'flip'+os.sep+_s_name

            if s.mask is not None:
                s.mask_f=prefix+os.sep+'flip'+os.sep+'mask_'+_s_name

            flip_all.append( futures.submit( generate_flip_sample,s )  )

        futures.wait(flip_all, return_when=futures.ALL_COMPLETED)
    # go through all the iterations
    it=0
    for (i,p) in enumerate(protocol):
        downsample=p.get('downsample',downsample_)
        for j in range(1,p['iter']+1):
            it+=1
            if it>stop_early:
                break
            # this will be a model for next iteration actually

            # 1 register all subjects to current template
            next_model=MriDataset(prefix=prefix,iter=it,name='avg')
            next_model_sd=MriDataset(prefix=prefix,iter=it,name='sd')
            transforms=[]

            it_prefix=prefix+os.sep+str(it)
            if not os.path.exists(it_prefix):
                os.makedirs(it_prefix)

            inv_transforms=[]
            fwd_transforms=[]
            
            start=None
            if it==1:
                start=start_level

            for (i, s) in enumerate(samples):
                sample_xfm=MriTransform(name=s.name,prefix=it_prefix,iter=it)
                sample_inv_xfm=MriTransform(name=s.name+'_inv',prefix=it_prefix,iter=it)

                prev_transform = None

                if it > 1:
                    if refine:
                        prev_transform = corr_transforms[i]
                    else:
                        start=start_level # TWEAK?
                    
                if it>skip and it<stop_early:
                    if use_dd:
                        transforms.append(
                            futures.submit(
                                dd_register_step,
                                s,
                                current_model,
                                sample_xfm,
                                output_invert=sample_inv_xfm,
                                init_xfm=prev_transform,
                                symmetric=symmetric,
                                parameters=parameters,
                                level=p['level'],
                                start=start,
                                work_dir=prefix,
                                downsample=downsample)
                            )
                    elif use_ants:
                        transforms.append(
                            futures.submit(
                                ants_register_step,
                                s,
                                current_model,
                                sample_xfm,
                                output_invert=sample_inv_xfm,
                                init_xfm=prev_transform,
                                symmetric=symmetric,
                                parameters=parameters,
                                level=p['level'],
                                start=start,
                                work_dir=prefix,
                                downsample=downsample)
                            )
                    elif use_elastix:
                        transforms.append(
                            futures.submit(
                                elastix_register_step,
                                s,
                                current_model,
                                sample_xfm,
                                output_invert=sample_inv_xfm,
                                init_xfm=prev_transform,
                                symmetric=symmetric,
                                parameters=parameters,
                                level=p['level'],
                                start=start,
                                work_dir=prefix,
                                downsample=downsample)
                            )
                    else:
                        transforms.append(
                            futures.submit(
                                non_linear_register_step,
                                s,
                                current_model,
                                sample_xfm,
                                output_invert=sample_inv_xfm,
                                init_xfm=prev_transform,
                                symmetric=symmetric,
                                parameters=parameters,
                                level=p['level'],
                                start=start,
                                work_dir=prefix,
                                downsample=downsample)
                            )
                inv_transforms.append(sample_inv_xfm)
                fwd_transforms.append(sample_xfm)

            # wait for jobs to finish
            if it>skip and it<stop_early:
                futures.wait(transforms, return_when=futures.ALL_COMPLETED)

            if cleanup and it>1 :
                # remove information from previous iteration
                for s in corr_samples:
                    s.cleanup(verbose=True)
                for x in corr_transforms:
                    x.cleanup(verbose=True)

            # here all the transforms should exist
            avg_inv_transform=MriTransform(name='avg_inv', prefix=it_prefix, iter=it)

            # 2 average all transformations
            if it>skip and it<stop_early:
                result=futures.submit(average_transforms, inv_transforms, avg_inv_transform, nl=True, symmetric=symmetric)
                futures.wait([result], return_when=futures.ALL_COMPLETED)

            corr=[]
            corr_transforms=[]
            corr_samples=[]
            # 3 concatenate correction and resample
            for (i, s) in enumerate(samples):
                c=MriDataset(prefix=it_prefix,iter=it,name=s.name)
                x=MriTransform(name=s.name+'_corr',prefix=it_prefix,iter=it)

                if it>skip and it<stop_early:
                    corr.append(futures.submit( 
                        concat_resample_nl, 
                        s, 
                        fwd_transforms[i], 
                        avg_inv_transform, 
                        c, 
                        x, 
                        current_model, 
                        level=p['level'], symmetric=symmetric, qc=qc ))
                corr_transforms.append(x)
                corr_samples.append(c)

            if it>skip and it<stop_early:
                futures.wait(corr, return_when=futures.ALL_COMPLETED)

            # cleanup transforms
            if cleanup :
                for x in inv_transforms:
                    x.cleanup()
                for x in fwd_transforms:
                    x.cleanup()
                avg_inv_transform.cleanup()
                
            # 4 average resampled samples to create new estimate
            if it>skip and it<stop_early:
                result=futures.submit(average_samples, corr_samples, next_model, next_model_sd, symmetric=symmetric, symmetrize=symmetric,median=use_median)
                futures.wait([result], return_when=futures.ALL_COMPLETED)

            if cleanup and it>1:
                # remove previous template estimate
                models.append(next_model)
                models_sd.append(next_model_sd)

            current_model=next_model
            current_model_sd=next_model_sd

            if it>skip and it<stop_early:
                result=futures.submit(average_stats, next_model, next_model_sd)
                sd.append(result)
    
    # copy output to the destination
    futures.wait(sd, return_when=futures.ALL_COMPLETED)
    with open(prefix+os.sep+'stats.txt','w') as f:
        for s in sd:
            f.write("{}\n".format(s.result()))
            
    results={
            'model':      current_model,
            'model_sd':   current_model_sd,
            'xfm':        corr_transforms,
            'biascorr':   None,
            'scan':       corr_samples,
            'symmetric':  symmetric,
            'samples':    samples
            }
            
    with open(prefix+os.sep+'results.json','w') as f:
         json.dump(results, f, indent=1, cls=MRIEncoder)

    if cleanup and stop_early==100000:
        # keep the final model
        models.pop()
        models_sd.pop()
        
        # delete unneeded models
        for m in models:
            m.cleanup()
        for m in models_sd:
            m.cleanup()

    return results
예제 #55
0
def main():
    options = parse_options()
    pipeline_parameters = default_pipeline_options
    pipeline_info = {}
    modalities = options.modalities.split(',')
    try:
        if options.options is not None:
            try:
                with open(options.options, 'r') as f:
                    pipeline_parameters = json.load(f)
            except:
                print("Error reading:{}".format(options.options))
                raise

        if (options.csv is not None) or (options.load is not None):
            inputs = []

            if options.load is not None:
                inputs = load_pipeline_output(options.load)
            else:
                with open(options.csv, 'r') as csvfile:
                    reader = csv.reader(csvfile,
                                        delimiter=',',
                                        quoting=csv.QUOTE_NONE)
                    for row in reader:
                        if len(row) >= 3:
                            data_name = '{}_{}'.format(row[0], row[1])

                            t1w = MriScan(name=data_name,
                                          scan=row[2],
                                          modality='t1w',
                                          mask=None)

                            t2w = None
                            pdw = None
                            corr_t1w = None
                            corr_t2w = None
                            age = None
                            sex = None

                            add = []

                            for l, ll in enumerate(modalities):
                                if len(row) > (3 + l) and row[3 + l] != '':
                                    add.append(
                                        MriScan(name=data_name,
                                                scan=row[3 + l],
                                                modality=ll,
                                                mask=None))

                            if len(row) > (4 + len(modalities)) and row[
                                (4 + len(modalities))] != '':
                                age = float(row[(4 + len(modalities))])
                            if len(row) > (5 + len(modalities)) and row[
                                (5 + len(modalities))] != '':
                                sex = float(row[(5 + len(modalities))])
                            if len(row) > (6 + len(modalities)) and row[
                                (6 + len(modalities))] != '':
                                corr_t1w = MriTransform(
                                    None,
                                    'corr_t1w',
                                    xfm=row[(6 + len(modalities))])  # corr_t1w
                            if len(row) > (7 + len(modalities)) and row[
                                (7 + len(modalities))] != '':
                                corr_t2w = MriTransform(
                                    None,
                                    'corr_t2w',
                                    xfm=row[(7 + len(modalities))])  # corr_t1w

                            line = {
                                'subject': row[0],
                                'visit': row[1],
                                # MRI
                                't1w': t1w,
                                # demographic info
                                'age': age,
                                'sex': sex,
                                # distortion correction
                                'corr_t1w': corr_t1w,
                                'corr_t2w': corr_t2w,
                                # timepoint specific model
                                'model_name': None,
                                'model_dir': None,
                            }
                            #
                            if len(add) > 0:
                                line['add'] = add

                            inputs.append(line)
                        else:
                            print("Error, unexpected line format:{}".format(
                                repr(row)))
                            raise Exception()

            pipeline_parameters['debug'] = options.debug
            if options.debug:
                print(repr(inputs))

            run_pipeline = []

            # only needed for parallel execution
            from scoop import futures, shared

            for (i, s) in enumerate(inputs):
                output_dir = options.output + os.sep + s[
                    'subject'] + os.sep + s['visit']
                manual_dir = None

                if options.manual is not None:
                    manual_dir = options.manual + os.sep + s[
                        'subject'] + os.sep + s['visit']

                run_pipeline.append(
                    futures.submit(standard_pipeline,
                                   s,
                                   output_dir,
                                   options=pipeline_parameters,
                                   work_dir=output_dir,
                                   manual_dir=manual_dir))
            #
            # wait for all to finish
            #
            futures.wait(run_pipeline, return_when=futures.ALL_COMPLETED)

            for j, i in enumerate(run_pipeline):
                inputs[j]['output'] = i.result()

            save_pipeline_output(inputs,
                                 options.output + os.sep + 'summary.json')

        elif options.scans   is not None and \
             options.subject is not None and \
             options.visit   is not None:
            # run on a single subject
            data_name = '{}_{}'.format(options.subject, options.visit)
            pipeline_parameters['debug'] = options.debug
            output_dir = options.output + os.sep + options.subject + os.sep + options.visit
            manual_dir = None

            if options.manual is not None:
                manual_dir = options.manual + os.sep + options.subject + os.sep + options.visit

            add = []

            for l, ll in enumerate(modalities):
                if len(options.scans) > (l + 1):
                    add.append(
                        MriScan(name=data_name,
                                scan=options.scans[(l + 1)],
                                modality=ll,
                                mask=None))

            if len(add) == 0: add = None

            info = {
                'subject':
                options.subject,
                'visit':
                options.visit,
                't1w':
                MriScan(name=data_name,
                        scan=options.scans[0],
                        modality='t1w',
                        mask=None),
                'add':
                add
            }

            if options.corr is not None:

                info['corr_t1w'] = MriTransform(None,
                                                'corr_t1w',
                                                xfm=options.corr[0])

                if len(options.corr) > 1:
                    info['corr_t2w'] = MriTransform(None,
                                                    'corr_t2w',
                                                    xfm=options.corr[1])

            ret = standard_pipeline(info,
                                    output_dir,
                                    options=pipeline_parameters,
                                    work_dir=output_dir,
                                    manual_dir=manual_dir)
            # TODO: make a check if there is a summary file there already?
            #save_pipeline_output([info],options.output+os.sep+'summary.json')

        else:
            print("Refusing to run without input data, run --help")
            exit(1)
    except:
        print("Exception :{}".format(sys.exc_info()[0]))
        traceback.print_exc(file=sys.stdout)
        raise
예제 #56
0
def funcCancel():
    f = futures.submit(func4, 100)
    f.cancel()
    return f.cancelled()
예제 #57
0
파일: scooptest.py 프로젝트: Mariovr/RG
def func0(n):
  # Task submission is asynchronous; It will return immediately.
  task = futures.submit(func1, n)
  # The call blocks here until it gets the result
  result = task.result()
  return result
예제 #58
0
def funcCancel():
    f = futures.submit(func4, 100)
    f.cancel()
    return f.cancelled()
예제 #59
0
파일: common.py 프로젝트: visheratin/heft
def multi_repeat(n, funcs):
    fs = [futures.submit(func) for func in funcs for _ in range(n)]
    futures.wait(fs)
    return [f.result() for f in fs]
예제 #60
0
#    This file is part of Scalable COncurrent Operations in Python (SCOOP).
#
#    SCOOP is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Lesser General Public License as
#    published by the Free Software Foundation, either version 3 of
#    the License, or (at your option) any later version.
#
#    SCOOP is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#    GNU Lesser General Public License for more details.
#
#    You should have received a copy of the GNU Lesser General Public
#    License along with SCOOP. If not, see <http://www.gnu.org/licenses/>.
#
"""
Shows the conditional execution of a parallel Future.
"""
from scoop import futures
import random

first_type = lambda x: x + " World"
second_type = lambda x: x + " Parallel World"

if __name__ == '__main__':
    if random.random() < 0.5:
        my_future = futures.submit(first_type, "Hello")
    else:
        my_future = futures.submit(second_type, "Hello")
    print(my_future.result())