def ProduceData(fitParameters, filename): from krebs.analyzeGeneral import DataBasicVessel, DataVesselSamples, DataVesselGlobal from krebs.detailedo2Analysis import DataDetailedPO2 import krebs.detailedo2Analysis.singleVesselCases as singleVesselCases paramspo2Override = dict( massTransferCoefficientModelNumber=1, conductivity_coeff1=fitParameters[0], conductivity_coeff2=fitParameters[1], conductivity_coeff3=fitParameters[2], ) f = h5files.open(filename, 'a') krebsutils.set_num_threads(2) dataman = myutils.DataManager(20, [ DataDetailedPO2(), DataBasicVessel(), DataVesselSamples(), DataVesselGlobal() ]) for k, params in fitCases: params = deepcopy(params) params.paramspo2.update(paramspo2Override) singleVesselCases.GenerateSingleCapillaryWPo2(dataman, f, k, 16, params) return f
def doit_optimize_deap(individual): _ku.set_num_threads(1) if sys.flags.debug: print("individual in doit_optimize_deap") print(individual) print(individual.adaptionParameters['adaption']) print('starting doit in python') ''' update parameters ''' individual.adaptionParameters['adaption'].update( k_c = individual[0], k_m = individual[1], k_s = individual[2], ) returnState, mean, varOfMean, total_surface = adaption_cpp.computeAdaption(individual.adaptionParameters['adaption'],individual.adaptionParameters['calcflow'], False) if 0: #hardcore debugging print('mean: %f' % mean) print('param: %f' % individual.adaptionParameters['optimization']['desired_cap_flow']) print((mean-individual.adaptionParameters['optimization']['desired_cap_flow'])**2,) if returnState == 0: if sys.flags.debug: print("adaption succesful with mean: %f" % mean) # if not returnState == 0: # warnings.warn("adation broken", RuntimeWarning) if sys.flags.debug: print('mean: %f' % mean) # return (mean-individual.adaptionParameters['optimization']['desired_cap_flow'])**2, #return (mean-20000)**2, return varOfMean,
def doit(parameters): _ku.set_num_threads(1) if sys.flags.debug: print(parameters) # fn = parameters['adaption']['vesselFileName'] # pattern = parameters['adaption']['vesselGroupName'] print('starting doit in python ... paramset :%s' % parameters['name']) # f_ = h5files.open(fn, 'r') # dirs = myutils.walkh5(f_['.'], pattern) # f_.close() # print(dirs) # for group_path in dirs: #cachelocation = (outfn_no_ext+'.h5', group_path+'_'+parameters_name) #cachelocation = (fnbase+'_adption_p_'+ parameters['name'] +'.h5', group_path) #ref = adaption_cpp.computeAdaption(f, group_path, parameters['adaption'],parameters['calcflow'], cachelocation) returnState, mean, varOfMean, total_surface = adaption_cpp.computeAdaption( parameters['adaption'], parameters['calcflow'], True) if returnState == 0: print("adaption succesful! mean: %f, var: %f" % (mean, varOfMean)) if not returnState == 0: warnings.warn("adation broken", RuntimeWarning) # print 'computed Adaption stored in:', ref # output_links.append(ref) return returnState, mean, varOfMean, total_surface
def run_iffsim(params): params = deepcopy(params) outfilename = params.pop('fn_out') measure_params = params.pop('ift_measure', dict()) krebsutils.set_num_threads(params.pop('num_threads', 1)) krebsutils.run_iffsim(dicttoinfo.dicttoinfo(params), str(outfilename.encode('utf-8')), Measure(outfilename, measure_params))
def worker_on_client(fn, grp_pattern, adaptionParams, num_threads=1): print('Adaption on %s / %s / param: %s' % (fn, grp_pattern, adaptionParams['name'])) #h5files.search_paths = [dirname(fn)] # so the plotting and measurement scripts can find the original tumor files using the stored basename alone _ku.set_num_threads(num_threads) #params['name'] = parameter_set_name adaptionParams['adaption'].update( vesselFileName = fn, vesselGroupName = grp_pattern, ) doit( adaptionParams)
def runLevelsetRedistancingScheme(): name = 'zalesak_redistanceing' params = dict(fn_out='zalesak_redistanceing') ld = krebsutils.LatticeDataQuad3d((0, 99, 0, 99, 0, 0), 1. / 100.) ld.SetCellCentering((True, True, False)) f_distort = lambda w: (-1 if w < 0 else 1) * (abs(w)**(1. / 3.)) f_conc = lambda x, y, z: f_distort( zalesakDisk((x, y, z), 0.33, (0.5, 0.5, 0.))) # if not os.path.exists(name+'.h5'): krebsutils.set_num_threads(2) levelsetRedistancing(dicttoinfo(params), ld, dict(conc_profile=f_conc))
def runs_on_client(name, config): # import krebs.iffsim # krebs.iffsim.run_iffsim(config) from krebs.iff import iff_cpp import krebsutils ''' now config is asumed to be imported from a py dict ''' params = deepcopy(config) outfilename = params.pop('fn_out') measure_params = params.pop('ift_measure', dict()) krebsutils.set_num_threads(params.pop('num_threads', 1)) #krebsutils.run_iffsim(dicttoinfo.dicttoinfo(params), str(outfilename.encode('utf-8')), Measure(outfilename, measure_params)) iff_cpp.run_iffsim(dicttoinfo.dicttoinfo(params), str(outfilename.encode('utf-8')), Measure(outfilename, measure_params)) if __debug__: print('iff_cpp returned')
def worker_on_client_optimize(fn, grp_pattern, adaptionParams, num_threads, timing): print('Adaption on %s / %s / param: %s' % (fn, grp_pattern, adaptionParams['name'])) h5files.search_paths = [dirname(fn)] # so the plotting and measurement scripts can find the original tumor files using the stored basename alone krebsutils.set_num_threads(num_threads) if timing: prof = cProfile.Profile() adaption_refs_optimize = prof.runcall(krebs.adaption.doit_optimize, fn, adaptionParams['adaption'],adaptionParams['calcflow']) #prof.dump_stats(file) prof.print_stats() else: adaption_refs_optimize = krebs.adaption.doit_optimize(fn, adaptionParams['adaption'],adaptionParams['calcflow']) print("got back: ") print(adaption_refs_optimize) h5files.closeall() # just to be sure
def calcBoxCounts(data, vesselgroup, dataId, opts): """ boxcounting for various configurations """ if dataId.startswith('tumor'): raise RuntimeError('not implemented') # ld = krebs.hdf.get_ld(file['lattice']) # ds = root['tumor/tc_density'] # binimage, filter, scale = makeInnerTumorFilter(ds, innerOffset, 0.5) # data = boxcountImage(binimage,pixelsize=scale) else: graph = krebsutils.read_vesselgraph(vesselgroup, ['position', 'flags', 'radius']) flags = graph.edges['flags'] rad = graph.edges['radius'] # compute filter if dataId == 'arteries': mask = np.logical_and((flags & krebsutils.ARTERY) > 0, rad > 10.0) elif dataId == 'veins': mask = np.logical_and((flags & krebsutils.VEIN) > 0, rad > 10.0) elif dataId == 'arteriovenous': mask = np.logical_and( (flags & krebsutils.VEIN) | (flags & krebsutils.ARTERY), rad > 10.0) elif dataId == 'capillaries': mask = rad <= 6. elif dataId == 'complete': mask = None elif dataId == 'withintumor': mask = flags & krebsutils.WITHIN_TUMOR if mask is not None: subgraph = graph.get_filtered(edge_indices=np.nonzero(mask)) else: subgraph = graph if len(subgraph.edgelist): # if graph is not empty print 'computing %s' % dataId krebsutils.set_num_threads(opts.num_threads) #do boxcounting spacing = opts.spacing max_spacing = max( krebsutils.LatticeDataGetWorldSize(makeLd(graph, spacing, 0.))) bs, bc = calcVesselBoxCounts(subgraph, spacing, max_spacing) data[dataId] = dict(bs=bs, bc=bc)
def worker_on_client(vessel_fn, tumor_parameters, o2_parameter_set_name, num_threads): krebsutils.set_num_threads(num_threads) tumor_fn = tumor_parameters['fn_out'] tend = tumor_parameters['tend'] pattern1 = 'out0000' pattern2 = 'out%04i' % int(round(tend / tumor_parameters['out_intervall'])) pattern = '|'.join([pattern1, pattern2]) print tumor_fn, pattern #os.system("%s -s '%s'" % (krebsjobs.submitTumOnlyVessels.exe,dicttoinfo(tumor_parameters))) os.system("%s -s '%s'" % (krebs.tumors.run_faketum, dicttoinfo(tumor_parameters))) o2_refs = detailedo2.doit( tumor_fn, pattern, (getattr(krebs.detailedo2Analysis.parameterSetsO2, o2_parameter_set_name), o2_parameter_set_name)) for ref in o2_refs: po2group = h5files.open(ref.fn)[ref.path] krebs.analyzeTissueOxygenDetailed.WriteSamplesToDisk(po2group) krebs.povrayRenderOxygenDetailed.doit(o2_refs[1].fn, o2_refs[1].path) h5files.closeall()
def runTestEllipticSolver(): try: make_image = sys.argv[1] == '--img' except: make_image = False if make_image: size = (128, 128, 1) krebsutils.set_num_threads(4) time, mem = EST_testOperatorApplication(size, True) time, mem = EST_testOperatorSolve(size, True) else: if sys.flags.debug: size = (128, 128, 32) krebsutils.set_num_threads(4) time, mem = EST_testOperatorSolve(size, False) else: size = (128, 128, 128) times = collections.defaultdict(list) num_repeat = 5 for num_threads in [1, 2, 3, 4]: krebsutils.set_num_threads(num_threads) print 'np %i' % num_threads, for i in range(num_repeat): print '.', time, mem = EST_testOperatorSolve(size, False) times[num_threads].append(time) print '' for k, v in times.iteritems(): times[k] = np.average(v) base = times[1] for k in sorted(times.keys()): v = times[k] print 'np = %i, time: %f, speedup: %f' % (k, v, base / v)
pdfwriter.savefig(fig, postfix='_curves') plotAnalyzeConvergence(dataman, pdfwriter, plotties) plotties[0].AddStatsPage(pdfwriter) plotAnalyzeIterativeConvergence(dataman, pdfwriter, plotties) pyplot.close('all') if __name__ == '__main__': krebsutils.set_num_threads(2) dataman = myutils.DataManager(20, [DataDetailedPO2(),DataBasicVessel(), DataVesselSamples(), DataVesselGlobal()]) fn = 'vessel-single-all.h5' #os.unlink(fn) f = h5files.open(fn,'a') GenerateSingleCapillaryWPo2(dataman, f, 'nair_uptake', 14, singleVesselParameterSets.nair_uptake) plot_single_capillary(dataman, f['nair_uptake'], useInsets = True) GenerateSingleCapillaryWPo2(dataman, f, 'nair_release', 14, singleVesselParameterSets.nair_release) plot_single_capillary(dataman, f['nair_release'], useInsets = True) grouplist = [] for name in [ 'moschandreou_case%02i' % i for i in xrange(6) ]: params = getattr(singleVesselParameterSets, name)