def setUpClass(cls): path_images = path_image_samples / "Oseen/Images" series = SeriesOfArrays(str(path_images / "Oseen*"), "i+1:i+3") cls.path_tmp = path_images.parent / "tmp_test_work_piv" if not cls.path_tmp.exists(): cls.path_tmp.mkdir() cls.serie = series.get_serie_from_index(0)
def _make_partition(self, serie_arrays, n): """ Partition a SerieOfArrayFromFile into n SeriesOfArray :param serie_arrays: A SerieOfArrayFromFile :type SerieOfArrayFromFile :param n: The number of slices :type int :return: """ print("nb process = " + str(n)) ind_start = self.params.series.ind_start ind_stop = self.params.series.ind_stop ind_step = self.params.series.ind_step nb_image = ind_stop - ind_start + 1 cut = int(nb_image / n) rest = nb_image % n for i in range(n): if rest > 0: plus = 1 else: plus = 0 self.series.append( SeriesOfArrays( serie_arrays, self.params.series.strcouple, ind_start=ind_start, ind_stop=ind_start + cut + plus, ind_step=ind_step, )) ind_start = ind_start + cut + plus rest -= 1
from fluidimage import SeriesOfArrays from fluidimage.works.piv import WorkPIV params = WorkPIV.create_default_params() params.multipass.number = 2 params.multipass.use_tps = True params.piv0.shape_crop_im0 = 32 params.piv0.displacement_max = 5 params.fix.correl_min = 0.2 params.fix.threshold_diff_neighbour = 8 params.mask.strcrop = '30:250, 100:' work = WorkPIV(params=params) path = '../../image_samples/Oseen/Images' # path = '../../image_samples/Karman/Images' series = SeriesOfArrays(path, 'i+1:i+3') serie = series.get_serie_from_index(0) piv = work.calcul(serie) # piv.display(show_interp=True, scale=0.3, show_error=True) piv.display(show_interp=False, scale=1, show_error=True) # result.save()
from fluidimage import SeriesOfArrays from fluidimage.works.piv import WorkPIV from path_images import get_path path = os.path.join(get_path('2005C'), 'c*.bmp') params = WorkPIV.create_default_params() params.piv0.shape_crop_im0 = 64 params.piv0.grid.overlap = 0.5 params.multipass.number = 3 params.multipass.use_tps = False params.fix.displacement_max = 3 params.fix.correl_min = 0.1 params.fix.threshold_diff_neighbour = 3 work = WorkPIV(params=params) series = SeriesOfArrays(path, 'i, 0:2') serie = series.get_serie_from_index(50) t0 = time() piv = work.calcul(serie) t1 = time() print('Work done in {:.3f} s.'.format(t1 - t0)) piv.display(show_interp=False, scale=0.1, show_error=True)
from fluidimage import SeriesOfArrays from fluidimage.works.piv import WorkPIV params = WorkPIV.create_default_params() # for a very short computation params.piv0.shape_crop_im0 = 32 params.piv0.grid.overlap = 0.5 # params.piv0.method_subpix = 'centroid' # params.piv0.method_correl = 'theano' params.multipass.number = 2 params.multipass.use_tps = 'last' params.multipass.coeff_zoom = [2] piv = WorkPIV(params=params) series = SeriesOfArrays('../../image_samples/Oseen/Images', 'i+1:i+3') serie = series.get_serie_from_index(0) cProfile.runctx('result = piv.calcul(serie)', globals(), locals(), 'profile.pstats') s = pstats.Stats('profile.pstats') s.strip_dirs().sort_stats('time').print_stats(10) print('with gprof2dot and graphviz (command dot):\n' 'gprof2dot -f pstats profile.pstats | dot -Tpng -o profile.png')
class TopologyPIV(TopologyBase): """Topology for PIV computation. The most useful methods for the user (in particular :func:`compute`) are defined in the base class :class:`fluidimage.topologies.base.TopologyBase`. Parameters ---------- params : None A ParamContainer (created with the class method :func:`create_default_params`) containing the parameters for the computation. logging_level : str, {'warning', 'info', 'debug', ...} Logging level. nb_max_workers : None, int Maximum numbers of "workers". If None, a number is estimated from the number of cores detected. If there are memory errors, you can try to decrease the number of workers. """ @classmethod def create_default_params(cls): """Class method returning the default parameters. Typical usage:: params = TopologyPIV.create_default_params() # modify parameters here ... topo = TopologyPIV(params) """ params = ParamContainer(tag="params") params._set_child( "series", attribs={ "path": "", "strcouple": "i:i+2", "ind_start": 0, "ind_stop": None, "ind_step": 1, }, ) params.series._set_doc(""" Parameters indicating the input series of images. path : str, {''} String indicating the input images (can be a full path towards an image file or a string given to `glob`). strcouple : 'i:i+2' String indicating as a Python slicing how couples of images are formed. There is one couple per value of `i`. The values of `i` are set with the other parameters `ind_start`, `ind_step` and `ind_stop` approximately with the function range (`range(ind_start, ind_stop, ind_step)`). Python slicing is a very powerful notation to define subset from a (possibly multidimensional) set of images. For a user, an alternative is to understand how Python slicing works. See for example this page: http://stackoverflow.com/questions/509211/explain-pythons-slice-notation. Another possibility is to follow simple examples: For single-frame images (im0, im1, im2, im3, ...), we keep the default value 'i:i+2' to form the couples (im0, im1), (im1, im2), ... To see what it gives, one can use IPython and range: >>> i = 0 >>> list(range(10))[i:i+2] [0, 1] >>> list(range(10))[i:i+4:2] [0, 2] We see that we can also use the value 'i:i+4:2' to form the couples (im0, im2), (im1, im3), ... For double-frame images (im1a, im1b, im2a, im2b, ...) you can write >>> params.series.strcouple = 'i, 0:2' In this case, the first couple will be (im1a, im1b). To get the first couple (im1a, im1a), we would have to write >>> params.series.strcouple = 'i:i+2, 0' ind_start : int, {0} ind_step : int, {1} int_stop : None """) params._set_child("saving", attribs={ "path": None, "how": "ask", "postfix": "piv" }) params.saving._set_doc("""Saving of the results. path : None or str Path of the directory where the data will be saved. If None, the path is obtained from the input path and the parameter `postfix`. how : str {'ask'} 'ask', 'new_dir', 'complete' or 'recompute'. postfix : str Postfix from which the output file is computed. """) WorkPIV._complete_params_with_default(params) params._set_internal_attr( "_value_text", json.dumps({ "program": "fluidimage", "module": "fluidimage.topologies.piv", "class": "TopologyPIV", }), ) params._set_child("preproc") image2image.complete_im2im_params_with_default(params.preproc) return params def __init__(self, params, logging_level="info", nb_max_workers=None): self.params = params self.series = SeriesOfArrays( params.series.path, params.series.strcouple, ind_start=params.series.ind_start, ind_stop=params.series.ind_stop, ind_step=params.series.ind_step, ) path_dir = self.series.serie.path_dir path_dir_result, self.how_saving = prepare_path_dir_result( path_dir, params.saving.path, params.saving.postfix, params.saving.how) super().__init__( path_dir_result=path_dir_result, logging_level=logging_level, nb_max_workers=nb_max_workers, ) queue_couples_of_names = self.add_queue("couples of names") queue_paths = self.add_queue("paths") queue_arrays = queue_arrays1 = self.add_queue("arrays") queue_couples_of_arrays = self.add_queue("couples of arrays") queue_piv = self.add_queue("piv") if params.preproc.im2im is not None: queue_arrays1 = self.add_queue("arrays1") self.add_work( "fill (couples of names, paths)", func_or_cls=self.fill_couples_of_names_and_paths, output_queue=(queue_couples_of_names, queue_paths), kind=("global", "one shot"), ) self.add_work( "read array", func_or_cls=imread, input_queue=queue_paths, output_queue=queue_arrays, kind="io", ) if params.preproc.im2im is not None: im2im_func = image2image.TopologyImage2Image.init_im2im( self, params.preproc) self.add_work( "image2image", func_or_cls=im2im_func, input_queue=queue_arrays, output_queue=queue_arrays1, ) self.add_work( "make couples of arrays", func_or_cls=self.make_couples, params_cls=None, input_queue=(queue_couples_of_names, queue_arrays), output_queue=queue_couples_of_arrays, kind="global", ) self.work_piv = WorkPIV(self.params) self.add_work( "compute piv", func_or_cls=self.work_piv.calcul, params_cls=params, input_queue=queue_couples_of_arrays, output_queue=queue_piv, ) self.add_work( "save piv", func_or_cls=self.save_piv_object, input_queue=queue_piv, kind="io", ) self.results = [] def save_piv_object(self, obj): """Save a PIV object""" ret = obj.save(self.path_dir_result) self.results.append(ret) def fill_couples_of_names_and_paths(self, input_queue, output_queues): """Fill the two first queues""" assert input_queue is None queue_couples_of_names = output_queues[0] queue_paths = output_queues[1] series = self.series if not series: logger.warning("add 0 couple. No PIV to compute.") return if self.how_saving == "complete": index_series = [] for ind_serie, serie in self.series.items(): name_piv = get_name_piv(serie, prefix="piv") if not (self.path_dir_result / name_piv).exists(): index_series.append(ind_serie) if not index_series: logger.warning( 'topology in mode "complete" and work already done.') return series.set_index_series(index_series) if logger.isEnabledFor(DEBUG): logger.debug( repr([serie.get_name_arrays() for serie in series])) nb_series = len(series) logger.info(f"Add {nb_series} PIV fields to compute.") for iserie, serie in enumerate(series): if iserie > 1: break logger.info("Files of serie {}: {}".format( iserie, serie.get_name_arrays())) for ind_serie, serie in series.items(): queue_couples_of_names[ind_serie] = serie.get_name_arrays() for name, path in serie.get_name_path_arrays(): queue_paths[name] = path def make_couples(self, input_queues, output_queue): """Make the couples of arrays""" queue_couples_of_names = input_queues[0] queue_arrays = input_queues[1] try: params_mask = self.params.mask except AttributeError: params_mask = None # for each name couple for key, couple in tuple(queue_couples_of_names.items()): # if correspondant arrays are available, make an array couple if (couple[0] in queue_arrays.keys() and couple[1] in queue_arrays.keys()): array1 = queue_arrays[couple[0]] array2 = queue_arrays[couple[1]] serie = copy.copy(self.series.get_serie_from_index(key)) # logger.debug( # f"create couple {key}: {couple}, ({array1}, {array2})" # ) array_couple = ArrayCouple( names=(couple[0], couple[1]), arrays=(array1, array2), params_mask=params_mask, serie=serie, ) output_queue[key] = array_couple del queue_couples_of_names[key] # remove the image_array if it not will be used anymore if not is_name_in_queue(couple[0], queue_couples_of_names): del queue_arrays[couple[0]] if not is_name_in_queue(couple[1], queue_couples_of_names): del queue_arrays[couple[1]] def make_text_at_exit(self, time_since_start): """Make a text printed at exit""" txt = f"Stop compute after t = {time_since_start:.2f} s" try: nb_results = len(self.results) except AttributeError: nb_results = None if nb_results is not None and nb_results > 0: txt += f" ({nb_results} piv fields, {time_since_start / nb_results:.2f} s/field)." else: txt += "." txt += "\npath results:\n" + str(self.path_dir_result) return txt
def __init__(self, params, logging_level="info", nb_max_workers=None): self.params = params self.series = SeriesOfArrays( params.series.path, params.series.strcouple, ind_start=params.series.ind_start, ind_stop=params.series.ind_stop, ind_step=params.series.ind_step, ) path_dir = self.series.serie.path_dir path_dir_result, self.how_saving = prepare_path_dir_result( path_dir, params.saving.path, params.saving.postfix, params.saving.how) super().__init__( path_dir_result=path_dir_result, logging_level=logging_level, nb_max_workers=nb_max_workers, ) queue_couples_of_names = self.add_queue("couples of names") queue_paths = self.add_queue("paths") queue_arrays = queue_arrays1 = self.add_queue("arrays") queue_couples_of_arrays = self.add_queue("couples of arrays") queue_piv = self.add_queue("piv") if params.preproc.im2im is not None: queue_arrays1 = self.add_queue("arrays1") self.add_work( "fill (couples of names, paths)", func_or_cls=self.fill_couples_of_names_and_paths, output_queue=(queue_couples_of_names, queue_paths), kind=("global", "one shot"), ) self.add_work( "read array", func_or_cls=imread, input_queue=queue_paths, output_queue=queue_arrays, kind="io", ) if params.preproc.im2im is not None: im2im_func = image2image.TopologyImage2Image.init_im2im( self, params.preproc) self.add_work( "image2image", func_or_cls=im2im_func, input_queue=queue_arrays, output_queue=queue_arrays1, ) self.add_work( "make couples of arrays", func_or_cls=self.make_couples, params_cls=None, input_queue=(queue_couples_of_names, queue_arrays), output_queue=queue_couples_of_arrays, kind="global", ) self.work_piv = WorkPIV(self.params) self.add_work( "compute piv", func_or_cls=self.work_piv.calcul, params_cls=params, input_queue=queue_couples_of_arrays, output_queue=queue_piv, ) self.add_work( "save piv", func_or_cls=self.save_piv_object, input_queue=queue_piv, kind="io", ) self.results = []
def __init__(self, params, logging_level="info", nb_max_workers=None): self.params = params if params.surface_tracking is None: raise ValueError("params.surface_tracking has to be set.") self.serie = SerieOfArraysFromFiles(params.images.path, params.images.str_slice) self.series = SeriesOfArrays( params.images.path, "i:i+" + str(self.serie.get_index_slices()[0][2] + 1) + ":" + str(self.serie.get_index_slices()[0][2]), ind_start=self.serie.get_index_slices()[0][0], ind_stop=self.serie.get_index_slices()[0][1] - 1, ind_step=self.serie.get_index_slices()[0][2], ) path_dir = self.serie.path_dir path_dir_result, self.how_saving = prepare_path_dir_result( path_dir, params.saving.path, params.saving.postfix, params.saving.how) self.path_dir_result = path_dir_result self.path_dir_src = Path(path_dir) self.surface_tracking_work = WorkSurfaceTracking(params) super().__init__( path_dir_result=path_dir_result, logging_level=logging_level, nb_max_workers=nb_max_workers, ) queue_paths = self.add_queue("paths") queue_couples_of_names = self.add_queue("couples of names") queue_arrays = self.add_queue("arrays") queue_angles = self.add_queue("angles") queue_couples_of_arrays = self.add_queue( "couples of corrected angles and angles") queuemod0_angles = self.add_queue("corrected angles copy") queuemod_angles = self.add_queue("corrected angles") queue_heights = self.add_queue("heights") self.add_work( "fill_path", self.fill_queue_paths, output_queue=(queue_paths, queue_couples_of_names), kind="one shot", ) self.add_work( "read_array", self.imread, input_queue=queue_paths, output_queue=queue_arrays, kind="io", ) self.add_work( "process_frame", self.surface_tracking_work.process_frame_func, input_queue=queue_arrays, output_queue=queue_angles, ) self.add_work( "create_couple", self.make_couples, input_queue=(queuemod0_angles, queue_angles, queue_couples_of_names), output_queue=(queuemod_angles, queue_couples_of_arrays), kind="global", ) self.add_work( "correct_couple_of_phases", self.surface_tracking_work.correctcouple, input_queue=queue_couples_of_arrays, output_queue=queuemod0_angles, ) self.add_work( "calcul_height", self.surface_tracking_work.calculheight_func, input_queue=queuemod_angles, output_queue=queue_heights, ) self.add_work("save", self.save_image, input_queue=queue_heights, kind="io")
class TopologyBOS(TopologyBase): """Topology for BOS. See https://en.wikipedia.org/wiki/Background-oriented_schlieren_technique Parameters ---------- params : None A ParamContainer containing the parameters for the computation. logging_level : str, {'warning', 'info', 'debug', ...} Logging level. nb_max_workers : None, int Maximum numbers of "workers". If None, a number is computed from the number of cores detected. If there are memory errors, you can try to decrease the number of workers. """ @classmethod def create_default_params(cls): """Class method returning the default parameters. For developers: cf. fluidsim.base.params """ params = ParamContainer(tag="params") params._set_attrib("reference", 0) params._set_doc(""" reference : str or int, {0} Reference file (from which the displacements will be computed). Can be an absolute file path, a file name or the index in the list of files found from the parameters in ``params.series``. """) params._set_child( "series", attribs={ "path": "", "strslice": None, "ind_start": 0, "ind_stop": None, "ind_step": 1, }, ) params.series._set_doc(""" Parameters indicating the input series of images. path : str, {''} String indicating the input images (can be a full path towards an image file or a string given to `glob`). strslice : None String indicating as a Python slicing how series of images are formed. See the parameters the PIV topology. ind_start : int, {0} ind_step : int, {1} int_stop : None """) params._set_child("saving", attribs={ "path": None, "how": "ask", "postfix": "piv" }) params.saving._set_doc("""Saving of the results. path : None or str Path of the directory where the data will be saved. If None, the path is obtained from the input path and the parameter `postfix`. how : str {'ask'} 'ask', 'new_dir', 'complete' or 'recompute'. postfix : str Postfix from which the output file is computed. """) WorkPIV._complete_params_with_default(params) params._set_internal_attr( "_value_text", json.dumps({ "program": "fluidimage", "module": "fluidimage.topologies.bos", "class": "TopologyBOS", }), ) return params def __init__(self, params=None, logging_level="info", nb_max_workers=None): if params is None: params = self.__class__.create_default_params() self.params = params self.piv_work = WorkPIV(params) self.series = SeriesOfArrays( params.series.path, params.series.strslice, ind_start=params.series.ind_start, ind_stop=params.series.ind_stop, ind_step=params.series.ind_step, ) path_dir = self.series.serie.path_dir path_dir_result, self.how_saving = prepare_path_dir_result( path_dir, params.saving.path, params.saving.postfix, params.saving.how) self.path_dir_result = path_dir_result if not isinstance(params.reference, int): reference = os.path.expanduser(params.reference) else: reference = params.reference if isinstance(reference, int): names = self.series.get_name_all_arrays() names.sort() path_reference = os.path.join(path_dir, names[reference]) elif os.path.isfile(reference): path_reference = reference else: path_reference = os.path.join(path_dir_result, reference) if not os.path.isfile(path_reference): raise ValueError("Bad value of params.reference:" + path_reference) self.path_reference = path_reference self.image_reference = imread(path_reference) self.results = {} def save_piv_object(o): ret = o.save(path_dir_result, kind="bos") return ret self.wq_piv = WaitingQueueThreading("delta", save_piv_object, self.results, topology=self) self.wq_couples = WaitingQueueMultiprocessing("couple", self.piv_work.calcul, self.wq_piv, topology=self) self.wq_images = WaitingQueueMakeCoupleBOS( "array image", self.wq_couples, topology=self, image_reference=self.image_reference, path_reference=self.path_reference, serie=self.series.serie, ) self.wq0 = WaitingQueueLoadImage(destination=self.wq_images, path_dir=path_dir, topology=self) super().__init__( [self.wq0, self.wq_images, self.wq_couples, self.wq_piv], path_output=path_dir_result, logging_level=logging_level, nb_max_workers=nb_max_workers, ) self.add_series(self.series) def add_series(self, series): if len(series) == 0: logger.warning("add 0 image. No BOS to compute.") return names = series.get_name_all_arrays() if self.how_saving == "complete": names_to_compute = [] for name in names: name_bos = get_name_bos(name, series.serie) if not os.path.exists( os.path.join(self.path_dir_result, name_bos)): names_to_compute.append(name) names = names_to_compute if len(names) == 0: logger.warning( 'topology in mode "complete" and work already done.') return nb_names = len(names) print("Add {} BOS fields to compute.".format(nb_names)) logger.debug(repr(names)) print("First files to process:", names[:4]) self.wq0.add_name_files(names) # a little bit strange, to apply mask... try: params_mask = self.params.mask except AttributeError: params_mask = None im = self.image_reference couple = ArrayCouple(names=("", ""), arrays=(im, im), params_mask=params_mask) im, _ = couple.get_arrays() self.piv_work._prepare_with_image(im) def print_at_exit(self, time_since_start): txt = "Stop compute after t = {:.2f} s".format(time_since_start) try: nb_results = len(self.results) except AttributeError: nb_results = None if nb_results is not None and nb_results > 0: txt += " ({} bos fields, {:.2f} s/field).".format( nb_results, time_since_start / nb_results) else: txt += "." txt += "\npath results:\n" + str(self.path_dir_result) print(txt)
def __init__(self, params=None, logging_level="info", nb_max_workers=None): if params is None: params = self.__class__.create_default_params() self.params = params self.piv_work = WorkPIV(params) self.series = SeriesOfArrays( params.series.path, params.series.strslice, ind_start=params.series.ind_start, ind_stop=params.series.ind_stop, ind_step=params.series.ind_step, ) path_dir = self.series.serie.path_dir path_dir_result, self.how_saving = prepare_path_dir_result( path_dir, params.saving.path, params.saving.postfix, params.saving.how) self.path_dir_result = path_dir_result if not isinstance(params.reference, int): reference = os.path.expanduser(params.reference) else: reference = params.reference if isinstance(reference, int): names = self.series.get_name_all_arrays() names.sort() path_reference = os.path.join(path_dir, names[reference]) elif os.path.isfile(reference): path_reference = reference else: path_reference = os.path.join(path_dir_result, reference) if not os.path.isfile(path_reference): raise ValueError("Bad value of params.reference:" + path_reference) self.path_reference = path_reference self.image_reference = imread(path_reference) self.results = {} def save_piv_object(o): ret = o.save(path_dir_result, kind="bos") return ret self.wq_piv = WaitingQueueThreading("delta", save_piv_object, self.results, topology=self) self.wq_couples = WaitingQueueMultiprocessing("couple", self.piv_work.calcul, self.wq_piv, topology=self) self.wq_images = WaitingQueueMakeCoupleBOS( "array image", self.wq_couples, topology=self, image_reference=self.image_reference, path_reference=self.path_reference, serie=self.series.serie, ) self.wq0 = WaitingQueueLoadImage(destination=self.wq_images, path_dir=path_dir, topology=self) super().__init__( [self.wq0, self.wq_images, self.wq_couples, self.wq_piv], path_output=path_dir_result, logging_level=logging_level, nb_max_workers=nb_max_workers, ) self.add_series(self.series)
import params_piv try: reload except NameError: from importlib import reload reload(params_piv) iexp = 0 params = params_piv.make_params_piv(iexp) work = WorkPIV(params=params) pathin = params.series.path series = SeriesOfArrays(pathin, params.series.strcouple, ind_start=params.series.ind_start) # c060a.png and c060b.png serie = series.get_serie_from_index(params.series.ind_start) piv = work.calcul(serie) # piv.piv0.display(show_interp=True, scale=0.05, show_error=True) piv.display(show_interp=False, scale=0.05, show_error=True)
def __init__(self, params=None, logging_level="info", nb_max_workers=None): if params is None: params = self.__class__.create_default_params() self.params = params self.piv_work = WorkPIV(params) serie_arrays = SerieOfArraysFromFiles(params.series.path) self.series = SeriesOfArrays( serie_arrays, params.series.strcouple, ind_start=params.series.ind_start, ind_stop=params.series.ind_stop, ind_step=params.series.ind_step, ) path_dir = self.series.serie.path_dir path_dir_result, self.how_saving = prepare_path_dir_result( path_dir, params.saving.path, params.saving.postfix, params.saving.how) self.path_dir_result = path_dir_result self.results = {} def save_piv_object(o): ret = o.save(path_dir_result) return ret self.wq_piv = WaitingQueueThreading("delta", save_piv_object, self.results, topology=self) self.wq_couples = WaitingQueueMultiprocessing("couple", self.piv_work.calcul, self.wq_piv, topology=self) self.wq_images = WaitingQueueMakeCouple("array image", self.wq_couples, topology=self) if params.preproc.im2im is not None: self.im2im_func = image2image.TopologyImage2Image.init_im2im( self, params.preproc) self.wq_images0 = WaitingQueueMultiprocessing("image ", self.im2im_func, self.wq_images, topology=self) wq_after_load = self.wq_images0 else: wq_after_load = self.wq_images self.wq0 = WaitingQueueLoadImage(destination=wq_after_load, path_dir=path_dir, topology=self) if params.preproc.im2im is not None: waiting_queues = [ self.wq0, self.wq_images0, self.wq_images, self.wq_couples, self.wq_piv, ] else: waiting_queues = [ self.wq0, self.wq_images, self.wq_couples, self.wq_piv, ] super().__init__( waiting_queues, path_output=path_dir_result, logging_level=logging_level, nb_max_workers=nb_max_workers, ) self.add_series(self.series)
# params.piv0.method_correl = 'pythran' params.multipass.number = 1 params.multipass.use_tps = False # params.multipass.coeff_zoom = [2, 2] # bug params.piv0.shape_crop_im0 = 128 # !! params.piv0.shape_crop_im0 = 64 # (80, 90) # params.piv0.shape_crop_im1 = (38, 36) params.fix.correl_min = 0.2 params.fix.threshold_diff_neighbour = 4 # params.piv0.grid.overlap = 10 piv = WorkPIV(params=params) series = SeriesOfArrays("../../../image_samples/Oseen/Images", "i+1:i+3") serie = series.get_serie_from_index(0) result = piv.calcul(serie) result.display() result.save() # lightresult = result.make_light_result() # lightresult.save() # lightresultload = LightPIVResults(str_path='piv_Oseen_center01-02_light.h5') # f=h5netcdf.File('piv_Oseen_center01-02.h5') # f=h5py.File('piv_Oseen_center01-02.h5')
class TopologyPreproc(TopologyBase): """Preprocess series of images. The most useful methods for the user (in particular :func:`compute`) are defined in the base class :class:`fluidimage.topologies.base.TopologyBase`. Parameters ---------- params: None A ParamContainer (created with the class method :func:`create_default_params`) containing the parameters for the computation. logging_level: str, {'warning', 'info', 'debug', ...} Logging level. nb_max_workers: None, int Maximum numbers of "workers". If None, a number is computed from the number of cores detected. If there are memory errors, you can try to decrease the number of workers. """ @classmethod def create_default_params(cls, backend="python"): """Class method returning the default parameters. Typical usage:: params = TopologyPreproc.create_default_params() # modify parameters here ... topo = TopologyPreproc(params) Parameters ---------- backend : {'python', 'opencv'} Specifies which backend to use. """ params = WorkPreproc.create_default_params(backend) params.preproc.series._set_attribs({ "strcouple": "i:i+1", "ind_start": 0, "ind_stop": None, "ind_step": 1, }) params.preproc.series._set_doc(""" Parameters describing image loading prior to preprocessing. strcouple : str Determines the subset from the whole series of images that should be loaded and preprocessed together. Particularly useful when temporal filtering requires multiple images. For example, for a series of images with just one index, >>> strcouple = 'i:i+1' # load one image at a time >>> strcouple = 'i-2:i+3' # loads 5 images at a time Similarly for two indices, >>> strcouple = 'i:i+1,0' # load one image at a time, with second index fixed >>> strcouple = 'i-2:i+3,0' # loads 5 images at a time, with second index fixed .. todo:: Rename this parameter to strsubset / strslice ind_start : int Start index for the whole series of images being loaded. For more details: see `class SeriesOfArrays`. ind_stop : int Stop index for the whole series of images being loaded. For more details: see `class SeriesOfArrays`. ind_step : int Step index for the whole series of images being loaded. For more details: see `class SeriesOfArrays`. """) params.preproc._set_child( "saving", attribs={ "path": None, "strcouple": None, "how": "ask", "format": "img", "postfix": "pre", }, ) params.preproc.saving._set_doc(""" Parameters describing image saving after preprocessing. path : str or None Path to which preprocessed images are saved. str_subset : str or None NotImplemented! Determines the sub-subset of images must be saved from subset of images that were loaded and preprocessed. When set as None, saves the middle image from every subset. .. todo:: Implement the option params.saving.str_subset... how : str {'ask', 'new_dir', 'complete', 'recompute'} How preprocessed images must be saved if it already exists or not. format : str {'img', 'hdf5'} Format in which preprocessed image data must be saved. postfix : str A suffix added to the new directory where preprocessed images are saved. """) params._set_internal_attr( "_value_text", json.dumps({ "program": "fluidimage", "module": "fluidimage.topologies.preproc", "class": "TopologyPreproc", }), ) params._set_child("im2im") image2image.complete_im2im_params_with_default(params.im2im) return params def __init__(self, params: ParamContainer, logging_level="info", nb_max_workers=None): self.params = params.preproc self.preproc_work = WorkPreproc(params) self.results = [] self.display = self.preproc_work.display serie_arrays = self.preproc_work.serie_arrays self.series = SeriesOfArrays( serie_arrays, params.preproc.series.strcouple, ind_start=params.preproc.series.ind_start, ind_stop=params.preproc.series.ind_stop, ind_step=params.preproc.series.ind_step, ) subset = self.series.get_serie_from_index(0) self.nb_items_per_serie = subset.get_nb_arrays() if os.path.isdir(params.preproc.series.path): path_dir = params.preproc.series.path else: path_dir = os.path.dirname(params.preproc.series.path) self.path_dir_input = path_dir path_dir_result, self.how_saving = prepare_path_dir_result( path_dir, params.preproc.saving.path, params.preproc.saving.postfix, params.preproc.saving.how, ) super().__init__( path_dir_result=path_dir_result, logging_level=logging_level, nb_max_workers=nb_max_workers, ) self.params.saving.path = self.path_dir_result # Define waiting queues queue_subsets_of_names = self.add_queue("subsets of filenames") queue_paths = self.add_queue("image paths") queue_arrays = queue_arrays1 = self.add_queue("arrays") queue_subsets_of_arrays = self.add_queue("subsets of arrays") queue_preproc_objects = self.add_queue("preproc results") if params.im2im.im2im is not None: queue_arrays1 = self.add_queue("arrays1") # Define works self.add_work( "fill (subsets_of_names, paths)", func_or_cls=self.fill_subsets_of_names_and_paths, output_queue=(queue_subsets_of_names, queue_paths), kind=("global", "one shot"), ) self.add_work( "imread", func_or_cls=imread, input_queue=queue_paths, output_queue=queue_arrays, kind="io", ) if params.im2im.im2im is not None: im2im_func = image2image.TopologyImage2Image.init_im2im( self, params.im2im) self.add_work( "image2image", func_or_cls=im2im_func, input_queue=queue_arrays, output_queue=queue_arrays1, ) self.add_work( "make subsets of arrays", func_or_cls=self.make_subsets, input_queue=(queue_subsets_of_names, queue_arrays1), output_queue=queue_subsets_of_arrays, kind="global", ) self.add_work( "preproc a subset of arrays", func_or_cls=self.preproc_work.calcul, params_cls=params, input_queue=queue_subsets_of_arrays, output_queue=queue_preproc_objects, ) self.add_work( "save images", func_or_cls=self.save_preproc_object, input_queue=queue_preproc_objects, kind="io", ) def save_preproc_object(self, obj: ArraySubset): """Save a preprocessing object""" ret = obj.save(path=self.path_dir_result) self.results.append(ret) def init_series(self) -> List[str]: """Initializes the SeriesOfArrays object `self.series` based on input parameters.""" series = self.series if not series: logger.warning( "encountered empty series. No images to preprocess.") return if self.how_saving == "complete": index_subsets = [] for ind_subset, subset in self.series.items(): names_serie = subset.get_name_arrays() name_preproc = get_name_preproc( subset, names_serie, ind_subset, series.nb_series, self.params.saving.format, ) if not (self.path_dir_result / name_preproc).exists(): index_subsets.append(ind_subset) series.set_index_series(index_subsets) if logger.isEnabledFor(DEBUG): logger.debug( repr([subset.get_name_arrays() for subset in series])) nb_subsets = len(series) if nb_subsets == 0: logger.warning( 'topology in mode "complete" and work already done.') return elif nb_subsets == 1: plurial = "" else: plurial = "s" logger.info(f"Add {nb_subsets} image serie{plurial} to compute.") def fill_subsets_of_names_and_paths(self, input_queue: None, output_queues: Tuple[Dict]) -> None: """Fill the two first queues""" assert input_queue is None queue_subsets_of_names, queue_paths = output_queues self.init_series() for ind_subset, subset in self.series.items(): queue_subsets_of_names[ind_subset] = subset.get_name_arrays() for name, path in subset.get_name_path_arrays(): queue_paths[name] = path def make_subsets(self, input_queues: Tuple[Dict], output_queue: Dict) -> bool: """Create the subsets of images""" queue_subsets_of_names, queue_arrays = input_queues # for each name subset for key, names in list(queue_subsets_of_names.items()): # if correspondant arrays have been loaded from images, # make an array subset if all([name in queue_arrays for name in names]): arrays = (queue_arrays[name] for name in names) serie = copy.copy(self.series.get_serie_from_index(key)) array_subset = ArraySubset(names=names, arrays=arrays, serie=serie) output_queue[key] = array_subset del queue_subsets_of_names[key] # remove the image_array if it not will be used anymore key_arrays = list(queue_arrays.keys()) for key_array in key_arrays: if not is_name_in_queue(key_array, queue_subsets_of_names): del queue_arrays[key_array]
def __init__(self, params: ParamContainer, logging_level="info", nb_max_workers=None): self.params = params.preproc self.preproc_work = WorkPreproc(params) self.results = [] self.display = self.preproc_work.display serie_arrays = self.preproc_work.serie_arrays self.series = SeriesOfArrays( serie_arrays, params.preproc.series.strcouple, ind_start=params.preproc.series.ind_start, ind_stop=params.preproc.series.ind_stop, ind_step=params.preproc.series.ind_step, ) subset = self.series.get_serie_from_index(0) self.nb_items_per_serie = subset.get_nb_arrays() if os.path.isdir(params.preproc.series.path): path_dir = params.preproc.series.path else: path_dir = os.path.dirname(params.preproc.series.path) self.path_dir_input = path_dir path_dir_result, self.how_saving = prepare_path_dir_result( path_dir, params.preproc.saving.path, params.preproc.saving.postfix, params.preproc.saving.how, ) super().__init__( path_dir_result=path_dir_result, logging_level=logging_level, nb_max_workers=nb_max_workers, ) self.params.saving.path = self.path_dir_result # Define waiting queues queue_subsets_of_names = self.add_queue("subsets of filenames") queue_paths = self.add_queue("image paths") queue_arrays = queue_arrays1 = self.add_queue("arrays") queue_subsets_of_arrays = self.add_queue("subsets of arrays") queue_preproc_objects = self.add_queue("preproc results") if params.im2im.im2im is not None: queue_arrays1 = self.add_queue("arrays1") # Define works self.add_work( "fill (subsets_of_names, paths)", func_or_cls=self.fill_subsets_of_names_and_paths, output_queue=(queue_subsets_of_names, queue_paths), kind=("global", "one shot"), ) self.add_work( "imread", func_or_cls=imread, input_queue=queue_paths, output_queue=queue_arrays, kind="io", ) if params.im2im.im2im is not None: im2im_func = image2image.TopologyImage2Image.init_im2im( self, params.im2im) self.add_work( "image2image", func_or_cls=im2im_func, input_queue=queue_arrays, output_queue=queue_arrays1, ) self.add_work( "make subsets of arrays", func_or_cls=self.make_subsets, input_queue=(queue_subsets_of_names, queue_arrays1), output_queue=queue_subsets_of_arrays, kind="global", ) self.add_work( "preproc a subset of arrays", func_or_cls=self.preproc_work.calcul, params_cls=params, input_queue=queue_subsets_of_arrays, output_queue=queue_preproc_objects, ) self.add_work( "save images", func_or_cls=self.save_preproc_object, input_queue=queue_preproc_objects, kind="io", )
import os from fluidimage import SeriesOfArrays from fluidimage.works.piv import WorkPIV from path_images import get_path params = WorkPIV.create_default_params() params.piv0.shape_crop_im0 = 128 params.piv0.grid.overlap = 0.5 params.multipass.number = 2 params.multipass.use_tps = False params.fix.displacement_max = 15 params.fix.correl_min = 0.1 piv = WorkPIV(params=params) path = os.path.join(get_path('2001A'), 'A*') series = SeriesOfArrays(path, 'i, 1:3', ind_start=1) serie = series.get_serie_from_index(1) result = piv.calcul(serie) result.display()