示例#1
0
    def __call__(self,
                 problem,
                 options,
                 post_process_hook,
                 req_info,
                 coef_info,
                 micro_coors,
                 store_micro_idxs,
                 chunk_size,
                 time_tag=''):
        """Calculate homogenized correctors and coefficients.

        Parameters
        ----------
        The same parameters as :class:`HomogenizationWorker`, extended by:
        chunk_size : int
            The desired number of microproblems in one chunk.

        Returns
        -------
        The same returns as :class:`HomogenizationWorker`.
        """
        import sfepy.base.multiproc as multiproc

        dependencies = multiproc.get_dict('dependecies', clear=True)
        sd_names = multiproc.get_dict('sd_names', clear=True)
        numdeps = multiproc.get_dict('numdeps', clear=True)
        remaining = multiproc.get_int_value('remaining', 0)
        tasks = multiproc.get_queue('tasks')
        lock = multiproc.get_lock('lock')

        if micro_coors is not None:
            micro_chunk_tab, req_info, coef_info = \
                self.chunk_micro_coors(self.num_workers, micro_coors.shape[0],
                                       req_info, coef_info,
                                       chunk_size, store_micro_idxs)
        else:
            micro_chunk_tab = None

        sorted_names = self.get_sorted_dependencies(req_info, coef_info,
                                                    options.compute_only)

        remaining.value = len(sorted_names)

        # calculate number of dependencies and inverse map
        inverse_deps = {}
        for name in sorted_names:
            if name.startswith('c.'):
                reqs = coef_info[name[2:]].get('requires', [])
            else:
                reqs = req_info[name].get('requires', [])
            numdeps[name] = len(reqs)
            if len(reqs) > 0:
                for req in reqs:
                    if req in inverse_deps:
                        inverse_deps[req].append(name)
                    else:
                        inverse_deps[req] = [name]

        for name in sorted_names:
            if numdeps[name] == 0:
                tasks.put(name)

        workers = []
        for ii in range(self.num_workers):
            args = (tasks, lock, remaining, numdeps, inverse_deps, problem,
                    options, post_process_hook, req_info, coef_info, sd_names,
                    dependencies, micro_coors, time_tag, micro_chunk_tab,
                    str(ii + 1))
            w = multiproc.Process(target=self.calculate_req_multi, args=args)
            w.start()
            workers.append(w)

        # block until all workes are terminated
        for w in workers:
            w.join()

        if micro_coors is not None:
            dependencies = self.dechunk_reqs_coefs(dependencies,
                                                   len(micro_chunk_tab))

        return dependencies, sd_names
示例#2
0
    def call(self, ret_all=False, time_tag=''):
        problem = self.problem
        opts = self.app_options

        # Some coefficients can require other coefficients - resolve their
        # order here.
        req_info = getattr(self.conf, opts.requirements, {})
        coef_info = getattr(self.conf, opts.coefs, {})
        coef_info = self.define_volume_coef(coef_info, self.volumes)

        is_store_filenames = coef_info.pop('filenames', None) is not None
        is_micro_coors = self.micro_coors is not None

        use_multiprocessing = \
            multiproc.use_multiprocessing and self.app_options.multiprocessing

        if use_multiprocessing:
            num_workers = multiproc.cpu_count()

            if is_micro_coors:
                coef_info_orig = coef_info.copy()
                micro_chunk_tab, req_info, coef_info = \
                    self.chunk_micro_coors(num_workers,
                                           self.micro_coors.shape[0],
                                           req_info, coef_info,
                                           self.app_options.chunk_size,
                                           self.app_options.store_micro_idxs)
            else:
                micro_chunk_tab = None

            sorted_names = self.get_sorted_dependencies(req_info, coef_info,
                                                        opts.compute_only)

            dependencies = multiproc.get_dict('dependecies', clear=True)
            sd_names = multiproc.get_dict('sd_names', clear=True)
            numdeps = multiproc.get_list('numdeps', clear=True)
            remaining = multiproc.get_int_value('remaining', len(sorted_names))
            tasks = multiproc.get_queue('tasts')
            lock = multiproc.get_lock('lock')

            # calculate number of dependencies and inverse map
            inverse_deps = {}
            for ii, name in enumerate(sorted_names):
                if name.startswith('c.'):
                    reqs = coef_info[name[2:]].get('requires', [])
                else:
                    reqs = req_info[name].get('requires', [])
                numdeps.append(len(reqs))
                if len(reqs) > 0:
                    for req in reqs:
                        if req in inverse_deps:
                            inverse_deps[req].append((ii, name))
                        else:
                            inverse_deps[req] = [(ii, name)]

            for ii, name in enumerate(sorted_names):
                if numdeps[ii] == 0:
                    tasks.put(name)

            workers = []
            for ii in range(num_workers):
                args = (tasks, lock, remaining, numdeps, inverse_deps,
                        problem, opts, self.post_process_hook, req_info,
                        coef_info, sd_names, dependencies, self.micro_coors,
                        time_tag, micro_chunk_tab, str(ii + 1))
                w = multiproc.Process(target=self.calculate_req_multi,
                                      args=args)
                w.start()
                workers.append(w)

            # block until all workes are terminated
            for w in workers:
                w.join()

            if is_micro_coors:
                dependencies = self.dechunk_reqs_coefs(dependencies,
                                                       len(micro_chunk_tab))
                coef_info = coef_info_orig

        else: # no mlutiprocessing
            dependencies = {}
            sd_names = {}

            sorted_names = self.get_sorted_dependencies(req_info, coef_info,
                                                        opts.compute_only)
            for name in sorted_names:
                if not name.startswith('c.'):
                    if is_micro_coors:
                        req_info[name]['store_idxs'] = ([jj\
                            for jj in self.app_options.store_micro_idxs], 0)

                val = self.calculate_req(problem, opts, self.post_process_hook,
                                         name, req_info, coef_info, sd_names,
                                         dependencies, self.micro_coors,
                                         time_tag)

                dependencies[name] = val

        coefs = Struct()
        deps = {}
        for name in dependencies.keys():
            data = dependencies[name]
            if name.startswith('c.'):
                coef_name = name[2:]
                cstat = coef_info[coef_name].get('status', 'main')
                # remove "auxiliary" coefs
                if not cstat == 'auxiliary':
                    setattr(coefs, coef_name, data)
            else:
                deps[name] = data

        # Store filenames of all requirements as a "coefficient".
        if is_store_filenames:
            for name in sd_names.keys():
                if '|multiprocessing_' in name:
                    mname = rm_multi(name)
                    if mname in sd_names:
                        sd_names[mname] += sd_names[name]
                    else:
                        sd_names[mname] = sd_names[name]
                    del(sd_names[name])

            save_names = {}
            dump_names = {}
            for name in sd_names.keys():
                val = sd_names[name]
                if name.startswith('s.'):
                    save_names[name[2:]] = val
                elif name.startswith('d.'):
                    dump_names[name[2:]] = val
            coefs.save_names = save_names
            coefs.dump_names = dump_names

        if opts.coefs_info is not None:
            coefs.info = opts.coefs_info

        if ret_all:
            return coefs, deps
        else:
            return coefs
示例#3
0
    def call(self, verbose=False, ret_all=None, itime=None, iiter=None):
        """
        Call the homogenization engine and compute the homogenized
        coefficients.

        Parameters
        ----------
        verbose : bool
            If True, print the computed coefficients.
        ret_all : bool or None
            If not None, it can be used to override the 'return_all' option.
            If True, also the dependencies are returned.
        time_tag: str
            The time tag used in file names.

        Returns
        -------
        coefs : Coefficients instance
            The homogenized coefficients.
        dependencies : dict
            The dependencies, if `ret_all` is True.
        """
        opts = self.app_options

        ret_all = get_default(ret_all, opts.return_all)

        if not hasattr(self, 'he'):
            volumes = {}
            if hasattr(opts, 'volumes') and (opts.volumes is not None):
                volumes.update(opts.volumes)
            elif hasattr(opts, 'volume') and (opts.volume is not None):
                volumes['total'] = opts.volume
            else:
                volumes['total'] = 1.0

            self.he = HomogenizationEngine(self.problem, self.options,
                                           volumes=volumes)

        if self.micro_coors is not None:
            self.he.set_micro_coors(self.update_micro_coors(ret_val=True))

        if multiproc.use_multiprocessing and self.app_options.multiprocessing:
            upd_var = self.app_options.mesh_update_variable
            if upd_var is not None:
                uvar = self.problem.create_variables([upd_var])[upd_var]
                uvar.field.mappings0 = multiproc.get_dict('mappings0')
            per.periodic_cache = multiproc.get_dict('periodic_cache')


        time_tag = ('' if itime is None else '_t%03d' % itime)\
            + ('' if iiter is None else '_i%03d' % iiter)

        aux = self.he(ret_all=ret_all, time_tag=time_tag)
        if ret_all:
            coefs, dependencies = aux
            # store correctors for coors update
            if opts.mesh_update_corrector is not None:
                self.updating_corrs =\
                    dependencies[opts.mesh_update_corrector]
        else:
            coefs = aux

        coefs = Coefficients(**coefs.to_dict())

        if verbose:
            prec = nm.get_printoptions()[ 'precision']
            if hasattr(opts, 'print_digits'):
                nm.set_printoptions(precision=opts.print_digits)
            print(coefs)
            nm.set_printoptions(precision=prec)

        ms_cache = self.micro_state_cache
        for ii in self.app_options.store_micro_idxs:
            key = self.get_micro_cache_key('coors', ii, itime)
            ms_cache[key] = self.micro_coors[ii,...]

        coef_save_name = op.join(opts.output_dir, opts.coefs_filename)
        coefs.to_file_hdf5(coef_save_name + '%s.h5' % time_tag)
        coefs.to_file_txt(coef_save_name + '%s.txt' % time_tag,
                          opts.tex_names,
                          opts.float_format)

        if ret_all:
            return coefs, dependencies
        else:
            return coefs
示例#4
0
    def call(self, ret_all=False, time_tag=''):
        problem = self.problem
        opts = self.app_options

        # Some coefficients can require other coefficients - resolve their
        # order here.
        req_info = getattr(self.conf, opts.requirements, {})
        coef_info = getattr(self.conf, opts.coefs, {})
        coef_info = self.define_volume_coef(coef_info, self.volumes)

        is_store_filenames = coef_info.pop('filenames', None) is not None
        is_micro_coors = self.micro_coors is not None

        use_multiprocessing = \
            multiproc.use_multiprocessing and self.app_options.multiprocessing

        if use_multiprocessing:
            num_workers = multiproc.cpu_count()

            if is_micro_coors:
                coef_info_orig = coef_info.copy()
                micro_chunk_tab, req_info, coef_info = \
                    self.chunk_micro_coors(num_workers,
                                           self.micro_coors.shape[0],
                                           req_info, coef_info,
                                           self.app_options.chunk_size,
                                           self.app_options.store_micro_idxs)
            else:
                micro_chunk_tab = None

            sorted_names = self.get_sorted_dependencies(
                req_info, coef_info, opts.compute_only)

            dependencies = multiproc.get_dict('dependecies', clear=True)
            sd_names = multiproc.get_dict('sd_names', clear=True)
            numdeps = multiproc.get_list('numdeps', clear=True)
            remaining = multiproc.get_int_value('remaining', len(sorted_names))
            tasks = multiproc.get_queue('tasts')
            lock = multiproc.get_lock('lock')

            # calculate number of dependencies and inverse map
            inverse_deps = {}
            for ii, name in enumerate(sorted_names):
                if name.startswith('c.'):
                    reqs = coef_info[name[2:]].get('requires', [])
                else:
                    reqs = req_info[name].get('requires', [])
                numdeps.append(len(reqs))
                if len(reqs) > 0:
                    for req in reqs:
                        if req in inverse_deps:
                            inverse_deps[req].append((ii, name))
                        else:
                            inverse_deps[req] = [(ii, name)]

            for ii, name in enumerate(sorted_names):
                if numdeps[ii] == 0:
                    tasks.put(name)

            workers = []
            for ii in range(num_workers):
                args = (tasks, lock, remaining, numdeps, inverse_deps, problem,
                        opts, self.post_process_hook, req_info, coef_info,
                        sd_names, dependencies, self.micro_coors, time_tag,
                        micro_chunk_tab, str(ii + 1))
                w = multiproc.Process(target=self.calculate_req_multi,
                                      args=args)
                w.start()
                workers.append(w)

            # block until all workes are terminated
            for w in workers:
                w.join()

            if is_micro_coors:
                dependencies = self.dechunk_reqs_coefs(dependencies,
                                                       len(micro_chunk_tab))
                coef_info = coef_info_orig

        else:  # no mlutiprocessing
            dependencies = {}
            sd_names = {}

            sorted_names = self.get_sorted_dependencies(
                req_info, coef_info, opts.compute_only)
            for name in sorted_names:
                if not name.startswith('c.'):
                    if is_micro_coors:
                        req_info[name]['store_idxs'] = ([jj\
                            for jj in self.app_options.store_micro_idxs], 0)

                val = self.calculate_req(problem, opts, self.post_process_hook,
                                         name, req_info, coef_info, sd_names,
                                         dependencies, self.micro_coors,
                                         time_tag)

                dependencies[name] = val

        coefs = Struct()
        deps = {}
        for name in dependencies.keys():
            data = dependencies[name]
            if name.startswith('c.'):
                coef_name = name[2:]
                cstat = coef_info[coef_name].get('status', 'main')
                # remove "auxiliary" coefs
                if not cstat == 'auxiliary':
                    setattr(coefs, coef_name, data)
            else:
                deps[name] = data

        # Store filenames of all requirements as a "coefficient".
        if is_store_filenames:
            for name in sd_names.keys():
                if '|multiprocessing_' in name:
                    mname = rm_multi(name)
                    if mname in sd_names:
                        sd_names[mname] += sd_names[name]
                    else:
                        sd_names[mname] = sd_names[name]
                    del (sd_names[name])

            save_names = {}
            dump_names = {}
            for name in sd_names.keys():
                val = sd_names[name]
                if name.startswith('s.'):
                    save_names[name[2:]] = val
                elif name.startswith('d.'):
                    dump_names[name[2:]] = val
            coefs.save_names = save_names
            coefs.dump_names = dump_names

        if opts.coefs_info is not None:
            coefs.info = opts.coefs_info

        if ret_all:
            return coefs, deps
        else:
            return coefs