Exemplo n.º 1
0
def main():
    # if multi_mpi.cpu_count() < 2:
    #     raise ValueError('MPI mode - the number of nodes is less then 2!')

    if multi_mpi.mpi_rank == multi_mpi.mpi_master:
        # MPI master node - solve problem at macro scale
        parser = ArgumentParser(description=__doc__,
                                formatter_class=RawDescriptionHelpFormatter)
        parser.add_argument('--debug',
                            action='store_true', dest='debug',
                            default=False, help=helps['debug'])
        parser.add_argument('--debug_mpi',
                            action='store_true', dest='debug_mpi',
                            default=False, help=helps['debug_mpi'])
        parser.add_argument('-c', '--conf', metavar='"key : value, ..."',
                            action='store', dest='conf', type=str,
                            default=None, help=helps['conf'])
        parser.add_argument('-O', '--options', metavar='"key : value, ..."',
                            action='store', dest='app_options', type=str,
                            default=None, help=helps['options'])
        parser.add_argument('-d', '--define', metavar='"key : value, ..."',
                            action='store', dest='define_args', type=str,
                            default=None, help=helps['define'])
        parser.add_argument('-o', metavar='filename',
                            action='store', dest='output_filename_trunk',
                            default=None, help=helps['filename'])
        parser.add_argument('--format', metavar='format',
                            action='store', dest='output_format',
                            default=None, help=helps['output_format'])
        parser.add_argument('--log', metavar='file',
                            action='store', dest='log',
                            default=None, help=helps['log'])
        parser.add_argument('-q', '--quiet',
                            action='store_true', dest='quiet',
                            default=False, help=helps['quiet'])
        group = parser.add_mutually_exclusive_group(required=True)
        group.add_argument('filename_in', nargs='?')
        options = parser.parse_args()

        for k in ['save_ebc', 'save_ebc_nodes', 'save_regions',
                  'save_regions_as_groups', 'save_field_meshes', 'solve_not']:
            setattr(options, k, False)

        if options.debug:
            from sfepy.base.base import debug_on_error; debug_on_error()

        if options.debug_mpi:
            multi_mpi.set_logging_level('debug')

        filename_in = options.filename_in
        output.set_output(filename=options.log,
                          quiet=options.quiet,
                          combined=options.log is not None)

        required, other = get_standard_keywords()
        conf = ProblemConf.from_file_and_options(filename_in, options,
            required, other, define_args=options.define_args)

        opts = conf.options
        nslaves = multi_mpi.cpu_count() - 1
        opts.n_mpi_homog_slaves = nslaves
        output_prefix = opts.get('output_prefix', 'sfepy:')

        app = PDESolverApp(conf, options, output_prefix)
        if hasattr(opts, 'parametric_hook'):  # Parametric study.
            parametric_hook = conf.get_function(opts.parametric_hook)
            app.parametrize(parametric_hook)
        app()

        multi_mpi.master_send_task('finalize', None)
    else:
        # MPI slave mode - calculate homogenized coefficients
        homogen_app = None
        done = False
        rank = multi_mpi.mpi_rank
        while not done:
            task, data = multi_mpi.slave_get_task('main slave loop')

            if task == 'init':  # data: micro_file, n_micro
                output.set_output(filename='homog_app_mpi_%d.log' % rank,
                                  quiet=True)
                micro_file, n_micro = data[:2]
                required, other = get_standard_keywords()
                required.remove('equations')
                conf = ProblemConf.from_file(micro_file, required, other,
                                             verbose=False)
                options = Struct(output_filename_trunk=None)
                homogen_app = HomogenizationApp(conf, options, 'micro:',
                                                n_micro=n_micro,
                                                update_micro_coors=True)
            elif task == 'calculate':  # data: rel_def_grad, ts, iteration
                rel_def_grad, ts, iteration = data[:3]
                homogen_app.setup_macro_deformation(rel_def_grad)
                homogen_app(ret_all=True, itime=ts.step, iiter=iteration)
            elif task == 'finalize':
                done = True
Exemplo n.º 2
0
def get_num_workers():
    """Get the number of slave nodes."""
    mpi = multiprocessing_mode == 'mpi'
    return multiproc_mpi.cpu_count() - 1 if mpi else\
        multiproc_proc.cpu_count()
Exemplo n.º 3
0
    def __call__(self,
                 problem,
                 options,
                 post_process_hook,
                 req_info,
                 coef_info,
                 micro_coors,
                 store_micro_idxs,
                 chunk_size,
                 time_tag=''):
        """Calculate homogenized correctors and coefficients.

        Parameters and Returns
        ----------------------
        The same parameters and returns as :class:`HomogenizationWorkerMulti`.
        """
        import sfepy.base.multiproc_mpi as multiproc_mpi

        dependencies = multiproc_mpi.get_dict('dependecies')
        sd_names = multiproc_mpi.get_dict('sd_names')
        numdeps = multiproc_mpi.get_dict('numdeps', mutable=True)
        remaining = multiproc_mpi.get_int_value('remaining', 0)
        tasks = multiproc_mpi.get_queue('tasks')

        if micro_coors is not None:
            micro_chunk_tab, req_info, coef_info = \
                self.chunk_micro_coors(self.num_workers,
                                       micro_coors.shape[0],
                                       req_info, coef_info,
                                       chunk_size, store_micro_idxs)
        else:
            micro_chunk_tab = None

        sorted_names = self.get_sorted_dependencies(req_info, coef_info,
                                                    options.compute_only)

        # calculate number of dependencies and inverse map
        inverse_deps = {}
        loc_numdeps = {}
        for name in sorted_names:
            if name.startswith('c.'):
                reqs = coef_info[name[2:]].get('requires', [])
            else:
                reqs = req_info[name].get('requires', [])
            loc_numdeps[name] = len(reqs)
            if len(reqs) > 0:
                for req in reqs:
                    if req in inverse_deps:
                        inverse_deps[req].append(name)
                    else:
                        inverse_deps[req] = [name]

        if multiproc_mpi.mpi_rank == multiproc_mpi.mpi_master:  # master node
            for k, v in six.iteritems(loc_numdeps):
                numdeps[k] = v

            remaining.value = len(sorted_names)

            for name in sorted_names:
                if numdeps[name] == 0:
                    tasks.put(name)

            multiproc_mpi.master_loop(multiproc_mpi.cpu_count() - 1)

            if micro_coors is not None:
                dependencies = self.dechunk_reqs_coefs(dependencies,
                                                       len(micro_chunk_tab))

            return dependencies, sd_names

        else:  # slave node
            lock = multiproc_mpi.RemoteLock()

            multiproc_mpi.start_slave()

            self.calculate_req_multi(tasks, lock, remaining, numdeps,
                                     inverse_deps, problem, options,
                                     post_process_hook, req_info, coef_info,
                                     sd_names, dependencies, micro_coors,
                                     time_tag, micro_chunk_tab,
                                     str(multiproc_mpi.mpi_rank + 1))

            multiproc_mpi.stop_slave()

            return None, None
Exemplo n.º 4
0
def main():
    # if multi_mpi.cpu_count() < 2:
    #     raise ValueError('MPI mode - the number of nodes is less then 2!')

    if multi_mpi.mpi_rank == multi_mpi.mpi_master:
        # MPI master node - solve problem at macro scale
        parser = ArgumentParser(description=__doc__,
                                formatter_class=RawDescriptionHelpFormatter)
        parser.add_argument('--debug',
                            action='store_true',
                            dest='debug',
                            default=False,
                            help=helps['debug'])
        parser.add_argument('--debug_mpi',
                            action='store_true',
                            dest='debug_mpi',
                            default=False,
                            help=helps['debug_mpi'])
        parser.add_argument('-c',
                            '--conf',
                            metavar='"key : value, ..."',
                            action='store',
                            dest='conf',
                            type=str,
                            default=None,
                            help=helps['conf'])
        parser.add_argument('-O',
                            '--options',
                            metavar='"key : value, ..."',
                            action='store',
                            dest='app_options',
                            type=str,
                            default=None,
                            help=helps['options'])
        parser.add_argument('-d',
                            '--define',
                            metavar='"key : value, ..."',
                            action='store',
                            dest='define_args',
                            type=str,
                            default=None,
                            help=helps['define'])
        parser.add_argument('-o',
                            metavar='filename',
                            action='store',
                            dest='output_filename_trunk',
                            default=None,
                            help=helps['filename'])
        parser.add_argument('--format',
                            metavar='format',
                            action='store',
                            dest='output_format',
                            default=None,
                            help=helps['output_format'])
        parser.add_argument('--log',
                            metavar='file',
                            action='store',
                            dest='log',
                            default=None,
                            help=helps['log'])
        parser.add_argument('-q',
                            '--quiet',
                            action='store_true',
                            dest='quiet',
                            default=False,
                            help=helps['quiet'])
        group = parser.add_mutually_exclusive_group(required=True)
        group.add_argument('filename_in', nargs='?')
        options = parser.parse_args()

        for k in [
                'save_ebc', 'save_ebc_nodes', 'save_regions',
                'save_regions_as_groups', 'save_field_meshes', 'solve_not'
        ]:
            setattr(options, k, False)

        if options.debug:
            from sfepy.base.base import debug_on_error
            debug_on_error()

        if options.debug_mpi:
            multi_mpi.set_logging_level('debug')

        filename_in = options.filename_in
        output.set_output(filename=options.log,
                          quiet=options.quiet,
                          combined=options.log is not None)

        required, other = get_standard_keywords()
        conf = ProblemConf.from_file_and_options(
            filename_in,
            options,
            required,
            other,
            define_args=options.define_args)

        opts = conf.options
        nslaves = multi_mpi.cpu_count() - 1
        opts.n_mpi_homog_slaves = nslaves
        output_prefix = opts.get('output_prefix', 'sfepy:')

        app = PDESolverApp(conf, options, output_prefix)
        if hasattr(opts, 'parametric_hook'):  # Parametric study.
            parametric_hook = conf.get_function(opts.parametric_hook)
            app.parametrize(parametric_hook)
        app()

        multi_mpi.master_send_task('finalize', None)
    else:
        # MPI slave mode - calculate homogenized coefficients
        homogen_app = None
        done = False
        rank = multi_mpi.mpi_rank
        while not done:
            task, data = multi_mpi.slave_get_task('main slave loop')

            if task == 'init':  # data: micro_file, n_micro
                output.set_output(filename='homog_app_mpi_%d.log' % rank,
                                  quiet=True)
                micro_file, n_micro = data[:2]
                required, other = get_standard_keywords()
                required.remove('equations')
                conf = ProblemConf.from_file(micro_file,
                                             required,
                                             other,
                                             verbose=False)
                options = Struct(output_filename_trunk=None)
                homogen_app = HomogenizationApp(conf,
                                                options,
                                                'micro:',
                                                n_micro=n_micro)
            elif task == 'calculate':  # data: rel_def_grad, ts, iteration
                macro_data, ts, iteration = data[:3]
                homogen_app.setup_macro_data(macro_data)
                homogen_app(ret_all=True, itime=ts.step, iiter=iteration)
            elif task == 'finalize':
                done = True
Exemplo n.º 5
0
def get_num_workers():
    """Get the number of slave nodes."""
    mpi = multiprocessing_mode == 'mpi'
    return multiproc_mpi.cpu_count() - 1 if mpi else\
        multiproc_proc.cpu_count()