Beispiel #1
0
def _run_multiprocess_pool(cases,
                           ncpu=os.cpu_count(),
                           verbose=logging.INFO,
                           **kwargs):
    """
    Run multiprocessing jobs using Pool.

    This function returns all System instances in a list, but requires longer computation time.

    Parameters
    ----------
    ncpu : int, optional = os.cpu_cout()
        Number of cpu cores to use in parallel
    mp_verbose : 10 - 50
        Verbosity level during multiprocessing
    verbose : 10, 20, 30, 40, 50
        Verbosity level outside multiprocessing
    """
    pool = Pool(ncpu)
    print("Cases are processed in the following order:")
    print('\n'.join([f'"{name}"' for name in cases]))
    ret = pool.map(
        partial(run_case, verbose=verbose, remove_pycapsule=True, **kwargs),
        cases)

    return ret
Beispiel #2
0
    def _prepare_mp(self, quick=False):
        """
        Code generation with multiprocessing. NOT WORKING NOW.

        Warnings
        --------
        Function is not working. Serialization failed for `conj`.
        """
        from andes.shared import Pool
        import dill
        dill.settings['recurse'] = True

        # consistency check for group parameters and variables
        self._check_group_common()

        def _prep_model(model: Model):
            model.prepare(quick=quick)
            return model

        model_list = list(self.models.values())

        # TODO: failed when serializing.
        ret = Pool().map(_prep_model, model_list)

        for idx, name in enumerate(self.models.keys()):
            self.models[name] = ret[idx]

        self._store_calls()
        self.dill()