Esempio n. 1
0
    def on_start_analysis(self, locations, species, areas_definition):
        """Run the analysis for each of the selected species.

        Creates a pool of worker processes. A job is set for each species that
        was selected. The jobs are then added to the pool for execution. If
        multiple workers were created, analyses will run in parallel. When
        the results are ready, :meth:`~setlyze.analysis.common.PrepareAnalysis.on_pool_finished`
        is applied to it.
        """
        self.start_time = time.time()

        # Create a progress dialog and a handler.
        self.pdialog, self.pdialog_handler = self.get_progress_dialog()

        # Set the total number of times we decide to update the progress dialog.
        self.pdialog_handler.set_total_steps((PROGRESS_STEPS + self.n_repeats) *
            len(species))

        # Create a gateway to the main process for child processes.
        gw = ProcessGateway()
        gw.set_pdialog_handler(self.pdialog_handler)
        gw.start()

        # Create a process pool with workers.
        cp = setlyze.config.cfg.get('concurrent-processes')
        self.pool = multiprocessing.Pool(cp)

        # Create a list of jobs.
        logging.info("Adding %d jobs to the queue" % len(species))
        jobs = ((Analysis, (locations, sp, areas_definition, gw.queue)) for sp in species)

        # Add the jobs to the pool.
        self.pool.map_async(calculatestar, jobs, callback=self.on_pool_finished)
Esempio n. 2
0
    def on_start_analysis(self, locations, species, areas_definition):
        """Start the analysis.

        Starts the analysis with the locations selection `locations`, the
        species selection `species`, and the plate areas definition
        `areas_definition`.
        """
        # Create a progress dialog and a handler.
        self.pdialog, self.pdialog_handler = self.get_progress_dialog()

        # Set the total number of times we decide to update the progress dialog.
        self.pdialog_handler.set_total_steps(PROGRESS_STEPS + self.n_repeats)

        # Create a progress task executor.
        gw = ProcessGateway()
        gw.set_pdialog_handler(self.pdialog_handler)
        gw.start()

        # Create a process pool with a single worker.
        self.pool = multiprocessing.Pool(1)

        # Create a list with the job.
        jobs = [(Analysis, (locations, species, areas_definition, gw.queue))]

        # Add the job to the pool.
        self.pool.map_async(calculatestar, jobs, callback=self.on_pool_finished)
Esempio n. 3
0
    def on_start_analysis(self, locations, species):
        """Start the analysis.

        Starts the analysis with the locations selections `locations` and the
        species selections `species`. Both must be tuples containing two lists,
        each list being a selection.
        """
        assert len(locations) == 2, \
            "The locations tuple does not contain two items."
        assert len(species) == 2, \
            "The species tuple does not contain two items."

        # Create a progress dialog and a handler.
        self.pdialog, self.pdialog_handler = self.get_progress_dialog()

        # Set the total number of times we decide to update the progress dialog.
        self.pdialog_handler.set_total_steps(
            PROGRESS_STEPS + self.n_repeats
        )

        # Create a progress task executor.
        gw = ProcessGateway()
        gw.set_pdialog_handler(self.pdialog_handler)
        gw.start()

        # Create a process pool with a single worker.
        self.pool = multiprocessing.Pool(1)

        # Create a list with the job.
        jobs = [(Analysis, (locations, species, gw.queue))]

        # Add the job to the pool.
        self.pool.map_async(calculatestar, jobs, callback=self.on_pool_finished)
Esempio n. 4
0
    def on_start_analysis(self, locations, species):
        """Run the analysis for all possible inter species combinations.

        Creates a pool of worker processes. A job is set for every possible
        inter species combination of the species selection. The jobs are then
        added to the pool for execution. If multiple workers were created,
        analyses will run in parallel. When the results are ready,
        :meth:`~setlyze.analysis.common.PrepareAnalysis.on_pool_finished` is
        applied to it.
        """
        assert len(locations) == 2, \
            "The locations tuple does not contain two items."
        assert len(species) > 1, \
            "The species tuple has less than two items."

        self.start_time = time.time()

        # Get all inter species combinations for the species selection.
        species_combos = tuple(itertools.combinations(species, 2))

        # Create a progress dialog and a handler.
        self.pdialog, self.pdialog_handler = self.get_progress_dialog()

        # Set the total number of times we decide to update the progress dialog.
        self.pdialog_handler.set_total_steps(
            (PROGRESS_STEPS + self.n_repeats) * len(species_combos)
        )

        # Create a progress task executor.
        gw = ProcessGateway()
        gw.set_pdialog_handler(self.pdialog_handler)
        gw.start()

        # Create a process pool with workers.
        cp = setlyze.config.cfg.get('concurrent-processes')
        self.pool = multiprocessing.Pool(cp, maxtasksperchild=50)

        # Create a list of jobs.
        logging.info("Adding %d jobs to the queue" % len(species_combos))
        jobs = ((Analysis, (locations, sp_comb, gw.queue)) for sp_comb in species_combos)

        # Add the jobs to the pool.
        self.pool.map_async(calculatestar, jobs, callback=self.on_pool_finished)