Exemplo n.º 1
0
    def init_app(self, app):
        """Initializes the extension for a flask application. This will create
        a *GC3Pie* engine and start it in the background using the "gevent"
        scheduler.

        Parameters
        ----------
        app: flask.Flask
            flask application

        See also
        --------
        :class:`tmserver.extensions.gc3pie.engine.BGEngine`
        """
        logger.info('initialize GC3Pie extension')
        logger.debug('create GC3Pie engine')
        store = create_gc3pie_sql_store()
        engine = create_gc3pie_engine(store)
        bgengine = BgEngine('gevent', engine)
        logger.debug('start GC3Pie engine in the background')
        bgengine.start(10)
        app.extensions['gc3pie'] = {
            'engine': bgengine,
            'store': store,
        }
Exemplo n.º 2
0
    def submit(self, monitoring_depth, monitoring_interval, force=False):
        '''Creates a workflow, submits it to the cluster and monitors its
        progress.

        Parameters
        ----------
        monitoring_depth: int
            number of child tasks that should be monitored
        monitoring_interval: int
            query status of jobs every `monitoring_interval` seconds
        force: bool, opional
            whether inactivated stages and steps should be submitted anyways
        '''
        self._print_logo()
        logger.info('submit workflow')
        submission_id, user_name = self.register_submission()
        with tm.utils.ExperimentSession(self.experiment_id) as session:
            experiment = session.query(tm.Experiment).get(self.experiment_id)
            workflow_description = experiment.workflow_description
        if force:
            for stage in workflow_description.stages:
                stage.active = True
                for step in stage.steps:
                    step.active = True
        workflow = Workflow(
            experiment_id=self.experiment_id,
            verbosity=self.verbosity,
            submission_id=submission_id,
            user_name=user_name,
            description=workflow_description
        )
        store = create_gc3pie_sql_store()
        store.save(workflow)
        self.update_submission(workflow)
        engine = create_gc3pie_engine(store)
        logger.info('submit and monitor jobs')
        try:
            self.submit_jobs(
                workflow, engine,
                monitoring_depth=monitoring_depth,
                monitoring_interval=monitoring_interval
            )
        except KeyboardInterrupt:
            logger.info('processing interrupted')
            logger.info('killing jobs')
            while True:
                engine.kill(workflow)
                engine.progress()
                if workflow.is_terminated:
                    break
        except:
            raise
Exemplo n.º 3
0
    def resubmit(self, monitoring_depth, stage):
        '''Resumits a previously created workflow to the cluster and monitors
        its status.

        Parameters
        ----------
        monitoring_depth: int
            number of child tasks that should be monitored
        stage: str
            stage at which workflow should be submitted
        '''
        self._print_logo()
        store = create_gc3pie_sql_store()
        task_id = self.get_task_id_of_last_submission()
        with tm.utils.ExperimentSession(self.experiment_id) as session:
            experiment = session.query(tm.Experiment).get(self.experiment_id)
            workflow_description = experiment.workflow_description
        workflow = store.load(task_id)
        workflow.update_description(workflow_description)
        stage_names = [s.name for s in workflow.description.stages]
        try:
            start_index = stage_names.index(stage)
            workflow.update_stage(start_index)
        except IndexError:
            raise WorkflowDescriptionError('Unknown stage "%s".' % stage)
        logger.info('resubmit workflow at stage #%d "%s"', start_index, stage)
        engine = create_gc3pie_engine(store)
        logger.info('resubmit and monitor jobs')
        try:
            self.submit_jobs(
                workflow, engine, start_index=start_index,
                monitoring_depth=monitoring_depth
            )
        except KeyboardInterrupt:
            logger.info('processing interrupted')
            logger.info('killing jobs')
            while True:
                engine.kill(workflow)
                engine.progress()
                if workflow.is_terminated:
                    break
        except:
            raise
Exemplo n.º 4
0
    def submit(self, monitoring_depth, monitoring_interval):
        self._print_logo()
        submission_id, user_name = self.register_submission()
        api = self.api_instance

        jobs = IndependentJobCollection(api.step_name, submission_id)
        run_job_collection = api.create_run_phase(submission_id,
                                                  jobs.persistent_id)
        run_jobs = api.create_run_jobs(user_name,
                                       run_job_collection,
                                       self.verbosity,
                                       duration=self._submission_args.duration,
                                       memory=self._submission_args.memory,
                                       cores=self._submission_args.cores)
        jobs.add(run_jobs)
        if api.has_collect_phase:
            collect_job_collection = api.create_collect_phase(
                submission_id, jobs.persistent_id)
            collect_job = api.create_collect_job(user_name,
                                                 collect_job_collection,
                                                 self.verbosity)
            jobs.add(collect_job)

        store = create_gc3pie_sql_store()
        store.save(jobs)
        self.update_submission(jobs)
        engine = create_gc3pie_engine(store)
        logger.info('submit and monitor jobs')
        try:
            self.submit_jobs(jobs,
                             engine,
                             monitoring_depth=monitoring_depth,
                             monitoring_interval=monitoring_interval)
        except KeyboardInterrupt:
            logger.info('processing interrupted')
            logger.info('killing jobs')
            while True:
                engine.kill(jobs)
                engine.progress()
                if jobs.is_terminated:
                    break
        except Exception:
            raise
Exemplo n.º 5
0
    def load_jobs(self):
        '''Loads previously submitted jobs from the database.

        Returns
        -------
        tmlib.workflow.job or tmlib.workflow.job.JobCollection
            loaded jobs
        '''
        with tm.utis.MainSession() as session:
            last_submission_id = session.query(func.max(tm.Submission.id)).\
                filter(
                    tm.Submission.experiment_id == self.api_instance.experiment_id,
                    tm.Submission.program == self.name
                ).\
                group_by(tm.Submission.experiment_id).\
                one()[0]
            last_submission = session.query(tm.Submission).\
                get(last_submission_id)
            job_id = last_submission.top_task_id
        store = create_gc3pie_sql_store()
        return store.load(job_id)
Exemplo n.º 6
0
    def init_app(self, app, jobdaemon_url=None):
        """
        Start the GC3Pie job daemon process and connect to the DB.

        Parameters
        ----------
        app: flask.Flask
            flask application
        """
        logger.info('initializing GC3Pie extension ...')
        if jobdaemon_url:
            if jobdaemon_url.startswith('http'):
                self._jobdaemon_url = jobdaemon_url
            else:
                self._jobdaemon_url = 'http://' + jobdaemon_url
        else:
            # build it from host and port
            self._jobdaemon_url = cfg.jobdaemon_url
        app.extensions['gc3pie'] = {
            'store': create_gc3pie_sql_store(),
            'client': self._connect_to_job_daemon(),
        }
Exemplo n.º 7
0
    def init_app(self, app, jobdaemon_url=None):
        """
        Start the GC3Pie job daemon process and connect to the DB.

        Parameters
        ----------
        app: flask.Flask
            flask application
        """
        logger.info('initializing GC3Pie extension ...')
        if jobdaemon_url:
            if jobdaemon_url.startswith('http'):
                self._jobdaemon_url = jobdaemon_url
            else:
                self._jobdaemon_url = 'http://' + jobdaemon_url
        else:
            # build it from host and port
            self._jobdaemon_url = cfg.jobdaemon_url
        app.extensions['gc3pie'] = {
            'store': create_gc3pie_sql_store(),
            'client': self._connect_to_job_daemon(),
        }
Exemplo n.º 8
0
 def resubmit(self, monitoring_depth, monitoring_interval):
     self._print_logo()
     api = self.api_instance
     store = create_gc3pie_sql_store()
     job_id = self.get_task_id_of_last_submission()
     jobs = store.load(job_id)
     engine = create_gc3pie_engine(store)
     logger.info('resubmit and monitor jobs')
     try:
         self.submit_jobs(jobs,
                          engine,
                          monitoring_depth=monitoring_depth,
                          monitoring_interval=monitoring_interval)
     except KeyboardInterrupt:
         logger.info('processing interrupted')
         logger.info('killing jobs')
         while True:
             engine.kill(jobs)
             engine.progress()
             if jobs.is_terminated:
                 break
     except Exception:
         raise