def _create_pipe(self): with YamlReader(self.pipe_file) as f: content = f.read() try: description = PipelineDescription(**content) except TypeError as err: raise PipelineDescription('Incorrect pipeline description: %s' % str(err)) return Pipe(description)
def update_project(experiment_id): '''Saves modifications of the pipeline and module descriptions to the corresponding `.pipe` and `.handles` files. ''' logger.info('save jterator project of experiment %d', experiment_id) data = json.loads(request.data) project = yaml.load(data['project']) pipeline_description = PipelineDescription( **project['pipe']['description']) handles_descriptions = dict() for h in project['handles']: logger.debug('check handles of module "%s"', h['name']) handles_descriptions[h['name']] = HandleDescriptions( **h['description']) jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) try: jt.project.save() return jsonify({'success': True}) except Exception as err: raise MalformedRequestError( 'Project could not be saved: {err}'.format(err=err))
def _create_handles(self): handles = list() for name in self._module_names: h_file = self._get_handles_file(name) with YamlReader(h_file) as f: content = f.read() try: description = HandleDescriptions(**content) except TypeError as err: raise PipelineDescription( 'Incorrect handles description of module "%s": %s' % (name, str(err))) h = Handles(name, description) handles.append(h) return handles
def get_job_output(experiment_id): '''Gets output generated by a previous submission.''' data = json.loads(request.data) project = yaml.load(data['project']) pipeline_description = PipelineDescription(**project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) try: jobs = gc3pie.retrieve_most_recent_task(experiment_id, 'jtui') output = _get_output(experiment_id, jobs) return jsonify(output=output) except IndexError: return jsonify(output=None)
def check_project(experiment_id): '''Checks pipeline and module descriptions. ''' logger.info('check description of jterator project of experiment %d', experiment_id) data = json.loads(request.data) project = yaml.load(data['project']) pipeline_description = PipelineDescription( **project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } try: jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) return jsonify(success=True) except Exception as err: raise MalformedRequestError('Pipeline check failed:\n%s' % str(err))
def run_jobs(experiment_id): '''Runs one or more jobs of the current project with pipeline and module descriptions provided by the UI. This requires the pipeline and module descriptions to be saved to *pipe* and *handles* files, respectively. ''' logger.info('submit jobs for jterator project of experiment %d', experiment_id) data = json.loads(request.data) job_ids = map(int, data['job_ids']) project = yaml.load(data['project']) pipeline_description = PipelineDescription( **project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) # 1. Delete figures and logs from previous submission # since they are not tracked per submission. jt.remove_previous_pipeline_output() # TODO: remove figure files of previous runs!! # 2. Build job descriptions channel_names = [ ch.name for ch in jt.project.pipe.description.input.channels ] job_descriptions = list() with tm.utils.ExperimentSession(experiment_id) as session: sites = session.query(tm.Site.id).\ order_by(tm.Site.id).\ all() for j in job_ids: site_id = sites[j].id image_file_count = session.query(tm.ChannelImageFile.id).\ join(tm.Channel).\ filter(tm.Channel.name.in_(channel_names)).\ filter(tm.ChannelImageFile.site_id == site_id).\ count() if image_file_count == 0: raise MalformedRequestError( 'No images found for job ID {j}.'.format(j=j)) job_descriptions.append({'site_id': site_id, 'plot': True}) with tm.utils.MainSession() as session: submission = tm.Submission(experiment_id=experiment_id, program='jtui', user_id=current_identity.id) session.add(submission) session.flush() SubmitArgs = get_step_args('jterator')[1] submit_args = SubmitArgs() job_collection = jt.create_debug_run_phase(submission.id) jobs = jt.create_debug_run_jobs(user_name=current_identity.name, batches=job_descriptions, job_collection=job_collection, verbosity=2, duration=submit_args.duration, memory=submit_args.memory, cores=submit_args.cores) # 3. Store jobs in session gc3pie.store_task(jobs) # session.remove(data['previousSubmissionId']) gc3pie.submit_task(jobs) return jsonify(submission_id=jobs.submission_id)
def update_project(experiment_id): ''' .. http:put:: /api/experiments/(string:experiment_id)/workflow/jtproject Update a jterator project consisting of a :class:`PipelineDescription <tmlib.workflow.jterator.description.PipelineDescription>` and an optional :class:`HandleDescriptions <tmlib.workflow.jterator.description.HandleDescriptions>` for each module in the pipeline. **Example request**: .. sourcecode:: http Content-Type: application/json { "pipeline": { "input": { "channels": [ { "name": "wavelength-1" } ] }, "output": {}, "pipeline": [ { "handles": ../handles/module1.handles.yaml, "source": module1.py "active": true } ] }, "handles": { "module1": { "version": 0.1.0, "input": [], "output": [] }, ... } } **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/json { "message": "ok" } :reqheader Authorization: JWT token issued by the server :statuscode 400: malformed request :statuscode 200: no error ''' logger.info('update jterator project of experiment %d', experiment_id) data = json.loads(request.data) pipeline = data.get('pipeline') handles = data.get('handles') logger.debug('read pipeline description') pipeline_description = PipelineDescription(**pipeline) handles_descriptions = dict() for name, description in handles.iteritems(): logger.debug('read handles description for module "%s"', name) handles_descriptions[name] = HandleDescriptions(**description) jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) jt.project.save() return jsonify(message='ok')