def update_project(experiment_id): '''Saves modifications of the pipeline and module descriptions to the corresponding `.pipe` and `.handles` files. ''' logger.info('save jterator project of experiment %d', experiment_id) data = json.loads(request.data) project = yaml.load(data['project']) pipeline_description = PipelineDescription( **project['pipe']['description']) handles_descriptions = dict() for h in project['handles']: logger.debug('check handles of module "%s"', h['name']) handles_descriptions[h['name']] = HandleDescriptions( **h['description']) jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) try: jt.project.save() return jsonify({'success': True}) except Exception as err: raise MalformedRequestError( 'Project could not be saved: {err}'.format(err=err))
def get_module_figure(experiment_id): '''Gets the figure for a given module.''' module_name = request.args.get('module_name') job_id = request.args.get('job_id', type=int) with tm.utils.ExperimentSession(experiment_id) as session: sites = session.query(tm.Site.id).order_by(tm.Site.id).all() logger.info('get figure for module "%s" and job %d of experiment %d', module_name, job_id, experiment_id) jt = ImageAnalysisPipelineEngine(experiment_id) fig_file = [ m.build_figure_filename(jt.figures_location, sites[job_id].id) for m in jt.pipeline if m.name == module_name ] if len(fig_file) == 0: return jsonify({ 'success': False, 'error': 'No figure file found for module "%s"' % module_name }) fig_file = fig_file[0] if os.path.exists(fig_file): return send_file(fig_file) else: return jsonify({ 'success': False, 'error': 'No figure file found for module "%s"' % module_name })
def delete_project(experiment_id): '''Removes `.pipe` and `.handles` files from a given Jterator project. ''' logger.info('delete jterator project of experiment %d', experiment_id) jt = ImageAnalysisPipelineEngine(experiment_id, ) jt.project.remove() return jsonify({'success': True})
def get_project(experiment_id): '''Gets the Jterator :class:`Project <tmlib.workflow.jterator.project.Project>` for a given experiment. It consists of a pipeline description ("pipe") and several module descriptions ("handles"). ''' logger.info('get jterator project for experiment %d', experiment_id) jt = ImageAnalysisPipelineEngine(experiment_id) serialized_project = yaml.safe_dump(jt.project.to_dict()) return jsonify(jtproject=serialized_project)
def get_job_output(experiment_id): '''Gets output generated by a previous submission.''' data = json.loads(request.data) project = yaml.load(data['project']) pipeline_description = PipelineDescription(**project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) try: jobs = gc3pie.retrieve_most_recent_task(experiment_id, 'jtui') output = _get_output(experiment_id, jobs) return jsonify(output=output) except IndexError: return jsonify(output=None)
def check_project(experiment_id): '''Checks pipeline and module descriptions. ''' logger.info('check description of jterator project of experiment %d', experiment_id) data = json.loads(request.data) project = yaml.load(data['project']) pipeline_description = PipelineDescription( **project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } try: jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) return jsonify(success=True) except Exception as err: raise MalformedRequestError('Pipeline check failed:\n%s' % str(err))
def run_jobs(experiment_id): '''Runs one or more jobs of the current project with pipeline and module descriptions provided by the UI. This requires the pipeline and module descriptions to be saved to *pipe* and *handles* files, respectively. ''' logger.info('submit jobs for jterator project of experiment %d', experiment_id) data = json.loads(request.data) job_ids = map(int, data['job_ids']) project = yaml.load(data['project']) pipeline_description = PipelineDescription( **project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) # 1. Delete figures and logs from previous submission # since they are not tracked per submission. jt.remove_previous_pipeline_output() # TODO: remove figure files of previous runs!! # 2. Build job descriptions channel_names = [ ch.name for ch in jt.project.pipe.description.input.channels ] job_descriptions = list() with tm.utils.ExperimentSession(experiment_id) as session: sites = session.query(tm.Site.id).\ order_by(tm.Site.id).\ all() for j in job_ids: site_id = sites[j].id image_file_count = session.query(tm.ChannelImageFile.id).\ join(tm.Channel).\ filter(tm.Channel.name.in_(channel_names)).\ filter(tm.ChannelImageFile.site_id == site_id).\ count() if image_file_count == 0: raise MalformedRequestError( 'No images found for job ID {j}.'.format(j=j)) job_descriptions.append({'site_id': site_id, 'plot': True}) with tm.utils.MainSession() as session: submission = tm.Submission(experiment_id=experiment_id, program='jtui', user_id=current_identity.id) session.add(submission) session.flush() SubmitArgs = get_step_args('jterator')[1] submit_args = SubmitArgs() job_collection = jt.create_debug_run_phase(submission.id) jobs = jt.create_debug_run_jobs(user_name=current_identity.name, batches=job_descriptions, job_collection=job_collection, verbosity=2, duration=submit_args.duration, memory=submit_args.memory, cores=submit_args.cores) # 3. Store jobs in session gc3pie.store_task(jobs) # session.remove(data['previousSubmissionId']) gc3pie.submit_task(jobs) return jsonify(submission_id=jobs.submission_id)
def update_project(experiment_id): ''' .. http:put:: /api/experiments/(string:experiment_id)/workflow/jtproject Update a jterator project consisting of a :class:`PipelineDescription <tmlib.workflow.jterator.description.PipelineDescription>` and an optional :class:`HandleDescriptions <tmlib.workflow.jterator.description.HandleDescriptions>` for each module in the pipeline. **Example request**: .. sourcecode:: http Content-Type: application/json { "pipeline": { "input": { "channels": [ { "name": "wavelength-1" } ] }, "output": {}, "pipeline": [ { "handles": ../handles/module1.handles.yaml, "source": module1.py "active": true } ] }, "handles": { "module1": { "version": 0.1.0, "input": [], "output": [] }, ... } } **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/json { "message": "ok" } :reqheader Authorization: JWT token issued by the server :statuscode 400: malformed request :statuscode 200: no error ''' logger.info('update jterator project of experiment %d', experiment_id) data = json.loads(request.data) pipeline = data.get('pipeline') handles = data.get('handles') logger.debug('read pipeline description') pipeline_description = PipelineDescription(**pipeline) handles_descriptions = dict() for name, description in handles.iteritems(): logger.debug('read handles description for module "%s"', name) handles_descriptions[name] = HandleDescriptions(**description) jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) jt.project.save() return jsonify(message='ok')
def get_jterator_project(experiment_id): """ .. http:get:: /api/experiments/(string:experiment_id)/workflow/jtproject Get a jterator project consisting of a :class:`PipelineDescription <tmlib.workflow.jterator.description.PipelineDescription>` and an optional :class:`HandleDescriptions <tmlib.workflow.jterator.description.HandleDescriptions>`. for each module of the pipeline. **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/json { "data": { "pipeline": { "input": { "channels": [ { "name": "wavelength-1" } ], "objects": [] }, "output": { "objects": [] }, "pipeline": [ { "handles": ../handles/module1.handles.yaml, "source": module1.py "active": true } ] }, "handles": { "module1": { "version": 0.1.0, "input": [], "output": [] }, ... } } } :reqheader Authorization: JWT token issued by the server :statuscode 200: no error """ logger.info('get jterator project of experiment %d', experiment_id) jt = ImageAnalysisPipelineEngine(experiment_id) pipeline_description = jt.project.pipe.description.to_dict() handles_descriptions = {} for h in jt.project.handles: handles = h.to_dict() handles_descriptions[handles['name']] = handles['description'] return jsonify(data={ 'pipeline': pipeline_description, 'handles': handles_descriptions })
def run_jobs(experiment_id): '''Runs one or more jobs of the current project with pipeline and module descriptions provided by the UI. This requires the pipeline and module descriptions to be saved to *pipe* and *handles* files, respectively. ''' logger.info( 'submit jobs for jterator project of experiment %d', experiment_id ) data = json.loads(request.data) job_ids = map(int, data['job_ids']) project = yaml.load(data['project']) pipeline_description = PipelineDescription(**project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) # 1. Delete figures and logs from previous submission # since they are not tracked per submission. jt.remove_previous_pipeline_output() # TODO: remove figure files of previous runs!! # 2. Build job descriptions channel_names = [ ch.name for ch in jt.project.pipe.description.input.channels ] object_names = [ ob.name for ob in jt.project.pipe.description.input.objects ] job_descriptions = list() with tm.utils.ExperimentSession(experiment_id) as session: sites = session.query(tm.Site.id).\ order_by(tm.Site.id).\ all() for j in job_ids: site_id = sites[j-1].id # user-input is expected between [1..] image_file_count = 0 image_file_count += session.query(tm.ChannelImageFile.id).\ join(tm.Channel).\ filter(tm.Channel.name.in_(channel_names)).\ filter(tm.ChannelImageFile.site_id == site_id).\ count() image_file_count += session.query(tm.ChannelImageFile.id).\ join(tm.Site).\ join(tm.Well).\ join(tm.Plate).\ join(tm.Experiment).\ join(tm.MapobjectType).\ filter(tm.MapobjectType.name.in_(object_names)).\ filter(tm.ChannelImageFile.site_id == site_id).\ count() if image_file_count == 0: raise MalformedRequestError( 'No images found for job ID {j}.' .format(j=j)) job_descriptions.append({'site_id': site_id, 'plot': True}) with tm.utils.MainSession() as session: submission = tm.Submission( experiment_id=experiment_id, program='jtui', user_id=current_identity.id ) session.add(submission) session.flush() SubmitArgs = get_step_args('jterator')[1] submit_args = SubmitArgs() job_collection = jt.create_debug_run_phase(submission.id) jobs = jt.create_debug_run_jobs( user_name=current_identity.name, batches=job_descriptions, job_collection=job_collection, verbosity=2, duration=submit_args.duration, memory=submit_args.memory, cores=submit_args.cores ) # 3. Store jobs in session gc3pie.store_task(jobs) # session.remove(data['previousSubmissionId']) gc3pie.submit_task(jobs) return jsonify(submission_id=jobs.submission_id)