def process_tool_request(experiment_id): """ .. http:post:: /api/experiments/(string:experiment_id)/tools/request Processes a generic tool request sent by the client. **Example request**: .. sourcecode:: http Content-Type: application/json { "tool_name": "Cluster Tool", "payload": any object, "session_uuid": string } **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/json { "data": { "submission_id": "MQ==" } } :reqheader Authorization: JWT token issued by the server :statuscode 200: no error """ data = request.get_json() payload = data.get('payload', {}) session_uuid = data.get('session_uuid') tool_name = data.get('tool_name') logger.info('process request of tool "%s"', tool_name) manager = ToolRequestManager( experiment_id, tool_name, server_cfg.logging_verbosity ) submission_id, user_name = manager.register_submission(current_identity.id) manager.store_payload(payload, submission_id) job = manager.create_job(submission_id, user_name) # with tm.utils.ExperimentSession(experiment_id) as session: # session = session.get_or_create(ToolSession, uuid=session_uuid) # session_id = session.id gc3pie.store_task(job) gc3pie.submit_task(job) return jsonify(data={ 'submission_id': submission_id })
def submit_workflow(experiment_id): """ .. http:post:: /api/experiments/(string:experiment_id)/workflow/submit Submit a workflow based on a ``WorkflowDescription``. Please refer to the respective class documention for more details on how to structure such a description object. **Example request**: .. sourcecode:: http Content-Type: application/json { "description": {...} } **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/json { "message": "ok", "submission_id": 1 } :reqheader Authorization: JWT token issued by the server :statuscode 200: no error """ logger.info('submit workflow for experiment %d', experiment_id) data = request.get_json() with tm.utils.ExperimentSession(experiment_id) as session: experiment = _retrieve_experiment_or_abort(experiment_id, session) if 'description' in data: logger.info('use provided workflow description') workflow_description = WorkflowDescription(**data['description']) experiment.persist_workflow_description(workflow_description) else: logger.warn('no workflow description provided') logger.info('load workflow description') workflow_description = experiment.workflow_description workflow_type = experiment.workflow_type submission_manager = SubmissionManager(experiment_id, 'workflow') submission_id, user_name = submission_manager.register_submission() workflow = Workflow(experiment_id=experiment_id, verbosity=server_cfg.logging_verbosity, submission_id=submission_id, user_name=user_name, description=workflow_description) gc3pie.store_task(workflow) gc3pie.submit_task(workflow) return jsonify({'message': 'ok', 'submission_id': workflow.submission_id})
def process_tool_request(experiment_id): """ .. http:post:: /api/experiments/(string:experiment_id)/tools/request Processes a generic tool request sent by the client. **Example request**: .. sourcecode:: http Content-Type: application/json { "tool_name": "Cluster Tool", "payload": any object, "session_uuid": string } **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/json { "data": { "submission_id": "MQ==" } } :reqheader Authorization: JWT token issued by the server :statuscode 200: no error """ data = request.get_json() payload = data.get('payload', {}) session_uuid = data.get('session_uuid') tool_name = data.get('tool_name') logger.info('process request of tool "%s"', tool_name) manager = ToolRequestManager(experiment_id, tool_name, server_cfg.logging_verbosity) submission_id, user_name = manager.register_submission(current_identity.id) manager.store_payload(payload, submission_id) job = manager.create_job(submission_id, user_name) # with tm.utils.ExperimentSession(experiment_id) as session: # session = session.get_or_create(ToolSession, uuid=session_uuid) # session_id = session.id gc3pie.store_task(job) gc3pie.submit_task(job) return jsonify(data={'submission_id': submission_id})
def run_jobs(experiment_id): '''Runs one or more jobs of the current project with pipeline and module descriptions provided by the UI. This requires the pipeline and module descriptions to be saved to *pipe* and *handles* files, respectively. ''' logger.info('submit jobs for jterator project of experiment %d', experiment_id) data = json.loads(request.data) job_ids = map(int, data['job_ids']) project = yaml.load(data['project']) pipeline_description = PipelineDescription( **project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) # 1. Delete figures and logs from previous submission # since they are not tracked per submission. jt.remove_previous_pipeline_output() # TODO: remove figure files of previous runs!! # 2. Build job descriptions channel_names = [ ch.name for ch in jt.project.pipe.description.input.channels ] job_descriptions = list() with tm.utils.ExperimentSession(experiment_id) as session: sites = session.query(tm.Site.id).\ order_by(tm.Site.id).\ all() for j in job_ids: site_id = sites[j].id image_file_count = session.query(tm.ChannelImageFile.id).\ join(tm.Channel).\ filter(tm.Channel.name.in_(channel_names)).\ filter(tm.ChannelImageFile.site_id == site_id).\ count() if image_file_count == 0: raise MalformedRequestError( 'No images found for job ID {j}.'.format(j=j)) job_descriptions.append({'site_id': site_id, 'plot': True}) with tm.utils.MainSession() as session: submission = tm.Submission(experiment_id=experiment_id, program='jtui', user_id=current_identity.id) session.add(submission) session.flush() SubmitArgs = get_step_args('jterator')[1] submit_args = SubmitArgs() job_collection = jt.create_debug_run_phase(submission.id) jobs = jt.create_debug_run_jobs(user_name=current_identity.name, batches=job_descriptions, job_collection=job_collection, verbosity=2, duration=submit_args.duration, memory=submit_args.memory, cores=submit_args.cores) # 3. Store jobs in session gc3pie.store_task(jobs) # session.remove(data['previousSubmissionId']) gc3pie.submit_task(jobs) return jsonify(submission_id=jobs.submission_id)
def submit_workflow(experiment_id): """ .. http:post:: /api/experiments/(string:experiment_id)/workflow/submit Submit a workflow based on a ``WorkflowDescription``. Please refer to the respective class documention for more details on how to structure such a description object. **Example request**: .. sourcecode:: http Content-Type: application/json { "description": {...} } **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/json { "message": "ok", "submission_id": 1 } :reqheader Authorization: JWT token issued by the server :statuscode 200: no error """ logger.info('submit workflow for experiment %d', experiment_id) data = request.get_json() with tm.utils.ExperimentSession(experiment_id) as session: experiment = _retrieve_experiment_or_abort(experiment_id, session) if 'description' in data: logger.info('use provided workflow description') workflow_description = WorkflowDescription(**data['description']) experiment.persist_workflow_description(workflow_description) else: logger.warn('no workflow description provided') logger.info('load workflow description') workflow_description = experiment.workflow_description workflow_type = experiment.workflow_type submission_manager = SubmissionManager(experiment_id, 'workflow') submission_id, user_name = submission_manager.register_submission() workflow = Workflow( experiment_id=experiment_id, verbosity=server_cfg.logging_verbosity, submission_id=submission_id, user_name=user_name, description=workflow_description ) gc3pie.store_task(workflow) gc3pie.submit_task(workflow) return jsonify({ 'message': 'ok', 'submission_id': workflow.submission_id })
def run_jobs(experiment_id): '''Runs one or more jobs of the current project with pipeline and module descriptions provided by the UI. This requires the pipeline and module descriptions to be saved to *pipe* and *handles* files, respectively. ''' logger.info( 'submit jobs for jterator project of experiment %d', experiment_id ) data = json.loads(request.data) job_ids = map(int, data['job_ids']) project = yaml.load(data['project']) pipeline_description = PipelineDescription(**project['pipe']['description']) handles_descriptions = { h['name']: HandleDescriptions(**h['description']) for h in project['handles'] } jt = ImageAnalysisPipelineEngine( experiment_id, pipeline_description=pipeline_description, handles_descriptions=handles_descriptions, ) # 1. Delete figures and logs from previous submission # since they are not tracked per submission. jt.remove_previous_pipeline_output() # TODO: remove figure files of previous runs!! # 2. Build job descriptions channel_names = [ ch.name for ch in jt.project.pipe.description.input.channels ] object_names = [ ob.name for ob in jt.project.pipe.description.input.objects ] job_descriptions = list() with tm.utils.ExperimentSession(experiment_id) as session: sites = session.query(tm.Site.id).\ order_by(tm.Site.id).\ all() for j in job_ids: site_id = sites[j-1].id # user-input is expected between [1..] image_file_count = 0 image_file_count += session.query(tm.ChannelImageFile.id).\ join(tm.Channel).\ filter(tm.Channel.name.in_(channel_names)).\ filter(tm.ChannelImageFile.site_id == site_id).\ count() image_file_count += session.query(tm.ChannelImageFile.id).\ join(tm.Site).\ join(tm.Well).\ join(tm.Plate).\ join(tm.Experiment).\ join(tm.MapobjectType).\ filter(tm.MapobjectType.name.in_(object_names)).\ filter(tm.ChannelImageFile.site_id == site_id).\ count() if image_file_count == 0: raise MalformedRequestError( 'No images found for job ID {j}.' .format(j=j)) job_descriptions.append({'site_id': site_id, 'plot': True}) with tm.utils.MainSession() as session: submission = tm.Submission( experiment_id=experiment_id, program='jtui', user_id=current_identity.id ) session.add(submission) session.flush() SubmitArgs = get_step_args('jterator')[1] submit_args = SubmitArgs() job_collection = jt.create_debug_run_phase(submission.id) jobs = jt.create_debug_run_jobs( user_name=current_identity.name, batches=job_descriptions, job_collection=job_collection, verbosity=2, duration=submit_args.duration, memory=submit_args.memory, cores=submit_args.cores ) # 3. Store jobs in session gc3pie.store_task(jobs) # session.remove(data['previousSubmissionId']) gc3pie.submit_task(jobs) return jsonify(submission_id=jobs.submission_id)