def get(self, request, workflow_id): """ Export workflow to zip file """ jms = JMS(user=request.user) workflow = jms.GetWorkflow(workflow_id) serializer = WorkflowDetailSerializer(workflow) json_path = "/tmp/workflow_%s_%s.json" % (str(workflow_id), request.user.username) with open(json_path, 'w') as f: f.write(JSONRenderer().render(serializer.data)) zip_path = '/tmp/workflow_%s_%s.zip' % (str(workflow_id), request.user.username) workflow_dir = '%s/workflows/%s/' % (jms.base_dir, str(workflow_id)) with ZipFile(zip_path, 'w') as myzip: myzip.write(json_path, "workflow.json") for root, dirs, files in os.walk(workflow_dir): for f in files: myzip.write(os.path.join(root, f), "scripts/%s" % f) wrapper = FileWrapper(file(zip_path)) response = HttpResponse(wrapper, content_type='application/force-download') response['Content-Length'] = os.path.getsize(zip_path) response[ 'Content-Disposition'] = 'attachment; filename=workflow_%s_%s.zip' % ( str(workflow_id), request.user.username) return response
def delete(self, request, job_id, user_id): ''' Remove user's access to job ''' jms = JMS(user=request.user) jms.DeleteUserJobAccessRight(job_id, user_id) return Response()
def get(self, request, job_id, download_type, type_id): """ Fetch a job file """ jms = JMS(user=request.user) filename = '' if download_type.lower() == "parameters": param = JobStageParameter.objects.get( Parameter__ParameterID=type_id, JobStage__Job__JobID=job_id) filename = param.Value elif download_type.lower() == "outputs": output = ExpectedOutput.objects.get(pk=type_id) filename = output.ExpectedOutputFileName path = "%s/%s" % (job_id, filename) job = jms.GetJob(job_id) filepath = os.path.join(jms.users_dir, job.User.username + "/jobs/" + path) wrapper = FileWrapper(file(filepath)) response = HttpResponse(wrapper, content_type='application/force-download') response['Content-Length'] = os.path.getsize(filepath) response['Content-Disposition'] = 'attachment; filename=%s' % filename return response
def delete(self, request, job_id, group_id): ''' Remove groups's access to job ''' jms = JMS(user=request.user) jms.DeleteGroupJobAccessRight(job_id, group_id) return Response()
def post(self, request, upload_type, type_id): """ Uploads files to the server and returns the list of files for a job or workflow """ jms = JMS(user=request.user) if upload_type == 'jobs': rootpath = os.path.join( jms.users_dir, request.user.username + '/jobs/' + type_id + '/') elif upload_type == 'workflows': rootpath = os.path.join(jms.base_dir, 'workflows/' + type_id + '/') else: return Response(status=404) jms.createJobDir(rootpath) for k, v in request.FILES.iteritems(): for f in request.FILES.getlist(k): with open(os.path.join(rootpath, f.name), 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) os.chmod(os.path.join(rootpath, f.name), 0777) return Response(os.listdir(rootpath), status=200)
def delete(self, request, workflow_id, group_id): ''' Remove access to workflow for group ''' jms = JMS(user=request.user) jms.DeleteGroupWorkflowAccessRight(workflow_id, group_id) return Response()
def put(self, request, batch_job_id): """ Start batch job """ jms = JMS(user=request.user) jms.StartBatchJob(batch_job_id) return Response()
def run(self): count = 1 while True: with open("/tmp/queue-daemon.txt", "w") as f: print >> f, str(count) count += 1 try: process = subprocess.Popen("qstat -x", shell=True, stdout=subprocess.PIPE) out, err = process.communicate() data = objectify.fromstring(out) jms = JMS() for job in data.Job: print >> f, job.Job_Id try: jms.AddUpdateClusterJob(job) except Exception, err: print >> f, ''.join( traceback.format_exception(*sys.exc_info())) # Reset database connection to avoid "MySQL has gone away" error after daemon # has been running for a long time db.close_connection() except Exception, err: print >> f, "Error: " + str(err)
def delete(self, request, job_id): """ Delete a job from history """ jms = JMS(user=request.user) jms.DeleteJob(job_id) return Response(status=200)
def delete(self, request, batch_job_id): """ Delete batch job - deletes all individual jobs as well """ jms = JMS(user=request.user) jms.DeleteBatchJob(batch_job_id) return Response()
def post(self, request): """ Submit a new job """ job = lambda: None job.__dict__ = json.loads(request.body) name = job.JobName wokflow_id = job.WorkflowID user = request.user description = job.Description stages = [] for s in job.Stages: stage = objects.JobStageInput(s["StageID"], s["StageName"], s["Parameters"], s["RequiresEdit"], s["Queue"], s["Nodes"], s["MaxCores"], s["Memory"], s["Walltime"]) stages.append(stage) jms = JMS(user=request.user) job_id = jms.CreateWorkflowJob(name, wokflow_id, description, stages) return Response(job_id, status=200)
def put(self, request, profile_id): """ Update an input profile """ input_profile = lambda: None input_profile.__dict__ = json.loads(request.body) with transaction.atomic(): jms = JMS(user=request.user) profile = jms.UpdateInputProfile( InputProfileID=profile_id, InputProfileName=input_profile.InputProfileName, Description=input_profile.Description) #delete current profile parameters for ip in profile.InputProfileParameters.all(): ip.delete() #replace deleted params with new ones for ip in input_profile.InputProfileParameters: profile_param = jms.CreateInputProfileParameter( InputProfileID=profile.InputProfileID, ParameterID=ip["ParameterID"], Value=ip["Value"]) return Response()
def delete(self, request, profile_id): """ Delete an input profile """ jms = JMS(user=request.user) jms.DeleteInputProfile(profile_id) return Response()
def delete(self, request, workflow_id): """ Delete workflow by WorkflowID """ jms = JMS(user=request.user) jms.DeleteWorkflow(workflow_id) return Response()
def post(self, request, batch_job_id, file_type): """ Uploads batch files """ jms = JMS(user=request.user) if file_type == 'batch': rootpath = os.path.join( jms.users_dir, jms.user.username + '/jobs/batch_jobs/' + batch_job_id + '/') elif file_type == 'input': rootpath = os.path.join( jms.users_dir, jms.user.username + '/jobs/batch_jobs/' + batch_job_id + '/files') else: return Response(status=404) jms.createJobDir(rootpath) for k, v in request.FILES.iteritems(): for f in request.FILES.getlist(k): with open(os.path.join(rootpath, f.name), 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) os.chmod(os.path.join(rootpath, f.name), 0777) return Response(os.listdir(rootpath), status=200)
def delete(self, request, workflow_id, user_id): ''' Remove access to workflow for user ''' jms = JMS(user=request.user) jms.DeleteUserWorkflowAccessRight(workflow_id, user_id) return Response()
def get(self, request): """ Get nodes """ jms = JMS(user=request.user) nodes = jms.GetNodes() return Response(json.dumps(nodes, default=lambda o: o.__dict__))
def get(self, request): """ Get server settings """ jms = JMS(user=request.user) settings = jms.GetServerSettings() return Response(json.dumps(settings, default=lambda o: o.__dict__))
def get(self, request, workflow_id): """ Fetch workflow by WorkflowID """ jms = JMS(user=request.user) data = jms.GetWorkflow(workflow_id) serializer = WorkflowDetailSerializer(data) return Response(serializer.data)
def get(self, request, batch_job_id): """ Fetch batch job details """ jms = JMS(user=request.user) job = jms.GetBatchJob(batch_job_id) serializer = BatchJobSerializer(job) return Response(serializer.data)
def get(self, request, profile_id): """ Fetch an input profile """ jms = JMS(user=request.user) profile = jms.GetInputProfile(profile_id) serializer = InputProfileDetailSerializer(profile) return Response(serializer.data)
def delete(self, request, job_id, comment_id): """ Delete a comment - must be your comment unless you have admin privileges for the job """ jms = JMS(user=request.user) response_code = jms.DeleteComment(comment_id, request.user) return Response(status=response_code)
def get(self, request): """ Get all batch jobs """ jms = JMS(user=request.user) jobs = jms.GetBatchJobs() serializer = BatchJobSerializer(jobs, many=True) return Response(serializer.data)
def get(self, request, job_id): """ Fetch a job based on its ID """ jms = JMS(user=request.user) response = jms.GetJob(job_id) serializer = JobDetailSerializer(response) return Response(serializer.data)
def delete(self, request, cluster_id): """ Stop a job running on the cluster """ jms = JMS(user=request.user) code = jms.StopClusterJob(cluster_id) return Response(status=200)
def put(self, request, job_id): """ Start a created job """ jms = JMS(user=request.user) jms.StartJob(job_id) return Response()
def get(self, request): """ Fetch all jobs for user """ jms = JMS(user=request.user) jobs = jms.GetJobs() serializer = JobSerializer(jobs, many=True) return Response(serializer.data)
def get(self, request): """ Get queue and node usage statistics for all nodes in the cluster """ jms = JMS(user=request.user) dashboard = jms.GetDashboard() return Response( json.dumps(dashboard, default=lambda o: o.__dict__, sort_keys=True))
def put(self, request, job_stage_id): ''' Continue a job that has been put in a held state ''' jms = JMS(user=request.user) jobstage = jms.GetJobStage(job_stage_id) jms.ContinueStage(jobstage) return Response(status=status.HTTP_200_OK)
def get(self, request, cluster_id): """ Get details of a job running on the cluster """ jms = JMS(user=request.user) job = jms.GetClusterJob(job_id=cluster_id) serializer = ClusterJobSerializer(job) return Response(serializer.data)
def put(self, request, upload_type, type_id, file_name): """ Update a job or workflow file """ jms = JMS(user=request.user) if upload_type == 'jobs': rootpath = os.path.join('/jobs/', type_id) elif upload_type == 'workflows': rootpath = os.path.join(jms.base_dir, 'workflows/' + type_id) else: return Response(status=404) jms.createJobDir(rootpath) path = os.path.join(rootpath, file_name); jms.CreateFile(path, request.body) return Response(os.listdir(rootpath), status=200)
def post(self, request, batch_job_id, file_type): """ Uploads batch files """ jms = JMS(user=request.user) if file_type == 'batch': rootpath = os.path.join(jms.users_dir, jms.user.username + '/jobs/batch_jobs/' + batch_job_id + '/') elif file_type == 'input': rootpath = os.path.join(jms.users_dir, jms.user.username + '/jobs/batch_jobs/' + batch_job_id + '/files') else: return Response(status=404) jms.createJobDir(rootpath) for k, v in request.FILES.iteritems(): for f in request.FILES.getlist(k): with open(os.path.join(rootpath, f.name), 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) os.chmod(os.path.join(rootpath, f.name), 0777) return Response(os.listdir(rootpath), status=200)
def post(self, request, upload_type, type_id): """ Uploads files to the server and returns the list of files for a job or workflow """ jms = JMS(user=request.user) if upload_type == 'jobs': rootpath = os.path.join(jms.users_dir, request.user.username + '/jobs/' + type_id + '/') elif upload_type == 'workflows': rootpath = os.path.join(jms.base_dir, 'workflows/' + type_id + '/') else: return Response(status=404) jms.createJobDir(rootpath) for k, v in request.FILES.iteritems(): for f in request.FILES.getlist(k): with open(os.path.join(rootpath, f.name), 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) os.chmod(os.path.join(rootpath, f.name), 0777) return Response(os.listdir(rootpath), status=200)
def post(self, request): #parse POST json workflow = lambda:None workflow.__dict__ = json.loads(request.body) jms = JMS(user=request.user) workflowID = workflow.WorkflowID with transaction.atomic(): #if the workflow ID is <= 0, this is a new workflow that we must create if workflow.WorkflowID <= 0: #Create workflow workflowID = jms.CreateWorkflow(WorkflowName=workflow.WorkflowName, Description=workflow.Description) #create workflow stages map_ids = {} map_param_ids = {} for stage in workflow.Stages: CreateStage(stage, workflowID, map_ids, map_param_ids, jms) #if the workflow ID is > 0, this workflow already exists in the database and we must update it elif workflow.WorkflowID > 0: old_workflow = jms.GetWorkflow(workflow.WorkflowID) #Update workflow jms.UpdateWorkflow(WorkflowID=workflow.WorkflowID, WorkflowName=workflow.WorkflowName, Description=workflow.Description) stages = jms.GetStages(workflow.WorkflowID) #delete removed stages for old_stage in stages: include = False for current_stage in workflow.Stages: if current_stage["StageID"] == old_stage.StageID: include = True break if not include: jms.DeleteStage(StageID=old_stage.StageID) map_ids = {} map_param_ids = {} #update existing stages and create new stages for current_stage in workflow.Stages: stage_exists = False for s in old_workflow.Stages.all(): if s.StageID == current_stage["StageID"]: stage_exists = True break if stage_exists: #update stage command = current_stage["Command"] jms.UpdateStage(StageID=current_stage["StageID"], StageName=current_stage["StageName"], StageTypeID=current_stage["StageType"], WorkflowID=workflow.WorkflowID, Command=command, StageIndex=current_stage["StageIndex"], Queue=current_stage["Queue"], MaxCores=current_stage["MaxCores"], Nodes=current_stage["Nodes"], Memory=current_stage["Memory"], Walltime=current_stage["Walltime"]) map_ids[current_stage["StageID"]] = current_stage["StageID"] deps = jms.GetStageDependencies(current_stage["StageID"]) #delete dependencies for old_dep in deps: jms.DeleteStageDependency(StageDependencyID=old_dep.StageDependencyID) #add dependencies for current_dep in current_stage["StageDependencies"]: jms.CreateStageDependency(StageID=current_stage["StageID"], DependantOnID=map_ids[current_dep["StageID"]], ConditionID=current_dep["ConditionID"], ExitCodeValue=current_dep["Value"]) params = jms.GetParameters(current_stage["StageID"]) #delete removed parameters for old_param in params: #if old_param doesn't exist in the new set of parameters, delete it from the DB Include = RecursiveFindParameter(current_stage["Parameters"], old_param) if not Include: jms.DeleteParameter(ParameterID=old_param.ParameterID) #loop through parameters, updating existing parameters and creating new ones for current_param in current_stage["Parameters"]: param_exists = True try: jms.GetParameter(ParameterID=current_param["ParameterID"]) except Exception, e: param_exists = False val = current_param["Value"] if not current_param["Type"]: current_param["Type"] = 1 elif current_param["Type"] == 6: val = map_param_ids[int(val)] if param_exists: #recursively update parameters RecursiveUpdateParameters(current_param, map_param_ids, current_stage["StageID"], jms) else: #create parameter RecursiveCreateParameters(current_param, map_param_ids, current_stage["StageID"], jms) outputs = jms.GetExpectedOutputs(current_stage["StageID"]) #delete removed outputs for old_out in outputs: Include = False for current_out in current_stage["ExpectedOutputs"]: if current_out["ExpectedOutputID"] == old_out.ExpectedOutputID: Include = True break if not Include: jms.DeleteExpectedOutput(ExpectedOutputID=old_out.ExpectedOutputID) #update outputs for current_output in current_stage["ExpectedOutputs"]: if current_output["ExpectedOutputID"] > 0: #update output jms.UpdateExpectedOutput(ExpectedOutputID=current_output["ExpectedOutputID"], ExpectedOutputFileName=current_output["ExpectedOutputFileName"]) else: #create output output_id = jms.CreateExpectedOutput(ExpectedOutputFileName=current_output["ExpectedOutputFileName"], StageID=current_stage["StageID"]) else: #stage doesn't exist so create it CreateStage(current_stage, workflowID, map_ids, map_param_ids, jms) #create the directory where the workflow scripts will be stored jms.createJobDir(jms.base_dir + "/workflows/" + str(workflowID))