def GetAugmentedWorkflow(workflowId): worflowJson = WorkflowRepsitory.getWorkflow(workflowId) workflow = json.loads(worflowJson, object_hook=object_decoder) blocks = workflow["blocks"] for b in range(len(blocks)): blockType = blocks[b]["blockType"] block = BlockRepository.getBlockFromType(blockType) if block is None: LogWriter.logError("Could get block from Repository: " + str(blockType)) blocks[b]["Status"] = "not found" continue inputs = block.Inputs inputsList = [] for i in range(len(inputs)): inputsList.append(inputs[i].Id) outputs = block.Outputs outputList = [] for i in range(len(outputs)): outputList.append(outputs[i].Id) blocks[b]["Inputs"] = inputsList blocks[b]["Outputs"] = outputList return workflow
def getCommitsInternal(concatPath): tmp = concatPath.split(GitHelper.seperatro,2) git_url = tmp[0] file_path = tmp[1] LogWriter.logInfo("System: " + str(platform.system())) if (platform.system() == 'Windows'): # under my windows debug environment git does not work, since it does not use ssh # return debug fill stuff return [[1,"just a test"],[2, "bar"],[3, "foo"]] try : repo_dir = tempfile.mkdtemp() git_url = GitHelper.checkAndModifyGitUrl(git_url) repo = git.Repo.clone_from(git_url, repo_dir) g = git.Git(repo_dir) commits = g.log("--follow", '--pretty=format:"%H - %an, %ar : %s', file_path).split("\n") result = [] for c in commits: rev = c.split("-")[0].strip().replace("\"", "") other = c.split("-")[1].strip().replace("\"", "") result.append([rev, other]) return result except Exception as e: LogWriter.logError("getCommits failed: " + e.message) LogWriter.logError(traceback.format_exc()) return []
def getBlocks(): with open(BlockRepository.getRepoPath()) as data_file: try: return json.load(data_file, object_hook=object_decoder) except Exception as e: LogWriter.logError(e.message) return [] pass
def AnnotateDataSetsWithFiles(datasetIds, fileList): fileIds = [] for filename in fileList: try: fileIds.append( WorkflowExecutor.AnnotateDataSetsWithFile( datasetIds, filename)) except Exception as e: LogWriter.logError("+ Error annotating " + e.message) LogWriter.logError(traceback.format_exc()) return fileIds
def GetGitVersions(request, gitUrl, gitPath): LogWriter.logInfo("GetGitVersions " + str(gitUrl) + " " + str(gitPath)) res = {} try: commits = GitHelper.getCommits(gitUrl, gitPath) except Exception as e: import traceback LogWriter.logError(e.message) LogWriter.logError(traceback.format_exc()) res["commits"] = commits return HttpResponse(json.dumps(res), content_type="application/json")
def ToFileList(path): files = [] if (os.path.isdir(path)): filesNames = os.listdir(path) for file in filesNames: files.append(path + file) elif (os.path.isfile(path)): files = [path] else: LogWriter.logError("Path is neither directory nor file: " + path) return files
def PrefixFiles(fileList, prefix): renamedFiles = [] for fullPath in fileList: try: directoryname, filename = os.path.split(fullPath) renamedFileFull = directoryname + "/" + prefix + filename os.rename(fullPath, renamedFileFull) renamedFiles.append(renamedFileFull) except Exception as e: LogWriter.logError("+ Error renaming " + e.message + " " + fullPath + " => " + renamedFileFull) LogWriter.logError(traceback.format_exc()) return renamedFiles
def JoinSession(conn): try: LogWriter.logDebug("Connect To:" + str(conn.host) + " " + str(conn.port) + " " + str(conn._getSessionId())) connection = BlitzGateway('OMERO.script', host=conn.host, port=conn.port) connection.connect(sUuid=conn._getSessionId()) return connection except Exception as inst: LogWriter.logError("Connecting own session failed " + str(inst.message)) LogWriter.logError(traceback.format_exc()) return None
def GetAnnotationFileFromServer(self, fileId): data = None name = "" try: ann = self.connection.getObject("FileAnnotation", fileId) name=ann.getFileName() for chunk in ann.getFileInChunks(): if data is None: data = chunk else: data += chunk LogWriter.logDebug("Downloaded " + str(name) + " " + str(len(data)) + " size") except: LogWriter.logError("Could not load file annotation" + str(traceback.format_exc())) return name, data
def AnnotateImagesWithFile(self,ids, filePath): if not isinstance(ids, collections.Sequence): ids = [ids] fileAnn = self.connection.createFileAnnfromLocalFile(filePath, mimetype="text/plain", ns=self.namespace, desc=None) for i in range(len(ids)): try: image = self.connection.getObject("Image", ids[i]) if image is None: LogWriter.logError("DataSet " + ids[i] + " not found") continue image.linkAnnotation(fileAnn) except : LogWriter.logError("Could not annotate id " + str(ids[i]) + str(traceback.format_exc())) return fileAnn.getId()
def getCompileShellScript(): defaultScript = "compileJar.sh" if (platform.system() == 'Windows'): defaultScript = "compileJar.bat" LogWriter.logDebug("Absolut Path to Compile Script: " + str(os.path.abspath(defaultScript))) if os.path.isfile(defaultScript) and os.path.exists(defaultScript): return os.path.abspath(defaultScript) LogWriter.logError("could not find compile script. Create it: " + defaultScript) file = open(defaultScript, 'w+') file.close() return None
def AnnotateImagesWithResultFiles(annotationDatasetIds, fullWorkingFolderOnCluster): for annotate in annotationDatasetIds: annotationFileName = annotate[0] WorkflowExecutor.DownloadFilesFromClusterToHere( fullWorkingFolderOnCluster + annotationFileName, annotationFileName) if not os.path.exists(annotationFileName): LogWriter.logError( "Result file was not found => could not be annotated (" + annotationFileName + ")") else: WorkflowExecutor.AnnotateImagesWithFile([annotate[1]], annotationFileName) os.remove(annotationFileName)
def AnnotateDataSetsWithFile(self,ids, filePath): if not isinstance(ids, collections.Sequence): ids = [ids] fileAnn = self.connection.createFileAnnfromLocalFile(filePath, mimetype="text/plain", ns=self.namespace, desc=None) for i in range(len(ids)): LogWriter.logDebug("Annotate Dataset " + str(ids[i]) + "with " + str(filePath)) try: ds = self.connection.getObject("DataSet", ids[i]) if ds is None: LogWriter.logError("DataSet " + ids[i] + " not found") continue ds.linkAnnotation(fileAnn) LogWriter.logDebug("Annotate Dataset " + str(ids[i]) + "with " + str(filePath) + " Succsessfull") except : LogWriter.logError("Could not annotate id " + str(ids[i]) + str(traceback.format_exc())) return fileAnn.getId()
def compileOrGetFromCache(git_url, file_path, rev, name): scriptFile = JavaHelper.getCompileShellScript() if scriptFile is None: return None code = GitHelper.getFileStringFromRevision(git_url, file_path, rev) compileFolder = tempfile.mkdtemp() sourceFileName = compileFolder + "/" + name with open(sourceFileName, "w") as text_file: text_file.write(code) # strip the .java extention name = name.replace(".java", "") params = [scriptFile, sourceFileName, compileFolder, name] LogWriter.logDebug("Call Compiler: " + str(params)) try: compileOut = check_output(params) except Exception as e: import traceback LogWriter.logError(e.message) LogWriter.logError(traceback.format_exc()) LogWriter.logError(compileOut) LogWriter.logDebug("Compiler out: " + compileOut) resultpath = compileFolder + "/" + name + ".jar" LogWriter.logDebug("Should have compiled to: " + str(resultpath)) if os.path.isfile(resultpath) and os.path.exists(resultpath): return os.path.abspath(resultpath) LogWriter.logDebug("Not Found: " + str(resultpath) + " \n Compiler output:\n" + str(compileOut)) return None
def GetResultFromServer(self, fileId): data = "" try: ann = self.connection.getObject("FileAnnotation", fileId) if ann is None : LogWriter.logError("Could not get file annotation with id " + fileId) return None tf = tempfile.NamedTemporaryFile(prefix="workflow", suffix=".json", delete=False) f = open(str(tf.name), 'w') try: for chunk in ann.getFileInChunks(): f.write(chunk) finally: f.close() with open(tf.name, 'r') as myfile: data = myfile.read().replace('\n', '') LogWriter.logDebug(data) except: LogWriter.logError("Could not load file annotation" + str(traceback.format_exc())) return data
def StartExcutionOnCluster(workflowAndParameterDic): try: hadErrors = False runId = str(uuid.uuid4()) intermediateDataSet = 751 workflowAndParameterDic["runId"] = runId workflowAndParameterDic[ "intermediateDataSet"] = intermediateDataSet unmodiviedParams = copy.deepcopy( workflowAndParameterDic["parameters"]) name = "" if ("name" in workflowAndParameterDic): name = workflowAndParameterDic["name"] RunRepository.registerRun(runId, name) workingFolderName = runId # create a temp folder on the cluster with this id workingFolderOnCluster = "omeroEnv/" + workingFolderName + "/" fullWorkingFolderOnCluster = WorkflowExecutor.serverInterface.GetHomePath( ) + workingFolderOnCluster fullDeploymentFolderName = WorkflowExecutor.serverInterface.GetHomePath( ) + "omeroEnv/deployment/" RunRepository.updateStatusRun(runId, "Deploy environment") WorkflowExecutor.CreateTempFolderOnServer( fullWorkingFolderOnCluster) WorkflowExecutor.CreateTempFolderOnServer( fullWorkingFolderOnCluster + "PluginDeploy/") WorkflowExecutor.CreateTempFolderOnServer( fullWorkingFolderOnCluster + "Tool/") LogWriter.logInfo("+ Check deployment and create deployment ") WorkflowExecutor.CheckOrDeployDeployment( fullDeploymentFolderName, fullWorkingFolderOnCluster) versions = workflowAndParameterDic["versions"] blocks = workflowAndParameterDic["blocks"] for i in range(0, len(versions)): correspondingBlock = None for j in range(0, len(blocks)): if (blocks[j]["elementId"] == versions[i][0]): correspondingBlock = blocks[j] break if not correspondingBlock is None: git = correspondingBlock["GitRepo"] path = correspondingBlock["GitFilePath"] try: pathToJar = JavaHelper.compileOrGetFromCache( git, path, versions[i][1], path) if not pathToJar is None: WorkflowExecutor.serverInterface.CopyToServer( fullWorkingFolderOnCluster + "PluginDeploy/", pathToJar) toolStuff = JavaHelper.checkForToolDependencies( pathToJar) if not toolStuff is None and len(toolStuff) > 0: LogWriter.logInfo("ToolStuff " + str(toolStuff)) tool_file = JavaHelper.getToolFromVersionControl( toolStuff[0][0], toolStuff[0][1], toolStuff[0][2], toolStuff[0][3], toolStuff[0][4]) if not tool_file is None: WorkflowExecutor.serverInterface.CopyToServer( fullWorkingFolderOnCluster + "Tool/", tool_file) else: LogWriter.logDebug( "No Tool file downloaded") else: LogWriter.logError("No compiled file created") except Exception as e: LogWriter.logError(e.message) LogWriter.logError(traceback.format_exc()) LogWriter.logInfo("+ Download Input files ") RunRepository.updateStatusRun(runId, "Download Inputs") imageIds = WorkFlowAnalyser.GetRequiredImageIdsFromWorkflow( workflowAndParameterDic) WorkflowExecutor.DownloadImageFilesToTempFolderOnCluster( fullWorkingFolderOnCluster, imageIds) fileIds = WorkFlowAnalyser.GetRequiredFileIdsFromWorkflow( workflowAndParameterDic) WorkflowExecutor.DownloadFilesToTempFolderOnCluster( fullWorkingFolderOnCluster, fileIds) # find Result images => import into Data Set imagesToUpLoadToDataSet = WorkFlowAnalyser.GetImageUploadsFromWorkflow( workflowAndParameterDic) imageNamesWithDatasetIds = WorkflowExecutor.CreateTempFileNamesForUploadsAndModifyParameters( imagesToUpLoadToDataSet, ".tiff") # find result file => image annotation dataFilesToAnnotate = WorkFlowAnalyser.GetImagesToAnnotateFromWorkflow( workflowAndParameterDic) dataFileNamesWithImageIds = WorkflowExecutor.CreateTempFileNamesForUploadsAndModifyParameters( dataFilesToAnnotate, ".txt") LogWriter.logInfo("+ Write Workflow file ") WorkflowExecutor.WriteWorkflowFileToCluster( workflowAndParameterDic, fullWorkingFolderOnCluster) ##################################################################################################### ##################################################################################################### ##################################################################################################### ##################################################################################################### ##################################################################################################### ##################################################################################################### #return ##################################################################################################### ##################################################################################################### LogWriter.logInfo("+ Start execution ") RunRepository.updateStatusRun(runId, "Execute Workflow") errorcode = WorkflowExecutor.StartWorkflowExecution( fullWorkingFolderOnCluster) if (errorcode > 0): LogWriter.logInfo("+ Error starting execution: ErrorCode " + str(errorcode)) RunRepository.updateStatusRun( runId, "Failed to execute Workflow Manager with error code : " + str(errorcode)) return LogWriter.logInfo("+ Upload Results") RunRepository.updateStatusRun(runId, "Upload results") try: ids = WorkflowExecutor.UploadFiles(imageNamesWithDatasetIds, fullWorkingFolderOnCluster) LogWriter.logDebug("+ Uploaded Created files as:" + str(ids)) except: LogWriter.logDebug("+ Upload results faild") ids = [] hadErrors = True WorkflowExecutor.AnnotateImagesWithResultFiles( dataFileNamesWithImageIds, fullWorkingFolderOnCluster) # annotate uploaded files with workflow description WorkFlowAnalyser.MergeReproducibilityParameters( unmodiviedParams, imageIds) resultAnnotationId = WorkflowExecutor.StoreWorkflowFile( ids, unmodiviedParams, workflowAndParameterDic) # get Intermediates from cluster RunRepository.updateStatusRun(runId, "Collect Intermediates") WorkflowExecutor.GetAndStoreIntermediates( fullWorkingFolderOnCluster, intermediateDataSet, runId) # get Statistics from cluster RunRepository.updateStatusRun(runId, "Collect Statistics") WorkflowExecutor.GetAndStoreStatistics(fullWorkingFolderOnCluster, intermediateDataSet, runId) # Cleanup #todo LogWriter.logInfo("+ Finished Execution ") RunRepository.FinishRun(runId, resultAnnotationId, hadErrors) except Exception as e: RunRepository.updateStatusRun(runId, "Failed with " + e.message) LogWriter.logError(e.message) LogWriter.logError(traceback.format_exc())