def getCommitsInternal(concatPath): tmp = concatPath.split(GitHelper.seperatro,2) git_url = tmp[0] file_path = tmp[1] LogWriter.logInfo("System: " + str(platform.system())) if (platform.system() == 'Windows'): # under my windows debug environment git does not work, since it does not use ssh # return debug fill stuff return [[1,"just a test"],[2, "bar"],[3, "foo"]] try : repo_dir = tempfile.mkdtemp() git_url = GitHelper.checkAndModifyGitUrl(git_url) repo = git.Repo.clone_from(git_url, repo_dir) g = git.Git(repo_dir) commits = g.log("--follow", '--pretty=format:"%H - %an, %ar : %s', file_path).split("\n") result = [] for c in commits: rev = c.split("-")[0].strip().replace("\"", "") other = c.split("-")[1].strip().replace("\"", "") result.append([rev, other]) return result except Exception as e: LogWriter.logError("getCommits failed: " + e.message) LogWriter.logError(traceback.format_exc()) return []
def ExecuteCommandOnServerOutput2(self, command): LogWriter.logInfo("ssh " + self.clusterUserAndAddress + " " + command) try: consoleOut = check_output(["ssh", self.clusterUserAndAddress, command]) LogWriter.logDebug("Console out :\n" + str(consoleOut)) return consoleOut except CalledProcessError, e: LogWriter.logDebug("Console out (" + str(e.returncode) + "):\n" + str(e.output)) return str(e.output)
def GetGitVersions(request, gitUrl, gitPath): LogWriter.logInfo("GetGitVersions " + str(gitUrl) + " " + str(gitPath)) res = {} try: commits = GitHelper.getCommits(gitUrl, gitPath) except Exception as e: import traceback LogWriter.logError(e.message) LogWriter.logError(traceback.format_exc()) res["commits"] = commits return HttpResponse(json.dumps(res), content_type="application/json")
def WriteWorkflowFileToCluster(cls, workflowAndParameterDic, fullWorkingFolderOnCluster): LogWriter.logInfo("ParameterDir: " + str(workflowAndParameterDic)) localTempfolder = WorkflowExecutor.CreateTempLocalFolder() parameterFile = localTempfolder + "/" + WorkflowExecutor.jsonParameterFileName LogWriter.logDebug("Temp Workflow file : " + parameterFile) with open(parameterFile, 'w') as outfile: json.dump(workflowAndParameterDic, outfile) WorkflowExecutor.serverInterface.CopyToServer( fullWorkingFolderOnCluster, parameterFile)
def CreateTempFileNamesForUploadsAndModifyParameters( self, filesToUpLoad, extension): result = [] if (len(filesToUpLoad) == 0): LogWriter.logInfo("no files to upload") return result for i in range(len(filesToUpLoad)): tempFileName = str(uuid.uuid4()) + extension result.append([tempFileName, filesToUpLoad[i][2]]) filesToUpLoad[i][1] = "Value" filesToUpLoad[i][2] = tempFileName filesToUpLoad[i][3] = "out" return result
def GetAndStoreStatistics(fullWorkingFolderOnCluster, intermediateDataSet, runId): tempStatisticsPath = tempfile.mkdtemp() LogWriter.logInfo("+ Copy Statistics to " + str(tempStatisticsPath)) WorkflowExecutor.DownloadFilesFromClusterToHere( fullWorkingFolderOnCluster + "statistics/statistics.txt", tempStatisticsPath, True) tempIntermediatePath = tempStatisticsPath + "/statistics.txt" RunRepository.updateStatusRun(runId, "Upload Statistics") files = WorkflowExecutor.ToFileList(tempIntermediatePath) renamedFiles = WorkflowExecutor.PrefixFiles( files, "statistics_" + str(runId) + "_") WorkflowExecutor.AnnotateDataSetsWithFiles([intermediateDataSet], renamedFiles) WorkflowExecutor.RemoveFilesOrFolder(tempIntermediatePath)
def getServerInterface(omeroHost, clusterUserAndAddress, sessionId, homepath, mpipath, additionalRunparameter=None): if InterfaceFactory.interfaceToUse == None: if InterfaceFactory.useOmero: LogWriter.logInfo("Create OmeroInterface ") InterfaceFactory.interfaceToUse = SshDeploymentInterface( omeroHost, clusterUserAndAddress, sessionId, homepath, mpipath, additionalRunparameter) else: LogWriter.logInfo("Create Dummy OmeroInterface ") InterfaceFactory.interfaceToUse = DummyDeploymentInterface( omeroHost, clusterUserAndAddress, sessionId, homepath, mpipath, additionalRunparameter) return InterfaceFactory.interfaceToUse
def ExecuteRun(request, workflowId, conn=None, **kwargs): LogWriter.logInfo( "####################################################################") LogWriter.logInfo("Run Workflow " + workflowId) blockPortSeperator = WorkFlowAnalyser.getBlockPortSeperator() parameters = [] versions = [] recordIntermediates = False #for key, value in request.GET.iteritems(): for key, value in request.GET.items(): if key == "intermediates": recordIntermediates = value == "record" continue blockPort = key.split(blockPortSeperator, 2) if (blockPort[1] == "Version"): versions.append([blockPort[0], value]) continue parameters.append([blockPort[0], blockPort[1], value, "in"]) workflow = GetAugmentedWorkflow(workflowId) workflow["parameters"] = parameters workflow["intermediates"] = recordIntermediates workflow["versions"] = versions #create own session because the given one will be disconnected myConn = JoinSession(conn) WorkflowExecutor.SetConnectionObject(myConn) try: execution_thread = threading.Thread( target=WorkflowExecutor.StartExcutionOnCluster, args=[workflow]) execution_thread.start() work.append(execution_thread) return HttpResponse("started") except Exception as inst: return HttpResponse("start failed " + str(inst.message))
def DownloadFilesToTempFolderOnCluster(workingFolderOnCluster, fileIds): if (len(fileIds) == 0): LogWriter.logInfo("no other files to download") for i in range(len(fileIds)): # blockId | portName | value id = fileIds[i][2] WorkflowExecutor.DownloadFileToFolderOnCluster( workingFolderOnCluster, id) LogWriter.logDebug("Download finished " + str(id)) LogWriter.logDebug("FileName finished " + WorkflowExecutor.GetFileName(id)) # change parameter for graph transformatoni fileIds[i][1] = "Value" fileIds[i][2] = (WorkflowExecutor.GetFileName(id)) fileIds[i][3] = "out" fileIds[i].append( WorkflowExecutor.GetFileInformation(workingFolderOnCluster + fileIds[i][2])) LogWriter.logDebug("Created Parameter " + str(fileIds[i]))
def DownloadImageFilesToTempFolderOnCluster(workingFolderOnCluster, imageIds): if (len(imageIds) == 0): LogWriter.logInfo("no images to download") for i in range(len(imageIds)): # blockId | portName | value id = imageIds[i][2] WorkflowExecutor.DownloadImgeToFolderOnCluster( workingFolderOnCluster, id) LogWriter.logDebug("Download finished " + str(id)) LogWriter.logDebug("ImageName finished " + WorkflowExecutor.GetImageName(id)) # change parameter for graph transformatoni imageIds[i][1] = "Value" imageIds[i][2] = (WorkflowExecutor.GetImageName(id)) imageIds[i][3] = "out" imageIds[i].append( WorkflowExecutor.GetFileInformation(workingFolderOnCluster + imageIds[i][2])) imageIds[i].append("OMERO ID:" + str(id)) LogWriter.logDebug("Created Parameter " + str(imageIds[i]))
def CheckOrDeployDeployment(fullDeploymentFolderName, fullWorkingFolderOnCluster): LogWriter.logInfo("Check if deployment exists") deploymentFile = "aDeployment.tgz" deploymentFolderExists = not WorkflowExecutor.serverInterface.isDeploymentExists( fullDeploymentFolderName) if not deploymentFolderExists: LogWriter.logInfo("No Deployment found: copy new") WorkflowExecutor.serverInterface.DeployDeployment( fullDeploymentFolderName, deploymentFile) else: LogWriter.logInfo("Deployment found") WorkflowExecutor.serverInterface.LinkFolderOnServer( fullDeploymentFolderName + "*", fullWorkingFolderOnCluster) # remap icy plugin folder for plugin detection to work WorkflowExecutor.serverInterface.LinkFolderOnServer( fullDeploymentFolderName + "Libs/Icy/plugins", fullWorkingFolderOnCluster)
def StartExcutionOnCluster(workflowAndParameterDic): try: hadErrors = False runId = str(uuid.uuid4()) intermediateDataSet = 751 workflowAndParameterDic["runId"] = runId workflowAndParameterDic[ "intermediateDataSet"] = intermediateDataSet unmodiviedParams = copy.deepcopy( workflowAndParameterDic["parameters"]) name = "" if ("name" in workflowAndParameterDic): name = workflowAndParameterDic["name"] RunRepository.registerRun(runId, name) workingFolderName = runId # create a temp folder on the cluster with this id workingFolderOnCluster = "omeroEnv/" + workingFolderName + "/" fullWorkingFolderOnCluster = WorkflowExecutor.serverInterface.GetHomePath( ) + workingFolderOnCluster fullDeploymentFolderName = WorkflowExecutor.serverInterface.GetHomePath( ) + "omeroEnv/deployment/" RunRepository.updateStatusRun(runId, "Deploy environment") WorkflowExecutor.CreateTempFolderOnServer( fullWorkingFolderOnCluster) WorkflowExecutor.CreateTempFolderOnServer( fullWorkingFolderOnCluster + "PluginDeploy/") WorkflowExecutor.CreateTempFolderOnServer( fullWorkingFolderOnCluster + "Tool/") LogWriter.logInfo("+ Check deployment and create deployment ") WorkflowExecutor.CheckOrDeployDeployment( fullDeploymentFolderName, fullWorkingFolderOnCluster) versions = workflowAndParameterDic["versions"] blocks = workflowAndParameterDic["blocks"] for i in range(0, len(versions)): correspondingBlock = None for j in range(0, len(blocks)): if (blocks[j]["elementId"] == versions[i][0]): correspondingBlock = blocks[j] break if not correspondingBlock is None: git = correspondingBlock["GitRepo"] path = correspondingBlock["GitFilePath"] try: pathToJar = JavaHelper.compileOrGetFromCache( git, path, versions[i][1], path) if not pathToJar is None: WorkflowExecutor.serverInterface.CopyToServer( fullWorkingFolderOnCluster + "PluginDeploy/", pathToJar) toolStuff = JavaHelper.checkForToolDependencies( pathToJar) if not toolStuff is None and len(toolStuff) > 0: LogWriter.logInfo("ToolStuff " + str(toolStuff)) tool_file = JavaHelper.getToolFromVersionControl( toolStuff[0][0], toolStuff[0][1], toolStuff[0][2], toolStuff[0][3], toolStuff[0][4]) if not tool_file is None: WorkflowExecutor.serverInterface.CopyToServer( fullWorkingFolderOnCluster + "Tool/", tool_file) else: LogWriter.logDebug( "No Tool file downloaded") else: LogWriter.logError("No compiled file created") except Exception as e: LogWriter.logError(e.message) LogWriter.logError(traceback.format_exc()) LogWriter.logInfo("+ Download Input files ") RunRepository.updateStatusRun(runId, "Download Inputs") imageIds = WorkFlowAnalyser.GetRequiredImageIdsFromWorkflow( workflowAndParameterDic) WorkflowExecutor.DownloadImageFilesToTempFolderOnCluster( fullWorkingFolderOnCluster, imageIds) fileIds = WorkFlowAnalyser.GetRequiredFileIdsFromWorkflow( workflowAndParameterDic) WorkflowExecutor.DownloadFilesToTempFolderOnCluster( fullWorkingFolderOnCluster, fileIds) # find Result images => import into Data Set imagesToUpLoadToDataSet = WorkFlowAnalyser.GetImageUploadsFromWorkflow( workflowAndParameterDic) imageNamesWithDatasetIds = WorkflowExecutor.CreateTempFileNamesForUploadsAndModifyParameters( imagesToUpLoadToDataSet, ".tiff") # find result file => image annotation dataFilesToAnnotate = WorkFlowAnalyser.GetImagesToAnnotateFromWorkflow( workflowAndParameterDic) dataFileNamesWithImageIds = WorkflowExecutor.CreateTempFileNamesForUploadsAndModifyParameters( dataFilesToAnnotate, ".txt") LogWriter.logInfo("+ Write Workflow file ") WorkflowExecutor.WriteWorkflowFileToCluster( workflowAndParameterDic, fullWorkingFolderOnCluster) ##################################################################################################### ##################################################################################################### ##################################################################################################### ##################################################################################################### ##################################################################################################### ##################################################################################################### #return ##################################################################################################### ##################################################################################################### LogWriter.logInfo("+ Start execution ") RunRepository.updateStatusRun(runId, "Execute Workflow") errorcode = WorkflowExecutor.StartWorkflowExecution( fullWorkingFolderOnCluster) if (errorcode > 0): LogWriter.logInfo("+ Error starting execution: ErrorCode " + str(errorcode)) RunRepository.updateStatusRun( runId, "Failed to execute Workflow Manager with error code : " + str(errorcode)) return LogWriter.logInfo("+ Upload Results") RunRepository.updateStatusRun(runId, "Upload results") try: ids = WorkflowExecutor.UploadFiles(imageNamesWithDatasetIds, fullWorkingFolderOnCluster) LogWriter.logDebug("+ Uploaded Created files as:" + str(ids)) except: LogWriter.logDebug("+ Upload results faild") ids = [] hadErrors = True WorkflowExecutor.AnnotateImagesWithResultFiles( dataFileNamesWithImageIds, fullWorkingFolderOnCluster) # annotate uploaded files with workflow description WorkFlowAnalyser.MergeReproducibilityParameters( unmodiviedParams, imageIds) resultAnnotationId = WorkflowExecutor.StoreWorkflowFile( ids, unmodiviedParams, workflowAndParameterDic) # get Intermediates from cluster RunRepository.updateStatusRun(runId, "Collect Intermediates") WorkflowExecutor.GetAndStoreIntermediates( fullWorkingFolderOnCluster, intermediateDataSet, runId) # get Statistics from cluster RunRepository.updateStatusRun(runId, "Collect Statistics") WorkflowExecutor.GetAndStoreStatistics(fullWorkingFolderOnCluster, intermediateDataSet, runId) # Cleanup #todo LogWriter.logInfo("+ Finished Execution ") RunRepository.FinishRun(runId, resultAnnotationId, hadErrors) except Exception as e: RunRepository.updateStatusRun(runId, "Failed with " + e.message) LogWriter.logError(e.message) LogWriter.logError(traceback.format_exc())
def CopyToServer(self, destinationFolder, fileToCopy): # call(["scp", deploymentFile, self.clusterUserAndAddress + ":" + fullDeploymentFolderName]) LogWriter.logInfo("scp " + fileToCopy + " " + self.clusterUserAndAddress + ":" + destinationFolder)
def ExecuteCommandOnServer(self, command): #return call(["ssh", self.clusterUserAndAddress, command]) LogWriter.logInfo("ssh " + self.clusterUserAndAddress + " " + command) return False