def StoreWorkflowFile(ids, unmodiviedParams, workflowAndParameterDic):
     workflowAndParameterDic["parameters"] = unmodiviedParams
     tmpfile = WorkflowExecutor.WriteToFile(workflowAndParameterDic)
     LogWriter.logDebug("File :" + tmpfile.name)
     resultAnnotationId = WorkflowExecutor.AnnotateImagesWithFile(
         ids, tmpfile.name)
     os.remove(tmpfile.name)
     return resultAnnotationId
 def ExecuteCommandOnServerOutput2(self, command):
     LogWriter.logInfo("ssh " + self.clusterUserAndAddress + " " + command)
     try:
         consoleOut = check_output(["ssh", self.clusterUserAndAddress, command])
         LogWriter.logDebug("Console out :\n" + str(consoleOut))
         return consoleOut
     except CalledProcessError, e:
         LogWriter.logDebug("Console out (" + str(e.returncode) + "):\n" + str(e.output))
         return str(e.output)
    def WriteWorkflowFileToCluster(cls, workflowAndParameterDic,
                                   fullWorkingFolderOnCluster):
        LogWriter.logInfo("ParameterDir: " + str(workflowAndParameterDic))

        localTempfolder = WorkflowExecutor.CreateTempLocalFolder()
        parameterFile = localTempfolder + "/" + WorkflowExecutor.jsonParameterFileName
        LogWriter.logDebug("Temp Workflow file : " + parameterFile)
        with open(parameterFile, 'w') as outfile:
            json.dump(workflowAndParameterDic, outfile)

        WorkflowExecutor.serverInterface.CopyToServer(
            fullWorkingFolderOnCluster, parameterFile)
예제 #4
0
    def getFileFromRevision(git_url, file_path, rev):
        if (platform.system() == 'Windows'):
            return GitHelper.getWindowsFallback(file_path)

        repo_dir = tempfile.mkdtemp()
        git_url = GitHelper.checkAndModifyGitUrl(git_url)
        repo = git.Repo.clone_from(git_url, repo_dir)
        repo.git.checkout(rev)

        #name = GitHelper.repoNameFromUrl(git_url)
        #pathToFile = repo.git.working_dir + "/" + name  + "/" + file_path
        pathToFile = repo.git.working_dir + "/" + file_path
        LogWriter.logDebug("Path to local file from git: " + pathToFile)
        return pathToFile
예제 #5
0
    def compileOrGetFromCache(git_url, file_path, rev, name):
        scriptFile = JavaHelper.getCompileShellScript()
        if scriptFile is None:
            return None

        code = GitHelper.getFileStringFromRevision(git_url, file_path, rev)

        compileFolder = tempfile.mkdtemp()
        sourceFileName = compileFolder + "/" + name
        with open(sourceFileName, "w") as text_file:
            text_file.write(code)

        # strip the .java extention
        name = name.replace(".java", "")

        params = [scriptFile, sourceFileName, compileFolder, name]
        LogWriter.logDebug("Call Compiler: " + str(params))
        try:
            compileOut = check_output(params)
        except Exception as e:
            import traceback
            LogWriter.logError(e.message)
            LogWriter.logError(traceback.format_exc())
            LogWriter.logError(compileOut)

        LogWriter.logDebug("Compiler out: " + compileOut)

        resultpath = compileFolder + "/" + name + ".jar"
        LogWriter.logDebug("Should have compiled to: " + str(resultpath))
        if os.path.isfile(resultpath) and os.path.exists(resultpath):
            return os.path.abspath(resultpath)
        LogWriter.logDebug("Not Found: " + str(resultpath) +
                           " \n Compiler output:\n" + str(compileOut))
        return None
예제 #6
0
def JoinSession(conn):
    try:
        LogWriter.logDebug("Connect To:" + str(conn.host) + " " +
                           str(conn.port) + " " + str(conn._getSessionId()))
        connection = BlitzGateway('OMERO.script',
                                  host=conn.host,
                                  port=conn.port)
        connection.connect(sUuid=conn._getSessionId())
        return connection
    except Exception as inst:
        LogWriter.logError("Connecting own session failed " +
                           str(inst.message))
        LogWriter.logError(traceback.format_exc())
        return None
예제 #7
0
    def getCompileShellScript():
        defaultScript = "compileJar.sh"
        if (platform.system() == 'Windows'):
            defaultScript = "compileJar.bat"

        LogWriter.logDebug("Absolut Path to Compile Script: " +
                           str(os.path.abspath(defaultScript)))
        if os.path.isfile(defaultScript) and os.path.exists(defaultScript):
            return os.path.abspath(defaultScript)

        LogWriter.logError("could not find compile script. Create it: " +
                           defaultScript)
        file = open(defaultScript, 'w+')
        file.close()
        return None
 def GetAnnotationFileFromServer(self, fileId):
     data = None
     name = ""
     try:
         ann = self.connection.getObject("FileAnnotation", fileId)
         name=ann.getFileName()
         for chunk in ann.getFileInChunks():
             if data is None:
                 data = chunk
             else:
                 data += chunk
         LogWriter.logDebug("Downloaded " + str(name) + " " + str(len(data)) + " size")
     except:
         LogWriter.logError("Could not load file annotation" + str(traceback.format_exc()))
     return name, data
    def UploadFiles(self, fileNamesWithDatasetIds, fullWorkingFolderOnCluster):
        ids = []
        for i in range(len(fileNamesWithDatasetIds)):
            command = CommandFactory.GetUploadCommand(fileNamesWithDatasetIds[i][0], fileNamesWithDatasetIds[i][1],
                                                      fullWorkingFolderOnCluster, self.hostName, self.sessionId)

            # can't use return code, since linux return code is mod 256
            # => cli will output  "Return with code: 720 " in its last line
            output = self.ExecuteCommandOnServerOutput(command)
            lines = output.splitlines()
            lastline = lines[len(lines)-1]
            LogWriter.logDebug("Last Line of Upload "+ str(lastline))
            splitted = lastline.split(":")
            idString = (splitted[len(splitted)-1])
            ids.append(int(idString))
        return ids;
    def AnnotateDataSetsWithFile(self,ids, filePath):
        if not isinstance(ids, collections.Sequence):
            ids = [ids]

        fileAnn = self.connection.createFileAnnfromLocalFile(filePath, mimetype="text/plain", ns=self.namespace, desc=None)
        for i in range(len(ids)):
            LogWriter.logDebug("Annotate Dataset " + str(ids[i]) + "with " + str(filePath))

            try:
                ds = self.connection.getObject("DataSet", ids[i])
                if ds is None:
                    LogWriter.logError("DataSet " + ids[i] + " not found")
                    continue

                ds.linkAnnotation(fileAnn)

                LogWriter.logDebug("Annotate Dataset " + str(ids[i]) + "with " + str(filePath) + " Succsessfull")
            except :
                LogWriter.logError("Could not annotate id " + str(ids[i]) + str(traceback.format_exc()))
        return fileAnn.getId()
예제 #11
0
    def checkForToolDependencies(jarPath):
        cliTool = "GetSourceControlInfo.jar"
        if not os.path.isfile(cliTool):
            return None

        LogWriter.logDebug("Absolut Path to tool tool: " +
                           str(os.path.abspath(cliTool)))

        import subprocess
        consoleOut = check_output(['java', '-jar', cliTool, jarPath])
        lines = consoleOut.strip().split("\n")

        LogWriter.logDebug("Tool Console out: " + str(consoleOut))

        result = []
        for line in lines:
            parts = line.strip().split("::", 5)
            if (len(parts) < 4):
                continue

            result.append(parts)
        return result
    def GetResultFromServer(self, fileId):
        data = ""
        try:
            ann = self.connection.getObject("FileAnnotation", fileId)

            if ann is None :
                LogWriter.logError("Could not get file annotation with id " + fileId)
                return None

            tf = tempfile.NamedTemporaryFile(prefix="workflow", suffix=".json", delete=False)
            f = open(str(tf.name), 'w')
            try:
                for chunk in ann.getFileInChunks():
                    f.write(chunk)
            finally:
                f.close()

            with open(tf.name, 'r') as myfile:
                data = myfile.read().replace('\n', '')
                LogWriter.logDebug(data)
        except:
            LogWriter.logError("Could not load file annotation" + str(traceback.format_exc()))
        return data
 def CopyToServer(self, fullDeploymentFolderName, localFile):
     LogWriter.logDebug("scp " + localFile + " " + self.clusterUserAndAddress + ":" + fullDeploymentFolderName)
     try:
         consoleOut = check_output(["scp", localFile, self.clusterUserAndAddress + ":" + fullDeploymentFolderName])
         LogWriter.logDebug("Console out: \n " + str(consoleOut))
         return 0
     except CalledProcessError, e:
         LogWriter.logDebug("Console out (" + str(e.returncode) + ") : \n" + str(e.output))
         return e.returncode
    def CopyFromServer(self, pathOnCluster, localPath , recursive):
        if (recursive):
            LogWriter.logDebug(
            "scp -r " + self.clusterUserAndAddress + ":" + pathOnCluster + " " + localPath + " ")
        else:
            LogWriter.logDebug(
                "scp " + self.clusterUserAndAddress + ":" + pathOnCluster + " " + localPath + " ")

        try:
            if (recursive):
                consoleOut = check_output(["scp","-r", self.clusterUserAndAddress + ":" + pathOnCluster, localPath])
            else:
                consoleOut = check_output(["scp", self.clusterUserAndAddress + ":" + pathOnCluster, localPath])
            LogWriter.logDebug("Console out: \n " + str(consoleOut))
            return 0
        except CalledProcessError, e:
            LogWriter.logDebug("Console out (" + str(e.returncode) + ") : \n" + str(e.output))
    def DownloadFilesToTempFolderOnCluster(workingFolderOnCluster, fileIds):
        if (len(fileIds) == 0):
            LogWriter.logInfo("no other files to download")

        for i in range(len(fileIds)):
            # blockId | portName | value
            id = fileIds[i][2]
            WorkflowExecutor.DownloadFileToFolderOnCluster(
                workingFolderOnCluster, id)
            LogWriter.logDebug("Download finished " + str(id))
            LogWriter.logDebug("FileName finished " +
                               WorkflowExecutor.GetFileName(id))

            # change parameter for graph transformatoni
            fileIds[i][1] = "Value"
            fileIds[i][2] = (WorkflowExecutor.GetFileName(id))
            fileIds[i][3] = "out"
            fileIds[i].append(
                WorkflowExecutor.GetFileInformation(workingFolderOnCluster +
                                                    fileIds[i][2]))

            LogWriter.logDebug("Created Parameter " + str(fileIds[i]))
    def DownloadImageFilesToTempFolderOnCluster(workingFolderOnCluster,
                                                imageIds):
        if (len(imageIds) == 0):
            LogWriter.logInfo("no images to download")

        for i in range(len(imageIds)):
            # blockId | portName | value
            id = imageIds[i][2]
            WorkflowExecutor.DownloadImgeToFolderOnCluster(
                workingFolderOnCluster, id)
            LogWriter.logDebug("Download finished " + str(id))
            LogWriter.logDebug("ImageName finished " +
                               WorkflowExecutor.GetImageName(id))

            # change parameter for graph transformatoni
            imageIds[i][1] = "Value"
            imageIds[i][2] = (WorkflowExecutor.GetImageName(id))
            imageIds[i][3] = "out"
            imageIds[i].append(
                WorkflowExecutor.GetFileInformation(workingFolderOnCluster +
                                                    imageIds[i][2]))
            imageIds[i].append("OMERO ID:" + str(id))
            LogWriter.logDebug("Created Parameter " + str(imageIds[i]))
 def CopyFromServer(self, pathOnCluster, localPath, recusive):
     LogWriter.logDebug("scp " + self.clusterUserAndAddress + ":" +
                        pathOnCluster + " " + localPath + " ")
     pass
    def StartExcutionOnCluster(workflowAndParameterDic):
        try:
            hadErrors = False

            runId = str(uuid.uuid4())
            intermediateDataSet = 751
            workflowAndParameterDic["runId"] = runId
            workflowAndParameterDic[
                "intermediateDataSet"] = intermediateDataSet
            unmodiviedParams = copy.deepcopy(
                workflowAndParameterDic["parameters"])

            name = ""
            if ("name" in workflowAndParameterDic):
                name = workflowAndParameterDic["name"]

            RunRepository.registerRun(runId, name)

            workingFolderName = runId  # create a temp folder on the cluster with this id
            workingFolderOnCluster = "omeroEnv/" + workingFolderName + "/"

            fullWorkingFolderOnCluster = WorkflowExecutor.serverInterface.GetHomePath(
            ) + workingFolderOnCluster
            fullDeploymentFolderName = WorkflowExecutor.serverInterface.GetHomePath(
            ) + "omeroEnv/deployment/"

            RunRepository.updateStatusRun(runId, "Deploy environment")
            WorkflowExecutor.CreateTempFolderOnServer(
                fullWorkingFolderOnCluster)
            WorkflowExecutor.CreateTempFolderOnServer(
                fullWorkingFolderOnCluster + "PluginDeploy/")
            WorkflowExecutor.CreateTempFolderOnServer(
                fullWorkingFolderOnCluster + "Tool/")
            LogWriter.logInfo("+ Check deployment and create deployment ")
            WorkflowExecutor.CheckOrDeployDeployment(
                fullDeploymentFolderName, fullWorkingFolderOnCluster)

            versions = workflowAndParameterDic["versions"]
            blocks = workflowAndParameterDic["blocks"]
            for i in range(0, len(versions)):
                correspondingBlock = None
                for j in range(0, len(blocks)):
                    if (blocks[j]["elementId"] == versions[i][0]):
                        correspondingBlock = blocks[j]
                        break

                if not correspondingBlock is None:
                    git = correspondingBlock["GitRepo"]
                    path = correspondingBlock["GitFilePath"]
                    try:
                        pathToJar = JavaHelper.compileOrGetFromCache(
                            git, path, versions[i][1], path)
                        if not pathToJar is None:
                            WorkflowExecutor.serverInterface.CopyToServer(
                                fullWorkingFolderOnCluster + "PluginDeploy/",
                                pathToJar)

                            toolStuff = JavaHelper.checkForToolDependencies(
                                pathToJar)
                            if not toolStuff is None and len(toolStuff) > 0:
                                LogWriter.logInfo("ToolStuff " +
                                                  str(toolStuff))
                                tool_file = JavaHelper.getToolFromVersionControl(
                                    toolStuff[0][0], toolStuff[0][1],
                                    toolStuff[0][2], toolStuff[0][3],
                                    toolStuff[0][4])
                                if not tool_file is None:
                                    WorkflowExecutor.serverInterface.CopyToServer(
                                        fullWorkingFolderOnCluster + "Tool/",
                                        tool_file)
                                else:
                                    LogWriter.logDebug(
                                        "No Tool file downloaded")

                        else:
                            LogWriter.logError("No compiled file created")
                    except Exception as e:
                        LogWriter.logError(e.message)
                        LogWriter.logError(traceback.format_exc())

            LogWriter.logInfo("+ Download Input files ")
            RunRepository.updateStatusRun(runId, "Download Inputs")
            imageIds = WorkFlowAnalyser.GetRequiredImageIdsFromWorkflow(
                workflowAndParameterDic)
            WorkflowExecutor.DownloadImageFilesToTempFolderOnCluster(
                fullWorkingFolderOnCluster, imageIds)

            fileIds = WorkFlowAnalyser.GetRequiredFileIdsFromWorkflow(
                workflowAndParameterDic)
            WorkflowExecutor.DownloadFilesToTempFolderOnCluster(
                fullWorkingFolderOnCluster, fileIds)

            # find Result images => import into Data Set
            imagesToUpLoadToDataSet = WorkFlowAnalyser.GetImageUploadsFromWorkflow(
                workflowAndParameterDic)
            imageNamesWithDatasetIds = WorkflowExecutor.CreateTempFileNamesForUploadsAndModifyParameters(
                imagesToUpLoadToDataSet, ".tiff")

            # find result file => image annotation
            dataFilesToAnnotate = WorkFlowAnalyser.GetImagesToAnnotateFromWorkflow(
                workflowAndParameterDic)
            dataFileNamesWithImageIds = WorkflowExecutor.CreateTempFileNamesForUploadsAndModifyParameters(
                dataFilesToAnnotate, ".txt")

            LogWriter.logInfo("+ Write Workflow file ")
            WorkflowExecutor.WriteWorkflowFileToCluster(
                workflowAndParameterDic, fullWorkingFolderOnCluster)

            #####################################################################################################
            #####################################################################################################
            #####################################################################################################
            #####################################################################################################
            #####################################################################################################
            #####################################################################################################
            #return

            #####################################################################################################
            #####################################################################################################

            LogWriter.logInfo("+ Start execution ")
            RunRepository.updateStatusRun(runId, "Execute Workflow")
            errorcode = WorkflowExecutor.StartWorkflowExecution(
                fullWorkingFolderOnCluster)
            if (errorcode > 0):
                LogWriter.logInfo("+ Error starting execution: ErrorCode " +
                                  str(errorcode))
                RunRepository.updateStatusRun(
                    runId,
                    "Failed to execute Workflow Manager with error code : " +
                    str(errorcode))
                return

            LogWriter.logInfo("+ Upload Results")
            RunRepository.updateStatusRun(runId, "Upload results")
            try:
                ids = WorkflowExecutor.UploadFiles(imageNamesWithDatasetIds,
                                                   fullWorkingFolderOnCluster)
                LogWriter.logDebug("+ Uploaded Created files as:" + str(ids))
            except:
                LogWriter.logDebug("+ Upload results faild")
                ids = []
                hadErrors = True

            WorkflowExecutor.AnnotateImagesWithResultFiles(
                dataFileNamesWithImageIds, fullWorkingFolderOnCluster)

            # annotate uploaded files with workflow description
            WorkFlowAnalyser.MergeReproducibilityParameters(
                unmodiviedParams, imageIds)
            resultAnnotationId = WorkflowExecutor.StoreWorkflowFile(
                ids, unmodiviedParams, workflowAndParameterDic)

            # get Intermediates from cluster
            RunRepository.updateStatusRun(runId, "Collect Intermediates")
            WorkflowExecutor.GetAndStoreIntermediates(
                fullWorkingFolderOnCluster, intermediateDataSet, runId)

            # get Statistics from cluster
            RunRepository.updateStatusRun(runId, "Collect Statistics")
            WorkflowExecutor.GetAndStoreStatistics(fullWorkingFolderOnCluster,
                                                   intermediateDataSet, runId)

            # Cleanup
            #todo

            LogWriter.logInfo("+ Finished Execution ")
            RunRepository.FinishRun(runId, resultAnnotationId, hadErrors)

        except Exception as e:
            RunRepository.updateStatusRun(runId, "Failed with " + e.message)
            LogWriter.logError(e.message)
            LogWriter.logError(traceback.format_exc())