def test_Jobs_submitRDBJob(self): rdbComputeDomain = Jobs.getRDBComputeDomainFromName( Jobs_RDBComputeDomainName) jobId = Jobs.submitRDBQueryJob(Jobs_SqlQuery, rdbComputeDomain, Jobs_DatabaseContextName, Jobs_QueryResultsFile, Jobs_Alias) jobStatus = Jobs.waitForJob(jobId) self.assertTrue(jobStatus == Jobs.getJobStatus(jobId)) self.assertTrue(jobStatus.get('status') == 32) job = Jobs.getJobDescription(jobId) self.assertTrue(job.get('username') == Authentication_loginName) self.assertTrue(job.get('rdbDomainName') == Jobs_RDBComputeDomainName) self.assertTrue( job.get('databaseContextName') == Jobs_DatabaseContextName) self.assertTrue(job.get('inputSql') == Jobs_SqlQuery) self.assertTrue(job.get('submitterDID') == Jobs_Alias) fileService = Files.getFileServiceFromName(Jobs_FileServiceName) jobDirectory = job.get('resultsFolderURI') relativePath = jobDirectory.split( 'scratch/')[1] + Jobs_QueryResultsFile + '.csv' string = Files.download(fileService, 'scratch', '', relativePath, format="txt", userVolumeOwner=Authentication_loginName) string.rstrip("\n") self.assertTrue(string, Jobs_SqlQueryResult)
def run_preprocessing_pipeline(**kwargs): # TODO: change from kwargs """Run the preprocessing pipeline in science-server""" results_path = kwargs.get('results_path') code_tar_location = kwargs.get('code_tar_location') compute_domain = sj.getDockerComputeDomainFromName(SciServerJobRunner.JOBS_DOMAIN) jobid = sj.submitShellCommandJob('cd ../.. && mkdir -p code && rm -rf code/* ' '&& tar -xvf /home/idies/workspace/{} -C code ' '&& cd code/preprocessing && chmod +x preprocess.sh ' '&& ./preprocess.sh'.format(code_tar_location), compute_domain, SciServerJobRunner.DOCKER_IMAGE, userVolumes=[{'name': 'AstroResearch', 'needsWriteAccess': True}, {'name': 'scratch', 'rootVolumeName': 'Temporary', 'needsWriteAccess': True}], resultsFolderPath=results_path) if kwargs.get('wait_for_completion'): sj.waitForJob(jobid, verbose=True)
# get job description job = Jobs.getJobDescription(jobId) print(job) # In[ ]: # wait until job is finsihed and get job status jobId = Jobs.submitNotebookJob('/home/idies/workspace/' + Jobs_UserVolumeName + '/' + Jobs_DirectoryName + '/' + Jobs_NotebookName, dockerComputeDomain, Jobs_DockerImageName, Jobs_UserVolumes, Jobs_DataVolumes, Jobs_Alias) jobStatus = Jobs.waitForJob(jobId) print(jobStatus) # In[ ]: # cancel job jobId = Jobs.submitNotebookJob('/home/idies/workspace/' + Jobs_UserVolumeName + '/' + Jobs_DirectoryName + '/' + Jobs_NotebookName, dockerComputeDomain, Jobs_DockerImageName, Jobs_UserVolumes, Jobs_DataVolumes, Jobs_Alias) Jobs.cancelJob(jobId)
def test_Jobs_submitNotebookJob_cancel_waitForJob_getJobStatus_getJobDescription_submitShellCommandJob( self): fileService = Files.getFileServiceFromName(Jobs_FileServiceName) try: Files.deleteUserVolume(fileService, Jobs_RootVolumeName, Jobs_UserVolumeName) except: pass Files.createUserVolume(fileService, Jobs_RootVolumeName, Jobs_UserVolumeName) Files.upload(fileService, Jobs_RootVolumeName, Jobs_UserVolumeName, Jobs_DirectoryName + "/" + Jobs_NotebookName, localFilePath=Jobs_NotebookName) dockerComputeDomain = Jobs.getDockerComputeDomainFromName( Jobs_DockerComputeDomainName) jobId_1 = Jobs.submitNotebookJob( '/home/idies/workspace/' + Jobs_UserVolumeName + '/' + Jobs_DirectoryName + '/' + Jobs_NotebookName, dockerComputeDomain, Jobs_DockerImageName, Jobs_UserVolumes, Jobs_DataVolumes, Jobs_Parameters, Jobs_Alias) Jobs.cancelJob(jobId_1) jobStatus = Jobs.getJobStatus(jobId_1) self.assertTrue(jobStatus.get('status') == 128) jobId_2 = Jobs.submitNotebookJob(Jobs_RemoteNotebookPath, dockerComputeDomain, Jobs_DockerImageName, Jobs_UserVolumes, Jobs_DataVolumes, Jobs_Parameters, Jobs_Alias) jobStatus = Jobs.waitForJob(jobId_2) self.assertTrue(jobStatus == Jobs.getJobStatus(jobId_2)) self.assertTrue(jobStatus.get('status') == 32) job = Jobs.getJobDescription(jobId_2) self.assertTrue(job.get('username') == Authentication_loginName) self.assertTrue(job.get('dockerImageName') == Jobs_DockerImageName) self.assertTrue(job.get('scriptURI') == Jobs_RemoteNotebookPath) self.assertTrue(job.get('submitterDID') == Jobs_Alias) jobDirectory = job.get('resultsFolderURI') relativePath = jobDirectory.split( 'scratch/')[1] + Jobs_NoteBookOutPutFile string = Files.download(fileService, 'scratch', '', relativePath, format="txt", userVolumeOwner=Authentication_loginName) string.rstrip("\n") self.assertTrue(string, job.get('resultsFolderURI')) jobs = Jobs.getJobsList(top=2) found = False for job in jobs: if jobId_1 == job.get("id"): found = True self.assertTrue(found) found = False for job in jobs: if jobId_2 == job.get("id"): found = True self.assertTrue(found) jobId = Jobs.submitShellCommandJob(Jobs_ShellCommand, dockerComputeDomain, Jobs_DockerImageName, Jobs_UserVolumes, Jobs_DataVolumes, Jobs_Alias) jobStatus = Jobs.waitForJob(jobId) self.assertTrue(jobStatus == Jobs.getJobStatus(jobId)) self.assertTrue(jobStatus.get('status') == 32) job = Jobs.getJobDescription(jobId) self.assertTrue(job.get('username') == Authentication_loginName) self.assertTrue(job.get('dockerImageName') == Jobs_DockerImageName) self.assertTrue(job.get('command') == Jobs_ShellCommand) self.assertTrue(job.get('submitterDID') == Jobs_Alias) jobDirectory = job.get('resultsFolderURI') relativePath = jobDirectory.split('scratch/')[1] + "command.txt" string = Files.download(fileService, 'scratch', '', relativePath, format="txt", userVolumeOwner=Authentication_loginName) string.rstrip("\n") self.assertTrue(string, job.get('resultsFolderURI')) Files.deleteUserVolume(fileService, Jobs_RootVolumeName, Jobs_UserVolumeName)