def setUpClass(cls): cls.token = environ.get('KB_AUTH_TOKEN') cls.callbackURL = environ.get('SDK_CALLBACK_URL') print('CB URL: ' + cls.callbackURL) # WARNING: don't call any logging methods on the context object, # it'll result in a NoneType error cls.ctx = MethodContext(None) cls.ctx.update({'token': cls.token, 'provenance': [ {'service': 'gaprice_convert_assy_file_to_contigs', 'method': 'please_never_use_it_in_production', 'method_params': [] }], 'authenticated': 1}) config_file = environ.get('KB_DEPLOYMENT_CONFIG', None) cls.cfg = {} config = ConfigParser() config.read(config_file) for nameval in config.items('gaprice_convert_assy_file_to_contigs'): cls.cfg[nameval[0]] = nameval[1] cls.wsURL = cls.cfg['workspace-url'] cls.shockURL = cls.cfg['shock-url'] cls.hs = HandleService(url=cls.cfg['handle-service-url'], token=cls.token) cls.wsClient = Workspace(cls.wsURL, token=cls.token) wssuffix = int(time.time() * 1000) wsName = 'test_gaprice_convert_assy_file_to_contigs_' + str(wssuffix) cls.wsinfo = cls.wsClient.create_workspace({'workspace': wsName}) print('created workspace ' + cls.getWsName()) cls.serviceImpl = gaprice_convert_assy_file_to_contigs(cls.cfg) cls.staged = {} cls.nodes_to_delete = [] cls.handles_to_delete = [] cls.setupTestData() print('\n\n=============== Starting tests ==================')
def _own_handle(self, genome_data, handle_property): """ _own_handle: check that handle_property point to shock nodes owned by calling user """ log('start checking handle {} ownership'.format(handle_property)) if handle_property in genome_data: handle_id = genome_data[handle_property] hs = HandleService(self.handle_url, token=self.token) handles = hs.hids_to_handles([handle_id]) shock_id = handles[0]['id'] # Copy from DataFileUtil.own_shock_node implementation: header = {'Authorization': 'Oauth {}'.format(self.token)} res = requests.get(self.shock_url + '/node/' + shock_id + '/acl/?verbosity=full', headers=header, allow_redirects=True) self._check_shock_response( res, 'Error getting ACLs for Shock node {}: '.format(shock_id)) owner = res.json()['data']['owner']['username'] user_id = self.auth_client.get_user(self.token) if owner != user_id: log('start copying node to owner: {}'.format(user_id)) dfu_shock = self.dfu.copy_shock_node({ 'shock_id': shock_id, 'make_handle': True }) handle_id = dfu_shock['handle']['hid'] genome_data[handle_property] = handle_id
def own_handle(self, genome, handle_property, ctx): if not handle_property in genome: return token = ctx['token'] handle_id = genome[handle_property] hs = HandleService(self.handle_url, token=token) handles = hs.hids_to_handles([handle_id]) shock_id = handles[0]['id'] ## Copy from DataFileUtil.own_shock_node implementation: header = {'Authorization': 'Oauth {}'.format(token)} res = requests.get(self.shock_url + '/node/' + shock_id + '/acl/?verbosity=full', headers=header, allow_redirects=True) self.check_shock_response( res, 'Error getting ACLs for Shock node {}: '.format(shock_id)) owner = res.json()['data']['owner']['username'] if owner != ctx['user_id']: shock_id = self.copy_shock_node(ctx, shock_id) r = requests.get(self.shock_url + '/node/' + shock_id, headers=header, allow_redirects=True) errtxt = ('Error downloading attributes from shock ' + 'node {}: ').format(shock_id) self.check_shock_response(r, errtxt) shock_data = r.json()['data'] handle = {'id': shock_data['id'], 'type': 'shock', 'url': self.shock_url, 'file_name': shock_data['file']['name'], 'remote_md5': shock_data['file']['checksum']['md5'] } handle_id = hs.persist_handle(handle) genome[handle_property] = handle_id
def setUpClass(cls): cls.token = environ.get('KB_AUTH_TOKEN', None) cls.callbackURL = environ.get('SDK_CALLBACK_URL') config_file = environ.get('KB_DEPLOYMENT_CONFIG', None) cls.cfg = {} config = ConfigParser() config.read(config_file) for nameval in config.items('ExpressionUtils'): cls.cfg[nameval[0]] = nameval[1] # Getting username from Auth profile for token authServiceUrl = cls.cfg['auth-service-url'] auth_client = _KBaseAuth(authServiceUrl) user_id = auth_client.get_user(cls.token) # WARNING: don't call any logging methods on the context object, # it'll result in a NoneType error cls.ctx = MethodContext(None) cls.ctx.update({ 'token': cls.token, 'user_id': user_id, 'provenance': [{ 'service': 'ExpressionUtils', 'method': 'please_never_use_it_in_production', 'method_params': [] }], 'authenticated': 1 }) cls.shockURL = cls.cfg['shock-url'] cls.wsURL = cls.cfg['workspace-url'] cls.service_wizard_url = cls.cfg['srv-wiz-url'] cls.wsClient = workspaceService(cls.wsURL) cls.ws = Workspace(cls.wsURL, token=cls.token) cls.hs = HandleService(url=cls.cfg['handle-service-url'], token=cls.token) # create workspace wssuffix = int(time.time() * 1000) wsname = "test_expression_" + str(wssuffix) cls.wsinfo = cls.wsClient.create_workspace({'workspace': wsname}) print('created workspace ' + cls.getWsName()) cls.serviceImpl = ExpressionUtils(cls.cfg) cls.readUtils = ReadsUtils(cls.callbackURL) cls.dfu = DataFileUtil(cls.callbackURL, service_ver='dev') cls.dfu.ws_name_to_id(wsname) cls.assemblyUtil = AssemblyUtil(cls.callbackURL) cls.gfu = GenomeFileUtil(cls.callbackURL) cls.gaAPI = GenomeAnnotationAPI(cls.service_wizard_url) cls.rau = ReadsAlignmentUtils(cls.callbackURL) cls.scratch = cls.cfg['scratch'] cls.staged = {} cls.nodes_to_delete = [] cls.handles_to_delete = [] cls.setupTestData()
def setUpClass(cls): cls.token = environ.get('KB_AUTH_TOKEN', None) config_file = environ.get('KB_DEPLOYMENT_CONFIG', None) cls.cfg = {} config = ConfigParser() config.read(config_file) for nameval in config.items('kb_ballgown'): cls.cfg[nameval[0]] = nameval[1] # Getting username from Auth profile for token authServiceUrl = cls.cfg['auth-service-url'] auth_client = _KBaseAuth(authServiceUrl) user_id = auth_client.get_user(cls.token) # WARNING: don't call any logging methods on the context object, # it'll result in a NoneType error cls.ctx = MethodContext(None) cls.ctx.update({ 'token': cls.token, 'user_id': user_id, 'provenance': [{ 'service': 'kb_ballgown', 'method': 'please_never_use_it_in_production', 'method_params': [] }], 'authenticated': 1 }) cls.hs = HandleService(url=cls.cfg['handle-service-url'], token=cls.token) cls.shockURL = cls.cfg['shock-url'] cls.wsURL = cls.cfg['workspace-url'] cls.wsClient = workspaceService(cls.wsURL, token=cls.token) cls.serviceImpl = kb_ballgown(cls.cfg) cls.scratch = cls.cfg['scratch'] cls.callback_url = os.environ['SDK_CALLBACK_URL'] cls.gfu = GenomeFileUtil(cls.callback_url) cls.dfu = DataFileUtil(cls.callback_url) cls.ru = ReadsUtils(cls.callback_url) cls.rau = ReadsAlignmentUtils(cls.callback_url, service_ver='dev') cls.eu = ExpressionUtils(cls.callback_url, service_ver='dev') cls.set_api = SetAPI(cls.callback_url) suffix = int(time.time() * 1000) cls.wsName = "test_kb_ballgown_" + str(suffix) #cls.wsName = "test_kb_ballgown_1004" cls.wsClient.create_workspace({'workspace': cls.wsName}) cls.nodes_to_delete = [] cls.handles_to_delete = [] cls.prepare_data()
def getPairedEndLibInfo(self): if hasattr(self.__class__, 'pairedEndLibInfo'): return self.__class__.pairedEndLibInfo # try to reuse persist test json file if it exists testFile = 'data/testPairedEndLibInfo.json' if os.path.exists(testFile): logger.info("Reading pairedEndLibInfo from {}".format(testFile)) with open(testFile) as testInfoFile: return json.load(testInfoFile) # 1) upload files to shock token = self.ctx['token'] forward_shock_file = self.curl_upload_file_to_shock( shock_service_url=self.shockURL, filePath='/kb/module/data/small.forward.fq', token=token) reverse_shock_file = self.curl_upload_file_to_shock( shock_service_url=self.shockURL, filePath='/kb/module/data/small.reverse.fq', token=token) #pprint(forward_shock_file) #pprint(reverse_shock_file) # 2) create handle hs = HandleService(url=self.handleURL, token=token) forward_handle = hs.persist_handle({ 'id': forward_shock_file['id'], 'type': 'shock', 'url': self.shockURL, 'file_name': forward_shock_file['file']['name'], 'remote_md5': forward_shock_file['file']['checksum']['md5'] }) reverse_handle = hs.persist_handle({ 'id': reverse_shock_file['id'], 'type': 'shock', 'url': self.shockURL, 'file_name': reverse_shock_file['file']['name'], 'remote_md5': reverse_shock_file['file']['checksum']['md5'] }) # 3) save to WS paired_end_library = { 'lib1': { 'file': { 'hid': forward_handle, 'file_name': forward_shock_file['file']['name'], 'id': forward_shock_file['id'], 'url': self.shockURL, 'type': 'shock', 'remote_md5': forward_shock_file['file']['checksum']['md5'] }, 'encoding': 'UTF8', 'type': 'fastq', 'size': forward_shock_file['file']['size'] }, 'lib2': { 'file': { 'hid': reverse_handle, 'file_name': reverse_shock_file['file']['name'], 'id': reverse_shock_file['id'], 'url': self.shockURL, 'type': 'shock', 'remote_md5': reverse_shock_file['file']['checksum']['md5'] }, 'encoding': 'UTF8', 'type': 'fastq', 'size': reverse_shock_file['file']['size'] }, 'interleaved': 0, 'sequencing_tech': 'artificial reads' } new_obj_info = self.ws.save_objects({ 'workspace': self.getWsName(), 'objects': [{ 'type': 'KBaseFile.PairedEndLibrary', 'data': paired_end_library, 'name': 'test.pe.reads', 'meta': {}, 'provenance': [{ 'service': 'AssemblyRAST', 'method': 'test_kiki' }] }] }) self.__class__.pairedEndLibInfo = new_obj_info[0] logger.info("pairedEndLibInfo='{}'".format(json.dumps( new_obj_info[0]))) return new_obj_info[0]
def getPairedEndInterleavedLibInfo(self, read_lib_basename, lib_i=0): if hasattr(self.__class__, 'pairedEndLibInfo_list'): try: info = self.__class__.pairedEndLibInfo_list[lib_i] name = self.__class__.pairedEndLibName_list[lib_i] if info != None: if name != read_lib_basename: self.__class__.singleEndLibInfo_list[lib_i] = None self.__class__.singleEndLibName_list[lib_i] = None else: return info except: pass # 1) upload files to shock token = self.ctx['token'] forward_shock_file = self.upload_file_to_shock('data/'+read_lib_basename+'.inter.fq') #pprint(forward_shock_file) #pprint(reverse_shock_file) # 2) create handle hs = HandleService(url=self.handleURL, token=token) forward_handle = hs.persist_handle({ 'id' : forward_shock_file['shock_id'], 'type' : 'shock', 'url' : self.shockURL, 'file_name': forward_shock_file['node_file_name'], 'remote_md5': forward_shock_file['handle']['remote_md5']}) # 3) save to WS paired_end_library = { 'lib1': { 'file': { 'hid':forward_handle, 'file_name': forward_shock_file['node_file_name'], 'id': forward_shock_file['shock_id'], 'url': self.shockURL, 'type':'shock', 'remote_md5':forward_shock_file['handle']['remote_md5'] }, 'encoding':'UTF8', 'type':'fastq', 'size':forward_shock_file['size'] }, 'interleaved':1, 'sequencing_tech':'artificial reads' } new_obj_info = self.wsClient.save_objects({ 'workspace':self.getWsName(), 'objects':[ { 'type':'KBaseFile.PairedEndLibrary', 'data':paired_end_library, 'name':'test-'+str(lib_i)+'.pe.reads', 'meta':{}, 'provenance':[ { 'service':'kb_ea_utils', 'method':'test_run_ea-utils' } ] }] })[0] # store it if not hasattr(self.__class__, 'pairedEndLibInfo_list'): self.__class__.pairedEndLibInfo_list = [] self.__class__.pairedEndLibName_list = [] for i in range(lib_i+1): try: assigned = self.__class__.pairedEndLibInfo_list[i] except: self.__class__.pairedEndLibInfo_list.append(None) self.__class__.pairedEndLibName_list.append(None) self.__class__.pairedEndLibInfo_list[lib_i] = new_obj_info self.__class__.pairedEndLibName_list[lib_i] = read_lib_basename return new_obj_info
forward_shock_file = upload_file_to_shock( shock_service_url = 'https://ci.kbase.us/services/shock-api', filePath = 'small.forward.fq', token = token ) reverse_shock_file = upload_file_to_shock( shock_service_url = 'https://ci.kbase.us/services/shock-api', filePath = 'small.reverse.fq', token = token ) pprint(forward_shock_file) pprint(reverse_shock_file) # 2) create handle hs = HandleService(url=HANDLE_URL, token=token) forward_handle = hs.persist_handle({ 'id' : forward_shock_file['id'], 'type' : 'shock', 'url' : SHOCK_URL, 'file_name': forward_shock_file['file']['name'], 'remote_md5': forward_shock_file['file']['checksum']['md5']}) reverse_handle = hs.persist_handle({ 'id' : reverse_shock_file['id'], 'type' : 'shock', 'url' : SHOCK_URL, 'file_name': reverse_shock_file['file']['name'], 'remote_md5': reverse_shock_file['file']['checksum']['md5']}) pprint(forward_handle) pprint(reverse_handle)
def upload_SingleEndLibrary_to_shock_and_ws( self, ctx, console, # DEBUG workspace_name, obj_name, file_path, provenance, sequencing_tech): self.log( console, 'UPLOADING FILE ' + file_path + ' TO ' + workspace_name + '/' + obj_name) # 1) upload files to shock token = ctx['token'] forward_shock_file = self.upload_file_to_shock( console, # DEBUG shock_service_url=self.shockURL, filePath=file_path, token=token) #pprint(forward_shock_file) self.log(console, 'SHOCK UPLOAD DONE') # 2) create handle self.log(console, 'GETTING HANDLE') hs = HandleService(url=self.handleURL, token=token) forward_handle = hs.persist_handle({ 'id': forward_shock_file['id'], 'type': 'shock', 'url': self.shockURL, 'file_name': forward_shock_file['file']['name'], 'remote_md5': forward_shock_file['file']['checksum']['md5'] }) # 3) save to WS self.log(console, 'SAVING TO WORKSPACE') single_end_library = { 'lib': { 'file': { 'hid': forward_handle, 'file_name': forward_shock_file['file']['name'], 'id': forward_shock_file['id'], 'url': self.shockURL, 'type': 'shock', 'remote_md5': forward_shock_file['file']['checksum']['md5'] }, 'encoding': 'UTF8', 'type': 'fasta', 'size': forward_shock_file['file']['size'] }, 'sequencing_tech': sequencing_tech } self.log(console, 'GETTING WORKSPACE SERVICE OBJECT') ws = workspaceService(self.workspaceURL, token=ctx['token']) self.log(console, 'SAVE OPERATION...') new_obj_info = ws.save_objects({ 'workspace': workspace_name, 'objects': [{ 'type': 'KBaseFile.SingleEndLibrary', 'data': single_end_library, 'name': obj_name, 'meta': {}, 'provenance': provenance }] })[0] self.log(console, 'SAVED TO WORKSPACE') return new_obj_info[0]
def getPairedEndLibInfo(self): if hasattr(self.__class__, 'pairedEndLibInfo'): return self.__class__.pairedEndLibInfo # 1) upload files to shock token = self.ctx['token'] forward_shock_file = self.upload_file_to_shock( shock_service_url=self.shockURL, filePath='data/small.forward.fq', token=token) reverse_shock_file = self.upload_file_to_shock( shock_service_url=self.shockURL, filePath='data/small.reverse.fq', token=token) #pprint(forward_shock_file) #pprint(reverse_shock_file) # 2) create handle hs = HandleService(url=self.handleURL, token=token) handf = { 'id': forward_shock_file['id'], 'type': 'shock', 'url': self.shockURL, 'file_name': forward_shock_file['file']['name'], 'remote_md5': forward_shock_file['file']['checksum']['md5'] } forward_handle = hs.persist_handle(handf) handr = { 'id': reverse_shock_file['id'], 'type': 'shock', 'url': self.shockURL, 'file_name': reverse_shock_file['file']['name'], 'remote_md5': reverse_shock_file['file']['checksum']['md5'] } reverse_handle = hs.persist_handle(handr) # 3) save to WS paired_end_library = { 'lib1': { 'file': { 'hid': forward_handle, 'file_name': forward_shock_file['file']['name'], 'id': forward_shock_file['id'], 'url': self.shockURL, 'type': 'shock', 'remote_md5': forward_shock_file['file']['checksum']['md5'] }, 'encoding': 'UTF8', 'type': 'fastq', 'size': forward_shock_file['file']['size'] }, 'lib2': { 'file': { 'hid': reverse_handle, 'file_name': reverse_shock_file['file']['name'], 'id': reverse_shock_file['id'], 'url': self.shockURL, 'type': 'shock', 'remote_md5': reverse_shock_file['file']['checksum']['md5'] }, 'encoding': 'UTF8', 'type': 'fastq', 'size': reverse_shock_file['file']['size'] }, 'interleaved': 0, 'sequencing_tech': 'artificial reads', 'read_length_mean': 100, 'insert_size_mean': 250, 'insert_size_std_dev': 10, 'total_bases': 125000, 'read_orientation_outward': 1 } ws_obj = { 'workspace': self.getWsName(), 'objects': [{ 'type': 'KBaseFile.PairedEndLibrary', 'data': paired_end_library, 'name': 'test.pe.reads', 'meta': {}, 'provenance': [{ 'service': 'hipmer', 'method': 'test_hipmer' }] }] } new_obj_info = self.ws.save_objects(ws_obj) self.__class__.pairedEndLibInfo = new_obj_info[0] return new_obj_info[0]