def _own_handle(self, genome_data, handle_property): """ _own_handle: check that handle_property point to shock nodes owned by calling user """ logging.info( 'start checking handle {} ownership'.format(handle_property)) if handle_property in genome_data: handle_id = genome_data[handle_property] hs = HandleService(self.handle_url, token=self.token) handles = hs.hids_to_handles([handle_id]) shock_id = handles[0]['id'] # Copy from DataFileUtil.own_shock_node implementation: header = {'Authorization': 'Oauth {}'.format(self.token)} res = requests.get(self.shock_url + '/node/' + shock_id + '/acl/?verbosity=full', headers=header, allow_redirects=True) self._check_shock_response( res, 'Error getting ACLs for Shock node {}: '.format(shock_id)) owner = res.json()['data']['owner']['username'] user_id = self.auth_client.get_user(self.token) if owner != user_id: logging.info('start copying node to owner: {}'.format(user_id)) dfu_shock = self.dfu.copy_shock_node({ 'shock_id': shock_id, 'make_handle': True }) handle_id = dfu_shock['handle']['hid'] genome_data[handle_property] = handle_id
def own_handle(self, genome, handle_property, ctx): if not handle_property in genome: return token = ctx['token'] handle_id = genome[handle_property] hs = HandleService(self.handle_url, token=token) handles = hs.hids_to_handles([handle_id]) shock_id = handles[0]['id'] ## Copy from DataFileUtil.own_shock_node implementation: header = {'Authorization': 'Oauth {}'.format(token)} res = requests.get(self.shock_url + '/node/' + shock_id + '/acl/?verbosity=full', headers=header, allow_redirects=True) self.check_shock_response( res, 'Error getting ACLs for Shock node {}: '.format(shock_id)) owner = res.json()['data']['owner']['username'] if owner != ctx['user_id']: shock_id = self.copy_shock_node(ctx, shock_id) r = requests.get(self.shock_url + '/node/' + shock_id, headers=header, allow_redirects=True) errtxt = ('Error downloading attributes from shock ' + 'node {}: ').format(shock_id) self.check_shock_response(r, errtxt) shock_data = r.json()['data'] handle = {'id': shock_data['id'], 'type': 'shock', 'url': self.shock_url, 'file_name': shock_data['file']['name'], 'remote_md5': shock_data['file']['checksum']['md5'] } handle_id = hs.persist_handle(handle) genome[handle_property] = handle_id
def __init__(self, config): self.callback_url = config['SDK_CALLBACK_URL'] self.scratch = config['scratch'] self.token = config['KB_AUTH_TOKEN'] self.user_id = config['USER_ID'] self.dfu = DataFileUtil(self.callback_url) self.hs = AbstractHandle(config['handle-service-url']) self.ws_client = Workspace(config['workspace-url']) self.shock_url = config['shock-url']
def own_handle(self, genome, handle_property, ctx): if not handle_property in genome: return token = ctx['token'] handle_id = genome[handle_property] hs = HandleService(self.handle_url, token=token) handles = hs.hids_to_handles([handle_id]) shock_id = handles[0]['id'] ## Copy from DataFileUtil.own_shock_node implementation: header = {'Authorization': 'Oauth {}'.format(token)} res = requests.get(self.shock_url + '/node/' + shock_id + '/acl/?verbosity=full', headers=header, allow_redirects=True) self.check_shock_response( res, 'Error getting ACLs for Shock node {}: '.format(shock_id)) owner = res.json()['data']['owner']['username'] if owner != ctx['user_id']: shock_id = self.copy_shock_node(ctx, shock_id) r = requests.get(self.shock_url + '/node/' + shock_id, headers=header, allow_redirects=True) errtxt = ('Error downloading attributes from shock ' + 'node {}: ').format(shock_id) self.check_shock_response(r, errtxt) shock_data = r.json()['data'] handle = { 'id': shock_data['id'], 'type': 'shock', 'url': self.shock_url, 'file_name': shock_data['file']['name'], 'remote_md5': shock_data['file']['checksum']['md5'] } handle_id = hs.persist_handle(handle) genome[handle_property] = handle_id
def setUpClass(cls): cls.token = os.environ.get('KB_AUTH_TOKEN', None) config_file = os.environ.get('KB_DEPLOYMENT_CONFIG', None) cls.cfg = {} config = ConfigParser() config.read(config_file) for nameval in config.items('kb_gtdbtk'): cls.cfg[nameval[0]] = nameval[1] # Getting username from Auth profile for token authServiceUrl = cls.cfg['auth-service-url'] auth_client = _KBaseAuth(authServiceUrl) user_id = auth_client.get_user(cls.token) # WARNING: don't call any logging methods on the context object, # it'll result in a NoneType error cls.ctx = MethodContext(None) cls.ctx.update({ 'token': cls.token, 'user_id': user_id, 'provenance': [{ 'service': 'kb_gtdbtk', 'method': 'please_never_use_it_in_production', 'method_params': [] }], 'authenticated': 1 }) cls.ws_url = cls.cfg['workspace-url'] cls.shock_url = cls.cfg['shock-url'] cls.ws = Workspace(cls.ws_url, token=cls.token) cls.serviceImpl = kb_gtdbtk(cls.cfg) cls.scratch = Path(cls.cfg['scratch']).absolute() cls.callback_url = os.environ['SDK_CALLBACK_URL'] suffix = int(time.time() * 1000) cls.wsName = "test_gktb_tk_" + str(suffix) ret = cls.ws.create_workspace({'workspace': cls.wsName}) cls.wsid = ret[0] cls.hs = AbstractHandle(cls.cfg['handle-service-url'], token=cls.token) cls.au = AssemblyUtil(cls.callback_url, token=cls.token) cls.dfu = DataFileUtil(cls.callback_url, token=cls.token) cls.handles_to_delete = [] cls.nodes_to_delete = []
def __init__(self, config): self.callback_url = config['SDK_CALLBACK_URL'] self.scratch = config['scratch'] self.token = config['KB_AUTH_TOKEN'] self.dfu = DataFileUtil(self.callback_url) self.hs = AbstractHandle(config['handle-service-url'])
def getPairedEndLibInfo(self): if hasattr(self.__class__, 'pairedEndLibInfo'): return self.__class__.pairedEndLibInfo # 1) upload files to shock token = self.ctx['token'] forward_shock_file = self.upload_file_to_shock( shock_service_url=self.shockURL, filePath='data/small.forward.fq', token=token) reverse_shock_file = self.upload_file_to_shock( shock_service_url=self.shockURL, filePath='data/small.reverse.fq', token=token) pprint(forward_shock_file) pprint(reverse_shock_file) # 2) create handle hs = HandleService(url=self.handleURL, token=token) handf = { 'id': forward_shock_file['id'], 'type': 'shock', 'url': self.shockURL, 'file_name': forward_shock_file['file']['name'], 'remote_md5': forward_shock_file['file']['checksum']['md5'] } forward_handle = hs.persist_handle(handf) handr = { 'id': reverse_shock_file['id'], 'type': 'shock', 'url': self.shockURL, 'file_name': reverse_shock_file['file']['name'], 'remote_md5': reverse_shock_file['file']['checksum']['md5'] } reverse_handle = hs.persist_handle(handr) # 3) save to WS paired_end_library = { 'lib1': { 'file': { 'hid': forward_handle, 'file_name': forward_shock_file['file']['name'], 'id': forward_shock_file['id'], 'url': self.shockURL, 'type': 'shock', 'remote_md5': forward_shock_file['file']['checksum']['md5'] }, 'encoding': 'UTF8', 'type': 'fastq', 'size': forward_shock_file['file']['size'] }, 'lib2': { 'file': { 'hid': reverse_handle, 'file_name': reverse_shock_file['file']['name'], 'id': reverse_shock_file['id'], 'url': self.shockURL, 'type': 'shock', 'remote_md5': reverse_shock_file['file']['checksum']['md5'] }, 'encoding': 'UTF8', 'type': 'fastq', 'size': reverse_shock_file['file']['size'] }, 'interleaved': 0, 'sequencing_tech': 'artificial reads', 'read_length_mean': 100, 'insert_size_mean': 250, 'insert_size_std_dev': 10, 'total_bases': 125000, 'read_orientation_outward': 1 } ws_obj = { 'workspace': self.getWsName(), 'objects': [ { 'type': 'KBaseFile.PairedEndLibrary', 'data': paired_end_library, 'name': 'test.pe.reads', 'meta': {}, 'provenance': [ { 'service': 'hipmer', 'method': 'test_hipmer' } ] }] } new_obj_info = self.ws.save_objects(ws_obj) self.__class__.pairedEndLibInfo = new_obj_info[0] return new_obj_info[0]
def test_update_taxon_assignments_valid(self): """ Test a valid call to the update_taxon_assignments method. """ taxon_key = str(uuid4()) taxon_val = str(uuid4()) taxon_val_new = str(uuid4()) # Copy the object to test workspace dfu = DataFileUtil(self.callbackURL) obj_ref = f"{_WORKSPACE_NAME}/{_OBJECT_NAME}" result = dfu.get_objects({'object_refs': [obj_ref]})['data'][0] obj_data = result['data'] # crate user owned handle in the object and update it hs = HandleService(self.handleURL) prev_handle_id = obj_data['genbank_handle_ref'] prev_shock_id = hs.hids_to_handles([prev_handle_id])[0]['id'] new_handle_id = dfu.own_shock_node({ 'shock_id': prev_shock_id, 'make_handle': 1 })['handle']['hid'] obj_data['genbank_handle_ref'] = new_handle_id # Save new object in test workspace obj_info = result['info'] new_obj = { 'type': obj_info[2], 'data': obj_data, 'name': 'GCF_002287175.1' } test_ws_id = dfu.ws_name_to_id(self.wsName) infos = dfu.save_objects({'id': test_ws_id, 'objects': [new_obj]}) obj_ref = f"{infos[0][6]}/{infos[0][0]}/{infos[0][4]}" new_ws_id = infos[0][6] new_obj_id = infos[0][0] get_obj_params = { 'wsid': new_ws_id, 'objid': new_obj_id, 'included': ['/taxon_assignments'] } # Add a new assignment self.serviceImpl.update_taxon_assignments( self.ctx, { 'workspace_id': new_ws_id, 'object_id': new_obj_id, 'taxon_assignments': { taxon_key: taxon_val } }) # Fetch the object and check the mapping obj = self.wsClient.get_objects2({'objects': [get_obj_params]})['data'][0]['data'] self.assertTrue(taxon_key in obj['taxon_assignments']) self.assertEqual(obj['taxon_assignments'][taxon_key], taxon_val) # Update the assignment we just added self.serviceImpl.update_taxon_assignments( self.ctx, { 'workspace_id': new_ws_id, 'object_id': new_obj_id, 'taxon_assignments': { taxon_key: taxon_val_new } }) # Fetch the object and check the mapping obj = self.wsClient.get_objects2({'objects': [get_obj_params]})['data'][0]['data'] self.assertTrue(taxon_key in obj['taxon_assignments']) self.assertEqual(obj['taxon_assignments'][taxon_key], taxon_val_new) # Remove the assignment we just added self.serviceImpl.update_taxon_assignments( self.ctx, { 'workspace_id': new_ws_id, 'object_id': new_obj_id, 'remove_assignments': [taxon_key] }) # Fetch the object and check the mapping obj = self.wsClient.get_objects2({'objects': [get_obj_params]})['data'][0]['data'] self.assertTrue(taxon_key not in obj['taxon_assignments']) self.assertEqual(obj['taxon_assignments'].get(taxon_key), None)