def test_list_object_with_sets(self): ws_name1 = self.createWs() reads_obj_ref = self.__class__.example_reads_ref set_obj_name = "MyReadsSet.1" sapi = SetAPI(self.__class__.serviceWizardURL, token=self.getContext()['token'], service_ver=self.__class__.SetAPI_version) sapi.save_reads_set_v1({ 'workspace': ws_name1, 'output_object_name': set_obj_name, 'data': { 'description': '', 'items': [{ 'ref': reads_obj_ref }] } }) list_ret = self.getImpl().list_objects_with_sets( self.getContext(), {"ws_name": ws_name1})[0] ret = list_ret['data'] self.assertTrue(len(ret) > 0) set_count = 0 for item in ret: self.assertTrue("object_info" in item) if "set_items" in item: set_count += 1 set_items = item["set_items"]["set_items_info"] self.assertEqual(1, len(set_items)) self.assertEqual(1, set_count) self.assertIn('data_palette_refs', list_ret) ws_id = self.getWsClient().get_workspace_info({"workspace": ws_name1})[0] ret2 = self.getImpl().list_objects_with_sets( self.getContext(), {"ws_id": ws_id})[0]["data"] self.assertEqual(len(ret), len(ret2)) type_filter = "KBaseSets.ReadsSet" ret3 = self.getImpl().list_objects_with_sets(self.getContext(), { "types": [type_filter], "workspaces": [str(ws_id)] })[0]["data"] self.assertTrue(len(ret3) > 0) for item in ret3: info = item['object_info'] obj_type = info[2].split('-')[0] self.assertEqual(type_filter, obj_type) type_filter = "KBaseGenomes.Genome" ret4 = self.getImpl().list_objects_with_sets(self.getContext(), { "types": [type_filter], "workspaces": [str(ws_id)] })[0]["data"] self.assertTrue(len(ret4) == 0)
def test_two_users_set_inside_dp(self): ws_name1_1 = self.createWs() # Injecting reads object (real copy) into workspace1 orig_reads_obj_ref = self.__class__.example_reads_ref reads_obj_name = "TestReads" self.getWsClient().copy_object({'from': {'ref': orig_reads_obj_ref}, 'to': {'workspace': ws_name1_1, 'name': reads_obj_name}}) copy_reads_obj_ref = ws_name1_1 + '/' + reads_obj_name ws_name1_2 = self.createWs() set_obj_name = "MyReadsSet.1" sapi = SetAPI(self.__class__.serviceWizardURL, token=self.getContext()['token'], service_ver=self.__class__.SetAPI_version) sapi.save_reads_set_v1({'workspace': ws_name1_2, 'output_object_name': set_obj_name, 'data': {'description': '', 'items': [{'ref': copy_reads_obj_ref}]}}) orig_set_ref = ws_name1_2 + '/' + set_obj_name # Making DP-copy of reads set object by user2 ws_name2 = self.createWs2() # Let's share workspace containing set with user2 self.getWsClient().set_permissions({'workspace': ws_name1_2, 'new_permission': 'r', 'users': [self.getContext2()['user_id']]}) # Import reads set ref into DataPalette of third workspace dps = DataPaletteService(self.__class__.serviceWizardURL, token=self.getContext2()['token'], service_ver=self.__class__.DataPalette_version) dps.add_to_palette({'workspace': ws_name2, 'new_refs': [{'ref': orig_set_ref}]}) dp_ref_map = dps.list_data({'workspaces': [ws_name2]})['data_palette_refs'] set_ref_path = dp_ref_map.itervalues().next() + ';' + orig_set_ref reads_ref_path = set_ref_path + ';' + copy_reads_obj_ref # Un-share original workspace self.getWsClient().set_permissions({'workspace': ws_name1_2, 'new_permission': 'n', 'users': [self.getContext2()['user_id']]}) # Let's check that we can list set and see reads object as set item ret = self.getImpl().list_objects_with_sets(self.getContext2(), {"ws_name": ws_name2})[0]["data"] self.assertEqual(1, len(ret)) item = ret[0] self.assertTrue('set_items' in item) self.assertTrue('set_items_info' in item['set_items']) self.assertEqual(1, len(item['set_items']['set_items_info'])) # Check access to reads and to set objects info = self.getWsClient2().get_object_info_new({'objects': [{'ref': set_ref_path}]})[0] self.assertEqual(set_obj_name, info[1]) info = self.getWsClient2().get_object_info_new({'objects': [{'ref': reads_ref_path}]})[0] self.assertEqual(reads_obj_name, info[1])
def test_unique_items(self): # Create original workspace with reads object + ReadsSet object ws_name1 = self.createWs() foft = FakeObjectsForTests(os.environ['SDK_CALLBACK_URL']) reads_obj_name = "test.reads.1" foft.create_fake_reads({ 'ws_name': ws_name1, 'obj_names': [reads_obj_name] }) reads_obj_ref = ws_name1 + '/' + reads_obj_name set_obj_name = "test.reads_set.1" sapi = SetAPI(self.__class__.serviceWizardURL, token=self.getContext()['token'], service_ver=self.__class__.SetAPI_version) sapi.save_reads_set_v1({ 'workspace': ws_name1, 'output_object_name': set_obj_name, 'data': { 'description': '', 'items': [{ 'ref': reads_obj_ref }] } }) set_obj_ref = ws_name1 + '/' + set_obj_name # Create workspace with DataPalette copy of Reads object and copy of ReadsSet ws_name2 = self.createWs() dps = DataPaletteService( self.__class__.serviceWizardURL, token=self.getContext()['token'], service_ver=self.__class__.DataPalette_version) dps.add_to_palette({ 'workspace': ws_name2, 'new_refs': [{ 'ref': reads_obj_ref }, { 'ref': set_obj_ref }] }) # Check if listing in both these workspaces at the same time gives unique items ret = self.getImpl().list_objects_with_sets( self.getContext(), {"workspaces": [ws_name1, ws_name2]})[0]["data"] self.assertEqual(2, len(ret))
def load_reads_set(callback_url, ws_name, reads_set, target_name): """ Combine a list of reads references into a ReadsSet. if file_rev is None or not a present key, then this is treated as a single end reads. """ set_client = SetAPI(callback_url) set_output = set_client.save_reads_set_v1({ "workspace": ws_name, "output_object_name": target_name, "data": { "description": "reads set for testing", "items": reads_set } }) return set_output["set_ref"]
def save_read_set(self, ctx, params): """ :param params: instance of type "save_read_set_params" (** ** Method for adding Reads objects to a Reads Set) -> structure: parameter "workspace_name" of String, parameter "output_readset_name" of String, parameter "input_reads_list" of list of String, parameter "desc" of String :returns: instance of type "save_read_set_output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ # ctx is the context object # return variables are: returnVal #BEGIN save_read_set console = [] invalid_msgs = [] #self.log(console,'Running save_read_set with params=') #self.log(console, "\n"+pformat(params)) report = '' # report = 'Running KButil_Add_Genomes_to_GenomeSet with params=' # report += "\n"+pformat(params) #### do some basic checks # if 'workspace_name' not in params: raise ValueError('workspace_name parameter is required') if 'desc' not in params: raise ValueError('desc parameter is required') if 'input_reads_list' not in params: raise ValueError('input_reads_list parameter is required') #if 'input_readsset_name' not in params: # raise ValueError('input_readsset_name parameter is optional') if 'output_readset_name' not in params: raise ValueError('output_readset_name parameter is required') # Build ReadsSet # elements = dict() savereadssetparams = {} savereadssetparams['workspace_name'] = params['workspace_name'] savereadssetparams['output_object_name'] = params[ 'output_readset_name'] readsetdata = {} if (params['desc'] is not None): readsetdata['description'] = params['desc'] readsetdata['items'] = [] print "WS " + params['workspace_name'] print "READS " + str(params['input_reads_list']) # add new reads for reads_name in params['input_reads_list']: readssetitem = {} readssetitem['ref'] = params['workspace_name'] + '/' + reads_name readssetitem['label'] = '' readsetdata['items'].append(readssetitem) savereadssetparams['data'] = readsetdata # load the method provenance from the context object # #self.log(console,"Setting Provenance") # DEBUG provenance = [{}] if 'provenance' in ctx: provenance = ctx['provenance'] # add additional info to provenance here, in this case the input data object reference try: prov_defined = provenance[0]['input_ws_objects'] except: provenance[0]['input_ws_objects'] = [] for reads_name in params['input_reads_list']: provenance[0]['input_ws_objects'].append(params['workspace_name'] + '/' + reads_name) provenance[0]['service'] = 'ReadssetEditor' provenance[0]['method'] = 'save_read_set' # Save output object # #if len(invalid_msgs) == 0: # self.log(console,"Saving ReadssSet") set_api = SetAPI(url=self.servicewizardURL, token=ctx['token']) #set_api._service_ver = "dev" set_api.save_reads_set_v1(savereadssetparams) # build output report object # #self.log(console,"BUILDING REPORT") # DEBUG if len(invalid_msgs) == 0: #self.log(console,"reads in output set "+params['output_readset_name']+": "+str(len(elements.keys()))) report += 'reads in output set ' + params[ 'output_readset_name'] + ': ' + str(len( elements.keys())) + "\n" reportObj = { 'objects_created': [{ 'ref': params['workspace_name'] + '/' + params['output_readset_name'], 'description': 'save_read_set' }], 'text_message': report } else: report += "FAILURE:\n\n" + "\n".join(invalid_msgs) + "\n" reportObj = {'objects_created': [], 'text_message': report} reportName = 'save_read_set_report_' + str(hex(uuid.getnode())) ws = workspaceService(self.workspaceURL, token=ctx['token']) report_obj_info = ws.save_objects({ 'workspace': params['workspace_name'], 'objects': [{ 'type': 'KBaseReport.Report', 'data': reportObj, 'name': reportName, 'meta': {}, 'hidden': 1, 'provenance': provenance }] })[0] # Build report and return # #self.log(console,"BUILDING RETURN OBJECT") returnVal = { 'report_name': reportName, 'report_ref': str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4]), } #self.log(console,"save_read_set DONE") #END save_read_set # At some point might do deeper type checking... if not isinstance(returnVal, dict): raise ValueError('Method save_read_set return value ' + 'returnVal is not type dict as required.') # return the results return [returnVal]