def test_loadGenome(self): ''' Load a test Genome object into the test workspace. ''' # Create the test workspace. wsClient = Workspace(self._config["workspace_url"], token=self._token) try: # See if the workspace exists. wsInfo = wsClient.get_workspace_info( { "workspace": self._config["test_ws"] } ) except WorkspaceServerError as e: # Hopefully this means the workspace does not exist. (It could also mean someone messed up setting up the URLs) traceback.print_exc(file=sys.stderr) wsInfo = wsClient.create_workspace( { "workspace": self._config["test_ws"] } ) # We also need to put in a mapping and a biochemistry object somewhere. # To do this, I just create a "dependency workspace" and pull them from there. try: # See if the workspace exists. wsInfo = wsClient.get_workspace_info( { "workspace": self._config["dependency_ws"] } ) except WorkspaceServerError as e: # Hopefully this means the workspace does not exist. (It could also mean someone messed up setting up the URLs) # traceback.print_exc(file=sys.stderr) depWsInfo = wsClient.create_workspace( { "workspace": self._config["dependency_ws"] } ) # Load the mapping and biochemistry objects testContigSet = json.load(open(self._config['contigset_file'], 'r')) contigSetSaveData = dict() contigSetSaveData['type'] = 'KBaseGenomes.ContigSet' contigSetSaveData['name'] = self._config['contigsetid'] contigSetSaveData['data'] = testContigSet testGenome = json.load(open(self._config["genome_file"], "r")) genomeSaveData = dict() genomeSaveData['type'] = 'KBaseGenomes.Genome' genomeSaveData['name'] = self._config['genomeid'] genomeSaveData['data'] = testGenome wsClient.save_objects( { 'workspace': self._config['test_ws'], 'objects': [ genomeSaveData, contigSetSaveData ] } )
def upload_narrative(nar_file, auth_token, user_id, url=ci_ws, set_public=False): """ Uploads a Narrative from a downloaded object file. This file needs to be in JSON format, and it expects all data and info that is usually returned by the Workspace.get_objects method. Returns a dict of three elements: ws: the id of the workspace that was created obj: the id of the narrative object ref: the above two joined together into an object ref (for convenience) """ # read the file f = open(nar_file, "r") nar = json.loads(f.read()) f.close() # do some setup. current_nar_metadata = ws_metadata current_nar_metadata["narrative_nice_name"] = nar["data"]["metadata"]["name"] ws_client = Workspace(url=url, token=auth_token) # create the new workspace for the narrative ws_info = ws_client.create_workspace( { "workspace": "{}:{}".format(user_id, str(time.time()).replace(".", "")), "meta": current_nar_metadata, "globalread": "r" if set_public else "n", } ) ws_id = ws_info[0] # setup and save the narrative object nar["info"][10] ws_save_obj = { "type": "KBaseNarrative.Narrative", "data": nar["data"], "name": nar["info"][1], "meta": nar["info"][10], "provenance": [ { "script": "upload_narrative_test.py", "description": "Temporary Narrative uploaded for automated testing", } ], } obj_info = ws_client.save_objects({"id": ws_id, "objects": [ws_save_obj]}) # tweak the workspace's metadata to properly present its narrative ws_client.alter_workspace_metadata( {"wsi": {"id": ws_id}, "new": {"narrative": obj_info[0][0]}} ) return { "ws": ws_info[0], "obj": obj_info[0][0], "refstr": "{}/{}".format(ws_info[0], obj_info[0][0]), "ref": NarrativeRef({"wsid": ws_info[0], "objid": obj_info[0][0]}), }
def test_loadGenome(self): ''' Load a test Genome object into the test workspace. ''' # Create the test workspace. wsClient = Workspace(self._config["workspace_url"], token=self._token) try: # See if the workspace exists. wsInfo = wsClient.get_workspace_info( {"workspace": self._config["test_ws"]}) except WorkspaceServerError as e: # Hopefully this means the workspace does not exist. (It could also mean someone messed up setting up the URLs) traceback.print_exc(file=sys.stderr) wsInfo = wsClient.create_workspace( {"workspace": self._config["test_ws"]}) # We also need to put in a mapping and a biochemistry object somewhere. # To do this, I just create a "dependency workspace" and pull them from there. try: # See if the workspace exists. wsInfo = wsClient.get_workspace_info( {"workspace": self._config["dependency_ws"]}) except WorkspaceServerError as e: # Hopefully this means the workspace does not exist. (It could also mean someone messed up setting up the URLs) # traceback.print_exc(file=sys.stderr) depWsInfo = wsClient.create_workspace( {"workspace": self._config["dependency_ws"]}) # Load the mapping and biochemistry objects testContigSet = json.load(open(self._config['contigset_file'], 'r')) contigSetSaveData = dict() contigSetSaveData['type'] = 'KBaseGenomes.ContigSet' contigSetSaveData['name'] = self._config['contigsetid'] contigSetSaveData['data'] = testContigSet testGenome = json.load(open(self._config["genome_file"], "r")) genomeSaveData = dict() genomeSaveData['type'] = 'KBaseGenomes.Genome' genomeSaveData['name'] = self._config['genomeid'] genomeSaveData['data'] = testGenome wsClient.save_objects({ 'workspace': self._config['test_ws'], 'objects': [genomeSaveData, contigSetSaveData] })
def upload_narrative(nar_file, auth_token, user_id, url=ci_ws, set_public=False): """ Uploads a Narrative from a downloaded object file. This file needs to be in JSON format, and it expects all data and info that is usually returned by the Workspace.get_objects method. Returns a dict of three elements: ws: the id of the workspace that was created obj: the id of the narrative object ref: the above two joined together into an object ref (for convenience) """ # read the file f = open(nar_file, 'r') nar = json.loads(f.read()) f.close() # do some setup. current_nar_metadata = ws_metadata current_nar_metadata['narrative_nice_name'] = nar['data']['metadata']['name'] ws_client = Workspace(url=url, token=auth_token) # create the new workspace for the narrative ws_info = ws_client.create_workspace({ 'workspace': '{}:{}'.format(user_id, str(time.time()).replace('.', '')), 'meta': current_nar_metadata, 'globalread': 'r' if set_public else 'n' }) ws_id = ws_info[0] # setup and save the narrative object metadata = nar['info'][10] ws_save_obj = { 'type': 'KBaseNarrative.Narrative', 'data': nar['data'], 'name': nar['info'][1], 'meta': nar['info'][10], 'provenance': [{ 'script': 'upload_narrative_test.py', 'description': 'Temporary Narrative uploaded for automated testing' }] } obj_info = ws_client.save_objects({'id': ws_id, 'objects': [ws_save_obj]}) # tweak the workspace's metadata to properly present its narrative ws_client.alter_workspace_metadata({'wsi': {'id': ws_id}, 'new': {'narrative': obj_info[0][0]}}) return { 'ws': ws_info[0], 'obj': obj_info[0][0], 'refstr': '{}/{}'.format(ws_info[0], obj_info[0][0]), 'ref': NarrativeRef({'wsid': ws_info[0], 'objid': obj_info[0][0]}) }
def setUp(cls): token = environ.get('KB_AUTH_TOKEN', None) if token is None: sys.stderr.write( "Error: Unable to run tests without authentication token!\n") sys.exit(1) token_file = open('ltest/script_test/token.txt', 'w') token_file.write(token) config_file = environ.get('KB_DEPLOYMENT_CONFIG', None) cls.cfg = {} config = ConfigParser() config.read(config_file) for nameval in config.items('CoExpression'): cls.cfg[nameval[0]] = nameval[1] auth_service_url = cls.cfg.get( 'auth-service-url', "https://kbase.us/services/authorization/Sessions/Login") ws_url = cls.cfg['ws_url'] auth_service_url_allow_insecure = cls.cfg[ 'auth-service-url-allow-insecure'] auth_client = _KBaseAuth(auth_service_url) user_id = auth_client.get_user(token) ws = Workspace( url=ws_url, token=token, auth_svc=auth_service_url, trust_all_ssl_certificates=auth_service_url_allow_insecure) # update input data in reverse order of references ordered_file_list = [ INPUT_META_DATA_DIR + '/test_diff_p_distribution_input_ref2.json', INPUT_META_DATA_DIR + '/test_diff_p_distribution_input_ref1.json', INPUT_META_DATA_DIR + '/test_diff_p_distribution_input.json', INPUT_META_DATA_DIR + '/test_view_heatmap_input_ref1.json', INPUT_META_DATA_DIR + '/test_view_heatmap_input.json', INPUT_META_DATA_DIR + '/test_coex_clust_input.json', INPUT_META_DATA_DIR + '/test_filter_genes_input.json' ] for filename in ordered_file_list: with open(filename, 'r') as infile: input_meta_data = json.load(infile) # create workspace that is local to the user if it does not exist workspace_name_t = Template( str(input_meta_data['params'][0]['workspace_name'])) workspace_name = workspace_name_t.substitute(user_id=user_id) print('workspace_name: ' + workspace_name) try: ws_info = ws.get_workspace_info({'workspace': workspace_name}) print("workspace already exists: " + str(ws_info)) except: ws_info = ws.create_workspace({ 'workspace': workspace_name, 'description': 'Workspace for ' + str(input_meta_data['method']) }) print("Created new workspace: " + str(ws_info)) print('reading input file: ' + filename) object_name = str(input_meta_data['params'][0]['object_name']) print('object_name: ' + object_name) input_data_filename = INPUT_DATA_DIR + '/' + object_name + '.json' print('input data filename: ' + input_data_filename) with open(input_data_filename, 'r') as infile: input_data = json.load(infile) # update workspace name in input data input_data_str = json.dumps(input_data) input_data_t = Template(input_data_str) input_data_str = input_data_t.substitute( workspace_name=workspace_name) input_data = json.loads(input_data_str) print('type: ' + input_data[0]['info'][2]) #upload data (no effect if data already exists in workspace) print('uploading input data to workspace') ws.save_objects({ 'workspace': workspace_name, 'objects': [{ 'type': input_data[0]['info'][2], 'data': input_data[0]['data'], 'name': object_name }] }) print('ws objects: ' + str(ws.list_objects({'workspaces': [workspace_name]})))
def setUp(cls): token = environ.get('KB_AUTH_TOKEN', None) if token is None: sys.stderr.write("Error: Unable to run tests without authentication token!\n") sys.exit(1) token_file = open('ltest/script_test/token.txt', 'w') token_file.write(token) config_file = environ.get('KB_DEPLOYMENT_CONFIG', None) cls.cfg = {} config = ConfigParser() config.read(config_file) for nameval in config.items('CoExpression'): cls.cfg[nameval[0]] = nameval[1] auth_service_url = cls.cfg.get('auth-service-url', "https://kbase.us/services/authorization/Sessions/Login") ws_url = cls.cfg['ws_url'] auth_service_url_allow_insecure = cls.cfg['auth-service-url-allow-insecure'] auth_client = _KBaseAuth(auth_service_url) user_id = auth_client.get_user(token) ws = Workspace(url=ws_url, token=token, auth_svc=auth_service_url, trust_all_ssl_certificates=auth_service_url_allow_insecure) # update input data in reverse order of references ordered_file_list = [INPUT_META_DATA_DIR+'/test_diff_p_distribution_input_ref2.json', INPUT_META_DATA_DIR+'/test_diff_p_distribution_input_ref1.json', INPUT_META_DATA_DIR+'/test_diff_p_distribution_input.json', INPUT_META_DATA_DIR+'/test_view_heatmap_input_ref1.json', INPUT_META_DATA_DIR+'/test_view_heatmap_input.json', INPUT_META_DATA_DIR+'/test_coex_clust_input.json', INPUT_META_DATA_DIR+'/test_filter_genes_input.json'] for filename in ordered_file_list: with open(filename, 'r') as infile: input_meta_data = json.load(infile) # create workspace that is local to the user if it does not exist workspace_name_t = Template(str(input_meta_data['params'][0]['workspace_name'])) workspace_name = workspace_name_t.substitute(user_id=user_id) print('workspace_name: ' + workspace_name) try: ws_info = ws.get_workspace_info({'workspace': workspace_name}) print("workspace already exists: " + str(ws_info)) except: ws_info = ws.create_workspace( {'workspace': workspace_name, 'description': 'Workspace for ' + str(input_meta_data['method'])}) print("Created new workspace: " + str(ws_info)) print('reading input file: '+filename) object_name = str(input_meta_data['params'][0]['object_name']) print('object_name: '+object_name) input_data_filename = INPUT_DATA_DIR + '/' + object_name + '.json' print('input data filename: ' + input_data_filename) with open(input_data_filename, 'r') as infile: input_data = json.load(infile) # update workspace name in input data input_data_str = json.dumps(input_data) input_data_t = Template(input_data_str) input_data_str = input_data_t.substitute(workspace_name=workspace_name) input_data = json.loads(input_data_str) print('type: '+input_data[0]['info'][2]) #upload data (no effect if data already exists in workspace) print('uploading input data to workspace') ws.save_objects( {'workspace': workspace_name, 'objects': [{'type': input_data[0]['info'][2], 'data': input_data[0]['data'], 'name': object_name}]}) print('ws objects: ' + str(ws.list_objects({'workspaces': [workspace_name]})))