示例#1
0
    def _own_handle(self, genome_data, handle_property):
        """
        _own_handle: check that handle_property point to shock nodes owned by calling user
        """

        logging.info(
            'start checking handle {} ownership'.format(handle_property))

        if handle_property in genome_data:
            handle_id = genome_data[handle_property]
            hs = HandleService(self.handle_url, token=self.token)
            handles = hs.hids_to_handles([handle_id])
            shock_id = handles[0]['id']

            # Copy from DataFileUtil.own_shock_node implementation:
            header = {'Authorization': 'Oauth {}'.format(self.token)}
            res = requests.get(self.shock_url + '/node/' + shock_id +
                               '/acl/?verbosity=full',
                               headers=header,
                               allow_redirects=True)
            self._check_shock_response(
                res, 'Error getting ACLs for Shock node {}: '.format(shock_id))
            owner = res.json()['data']['owner']['username']
            user_id = self.auth_client.get_user(self.token)

            if owner != user_id:
                logging.info('start copying node to owner: {}'.format(user_id))
                dfu_shock = self.dfu.copy_shock_node({
                    'shock_id': shock_id,
                    'make_handle': True
                })
                handle_id = dfu_shock['handle']['hid']
                genome_data[handle_property] = handle_id
示例#2
0
    def setUpClass(cls):
        cls.token = environ.get('KB_AUTH_TOKEN', None)
        config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
        cls.cfg = {}
        config = ConfigParser()
        config.read(config_file)
        for nameval in config.items('GenericsAPI'):
            cls.cfg[nameval[0]] = nameval[1]
        # Getting username from Auth profile for token
        authServiceUrl = cls.cfg['auth-service-url']
        auth_client = _KBaseAuth(authServiceUrl)
        user_id = auth_client.get_user(cls.token)
        # WARNING: don't call any logging methods on the context object,
        # it'll result in a NoneType error
        cls.ctx = MethodContext(None)
        cls.ctx.update({
            'token':
            cls.token,
            'user_id':
            user_id,
            'provenance': [{
                'service': 'GenericsAPI',
                'method': 'please_never_use_it_in_production',
                'method_params': []
            }],
            'authenticated':
            1
        })
        cls.wsURL = cls.cfg['workspace-url']
        cls.wsClient = workspaceService(cls.wsURL)
        cls.serviceImpl = GenericsAPI(cls.cfg)
        cls.scratch = cls.cfg['scratch']
        cls.callback_url = os.environ['SDK_CALLBACK_URL']
        cls.shockURL = cls.cfg['shock-url']
        cls.dfu = DataFileUtil(cls.callback_url)
        cls.sample_uploader = sample_uploader(cls.callback_url,
                                              service_ver="dev")
        cls.sample_url = cls.cfg.get('kbase-endpoint') + '/sampleservice'
        cls.sample_ser = SampleService(cls.sample_url)
        cls.hs = HandleService(url=cls.cfg['handle-service-url'],
                               token=cls.token)

        suffix = int(time.time() * 1000)
        cls.wsName = "test_GenericsAPI_" + str(suffix)
        ret = cls.wsClient.create_workspace({'workspace': cls.wsName})
        cls.wsId = ret[0]

        small_file = os.path.join(cls.scratch, 'test.txt')
        with open(small_file, "w") as f:
            f.write("empty content")
        cls.test_shock = cls.dfu.file_to_shock({
            'file_path': small_file,
            'make_handle': True
        })
        cls.handles_to_delete = []
        cls.nodes_to_delete = []
        cls.handles_to_delete.append(cls.test_shock['handle']['hid'])
        cls.nodes_to_delete.append(cls.test_shock['shock_id'])

        cls.prepare_data()
    def setUpClass(cls):
        cls.maxDiff = 70000
        cls.token = os.environ.get('KB_AUTH_TOKEN', None)
        config_file = os.environ.get('KB_DEPLOYMENT_CONFIG', None)
        cls.cfg = {}
        config = ConfigParser()
        config.read(config_file)
        for nameval in config.items('GenericsAPI'):
            cls.cfg[nameval[0]] = nameval[1]
        # Getting username from Auth profile for token
        authServiceUrl = cls.cfg['auth-service-url']
        auth_client = _KBaseAuth(authServiceUrl)
        user_id = auth_client.get_user(cls.token)
        # WARNING: don't call any logging methods on the context object,
        # it'll result in a NoneType error
        cls.ctx = MethodContext(None)
        cls.ctx.update({'token': cls.token,
                        'user_id': user_id,
                        'provenance': [
                            {'service': 'GenericsAPI',
                             'method': 'please_never_use_it_in_production',
                             'method_params': []
                             }],
                        'authenticated': 1})
        cls.wsURL = cls.cfg['workspace-url']
        cls.wsClient = workspaceService(cls.wsURL)
        cls.serviceImpl = GenericsAPI(cls.cfg)
        cls.serviceUtils = AttributesUtil(cls.cfg)
        cls.shockURL = cls.cfg['shock-url']
        cls.scratch = cls.cfg['scratch']
        cls.callback_url = os.environ['SDK_CALLBACK_URL']
        cls.dfu = DataFileUtil(cls.callback_url)
        cls.hs = HandleService(url=cls.cfg['handle-service-url'],
                               token=cls.token)

        suffix = int(time.time() * 1000)
        cls.wsName = "test_CompoundSetUtils_" + str(suffix)
        ret = cls.wsClient.create_workspace({'workspace': cls.wsName})
        cls.wsId = ret[0]
        cls.attribute_mapping = json.load(open('data/AM1.json'))
        info = cls.dfu.save_objects({
            "id": cls.wsId,
            "objects": [{
                "type": "KBaseExperiments.AttributeMapping",
                "data": cls.attribute_mapping,
                "name": "test_cond_set"
            }]
        })[0]
        cls.attribute_mapping_ref = "%s/%s/%s" % (info[6], info[0], info[4])
        cls.attribute_mapping_2 = json.load(open('data/AM2.json'))

        small_file = os.path.join(cls.scratch, 'test.txt')
        with open(small_file, "w") as f:
            f.write("empty content")
        cls.test_shock = cls.dfu.file_to_shock({'file_path': small_file, 'make_handle': True})
        cls.handles_to_delete = []
        cls.nodes_to_delete = []
        cls.handles_to_delete.append(cls.test_shock['handle']['hid'])
        cls.nodes_to_delete.append(cls.test_shock['shock_id'])
    def setUpClass(cls):
        cls.token = environ.get('KB_AUTH_TOKEN', None)
        cls.callbackURL = environ.get('SDK_CALLBACK_URL')
        config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
        cls.cfg = {}
        config = ConfigParser()
        config.read(config_file)
        for nameval in config.items('ReadsAlignmentUtils'):
            cls.cfg[nameval[0]] = nameval[1]
        # Getting username from Auth profile for token
        authServiceUrl = cls.cfg['auth-service-url']
        auth_client = _KBaseAuth(authServiceUrl)
        user_id = auth_client.get_user(cls.token)
        # WARNING: don't call any logging methods on the context object,
        # it'll result in a NoneType error
        cls.ctx = MethodContext(None)
        cls.ctx.update({
            'token':
            cls.token,
            'user_id':
            user_id,
            'provenance': [{
                'service': 'ReadsAlignmentUtils',
                'method': 'please_never_use_it_in_production',
                'method_params': []
            }],
            'authenticated':
            1
        })
        cls.shockURL = cls.cfg['shock-url']
        cls.wsURL = cls.cfg['workspace-url']
        cls.wsClient = Workspace(cls.wsURL)
        cls.ws = Workspace(cls.wsURL, token=cls.token)
        cls.hs = HandleService(url=cls.cfg['handle-service-url'],
                               token=cls.token)
        # create workspace
        wssuffix = int(time.time() * 1000)
        wsname = "test_alignment_" + str(wssuffix)
        cls.wsinfo = cls.wsClient.create_workspace({'workspace': wsname})
        print('created workspace ' + cls.getWsName())

        cls.serviceImpl = ReadsAlignmentUtils(cls.cfg)
        cls.readUtilsImpl = ReadsUtils(cls.callbackURL)
        cls.dfu = DataFileUtil(cls.callbackURL)
        cls.assemblyUtil = AssemblyUtil(cls.callbackURL)
        cls.gfu = GenomeFileUtil(cls.callbackURL)

        cls.scratch = cls.cfg['scratch']
        cls.callback_url = os.environ['SDK_CALLBACK_URL']

        cls.staged = {}
        cls.nodes_to_delete = []
        cls.handles_to_delete = []
        cls.setupTestData()
    def setUpClass(cls):
        cls.token = environ.get('KB_AUTH_TOKEN')
        cls.callbackURL = environ.get('SDK_CALLBACK_URL')
        print('CB URL: ' + cls.callbackURL)
        # WARNING: don't call any logging methods on the context object,
        # it'll result in a NoneType error
        cls.ctx = MethodContext(None)
        cls.ctx.update({
            'token':
            cls.token,
            'provenance': [{
                'service': 'kb_unicycler',
                'method': 'please_never_use_it_in_production',
                'method_params': []
            }],
            'authenticated':
            1
        })
        config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
        cls.cfg = {}
        config = ConfigParser()
        config.read(config_file)
        for nameval in config.items('kb_unicycler'):
            cls.cfg[nameval[0]] = nameval[1]
        cls.cfg["SDK_CALLBACK_URL"] = cls.callbackURL
        cls.cfg["KB_AUTH_TOKEN"] = cls.token
        cls.wsURL = cls.cfg['workspace-url']
        cls.shockURL = cls.cfg['shock-url']
        cls.hs = HandleService(url=cls.cfg['handle-service-url'],
                               token=cls.token)
        # cls.wsClient = workspaceService(cls.wsURL, token=cls.token)
        cls.wsClient = Workspace(cls.wsURL, token=cls.token)
        wssuffix = int(time.time() * 1000)
        wsName = "test_kb_unicycler_" + str(wssuffix)
        cls.wsinfo = cls.wsClient.create_workspace({'workspace': wsName})
        print('created workspace ' + cls.getWsName())

        cls.PROJECT_DIR = 'unicycler_outputs'
        cls.scratch = cls.cfg['scratch']
        if not os.path.exists(cls.scratch):
            os.makedirs(cls.scratch)
        cls.prjdir = os.path.join(cls.scratch, cls.PROJECT_DIR)
        if not os.path.exists(cls.prjdir):
            os.makedirs(cls.prjdir)
        cls.serviceImpl = kb_unicycler(cls.cfg)

        cls.readUtilsImpl = ReadsUtils(cls.callbackURL, token=cls.token)
        cls.dfuClient = DataFileUtil(url=cls.callbackURL, token=cls.token)
        cls.staged = {}
        cls.nodes_to_delete = []
        cls.handles_to_delete = []
        cls.setupTestData()
        print(
            '\n\n=============== Starting Unicycler tests ==================')
示例#6
0
 def setUpClass(cls):
     cls.token = environ.get('KB_AUTH_TOKEN', None)
     config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
     cls.cfg = {}
     config = ConfigParser()
     config.read(config_file)
     for nameval in config.items('kb_quast'):
         cls.cfg[nameval[0]] = nameval[1]
     authServiceUrl = cls.cfg.get(
         'auth-service-url',
         "https://kbase.us/services/authorization/Sessions/Login")
     auth_client = _KBaseAuth(authServiceUrl)
     user_id = auth_client.get_user(cls.token)
     # WARNING: don't call any logging methods on the context object,
     # it'll result in a NoneType error
     cls.ctx = MethodContext(None)
     cls.ctx.update({
         'token':
         cls.token,
         'user_id':
         user_id,
         'provenance': [{
             'service': 'kb_quast',
             'method': 'please_never_use_it_in_production',
             'method_params': []
         }],
         'authenticated':
         1
     })
     cls.shockURL = cls.cfg['shock-url']
     cls.ws = Workspace(cls.cfg['workspace-url'], token=cls.token)
     cls.hs = HandleService(url=cls.cfg['handle-service-url'],
                            token=cls.token)
     cls.au = AssemblyUtil(os.environ['SDK_CALLBACK_URL'])
     cls.impl = kb_quast(cls.cfg)
     cls.scratch = cls.cfg['scratch']
     shutil.rmtree(cls.scratch)
     os.mkdir(cls.scratch)
     suffix = int(time.time() * 1000)
     wsName = "test_ReadsUtils_" + str(suffix)
     cls.ws_info = cls.ws.create_workspace({'workspace': wsName})
     cls.dfu = DataFileUtil(os.environ['SDK_CALLBACK_URL'])
     cls.staged = {}
     cls.nodes_to_delete = []
     cls.handles_to_delete = []
     #         cls.setupTestData()
     print('\n\n=============== Starting tests ==================')
示例#7
0
 def setUpClass(cls):
     token = environ.get('KB_AUTH_TOKEN', None)
     config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
     cls.cfg = {}
     config = ConfigParser()
     config.read(config_file)
     for nameval in config.items('MetagenomeUtils'):
         cls.cfg[nameval[0]] = nameval[1]
     # Getting username from Auth profile for token
     authServiceUrl = cls.cfg['auth-service-url']
     auth_client = _KBaseAuth(authServiceUrl)
     user_id = auth_client.get_user(token)
     # WARNING: don't call any logging methods on the context object,
     # it'll result in a NoneType error
     cls.ctx = MethodContext(None)
     cls.ctx.update({
         'token':
         token,
         'user_id':
         user_id,
         'provenance': [{
             'service': 'MetagenomeUtils',
             'method': 'please_never_use_it_in_production',
             'method_params': []
         }],
         'authenticated':
         1
     })
     cls.wsURL = cls.cfg['workspace-url']
     cls.wsClient = workspaceService(cls.wsURL)
     cls.serviceImpl = MetagenomeUtils(cls.cfg)
     cls.scratch = cls.cfg['scratch']
     cls.callback_url = os.environ['SDK_CALLBACK_URL']
     suffix = int(time.time() * 1000)
     wsName = "test_kb_maxbin_" + str(suffix)
     cls.ws_info = cls.wsClient.create_workspace({'workspace': wsName})
     cls.dfu = DataFileUtil(os.environ['SDK_CALLBACK_URL'], token=token)
     cls.au = AssemblyUtil(os.environ['SDK_CALLBACK_URL'], token=token)
     cls.metagenome_ref = None
     cls.handleURL = cls.cfg['handle-service-url']
     cls.hs = HandleService(cls.handleURL)
     cls.object_ref = 'KBaseTestData/test_metagenome/1'
    def own_handle(self, genome, handle_property, ctx):
        if not handle_property in genome:
            return
        token = ctx['token']
        handle_id = genome[handle_property]
        hs = HandleService(self.handle_url, token=token)
        handles = hs.hids_to_handles([handle_id])
        shock_id = handles[0]['id']

        ## Copy from DataFileUtil.own_shock_node implementation:
        header = {'Authorization': 'Oauth {}'.format(token)}
        res = requests.get(self.shock_url + '/node/' + shock_id +
                           '/acl/?verbosity=full',
                           headers=header,
                           allow_redirects=True)
        self.check_shock_response(
            res, 'Error getting ACLs for Shock node {}: '.format(shock_id))
        owner = res.json()['data']['owner']['username']
        if owner != ctx['user_id']:
            shock_id = self.copy_shock_node(ctx, shock_id)
            r = requests.get(self.shock_url + '/node/' + shock_id,
                             headers=header,
                             allow_redirects=True)
            errtxt = ('Error downloading attributes from shock ' +
                      'node {}: ').format(shock_id)
            self.check_shock_response(r, errtxt)
            shock_data = r.json()['data']
            handle = {
                'id': shock_data['id'],
                'type': 'shock',
                'url': self.shock_url,
                'file_name': shock_data['file']['name'],
                'remote_md5': shock_data['file']['checksum']['md5']
            }
            handle_id = hs.persist_handle(handle)
            genome[handle_property] = handle_id
示例#9
0
    def getPairedEndLibInfo(self):
        if hasattr(self.__class__, 'pairedEndLibInfo'):
            return self.__class__.pairedEndLibInfo
        # 1) upload files to shock
        token = self.ctx['token']
        forward_shock_file = self.upload_file_to_shock(
            shock_service_url=self.shockURL,
            filePath='data/small.forward.fq',
            token=token)
        reverse_shock_file = self.upload_file_to_shock(
            shock_service_url=self.shockURL,
            filePath='data/small.reverse.fq',
            token=token)
        pprint(forward_shock_file)
        pprint(reverse_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        handf = {
            'id': forward_shock_file['id'],
            'type': 'shock',
            'url': self.shockURL,
            'file_name': forward_shock_file['file']['name'],
            'remote_md5': forward_shock_file['file']['checksum']['md5']
        }
        forward_handle = hs.persist_handle(handf)
        handr = {
            'id': reverse_shock_file['id'],
            'type': 'shock',
            'url': self.shockURL,
            'file_name': reverse_shock_file['file']['name'],
            'remote_md5': reverse_shock_file['file']['checksum']['md5']
        }

        reverse_handle = hs.persist_handle(handr)

        # 3) save to WS
        paired_end_library = {
            'lib1': {
                'file': {
                    'hid': forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type': 'shock',
                    'remote_md5': forward_shock_file['file']['checksum']['md5']
                },
                'encoding': 'UTF8',
                'type': 'fastq',
                'size': forward_shock_file['file']['size']
            },
            'lib2': {
                'file': {
                    'hid': reverse_handle,
                    'file_name': reverse_shock_file['file']['name'],
                    'id': reverse_shock_file['id'],
                    'url': self.shockURL,
                    'type': 'shock',
                    'remote_md5': reverse_shock_file['file']['checksum']['md5']
                },
                'encoding': 'UTF8',
                'type': 'fastq',
                'size': reverse_shock_file['file']['size']

            },
            'interleaved': 0,
            'sequencing_tech': 'artificial reads',
            'read_length_mean': 100,
            'insert_size_mean': 250,
            'insert_size_std_dev': 10,
            'total_bases': 125000,
            'read_orientation_outward': 1
        }
        ws_obj = {
            'workspace': self.getWsName(),
            'objects': [
                {
                    'type': 'KBaseFile.PairedEndLibrary',
                    'data': paired_end_library,
                    'name': 'test.pe.reads',
                    'meta': {},
                    'provenance': [
                        {
                            'service': 'hipmer',
                            'method': 'test_hipmer'
                        }
                    ]
                }]
        }

        new_obj_info = self.ws.save_objects(ws_obj)
        self.__class__.pairedEndLibInfo = new_obj_info[0]
        return new_obj_info[0]
 def test_update_taxon_assignments_valid(self):
     """
     Test a valid call to the update_taxon_assignments method.
     """
     taxon_key = str(uuid4())
     taxon_val = str(uuid4())
     taxon_val_new = str(uuid4())
     # Copy the object to test workspace
     dfu = DataFileUtil(self.callbackURL)
     obj_ref = f"{_WORKSPACE_NAME}/{_OBJECT_NAME}"
     result = dfu.get_objects({'object_refs': [obj_ref]})['data'][0]
     obj_data = result['data']
     # crate user owned handle in the object and update it
     hs = HandleService(self.handleURL)
     prev_handle_id = obj_data['genbank_handle_ref']
     prev_shock_id = hs.hids_to_handles([prev_handle_id])[0]['id']
     new_handle_id = dfu.own_shock_node({
         'shock_id': prev_shock_id,
         'make_handle': 1
     })['handle']['hid']
     obj_data['genbank_handle_ref'] = new_handle_id
     # Save new object in test workspace
     obj_info = result['info']
     new_obj = {
         'type': obj_info[2],
         'data': obj_data,
         'name': 'GCF_002287175.1'
     }
     test_ws_id = dfu.ws_name_to_id(self.wsName)
     infos = dfu.save_objects({'id': test_ws_id, 'objects': [new_obj]})
     obj_ref = f"{infos[0][6]}/{infos[0][0]}/{infos[0][4]}"
     new_ws_id = infos[0][6]
     new_obj_id = infos[0][0]
     get_obj_params = {
         'wsid': new_ws_id,
         'objid': new_obj_id,
         'included': ['/taxon_assignments']
     }
     # Add a new assignment
     self.serviceImpl.update_taxon_assignments(
         self.ctx, {
             'workspace_id': new_ws_id,
             'object_id': new_obj_id,
             'taxon_assignments': {
                 taxon_key: taxon_val
             }
         })
     # Fetch the object and check the mapping
     obj = self.wsClient.get_objects2({'objects':
                                       [get_obj_params]})['data'][0]['data']
     self.assertTrue(taxon_key in obj['taxon_assignments'])
     self.assertEqual(obj['taxon_assignments'][taxon_key], taxon_val)
     # Update the assignment we just added
     self.serviceImpl.update_taxon_assignments(
         self.ctx, {
             'workspace_id': new_ws_id,
             'object_id': new_obj_id,
             'taxon_assignments': {
                 taxon_key: taxon_val_new
             }
         })
     # Fetch the object and check the mapping
     obj = self.wsClient.get_objects2({'objects':
                                       [get_obj_params]})['data'][0]['data']
     self.assertTrue(taxon_key in obj['taxon_assignments'])
     self.assertEqual(obj['taxon_assignments'][taxon_key], taxon_val_new)
     # Remove the assignment we just added
     self.serviceImpl.update_taxon_assignments(
         self.ctx, {
             'workspace_id': new_ws_id,
             'object_id': new_obj_id,
             'remove_assignments': [taxon_key]
         })
     # Fetch the object and check the mapping
     obj = self.wsClient.get_objects2({'objects':
                                       [get_obj_params]})['data'][0]['data']
     self.assertTrue(taxon_key not in obj['taxon_assignments'])
     self.assertEqual(obj['taxon_assignments'].get(taxon_key), None)