예제 #1
0
 def tearDown(self):
     """
     Tear down temporary folder and files and stop S3 mock
     """
     TempDirectory.cleanup_all()
     nose.tools.assert_equal(os.path.isdir(self.temp_path), False)
     self.mock.stop()
예제 #2
0
 def tearDown(self):
     """
     Rollback database session.
     Tear down temporary folder and file structure, stop moto mock
     """
     super().tearDown()
     TempDirectory.cleanup_all()
     self.assertFalse(os.path.isdir(self.temp_path))
예제 #3
0
class TestS3Storage(unittest.TestCase):
    """S3Storage.

    Testing storage on S3 solution.
    """
    def setUp(self):
        """Initialise fixtures."""
        self._dir = TempDirectory()
        self.env = EnvironmentVarGuard()
        self.env.set('ARCHIVE_S3_URL', 'http://*****:*****@mock.patch.object(boto3, 'client')
    def test_init_s3storage(self, mock_boto):
        """Initialise S3 storage."""
        S3Storage('archive', 'lega')
        mock_boto.assert_called()

    @mock.patch.object(boto3, 'client')
    def test_init_location(self, mock_boto):
        """Initialise S3 storage."""
        storage = S3Storage('archive', 'lega')
        result = storage.location('file_id')
        self.assertEqual('file_id', result)
        mock_boto.assert_called()

    @mock.patch.object(boto3, 'client')
    def test_upload(self, mock_boto):
        """Test copy to S3, should call boto3 client."""
        path = self._dir.write('test.file', 'data1'.encode('utf-8'))
        storage = S3Storage('archive', 'lega')
        storage.copy(path, 'lega')
        mock_boto.assert_called_with('s3',
                                     aws_access_key_id='test',
                                     aws_secret_access_key='test',
                                     endpoint_url='http://*****:*****@mock.patch.object(boto3, 'client')
    def test_open(self, mock_boto):
        """Test open , should call S3FileReader."""
        path = self._dir.write('test.file', 'data1'.encode('utf-8'))
        storage = S3Storage('archive', 'lega')
        with storage.open(path) as resource:
            self.assertEqual(S3FileReader, type(resource))
 def tearDown(self):
     """
     Delete all the temporary directories and files created during this
     testing session.
     """
     for replicate in range(1, 6):
         file_name = "original_mutated_%d.fasta" % replicate
         if os.path.exists(file_name):
             os.remove(file_name)
     if os.path.exists("original_snpListMutated.txt"):
         os.remove("original_snpListMutated.txt")
     TempDirectory.cleanup_all()
예제 #5
0
 def tearDown(self):
     """
     Delete all the temporary directories and files created during this
     testing session.
     """
     for replicate in range(1, 6):
         file_name = "original_mutated_%d.fasta" % replicate
         if os.path.exists(file_name):
             os.remove(file_name)
     if os.path.exists("original_snpListMutated.txt"):
         os.remove("original_snpListMutated.txt")
     TempDirectory.cleanup_all()
예제 #6
0
    def test_cleanup_all(self):
        d1 = TempDirectory()
        d2 = TempDirectory()

        assert os.path.exists(d1.path) is True
        p1 = d1.path
        assert os.path.exists(d2.path) is True
        p2 = d2.path

        TempDirectory.cleanup_all()

        assert os.path.exists(p1) is False
        assert os.path.exists(p2) is False
예제 #7
0
class IniFileConfigurationLoaderTestCase(BaseTestCase):
    def setUp(self):
        super(IniFileConfigurationLoaderTestCase, self).setUp()
        self.tmp_dir = TempDirectory()

    def tearDown(self):
        super(IniFileConfigurationLoaderTestCase, self).tearDown()
        self.tmp_dir.cleanup_all()

    def test_skip_invalid_ini_file(self):
        from prettyconf.loaders import IniFileConfigurationLoader

        test_file = self.tmp_dir.write('some/strange/config.cfg',
                                       '*&ˆ%$#$%ˆ&*('.encode('utf8'))
        with self.assertRaises(InvalidConfigurationFile):
            IniFileConfigurationLoader(test_file)
예제 #8
0
class IniFileConfigurationLoaderTestCase(BaseTestCase):

    def setUp(self):
        super(IniFileConfigurationLoaderTestCase, self).setUp()
        self.tmp_dir = TempDirectory()

    def tearDown(self):
        super(IniFileConfigurationLoaderTestCase, self).tearDown()
        self.tmp_dir.cleanup_all()

    def test_skip_invalid_ini_file(self):
        from prettyconf.loaders import IniFileConfigurationLoader

        test_file = self.tmp_dir.write('some/strange/config.cfg', '*&ˆ%$#$%ˆ&*('.encode('utf8'))
        with self.assertRaises(InvalidConfigurationFile):
            IniFileConfigurationLoader(test_file)
예제 #9
0
class TestS3Storage(unittest.TestCase):
    """S3Storage.

    Testing storage on S3 solution over TLS.
    """
    def setUp(self):
        """Initialise fixtures."""
        self._dir = TempDirectory()
        self.env = EnvironmentVarGuard()
        self.env.set('ARCHIVE_S3_URL', 'https://*****:*****@mock.patch.object(botocore.client, 'Config')
    @mock.patch.object(boto3, 'client')
    def test_upload(self, mock_boto, mock_botocore):
        """Test copy to S3, should call boto3 client."""
        path = self._dir.write('test.file', 'data1'.encode('utf-8'))
        storage = S3Storage('archive', 'lega')
        storage.copy(path, 'lega')
        mock_botocore.assert_called_with(connect_timeout=60,
                                         client_cert=('/etc/ega/ssl.cert',
                                                      '/etc/ega/ssl.key'))
        mock_boto.assert_called_with('s3',
                                     aws_access_key_id='test',
                                     aws_secret_access_key='test',
                                     endpoint_url='https://localhost:5000',
                                     region_name='lega',
                                     use_ssl=True,
                                     verify='/etc/ega/CA.cert',
                                     config=mock_botocore())
예제 #10
0
class TestFileStorage(unittest.TestCase):
    """FileStorage.

    Testing storage on disk.
    """
    def setUp(self):
        """Initialise fixtures."""
        self._dir = TempDirectory()
        self.outputdir = self._dir.makedir('output')
        self.env = EnvironmentVarGuard()
        self.env.set('ARCHIVE_LOCATION', self.outputdir + '/%s/')
        self._store = FileStorage('archive', 'lega')

    def tearDown(self):
        """Remove setup variables."""
        self.env.unset('ARCHIVE_LOCATION')
        self._dir.cleanup_all()

    def test_location(self):
        """Test file location."""
        result = self._store.location('12')
        self.assertEqual(
            os.path.join('/', '000', '000', '000', '000', '000', '000', '12'),
            result)

    def test_copy(self):
        """Test copy file."""
        path = self._dir.write('output/lega/test.file',
                               'data1'.encode('utf-8'))
        path1 = self._dir.write('output/lega/test1.file', ''.encode('utf-8'))
        result = self._store.copy(open(path, 'rb'), path1)
        self.assertEqual(os.stat(path).st_size, result)

    def test_open(self):
        """Test open file."""
        path = self._dir.write('output/lega/test.file',
                               'data1'.encode('utf-8'))
        print(path)
        with self._store.open('test.file') as resource:
            self.assertEqual(BufferedReader, type(resource))
예제 #11
0
def tearDown(test):
    TempDirectory.cleanup_all()
	def tearDownClass(cls):
		TempDirectory.cleanup_all()
예제 #13
0
def tearDown(test):
    TempDirectory.cleanup_all()
    LogCapture.uninstall_all()
 def tearDown(self):
     TempDirectory.cleanup_all()
     self.activity.clean_tmp_dir()
예제 #15
0
class testVerify(unittest.TestCase):
    """Verify.

    Testing verify functionalities.
    """
    def setUp(self):
        """Initialise fixtures."""
        self.env = EnvironmentVarGuard()
        self._dir = TempDirectory()
        self.outputdir = self._dir.makedir('output')
        self.env = EnvironmentVarGuard()
        self.env.set('ARCHIVE_LOCATION', self.outputdir + '/%s/')
        self.env.set('LEGA_PASSWORD', 'value')
        self.env.set('QUALITY_CONTROL_VERIFY_PEER', 'True')
        self.env.set('QUALITY_CONTROL_VERIFY_HOSTNAME', 'False')

    def tearDown(self):
        """Remove setup variables."""
        self.env.unset('ARCHIVE_LOCATION')
        self._dir.cleanup_all()
        self.env.unset('LEGA_PASSWORD')
        self.env.unset('QUALITY_CONTROL_VERIFY_PEER')
        self.env.unset('QUALITY_CONTROL_VERIFY_HOSTNAME')

    @tempdir()
    @mock.patch('lega.verify.header_to_records')
    @mock.patch('lega.verify.get_key_id')
    def test_get_records(self, mock_key, mock_records, filedir):
        """Should call the url in order to provide the records."""
        infile = filedir.write('infile.in',
                               bytearray.fromhex(pgp_data.ENC_FILE))
        returned_data = KeyServerResponse(
            200, io.BytesIO(pgp_data.PGP_PRIVKEY.encode()))
        with PatchContextManager('lega.verify.urlopen',
                                 returned_data) as mocked:
            print(returned_data.status)
            with open(infile, 'rb') as f:
                get_records(f)
            mocked.assert_called()
        filedir.cleanup()

    @tempdir()
    @mock.patch('lega.verify.header_to_records')
    @mock.patch('lega.verify.get_key_id')
    def test_get_records_no_verify(self, mock_key, mock_records, filedir):
        """Should call the url in order to provide the records even without a verify turned off."""
        self.env.set('QUALITY_CONTROL_VERIFY_PEER', 'False')
        self.env.set('QUALITY_CONTROL_VERIFY_HOSTNAME', 'False')
        infile = filedir.write('infile.in',
                               bytearray.fromhex(pgp_data.ENC_FILE))
        returned_data = KeyServerResponse(
            200, io.BytesIO(pgp_data.PGP_PRIVKEY.encode()))
        with PatchContextManager('lega.verify.urlopen',
                                 returned_data) as mocked:
            with open(infile, 'rb') as f:
                get_records(f)
            mocked.assert_called()
        filedir.cleanup()

    @tempdir()
    @mock.patch('lega.verify.header_to_records')
    @mock.patch('lega.verify.get_key_id')
    def test_get_records_key_error(self, mock_key, mock_records, filedir):
        """The PGP key was not found, should raise PGPKeyError error."""
        infile = filedir.write('infile.in',
                               bytearray.fromhex(pgp_data.ENC_FILE))
        with mock.patch('lega.verify.urlopen') as urlopen_mock:
            urlopen_mock.side_effect = HTTPError('url', 404, 'msg', None, None)
            with self.assertRaises(PGPKeyError):
                with open(infile, 'rb') as f:
                    get_records(f)
        filedir.cleanup()

    @tempdir()
    @mock.patch('lega.verify.header_to_records')
    @mock.patch('lega.verify.get_key_id')
    def test_get_records_server_error(self, mock_key, mock_records, filedir):
        """Some keyserver error occured, should raise KeyserverError error."""
        infile = filedir.write('infile.in',
                               bytearray.fromhex(pgp_data.ENC_FILE))
        with mock.patch('lega.verify.urlopen') as urlopen_mock:
            urlopen_mock.side_effect = HTTPError('url', 400, 'msg', None, None)
            with self.assertRaises(KeyserverError):
                with open(infile, 'rb') as f:
                    get_records(f)
        filedir.cleanup()

    @tempdir()
    @mock.patch('lega.verify.header_to_records')
    @mock.patch('lega.verify.get_key_id')
    def test_get_records_error(self, mock_key, mock_records, filedir):
        """Some general error occured, should raise Exception error."""
        self.env.set('QUALITY_CONTROL_VERIFY_PEER', 'False')
        self.env.set('QUALITY_CONTROL_VERIFY_HOSTNAME', 'False')
        infile = filedir.write('infile.in',
                               bytearray.fromhex(pgp_data.ENC_FILE))
        with mock.patch('lega.verify.urlopen') as urlopen_mock:
            urlopen_mock.side_effect = Exception
            with self.assertRaises(Exception):
                with open(infile, 'rb') as f:
                    get_records(f)
        filedir.cleanup()

    @mock.patch('lega.ingest.getattr')
    @mock.patch('lega.verify.get_connection')
    @mock.patch('lega.verify.consume')
    def test_main(self, mock_consume, mock_connection, mock_getattr):
        """Test main verify, by mocking cosume call."""
        mock_consume.return_value = mock.MagicMock()
        main()
        mock_consume.assert_called()

    @tempdir()
    @mock.patch('lega.verify.db')
    @mock.patch('lega.verify.body_decrypt')
    @mock.patch('lega.verify.get_records')
    def test_work(self, mock_records, mock_decrypt, mock_db, filedir):
        """Test verify worker, should send a messge."""
        # Mocking a lot of stuff, ast it is previously tested
        mock_db.status.return_value = mock.Mock()
        mock_records.return_value = ['data'], 'key_id'
        mock_decrypt.return_value = mock.Mock()
        store = mock.MagicMock()
        store.open.return_value = mock.MagicMock()
        mock_broker = mock.MagicMock(name='channel')
        mock_broker.channel.return_value = mock.Mock()
        infile = filedir.write('infile.in', 'text'.encode("utf-8"))
        data = {
            'header': pgp_data.ENC_FILE,
            'stable_id': '1',
            'archive_path': infile,
            'file_id': '123',
            'org_msg': {}
        }
        result = work('10', store, mock_broker, data)
        self.assertTrue({'status': {
            'state': 'COMPLETED',
            'details': '1'
        }}, result)
        filedir.cleanup()
def tearDown(test):
    TempDirectory.cleanup_all()
예제 #17
0
 def tearDown(self):
     """
     Tear down temporary folder and file structure
     """
     TempDirectory.cleanup_all()
     nose.tools.assert_equal(os.path.isdir(self.temp_dir), False)
예제 #18
0
def tearDown(test):
    TempDirectory.cleanup_all()
    LogCapture.uninstall_all()
예제 #19
0
 def tearDown(self):
     TempDirectory.cleanup_all()
     self.env.destroy()
예제 #20
0
파일: test_sns.py 프로젝트: aodag/sns
def tearDownModule():
    """ """
    TempDirectory.cleanup_all()
예제 #21
0
 def tearDown(self):
     try:
         TempDirectory.cleanup_all()
     except:
         pass
예제 #22
0
class TestCommandLineARGS(unittest.TestCase):
    """CLI args

    Testing command line argument calls."""
    def setUp(self):
        """Setting things up."""
        self._dir = TempDirectory()
        self._path = self._dir.write('pubring.bin',
                                     pgp_data.PGP_PUBKEY.encode('utf-8'))
        self._pk = self._dir.write('pub_key.asc',
                                   pgp_data.PGP_PUBKEY.encode('utf-8'))
        self._sk = self._dir.write('sec_key.asc',
                                   pgp_data.PGP_PRIVKEY.encode('utf-8'))
        self._crypted = self._dir.write('input.file',
                                        bytearray.fromhex(pgp_data.ENC_FILE))

    def tearDown(self):
        """Remove files."""
        self._dir.cleanup_all()

    def test_cmdline_no_args(self):
        """User passes no args, should fail with SystemExit."""
        with self.assertRaises(SystemExit):
            parse_args()

    def test_cmdline_help(self):
        """User passes help, should return help."""
        with self.assertRaises(SystemExit):
            parse_args(['--help'])
            self.assertEqual(__doc__, sys.stdout)

    def test_cmdline_load_logger(self):
        """Should be able to load a custom logger."""
        with TempDirectory() as filedir:
            filedir.write('logger.yml',
                          logger_data.TEST_LOGGER.encode('utf-8'))
            parse_args(
                ['--log',
                 os.path.join(filedir.path, 'logger.yml'), 'list'])

    def test_cmdline_parse_list(self):
        """User should get an args list when asking for keys list."""
        expected = {
            '--input': None,
            '--keyid': None,
            '--output': None,
            '--pk': None,
            '--pubring': self._path,
            '--server': None,
            '--sk': None,
            '-r': '*****@*****.**',
            'decrypt': False,
            'encrypt': False,
            'list': True,
            'reencrypt': False
        }
        result = parse_args(['list', '--pubring', self._path])
        self.assertEqual(expected, dict(result))

    def test_cmdline_main_fail(self):
        """Run without commandline args, should exit."""
        with self.assertRaises(SystemExit):
            main()

    @mock.patch('legacryptor.__main__.Pubring')
    def test_cmdline_run_list_pubring(self, mock_ring):
        """Listing with specific pubring should call the Pubring."""
        run(['list', '--pubring', self._path])
        mock_ring.assert_called()

    @mock.patch('legacryptor.__main__.encrypt')
    def test_cmdline_run_encrypt_pubring(self, mock_encrypt):
        """Encrypt from the command line should call the encrypt function."""
        run(['encrypt', '--pk', self._pk])
        mock_encrypt.assert_called()

    @mock.patch('getpass.getpass')
    @mock.patch('legacryptor.__main__.decrypt')
    def test_cmdline_run_decrypt_pubring(self, mock_decrypt, mock_pass):
        """Decrypt from the command line should call the decrypt function."""
        mock_pass.return_value = pgp_data.PGP_PASSPHRASE
        run(['decrypt', '--sk', self._sk])
        mock_decrypt.assert_called()

    @mock.patch('getpass.getpass')
    @mock.patch('legacryptor.__main__.reencrypt')
    def test_cmdline_run_reencrypt_pubring(self, mock_reencrypt, mock_pass):
        """Reencrypt from the command line should call the reencrypt function."""
        mock_pass.return_value = pgp_data.PGP_PASSPHRASE
        run(['reencrypt', '--sk', self._sk, '--pk', self._pk])
        mock_reencrypt.assert_called()

    def test_cmdline_server_notimplemented(self):
        """Trying to access server keys should raise NotImplementedError."""
        with self.assertRaises(NotImplementedError):
            run(['list', '--server', 'someserver.com'])

    def test_cmdline_encrypt_key_notfound(self):
        """Raise error if key not found in default pubring."""
        with self.assertRaises(ValueError):
            run(['encrypt', '-r', 'Denmark', '--pubring', self._path])

    @mock.patch('getpass.getpass')
    @tempdir()
    def test_cmdline_decrypt_file(self, filedir, mock_pass):
        """Check if the decrypted file is actually there and the content is as expected."""
        filedir.makedir('directory')
        path = os.path.join(filedir.path, 'directory', 'output.file')
        mock_pass.return_value = pgp_data.PGP_PASSPHRASE
        run(['decrypt', '--sk', self._sk, '-i', self._crypted, '-o', path])
        result = filedir.read(('directory', 'output.file'))
        self.assertEqual(pgp_data.ORG_FILE, result)
        filedir.cleanup()

    @mock.patch('getpass.getpass')
    @tempdir()
    def test_cmdline_reencrypt_file(self, filedir, mock_pass):
        """Check if the reencrypted file is actually there and the header is crypt4gh."""
        filedir.makedir('directory')
        path = os.path.join(filedir.path, 'directory', 'output.file')
        mock_pass.return_value = pgp_data.PGP_PASSPHRASE
        run([
            'reencrypt', '--sk', self._sk, '--pk', self._pk, '-i',
            self._crypted, '-o', path
        ])
        result = filedir.read(('directory', 'output.file'))
        self.assertEqual(b'crypt4gh', result[:8])
        filedir.cleanup()
예제 #23
0
 def tearDown(self):
     """
     Tear down temporary folder and file structure
     """
     TempDirectory.cleanup_all()
     self.assertEqual(os.path.isdir(self.temp_path), False)
예제 #24
0
class TestPubring(unittest.TestCase):
    """Pubring

    Testing Pubring."""
    def setUp(self):
        """Setting things up."""
        self._dir = TempDirectory()
        self._path = self._dir.write('pubring.bin',
                                     pgp_data.PGP_PUBKEY.encode('utf-8'))
        self._pubring = Pubring(self._path)

    def tearDown(self):
        """Remove files."""
        self._dir.cleanup_all()

    @tempdir()
    def test_unproper_keyring(self, filedir):
        """Setting up should give an error due to unproper armored PGP."""
        path = filedir.write('pubring.bin', ''.encode('utf-8'))
        # the ValueError is raised by PGPy because unproper armored PGP
        with self.assertRaises(ValueError):
            Pubring(path)
        filedir.cleanup()

    @tempdir()
    @mock.patch('legacryptor.pubring.LegaKeyring')
    def test_empty_keyring(self, mock_keyring, filedir):
        """The keyring is empty, thus it should point that out."""
        mock_keyring.return_value = ''
        path = filedir.write('pubring.bin', ''.encode('utf-8'))
        with self.assertRaises(ValueError):
            Pubring(path)
        filedir.cleanup()

    def test_load_key(self):
        """Pubring getitem, should return the key."""
        # This identified a bug for example if there is no version in the PGP_PUBKEY
        # PGPy adds its own version e.g. Version: PGPy v0.4.3

        # for some reason PGPy adds a new line at the end
        data_name = str(self._pubring.__getitem__(
            pgp_data.PGP_NAME)).rstrip()  # get by name
        self.assertEqual(pgp_data.PGP_PUBKEY, data_name)
        data_key_id = str(self._pubring.__getitem__(
            pgp_data.KEY_ID)).rstrip()  # get by key_id
        self.assertEqual(pgp_data.PGP_PUBKEY, data_key_id)
        data_comment = str(self._pubring.__getitem__(
            pgp_data.PGP_COMMENT)).rstrip()  # get by key_id
        self.assertEqual(pgp_data.PGP_PUBKEY, data_comment)
        data_email = str(self._pubring.__getitem__(
            pgp_data.PGP_EMAIL)).rstrip()  # get by key_id
        self.assertEqual(pgp_data.PGP_PUBKEY, data_email)

    def test_pubring_notempty(self):
        """Pubring should not be empty, and this should be True."""
        self.assertTrue(bool(self._pubring))

    def test_pubring_str(self):
        """Should return the pubring path."""
        self.assertEqual(f'<Pubring from {self._path}>', str(self._pubring))

    def test_pubring_iter(self):
        """Get pubring items, should return the expected list."""
        list = [x for x in iter(self._pubring)]
        expected = [(pgp_data.KEY_ID, pgp_data.PGP_NAME, pgp_data.PGP_EMAIL,
                     pgp_data.PGP_COMMENT)]
        self.assertEqual(expected, list)

    def test_pubring_repr(self):
        """Get the table info from Pubring."""
        list_data = [('Key ID', 'User Name', 'User Email', 'User Comment')]
        list_data.append((pgp_data.KEY_ID, pgp_data.PGP_NAME,
                          pgp_data.PGP_EMAIL, pgp_data.PGP_COMMENT))
        table = DoubleTable(list_data)
        data = f'''\
Available keys from {self._path}
{table.table}
The first substring that matches the requested recipient will be used as the encryption key
Alternatively, you can use the KeyID itself'''
        self.assertEqual(data, repr(self._pubring))
예제 #25
0
 def tearDownAll():
     """
     Delete all the temporary directories and files created during this
     testing session.
     """
     TempDirectory.cleanup_all()
예제 #26
0
class DatabaseTestCase(unittest.IsolatedAsyncioTestCase):
    """Test database operations."""

    def setUp(self):
        """Initialise BeaconDB object."""
        self._db = BeaconDB()
        self._dir = TempDirectory()
        self.data = """##fileformat=VCFv4.0
        ##fileDate=20090805
        ##source=myImputationProgramV3.1
        ##reference=1000GenomesPilot-NCBI36
        ##phasing=partial
        ##INFO=<ID=NS,Number=1,Type=Integer,Description="Number of Samples With Data">
        ##INFO=<ID=AN,Number=1,Type=Integer,Description="Total number of alleles in called genotypes">
        ##INFO=<ID=AC,Number=.,Type=Integer,Description="Allele count in genotypes, for each ALT allele, in the same order as listed">
        ##INFO=<ID=DP,Number=1,Type=Integer,Description="Total Depth">
        ##INFO=<ID=AF,Number=.,Type=Float,Description="Allele Frequency">
        ##INFO=<ID=AA,Number=1,Type=String,Description="Ancestral Allele">
        ##INFO=<ID=DB,Number=0,Type=Flag,Description="dbSNP membership, build 129">
        ##INFO=<ID=H2,Number=0,Type=Flag,Description="HapMap2 membership">
        ##FILTER=<ID=q10,Description="Quality below 10">
        ##FILTER=<ID=s50,Description="Less than 50% of samples have data">
        ##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
        ##FORMAT=<ID=GQ,Number=1,Type=Integer,Description="Genotype Quality">
        ##FORMAT=<ID=DP,Number=1,Type=Integer,Description="Read Depth">
        ##FORMAT=<ID=HQ,Number=2,Type=Integer,Description="Haplotype Quality">
        ##ALT=<ID=DEL:ME:ALU,Description="Deletion of ALU element">
        ##ALT=<ID=CNV,Description="Copy number variable region">
        #CHROM	POS	ID	REF	ALT	QUAL	FILTER	INFO	FORMAT	NA00001	NA00002	NA00003
        19	111	.	A	C	9.6	.	.	GT:HQ	0|0:10,10	0|0:10,10	0/1:3,3
        19	112	.	A	G	10	.	.	GT:HQ	0|0:10,10	0|0:10,10	0/1:3,3
        20	14370	rs6054257	G	A	29	PASS	NS=3;DP=14;AF=0.5;DB;H2	GT:GQ:DP:HQ	0|0:48:1:51,51	1|0:48:8:51,51	1/1:43:5:.,.
        chrM 15011 . T C . PASS . GT:GQ:DP:RO:QR:AO:QA:GL 1:160:970:0:0:968:31792:-2860.58,0 1:160:970:0:0:968:31792:-2860.58,0"""
        self.datafile = self._dir.write("data.csv", self.data.encode("utf-8"))

    def tearDown(self):
        """Close database connection after tests."""
        self._dir.cleanup_all()

    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_rchop(self, db_mock):
        """Test rchop for SVTYPE."""
        db_mock.return_value = Connection()
        await self._db.connection()
        result = self._db._rchop("INS:ME:LINE1", ":LINE1")
        self.assertEqual("INS:ME", result)
        result_no_ending = self._db._rchop("INS", ":LINE1")
        self.assertEqual("INS", result_no_ending)

    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_handle_type(self, db_mock):
        """Test handle type."""
        db_mock.return_value = Connection()
        await self._db.connection()
        result = self._db._handle_type(1, int)
        self.assertEqual([1], result)
        result_tuple = self._db._handle_type((0.1, 0.2), float)
        self.assertEqual([0.1, 0.2], result_tuple)

    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_bnd_parts(self, db_mock):
        """Test breakend parsing parts."""
        db_mock.return_value = Connection()
        await self._db.connection()
        result = self._db._bnd_parts("[CHR17:31356925[N", "126_2")
        self.assertEqual(("chr17", 31356925, True, True, "N", True, "126_2"), result)

    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg")
    async def test_connection(self, db_mock):
        """Test database URL fetching."""
        await self._db.connection()
        db_mock.connect.assert_called()

    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_check_tables(self, db_mock):
        """Test checking tables."""
        db_mock.return_value = Connection()
        await self._db.connection()
        db_mock.assert_called()
        result = await self._db.check_tables(["DATATSET1", "DATATSET2"])
        # No Missing tables
        self.assertEqual(result, [])

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_create_tables(self, db_mock, mock_log):
        """Test creating tables."""
        sql = """CREATE TABLE IF NOT EXISTS beacon_data_table (
            id SERIAL,
            dataset_id VARCHAR(200),
            PRIMARY KEY (id));"""
        db_mock.return_value = Connection()
        await self._db.connection()
        db_mock.assert_called()
        sql_file = self._dir.write("sql.init", sql.encode("utf-8"))
        await self._db.create_tables(sql_file)
        # Should assert logs
        mock_log.info.assert_called_with("Tables have been created")

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_create_tables_exception(self, db_mock, mock_log):
        """Test creating tables exception."""
        db_mock.return_value = ConnectionException()
        await self._db.connection()
        await self._db.create_tables("sql.init")
        log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CREATE TABLES -> [Errno 2] No such file or directory: 'sql.init'"
        mock_log.error.assert_called_with(log)

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    @unittest.mock.patch("beacon_api.utils.db_load.VCF")
    async def test_load_metadata(self, mock_vcf, db_mock, mock_log):
        """Test load metadata."""
        metadata = """{"name": "ALL.chrMT.phase3_callmom-v0_4.20130502.genotypes.vcf",
            "datasetId": "urn:hg:exampleid",
            "description": "Mitochondrial genome from the 1000 Genomes project",
            "assemblyId": "GRCh38",
            "createDateTime": "2013-05-02 12:00:00",
            "updateDateTime": "2013-05-02 12:00:00",
            "version": "v0.4",
            "externalUrl": "smth",
            "accessType": "PUBLIC"}"""
        db_mock.return_value = Connection()
        await self._db.connection()
        db_mock.assert_called()
        metafile = self._dir.write("data.json", metadata.encode("utf-8"))
        vcf = unittest.mock.MagicMock(name="samples")
        vcf.samples.return_value = [1, 2, 3]
        await self._db.load_metadata(vcf, metafile, self.datafile)
        # Should assert logs
        mock_log.info.mock_calls = [f"Parsing metadata from {metafile}", "Metadata has been parsed"]

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_load_metadata_exception(self, db_mock, mock_log):
        """Test load metadata error."""
        db_mock.return_value = ConnectionException()
        await self._db.connection()
        vcf = unittest.mock.MagicMock(name="samples")
        vcf.samples.return_value = [1, 2, 3]
        await self._db.load_metadata(vcf, "meta.are", "datafile")
        log = "AN ERROR OCCURRED WHILE ATTEMPTING TO PARSE METADATA -> [Errno 2] No such file or directory: 'meta.are'"
        mock_log.error.assert_called_with(log)

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_load_datafile(self, db_mock, mock_log):
        """Test load_datafile."""
        db_mock.return_value = Connection()
        vcf = unittest.mock.MagicMock(name="samples")
        vcf.return_value = [{"record": 1}, {"record": 2}, {"records": 3}]
        vcf.samples.return_value = [{"record": 1}, {"record": 2}, {"records": 3}]
        await self._db.connection()
        db_mock.assert_called()
        await self._db.load_datafile(vcf, self.datafile, "DATASET1")
        # Should assert logs
        mock_log.info.mock_calls = [f"Read data from {self.datafile}", f"{self.datafile} has been processed"]

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_close(self, db_mock, mock_log):
        """Test database URL close."""
        db_mock.return_value = Connection()
        await self._db.connection()
        await self._db.close()
        mock_log.info.mock_calls = [
            "Mark the database connection to be closed",
            "The database connection has been closed",
        ]

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_close_error(self, db_mock, mock_log):
        """Test database URL close error."""
        db_mock.return_value = ConnectionException()
        await self._db.connection()
        await self._db.close()
        log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CLOSE DATABASE CONNECTION -> 'ConnectionException' object has no attribute 'close'"
        mock_log.error.assert_called_with(log)

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_unpack(self, db_mock, mock_log):
        """Test database URL fetching."""
        db_mock.return_value = Connection()
        await self._db.connection()
        inf1 = INFO((1), "i", 3, None)
        variant_1 = Variant(["C"], "T", inf1, 0.7, "indel", 3)
        result = self._db._unpack(variant_1)
        self.assertEqual(([0.3333333333333333], [1], ["SNP"], ["C"], 3, []), result)
        inf2 = INFO(1, "M", 3, None)
        variant_2 = Variant(["AT", "A"], "ATA", inf2, 0.7, "mnp", 3)
        result = self._db._unpack(variant_2)
        self.assertEqual(([0.3333333333333333], [1], ["DEL", "DEL"], ["AT", "A"], 3, []), result)
        inf3 = INFO((1), "S", 3, 0.5)
        variant_3 = Variant(["TC"], "T", inf3, 0.7, "snp", 3)
        result = self._db._unpack(variant_3)
        self.assertEqual(([0.5], [1], ["INS"], ["TC"], 3, []), result)
        inf4 = INFO((1), "<INS:ME>", 3, None, "BND")
        variant_4 = Variant(["TC"], "T", inf4, 0.7, "snp", 3)
        result = self._db._unpack(variant_4)
        self.assertEqual(([0.3333333333333333], [1], ["SNP"], ["TC"], 3, []), result)
        inf5 = INFO((1), "S", 3, None, "<INS:ME>")
        variant_5 = Variant(["TC"], "T", inf5, 0.7, "ins", 3)
        result5 = self._db._unpack(variant_5)
        self.assertEqual(([0.3333333333333333], [1], ["INS"], ["TC"], 3, []), result5)

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect")
    async def test_chunks(self, db_mock, mock_log):
        """Test database URL fetching."""
        db_mock.return_value = Connection()
        await self._db.connection()
        variant = [(1, 2), (2, 3)]
        result = self._db._chunks(variant, 1)
        lines = []
        for i in result:
            lines.append(list(i))
        self.assertEqual([[(1, 2)], [(2, 3)]], lines)
 def tearDown(self):
     TempDirectory.cleanup_all()
 def tearDownAll():
     """
     Delete all the temporary directories and files created during this
     testing session.
     """
     TempDirectory.cleanup_all()
예제 #29
0
class TestBasicFunctions(unittest.IsolatedAsyncioTestCase):
    """Test supporting functions."""
    def setUp(self):
        """Initialise BeaconDB object."""
        self._dir = TempDirectory()

    def tearDown(self):
        """Close database connection after tests."""
        self._dir.cleanup_all()

    def test_parser(self):
        """Test argument parsing."""
        parsed = parse_arguments(
            ["/path/to/datafile.csv", "/path/to/metadata.json"])
        self.assertEqual(parsed.datafile, "/path/to/datafile.csv")
        self.assertEqual(parsed.metadata, "/path/to/metadata.json")

    @unittest.mock.patch("beacon_api.conf.config.asyncpg")
    async def test_init_pool(self, db_mock):
        """Test database connection pool creation."""
        db_mock.return_value = unittest.mock.AsyncMock(name="create_pool")
        db_mock.create_pool = unittest.mock.AsyncMock()
        await init_db_pool()
        db_mock.create_pool.assert_called()

    @unittest.mock.patch("beacon_api.utils.db_load.LOG")
    @unittest.mock.patch("beacon_api.utils.db_load.BeaconDB")
    @unittest.mock.patch("beacon_api.utils.db_load.VCF")
    async def test_init_beacon_db(self, mock_vcf, db_mock, mock_log):
        """Test beacon_init db call."""
        db_mock.return_value = MockBeaconDB()
        metadata = """{"name": "DATASET1",
                    "description": "example dataset number 1",
                    "assemblyId": "GRCh38",
                    "version": "v1",
                    "sampleCount": 2504,
                    "externalUrl": "https://datasethost.org/dataset1",
                    "accessType": "PUBLIC"}"""
        metafile = self._dir.write("data.json", metadata.encode("utf-8"))
        data = """MOCK VCF file"""
        datafile = self._dir.write("data.vcf", data.encode("utf-8"))
        await init_beacon_db([datafile, metafile])
        mock_log.info.mock_calls = [
            "Mark the database connection to be closed",
            "The database connection has been closed",
        ]

    @unittest.mock.patch("beacon_api.utils.db_load.init_beacon_db")
    def test_main_db(self, mock_init):
        """Test run asyncio main beacon init."""
        main()
        mock_init.assert_called()

    def test_aud_claim(self):
        """Test aud claim function."""
        env = EnvironmentVarGuard()
        env.set("JWT_AUD", "aud1,aud2")
        result = verify_aud_claim()
        # Because it is false we expect it not to be parsed
        expected = (False, [])
        self.assertEqual(result, expected)
        env.unset("JWT_AUD")

    def test_token_scheme_check_bad(self):
        """Test token scheme no token."""
        # This might never happen, yet lets prepare for it
        with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized):
            token_scheme_check("", "https", {}, "localhost")

    def test_access_resolution_base(self):
        """Test assumptions for access resolution.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, [], False)
        host = "localhost"
        result = access_resolution(request, token, host, ["1", "2"],
                                   ["3", "4"], ["5", "6"])
        self.assertListEqual(result[0], ["PUBLIC"])
        intermediate_list = result[1]
        intermediate_list.sort()
        self.assertListEqual(["1", "2"], intermediate_list)

    def test_access_resolution_no_controlled(self):
        """Test assumptions for access resolution for token but no controlled datasets.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, [], True)
        host = "localhost"
        result = access_resolution(request, token, host, ["1", "2"],
                                   ["3", "4"], ["5", "6"])
        self.assertListEqual(result[0], ["PUBLIC"])
        intermediate_list = result[1]
        intermediate_list.sort()
        self.assertListEqual(["1", "2"], intermediate_list)

    def test_access_resolution_registered(self):
        """Test assumptions for access resolution for token with just bona_fide.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(True, [], True)
        host = "localhost"
        result = access_resolution(request, token, host, ["1", "2"],
                                   ["3", "4"], ["5", "6"])
        self.assertListEqual(result[0], ["PUBLIC", "REGISTERED"])
        intermediate_list = result[1]
        intermediate_list.sort()
        self.assertListEqual(["1", "2", "3", "4"], intermediate_list)

    def test_access_resolution_controlled_no_registered(self):
        """Test assumptions for access resolution for token and no bona_fide.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, ["5", "6"], True)
        host = "localhost"
        result = access_resolution(request, token, host, ["1", "2"],
                                   ["3", "4"], ["5", "6"])
        self.assertListEqual(result[0], ["PUBLIC", "CONTROLLED"])
        intermediate_list = result[1]
        intermediate_list.sort()
        self.assertListEqual(["1", "2", "5", "6"], intermediate_list)

    def test_access_resolution_controlled_registered(self):
        """Test assumptions for access resolution for token and bona_fide.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(True, ["5", "6"], True)
        host = "localhost"
        result = access_resolution(request, token, host, ["1", "2"],
                                   ["3", "4"], ["5", "6"])
        self.assertListEqual(result[0], ["PUBLIC", "REGISTERED", "CONTROLLED"])
        intermediate_list = result[1]
        intermediate_list.sort()
        self.assertListEqual(["1", "2", "3", "4", "5", "6"], intermediate_list)

    def test_access_resolution_bad_registered(self):
        """Test assumptions for access resolution for requested registered Unauthorized.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, [], False)
        host = "localhost"
        with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized):
            access_resolution(request, token, host, [], ["3"], [])

    def test_access_resolution_no_registered2(self):
        """Test assumptions for access resolution for requested registered Forbidden.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, [], True)
        host = "localhost"
        with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden):
            access_resolution(request, token, host, [], ["4"], [])

    def test_access_resolution_controlled_forbidden(self):
        """Test assumptions for access resolution for requested controlled Forbidden.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, [7], True)
        host = "localhost"
        with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden):
            access_resolution(request, token, host, [], ["6"], [])

    def test_access_resolution_controlled_unauthorized(self):
        """Test assumptions for access resolution for requested controlled Unauthorized.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, [], False)
        host = "localhost"
        with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized):
            access_resolution(request, token, host, [], ["5"], [])

    def test_access_resolution_controlled_no_perms(self):
        """Test assumptions for access resolution for requested controlled Forbidden.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, ["7"], True)
        host = "localhost"
        result = access_resolution(request, token, host, ["2"], ["6"], [])
        self.assertEqual(result, (["PUBLIC"], ["2"]))

    def test_access_resolution_controlled_some(self):
        """Test assumptions for access resolution for requested controlled some datasets.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, ["5"], True)
        host = "localhost"
        result = access_resolution(request, token, host, [], [], ["5", "6"])
        self.assertEqual(result, (["CONTROLLED"], ["5"]))

    def test_access_resolution_controlled_no_perms_public(self):
        """Test assumptions for access resolution for requested controlled and public, returning public only.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, [], False)
        host = "localhost"
        result = access_resolution(request, token, host, ["1"], [], ["5"])
        self.assertEqual(result, (["PUBLIC"], ["1"]))

    def test_access_resolution_controlled_no_perms_bonafide(self):
        """Test assumptions for access resolution for requested controlled and registered, returning registered only.

        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(True, [], True)
        host = "localhost"
        result = access_resolution(request, token, host, [], ["4"], ["7"])
        self.assertEqual(result, (["REGISTERED"], ["4"]))

    def test_access_resolution_controlled_never_reached(self):
        """Test assumptions for access resolution for requested controlled unauthorized.

        By default permissions cannot be None, at worst empty set, thus this might never be reached.
        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, None, False)
        host = "localhost"
        with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized):
            access_resolution(request, token, host, [], [], ["8"])

    def test_access_resolution_controlled_never_reached2(self):
        """Test assumptions for access resolution for requested controlled forbidden.

        By default permissions cannot be None, at worst empty set, thus this might never be reached.
        It is based on the result of fetch_datasets_access function.
        """
        request = PARAMS
        token = mock_token(False, None, True)
        host = "localhost"
        with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden):
            access_resolution(request, token, host, [], [], ["8"])

    @unittest.mock.patch("beacon_api.permissions.ga4gh.validate_passport")
    async def test_ga4gh_controlled(self, m_validation):
        """Test ga4gh permissions claim parsing."""
        # Test: no passports, no permissions
        datasets = await get_ga4gh_controlled([])
        self.assertEqual(datasets, set())
        # Test: 1 passport, 1 unique dataset, 1 permission
        passport = {
            "ga4gh_visa_v1": {
                "type": "ControlledAccessGrants",
                "value": "https://institution.org/EGAD01",
                "source": "https://ga4gh.org/duri/no_org",
                "by": "self",
                "asserted": 1539069213,
                "expires": 4694742813,
            }
        }
        m_validation.return_value = passport
        dataset = await get_ga4gh_controlled([{}])  # one passport
        self.assertEqual(dataset, {"EGAD01"})
        # Test: 2 passports, 1 unique dataset, 1 permission (permissions must not be duplicated)
        passport = {
            "ga4gh_visa_v1": {
                "type": "ControlledAccessGrants",
                "value": "https://institution.org/EGAD01",
                "source": "https://ga4gh.org/duri/no_org",
                "by": "self",
                "asserted": 1539069213,
                "expires": 4694742813,
            }
        }
        m_validation.return_value = passport
        dataset = await get_ga4gh_controlled([{}, {}])  # two passports
        self.assertEqual(dataset, {"EGAD01"})
        # Test: 2 passports, 2 unique datasets, 2 permissions
        # Can't test this case with the current design!
        # Would need a way for validate_passport() to mock two different results

    async def test_ga4gh_bona_fide(self):
        """Test ga4gh statuses claim parsing."""
        passports = [
            (
                "enc",
                "header",
                {
                    "ga4gh_visa_v1": {
                        "type": "AcceptedTermsAndPolicies",
                        "value": "https://doi.org/10.1038/s41431-018-0219-y",
                        "source": "https://ga4gh.org/duri/no_org",
                        "by": "self",
                        "asserted": 1539069213,
                        "expires": 4694742813,
                    }
                },
            ),
            (
                "enc",
                "header",
                {
                    "ga4gh_visa_v1": {
                        "type": "ResearcherStatus",
                        "value": "https://doi.org/10.1038/s41431-018-0219-y",
                        "source": "https://ga4gh.org/duri/no_org",
                        "by": "peer",
                        "asserted": 1539017776,
                        "expires": 1593165413,
                    }
                },
            ),
        ]
        # Good test: both required passport types contained the correct value
        bona_fide_status = await get_ga4gh_bona_fide(passports)
        self.assertEqual(bona_fide_status, True)  # has bona fide
        # Bad test: missing passports of required type
        passports_empty = []
        bona_fide_status = await get_ga4gh_bona_fide(passports_empty)
        self.assertEqual(bona_fide_status, False)  # doesn't have bona fide

    @unittest.mock.patch("beacon_api.permissions.ga4gh.get_jwk")
    @unittest.mock.patch("beacon_api.permissions.ga4gh.jwt")
    @unittest.mock.patch("beacon_api.permissions.ga4gh.LOG")
    async def test_validate_passport(self, mock_log, m_jwt, m_jwk):
        """Test passport validation."""
        m_jwk.return_value = "jwk"
        # Test: validation passed
        m_jwt.return_value = MockDecodedPassport()
        await validate_passport({})

        # # Test: validation failed
        m_jwt.return_value = MockDecodedPassport(validated=False)
        # with self.assertRaises(Exception):
        await validate_passport({})
        # we are not raising the exception we are just doing a log
        # need to assert the log called
        mock_log.error.assert_called_with(
            "Something went wrong when processing JWT tokens: 1")

    @unittest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_permissions")
    async def test_check_ga4gh_token(self, m_get_perms):
        """Test token scopes."""
        # Test: no scope found
        decoded_data = {}
        dataset_permissions, bona_fide_status = await check_ga4gh_token(
            decoded_data, {}, False, set())
        self.assertEqual(dataset_permissions, set())
        self.assertEqual(bona_fide_status, False)
        # Test: scope is ok, but no claims
        decoded_data = {"scope": ""}
        dataset_permissions, bona_fide_status = await check_ga4gh_token(
            decoded_data, {}, False, set())
        self.assertEqual(dataset_permissions, set())
        self.assertEqual(bona_fide_status, False)
        # Test: scope is ok, claims are ok
        m_get_perms.return_value = {"EGAD01"}, True
        decoded_data = {"scope": "openid ga4gh_passport_v1"}
        dataset_permissions, bona_fide_status = await check_ga4gh_token(
            decoded_data, {}, False, set())
        self.assertEqual(dataset_permissions, {"EGAD01"})
        self.assertEqual(bona_fide_status, True)

    async def test_decode_passport(self):
        """Test key-less JWT decoding."""
        token, _ = generate_token("http://test.csc.fi")
        header, payload = await decode_passport(token)
        self.assertEqual(header.get("alg"), "HS256")
        self.assertEqual(payload.get("iss"), "http://test.csc.fi")

    @unittest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_bona_fide")
    @unittest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_controlled")
    @unittest.mock.patch("beacon_api.permissions.ga4gh.decode_passport")
    @unittest.mock.patch("beacon_api.permissions.ga4gh.retrieve_user_data")
    async def test_get_ga4gh_permissions(self, m_userinfo, m_decode,
                                         m_controlled, m_bonafide):
        """Test GA4GH permissions main function."""
        # Test: no data (nothing)
        m_userinfo.return_value = [{}]
        header = {}
        payload = {}
        m_decode.return_value = header, payload
        m_controlled.return_value = set()
        m_bonafide.return_value = False
        dataset_permissions, bona_fide_status = await get_ga4gh_permissions({})
        self.assertEqual(dataset_permissions, set())
        self.assertEqual(bona_fide_status, False)
        # Test: permissions
        m_userinfo.return_value = [{}]
        header = {}
        payload = {"ga4gh_visa_v1": {"type": "ControlledAccessGrants"}}
        m_decode.return_value = header, payload
        m_controlled.return_value = {"EGAD01"}
        m_bonafide.return_value = False
        dataset_permissions, bona_fide_status = await get_ga4gh_permissions({})
        self.assertEqual(dataset_permissions, {"EGAD01"})
        self.assertEqual(bona_fide_status, False)
        # Test: bona fide
        m_userinfo.return_value = [{}]
        header = {}
        payload = {"ga4gh_visa_v1": {"type": "ResearcherStatus"}}
        m_decode.return_value = header, payload
        m_controlled.return_value = set()
        m_bonafide.return_value = True
        dataset_permissions, bona_fide_status = await get_ga4gh_permissions({})
        self.assertEqual(dataset_permissions, set())
        self.assertEqual(bona_fide_status, True)
예제 #30
0
 def tearDown(self):
     # 以下メソッドは、テストケース終了時に、必ず呼んでしまって構わないようだ。
     TempDirectory.cleanup_all()
 def tearDown(self):
     TempDirectory.cleanup_all()
 def tearDown(self):
     TempDirectory.cleanup_all()
     self.activity.clean_tmp_dir()
예제 #33
0
    def tearDown(self):
        TempDirectory.cleanup_all()

        # Explicitly clear keras memory
        gc.collect()
        K.clear_session()
예제 #34
0
def tearDownModule():
    """ """
    TempDirectory.cleanup_all()