コード例 #1
0
def main():
    catalogs = {
        'dcp2':
        config.Catalog(name='dcp2',
                       atlas='hca',
                       internal=False,
                       plugins=dict(
                           metadata=config.Catalog.Plugin(name='hca'),
                           repository=config.Catalog.Plugin(name='tdr')),
                       sources=set())
    }

    # To create a normalized OpenAPI document, we patch any
    # deployment-specific variables that affect the document.
    with patch.object(target=type(config),
                      attribute='catalogs',
                      new_callable=PropertyMock,
                      return_value=catalogs):
        assert config.catalogs == catalogs
        with patch.object(target=config,
                          attribute='service_function_name',
                          return_value='azul_service'):
            assert config.service_name == 'azul_service'
            with patch.object(target=config,
                              attribute='service_endpoint',
                              return_value='localhost'):
                assert config.service_endpoint() == 'localhost'
                app_module = load_app_module('service')
                app_spec = app_module.app.spec()
                doc_path = os.path.join(config.project_root,
                                        'lambdas/service/openapi.json')
                with write_file_atomically(doc_path) as file:
                    json.dump(app_spec, file, indent=4)
コード例 #2
0
    def test_cached_health(self):
        self.storage_service.create_bucket()
        # No health object is available in S3 bucket, yielding an error
        with self.helper() as helper:
            response = requests.get(
                str(self.base_url.set(path='/health/cached')))
            self.assertEqual(500, response.status_code)
            self.assertEqual(
                'ChaliceViewError: Cached health object does not exist',
                response.json()['Message'])

        # A successful response is obtained when all the systems are functional
        self._create_mock_queues()
        endpoint_states = self._endpoint_states()
        app = load_app_module(self.lambda_name(), unit_test=True)
        with self.helper() as helper:
            self._mock_service_endpoints(helper, endpoint_states)
            app.update_health_cache(MagicMock(), MagicMock())
            response = requests.get(
                str(self.base_url.set(path='/health/cached')))
            self.assertEqual(200, response.status_code)

        # Another failure is observed when the cache health object is older than 2 minutes
        future_time = time.time() + 3 * 60
        with self.helper() as helper:
            with patch('time.time', new=lambda: future_time):
                response = requests.get(
                    str(self.base_url.set(path='/health/cached')))
                self.assertEqual(500, response.status_code)
                self.assertEqual(
                    'ChaliceViewError: Cached health object is stale',
                    response.json()['Message'])
コード例 #3
0
def load_tests(_loader, tests, _ignore):
    root = azul.config.project_root
    for module in [
            azul, azul.azulclient, azul.collections, azul.doctests, azul.dss,
            azul.exceptions, azul.files, azul.indexer.aggregate, azul.json,
            azul.json_freeze, azul.objects, azul.openapi, azul.openapi.params,
            azul.openapi.responses, azul.openapi.schema,
            azul.plugins.metadata.hca.contributor_matrices,
            azul.plugins.metadata.hca.full_metadata,
            azul.plugins.repository.tdr, azul.plugins.metadata.hca.transform,
            azul.service.drs_controller, azul.service.manifest_service,
            azul.strings, azul.terra, azul.threads, azul.time, azul.types,
            azul.uuids, azul.vendored.frozendict, retorts,
            load_app_module('service'),
            load_module(root + '/scripts/envhook.py', 'envhook'),
            load_module(root + '/scripts/export_environment.py',
                        'export_environment'),
            load_module(root + '/scripts/check_branch.py', 'check_branch'),
            load_module(root + '/scripts/velocity.py', 'velocity'),
            load_module(root + '/.flake8/azul_flake8.py', 'azul_flake8')
    ]:
        suite = doctest.DocTestSuite(module)
        assert suite.countTestCases() > 0, module
        tests.addTests(suite)
    return tests
コード例 #4
0
ファイル: queues.py プロジェクト: DataBiosphere/azul
    def manage_lambdas(self, queues: Mapping[str, Queue], enable: bool):
        """
        Enable or disable the readers and writers of the given queues
        """
        indexer = load_app_module('indexer', unit_test=True)
        functions_by_queue = {
            handler.queue: config.indexer_function_name(handler.name)
            for handler in indexer.app.handler_map.values()
            if hasattr(handler, 'queue')
        }

        with ThreadPoolExecutor(max_workers=len(queues)) as tpe:
            futures = []

            def submit(f, *args, **kwargs):
                futures.append(tpe.submit(f, *args, **kwargs))

            for queue_name, queue in queues.items():
                try:
                    function = functions_by_queue[queue_name]
                except KeyError:
                    assert queue_name in config.fail_queue_names
                else:
                    if queue_name == config.notifications_queue_name():
                        # Prevent new notifications from being added
                        submit(self._manage_lambda, config.indexer_name, enable)
                    submit(self._manage_sqs_push, function, queue, enable)
            self._handle_futures(futures)
            futures = [tpe.submit(self._wait_for_queue_idle, queue) for queue in queues.values()]
            self._handle_futures(futures)
コード例 #5
0
ファイル: app_test_case.py プロジェクト: NoopDog/azul
 def setUpClass(cls):
     super().setUpClass()
     # Load the application module without modifying `sys.path` and without
     # adding it to `sys.modules`. This simplifies tear down and isolates the
     # app modules from different lambdas loaded by different concrete
     # subclasses. It does, however, violate this one invariant:
     # `sys.modules[module.__name__] == module`
     cls.app_module = load_app_module(cls.lambda_name(), unit_test=True)
コード例 #6
0
 def setUpClass(cls) -> None:
     super().setUpClass()
     app_module = load_app_module('service')
     app_dir = os.path.dirname(app_module.__file__)
     factory = chalice.cli.factory.CLIFactory(app_dir)
     config = factory.create_config_obj()
     cls.server = factory.create_local_server(app_obj=app_module.app,
                                              config=config,
                                              host=cls.url.host,
                                              port=cls.url.port)
     cls.server_thread = threading.Thread(target=cls.server.serve_forever)
     cls.server_thread.start()
コード例 #7
0
def load_tests(_loader, tests, _ignore):
    root = azul.config.project_root
    for module in [
        azul,
        azul.auth,
        azul.azulclient,
        azul.bigquery,
        azul.caching,
        azul.collections,
        azul.doctests,
        azul.dss,
        azul.exceptions,
        azul.files,
        azul.indexer,
        azul.indexer.aggregate,
        azul.iterators,
        azul.json,
        azul.json_freeze,
        azul.objects,
        azul.openapi,
        azul.openapi.params,
        azul.openapi.responses,
        azul.openapi.schema,
        azul.plugins.metadata.hca.contributor_matrices,
        azul.plugins.repository.tdr,
        azul.plugins.metadata.hca.transform,
        azul.service.drs_controller,
        azul.service.manifest_service,
        azul.service.repository_controller,
        azul.strings,
        azul.terra,
        azul.terraform,
        azul.threads,
        azul.time,
        azul.types,
        azul.uuids,
        azul.vendored.frozendict,
        load_app_module('service', unit_test=True),
        load_script('check_branch'),
        load_script('envhook'),
        load_script('export_environment'),
        load_module(root + '/.flake8/azul_flake8.py', 'azul_flake8'),
        load_module(root + '/test/test_tagging.py', 'test_tagging'),
        load_module(root + '/test/indexer/test_tdr.py', 'test_tdr')
    ]:
        suite = doctest.DocTestSuite(module)
        assert suite.countTestCases() > 0, module
        tests.addTests(suite)
    return tests
コード例 #8
0
ファイル: lambdas.py プロジェクト: DataBiosphere/azul
    def _contribution_lambda_names(cls) -> FrozenSet[str]:
        indexer = load_app_module('indexer')
        notification_queue_names = {
            config.unqual_notifications_queue_name(retry=retry)
            for retry in (False, True)
        }

        def has_notification_queue(handler) -> bool:
            try:
                queue = handler.queue
            except AttributeError:
                return False
            else:
                resource_name, _ = config.unqualified_resource_name(queue)
                return resource_name in notification_queue_names

        return frozenset((handler.name for handler in vars(indexer).values()
                          if has_notification_queue(handler)))
コード例 #9
0
ファイル: apidev.py プロジェクト: DataBiosphere/azul
def main():
    static_dir = os.path.join(parent_dir, 'apidev_static')
    web_page = os.path.join(static_dir, 'swagger-editor.html')

    host, port = 'localhost', 8787
    server_url = f"http://{host}:{port}"
    httpd = HTTPServer((host, port), SimpleHTTPRequestHandler)
    address = f"{server_url}/{os.path.relpath(web_page)}?url={server_url}/{os.path.relpath(parent_dir)}/{spec_file}"
    print(f'Open {address} in browser to validate changes.')

    service = load_app_module('service')
    event_handler = UpdateHandler(service)
    write_specs(service.app)

    observer = Observer()
    observer.schedule(event_handler,
                      path=os.path.dirname(service.__file__),
                      recursive=False)
    observer.start()

    httpd.serve_forever()
コード例 #10
0
from azul import (
    config, )
from azul.modules import (
    load_app_module, )
from azul.template import (
    emit, )

suffix = '-' + config.deployment_stage
assert config.indexer_name.endswith(suffix)

indexer = load_app_module('indexer')

emit({
    "version": "2.0",
    "app_name": config.
    indexer_name[:-len(suffix)],  # Chalice appends stage name implicitly
    "api_gateway_stage": config.deployment_stage,
    "manage_iam_role": False,
    "iam_role_arn": "${var.role_arn}",
    "environment_variables": config.lambda_env,
    "minimum_compression_size": config.minimum_compression_size,
    "lambda_timeout": config.api_gateway_lambda_timeout,
    "lambda_memory_size": 128,
    "stages": {
        config.deployment_stage: {
            "lambda_functions": {
                indexer.contribute.name: {
                    "reserved_concurrency":
                    config.contribution_concurrency(retry=False),
                    "lambda_memory_size":
                    256,
コード例 #11
0
    def test(self, mock_uuid, mock_helper):
        service = load_app_module('service', unit_test=True)
        # In a LocalAppTestCase we need the actual state machine name
        state_machine_name = config.state_machine_name(service.generate_manifest.name)
        with responses.RequestsMock() as helper:
            helper.add_passthru(str(self.base_url))
            for fetch in (True, False):
                with self.subTest(fetch=fetch):
                    execution_id = '6c9dfa3f-e92e-11e8-9764-ada973595c11'
                    mock_uuid.return_value = execution_id
                    format_ = ManifestFormat.compact
                    filters = Filters(explicit={'organ': {'is': ['lymph node']}},
                                      source_ids={'6aaf72a6-0a45-5886-80cf-48f8d670dc26'})
                    params = {
                        'catalog': self.catalog,
                        'format': format_.value,
                        'filters': json.dumps(filters.reify(service_config=MagicMock(),
                                                            explicit_only=True))
                    }
                    path = '/manifest/files'
                    object_url = 'https://url.to.manifest?foo=bar'
                    file_name = 'some_file_name'
                    object_key = f'manifests/{file_name}'
                    manifest_url = self.base_url.set(path=path,
                                                     args=dict(params, objectKey=object_key))
                    manifest = Manifest(location=object_url,
                                        was_cached=False,
                                        format_=format_,
                                        catalog=self.catalog,
                                        filters=filters,
                                        object_key=object_key,
                                        file_name=file_name)
                    url = self.base_url.set(path=path, args=params)
                    if fetch:
                        url.path.segments.insert(0, 'fetch')

                    partitions = (
                        ManifestPartition(index=0,
                                          is_last=False,
                                          file_name=None,
                                          config=None,
                                          multipart_upload_id=None,
                                          part_etags=None,
                                          page_index=None,
                                          is_last_page=None,
                                          search_after=None),
                        ManifestPartition(index=1,
                                          is_last=False,
                                          file_name=file_name,
                                          config={},
                                          multipart_upload_id='some_upload_id',
                                          part_etags=('some_etag',),
                                          page_index=512,
                                          is_last_page=False,
                                          search_after=('foo', 'doc#bar'))
                    )

                    with mock.patch.object(ManifestService, 'get_manifest') as mock_get_manifest:
                        for i, expected_status in enumerate(3 * [301] + [302]):
                            response = requests.get(str(url), allow_redirects=False)
                            if fetch:
                                self.assertEqual(200, response.status_code)
                                response = response.json()
                                self.assertEqual(expected_status, response.pop('Status'))
                                headers = response
                            else:
                                self.assertEqual(expected_status, response.status_code)
                                headers = response.headers
                            if expected_status == 301:
                                self.assertGreaterEqual(int(headers['Retry-After']), 0)
                            url = furl(headers['Location'])
                            if i == 0:
                                state = dict(format_=format_.value,
                                             catalog=self.catalog,
                                             filters=filters.to_json(),
                                             object_key=self.object_key,
                                             authentication=None,
                                             partition=partitions[0].to_json())
                                mock_helper.start_execution.assert_called_once_with(
                                    state_machine_name,
                                    execution_id,
                                    execution_input=state
                                )
                                mock_helper.describe_execution.assert_not_called()
                                mock_helper.reset_mock()
                                mock_helper.describe_execution.return_value = {'status': 'RUNNING'}
                            elif i == 1:
                                mock_get_manifest.return_value = partitions[1]
                                state = self.app_module.generate_manifest(state, None)
                                self.assertEqual(partitions[1],
                                                 ManifestPartition.from_json(state['partition']))
                                mock_get_manifest.assert_called_once_with(
                                    format_=ManifestFormat(state['format_']),
                                    catalog=state['catalog'],
                                    filters=Filters.from_json(state['filters']),
                                    partition=partitions[0],
                                    authentication=None,
                                    object_key=state['object_key']
                                )
                                mock_get_manifest.reset_mock()
                                mock_helper.start_execution.assert_not_called()
                                mock_helper.describe_execution.assert_called_once()
                                mock_helper.reset_mock()
                                # simulate absence of output due eventual consistency
                                mock_helper.describe_execution.return_value = {'status': 'SUCCEEDED'}
                            elif i == 2:
                                mock_get_manifest.return_value = manifest
                                mock_helper.start_execution.assert_not_called()
                                mock_helper.describe_execution.assert_called_once()
                                mock_helper.reset_mock()
                                mock_helper.describe_execution.return_value = {
                                    'status': 'SUCCEEDED',
                                    'output': json.dumps(
                                        self.app_module.generate_manifest(state, None)
                                    )
                                }
                            elif i == 3:
                                mock_get_manifest.assert_called_once_with(
                                    format_=ManifestFormat(state['format_']),
                                    catalog=state['catalog'],
                                    filters=Filters.from_json(state['filters']),
                                    partition=partitions[1],
                                    authentication=None,
                                    object_key=state['object_key']
                                )
                                mock_get_manifest.reset_mock()
                    mock_helper.start_execution.assert_not_called()
                    mock_helper.describe_execution.assert_called_once()
                    expected_url = str(manifest_url) if fetch else object_url
                    self.assertEqual(expected_url, str(url))
                    mock_helper.reset_mock()

            manifest_states = [
                manifest,
                CachedManifestNotFound,
                CachedManifestSourcesChanged
            ]
            with mock.patch.object(ManifestService,
                                   'get_cached_manifest_with_object_key',
                                   side_effect=manifest_states):
                for manifest in manifest_states:
                    with self.subTest(manifest=manifest):
                        self.assertEqual(object_key, manifest_url.args['objectKey'])
                        response = requests.get(str(manifest_url), allow_redirects=False)
                        if isinstance(manifest, Manifest):
                            self.assertEqual(302, response.status_code)
                            self.assertEqual(object_url, response.headers['Location'])
                        else:
                            if manifest is CachedManifestNotFound:
                                cause = 'expired'
                            elif manifest is CachedManifestSourcesChanged:
                                cause = 'become invalid due to an authorization change'
                            else:
                                assert False
                            msg = f'GoneError: The requested manifest has {cause}, please request a new one'
                            self.assertEqual(410, response.status_code)
                            self.assertEqual(msg, response.json()['Message'])
コード例 #12
0
from azul import (
    config, )
from azul.deployment import (
    aws, )
from azul.modules import (
    load_app_module, )

direct_access_role = config.dss_direct_access_role('service')
service = load_app_module('service')

policy = {
    "Version":
    "2012-10-17",
    "Statement": [
        {
            "Effect":
            "Allow",
            "Action": [
                "logs:CreateLogGroup", "logs:CreateLogStream",
                "logs:PutLogEvents"
            ],
            "Resource":
            "arn:aws:logs:*:*:*"
        },
        {
            "Effect":
            "Allow",
            "Action": [
                "es:ESHttpDelete", "es:ESHttpGet", "es:ESHttpHead",
                "es:ESHttpPut", "es:ESHttpPost", "es:ESHttpDelete"
            ],
コード例 #13
0
ファイル: apidev.py プロジェクト: DataBiosphere/azul
 def on_modified(self, event):
     if event.src_path == self.tracked_file:
         self.service = load_app_module('service')
         write_specs(self.service.app)
         print('Spec updated')