Esempio n. 1
0
    def init_app(self, test_mode: bool = False):
        """Initize app."""
        self.jwt_oidc_test_mode = test_mode
        #
        # CHECK IF WE'RE RUNNING IN TEST_MODE!!
        #
        if not self.jwt_oidc_test_mode:
            self.algorithm = get_api_settings().JWT_OIDC_ALGORITHMS

            # If the WELL_KNOWN_CONFIG is set, then go fetch the JWKS & ISSUER
            self.well_known_config = get_api_settings(
            ).JWT_OIDC_WELL_KNOWN_CONFIG
            if self.well_known_config:
                # try to get the jwks & issuer from the well known config
                with urlopen(url=self.well_known_config) as jurl:
                    self.well_known_obj_cache = json.loads(
                        jurl.read().decode('utf-8'))

                self.jwks_uri = self.well_known_obj_cache['jwks_uri']
                self.issuer = self.well_known_obj_cache['issuer']
            else:

                self.jwks_uri = get_api_settings().JWT_OIDC_JWKS_URI
                self.issuer = get_api_settings().JWT_OIDC_ISSUER

            # Setup JWKS caching
            self.caching_enabled = get_api_settings().JWT_OIDC_CACHING_ENABLED
            if self.caching_enabled:
                self.cache = SimpleCache(default_timeout=get_api_settings().
                                         JWT_OIDC_JWKS_CACHE_TIMEOUT)

            self.audience = get_api_settings().JWT_OIDC_AUDIENCE
            self.client_secret = get_api_settings().JWT_OIDC_CLIENT_SECRET
Esempio n. 2
0
async def subscribe_to_queue(stan_client: stan.aio.client.Client,
                             call_back: Callable[[stan.aio.client.Msg], None]) \
        -> str:
    """Subscribe to the Queue using the environment setup.

    Args:
        stan_client: the stan connection
        call_back: a callback function that accepts 1 parameter, a Msg
    Returns:
       str: the name of the queue
    """
    entity_subject = get_api_settings().NATS_SUBJECT
    entity_queue = get_api_settings().NATS_QUEUE
    entity_durable_name = entity_queue + '_durable'

    await stan_client.subscribe(subject=entity_subject,
                                queue=entity_queue,
                                durable_name=entity_durable_name,
                                cb=call_back)
    return entity_subject
Esempio n. 3
0
    def setup(self) -> None:
        """Override setup to not add openapi prefix to openapi url."""
        if self.openapi_url:

            async def openapi(_req: Request) -> JSONResponse:
                return JSONResponse(self.openapi())

            self.add_route(self.openapi_url, openapi, include_in_schema=False)
            # REMOVED: openapi_url = self.openapi_prefix + self.openapi_url
        if self.openapi_url and self.docs_url:

            async def swagger_ui_html(_req: Request) -> HTMLResponse:
                return get_swagger_ui_html(
                    openapi_url=self.openapi_url,
                    title=self.title + ' - Swagger UI',
                    oauth2_redirect_url=self.swagger_ui_oauth2_redirect_url,
                )

            self.add_route(self.docs_url,
                           swagger_ui_html,
                           include_in_schema=False)

            self.include_router(api_router,
                                prefix=get_api_settings().API_V1_STR)
            self.include_router(ops.ROUTER, prefix='/ops', tags=['ops'])

            if self.swagger_ui_oauth2_redirect_url:

                async def swagger_ui_redirect(_req: Request) -> HTMLResponse:
                    return get_swagger_ui_oauth2_redirect_html()

                self.add_route(
                    self.swagger_ui_oauth2_redirect_url,
                    swagger_ui_redirect,
                    include_in_schema=False,
                )
        if self.openapi_url and self.redoc_url:

            async def redoc_html(_req: Request) -> HTMLResponse:
                return get_redoc_html(openapi_url=self.openapi_url,
                                      title=self.title + ' - ReDoc')

            self.add_route(self.redoc_url, redoc_html, include_in_schema=False)
        self.add_exception_handler(HTTPException, http_error_handler)
        self.add_exception_handler(HTTP_422_UNPROCESSABLE_ENTITY,
                                   http_422_error_handler)
        self.add_exception_handler(RequestValidationError,
                                   validation_exception_handler)
        self.add_exception_handler(SQLAlchemyError, db_exception_handler)
        self.add_exception_handler(AuthenticationError, auth_exception_handler)
        # ADDED
        self.setup_jwt_manager(JWT)
        self.add_default_middleware()
Esempio n. 4
0
    async def find_notifications_by_status(db_session: Session, status: str):
        """Get notifications by status."""
        notifications = None
        if status == NotificationStatusEnum.FAILURE:
            seconds = get_api_settings().DELIVERY_FAILURE_RETRY_TIME_FRAME
            notifications = await NotificaitonCRUD.find_notifications_by_status_time(
                db_session, status, seconds)
        else:
            notifications = await NotificaitonCRUD.find_notifications_by_status(
                db_session, status)

        return notifications
def test_find_notification_by_status_time(session, loop):
    """Assert the test can retrieve notifications by status and time frame."""
    notification = NotificationFactory.create_model(session,
                                                    notification_info=NotificationFactory.Models.LESS_1_HOUR)

    result = loop.run_until_complete(
        NotificaitonCRUD.find_notifications_by_status_time(session,
                                                           notification.status_code,
                                                           get_api_settings().DELIVERY_FAILURE_RETRY_TIME_FRAME)
    )
    assert result[0] == notification
    assert result[0].id == NotificationFactory.Models.LESS_1_HOUR['id']
    assert result[0].recipients == NotificationFactory.Models.LESS_1_HOUR['recipients']
Esempio n. 6
0
async def publish(payload):  # pylint: disable=too-few-public-methods
    """Service to manage Queue publish operations."""
    # current_app.logger.debug('<publish')
    # NATS client connections
    nats_con = NATS()
    stan_con = STAN()

    async def close():
        """Close the stream and nats connections."""
        await stan_con.close()
        await nats_con.close()

    # Connection and Queue configuration.
    def nats_connection_options():
        return {
            'servers': get_api_settings().NATS_SERVERS,
            # 'io_loop': loop,
            'error_cb': error_cb,
            'closed_cb': closed_cb,
            'name': get_api_settings().NATS_CLIENT_NAME,
        }

    def stan_connection_options():
        return {
            'cluster_id': get_api_settings().NATS_CLUSTER_ID,
            'client_id': str(random.SystemRandom().getrandbits(0x58)),
            'nats': nats_con
        }

    try:
        # Connect to the NATS server, and then use that for the streaming connection.
        await nats_con.connect(**nats_connection_options(), verbose=True, connect_timeout=3, reconnect_time_wait=1)
        await stan_con.connect(**stan_connection_options())

        logger.debug(payload)

        await stan_con.publish(subject=get_api_settings().NATS_SUBJECT,
                               payload=json.dumps(payload).encode('utf-8'))

    except Exception as e:  # pylint: disable=broad-except
        logger.error(e)
        raise
    finally:
        # await nc.flush()
        await close()
Esempio n. 7
0
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = BASE.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

config.set_main_option("sqlalchemy.url", str(get_api_settings().DATABASE_URL))


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    url = config.get_main_option("sqlalchemy.url")
Esempio n. 8
0
 def stan_connection_options():
     return {
         'cluster_id': get_api_settings().NATS_CLUSTER_ID,
         'client_id': str(random.SystemRandom().getrandbits(0x58)),
         'nats': nats_con
     }
Esempio n. 9
0
def engine_fixture():
    """Connect to the database."""
    engine = sqlalchemy.create_engine(
        get_api_settings().NOTIFY_DATABASE_TEST_URL)
    SESSION.configure(bind=engine)
    return engine
Esempio n. 10
0
import logging

import uvicorn
from starlette.responses import RedirectResponse

from notify_api import NotifyAPI
from notify_api.core.settings import get_api_settings


# setup loggers
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)

# get root logger
logger = logging.getLogger(__name__)  # the __name__ resolve to 'main' since we are at the root of the project.
# This will get the root logger since no logger in the configuration has this name.


app = NotifyAPI(bind=get_api_settings().NOTIFY_DATABASE_URL)


@app.get('/')
async def root():
    response = RedirectResponse(url='/docs')
    return response

if __name__ == '__main__':
    uvicorn.run(app, port=5002)
Esempio n. 11
0
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = BASE.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

config.set_main_option("sqlalchemy.url",
                       str(get_api_settings().NOTIFY_DATABASE_URL))


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    url = config.get_main_option("sqlalchemy.url")