Example #1
0
def handle_errors(e):
    """Flask App Error Handler.

    A generic error handler for Flask applications.

    Note:
        This error handler is essentially to ensure that OAuth 2.0 authorization errors
        are handled in an appropriate fashion. The application configuration used when
        building the application must set the PROPOGATE_EXPECTIONS environment variable to
        True in order for the exception to be propogated.

    Return:
        dict, int: The error message and associated error code.
    """
    if isinstance(e, OAuth2ProviderError):
        return json.dumps({"message": "Access Denied"}), 401

    if isinstance(e, NotFound):
        return json.dumps({"error": "Location not found"}), 404

    if isinstance(e, ApiError):
        return e.get_message(), e.get_status_code()

    logger = LogFactory.get_console_logger("exception-handler")
    logger.exception("Encountered an error while processing a request.")

    if isinstance(e, ApiUnhandledError):
        return e.get_message(), e.get_status_code()

    return e.get_response()
    def __init__(self, logger_name: str = "data-manager", **kwargs):
        base = kwargs.get("base", Base)
        use_local_dirs = kwargs.get("use_local_dirs", True)
        descriptors = kwargs.get("descriptors", [])

        self.app_config = ConfigurationFactory.from_env()
        self.config = ConfigFunctions(self.app_config)

        self.db = DBHandler(self.config)

        self.orm_factory = ORMFactory(base)
        self.logger = LogFactory.get_console_logger(logger_name)

        self.descriptor_directories = []
        if use_local_dirs:
            self.descriptor_directories.append(
                self.config.get_data_resource_schema_path())

        self.custom_descriptors = descriptors

        self.data_store = []
Example #3
0
 def __init__(self):
     self.logger = LogFactory.get_console_logger("resource-handler")
Example #4
0
    programs_descriptor,
    skills_descriptor,
)

import docker
import pytest
from data_resource_api.app.data_managers.data_model_manager import DataModelManagerSync
from data_resource_api.app.data_managers.data_resource_manager import (
    DataResourceManagerSync, )
from data_resource_api.app.utils.junc_holder import JuncHolder
from data_resource_api.config import ConfigurationFactory
from data_resource_api.logging import LogFactory
from data_resource_api.utils import exponential_backoff
from sqlalchemy.ext.declarative import declarative_base

logger = LogFactory.get_console_logger("conftest")


class PostgreSQLContainer:
    """A PostgreSQL Container Object.

    This class provides a mechanism for managing PostgreSQL Docker containers
    so that it can be injected into unit tests.

    Class Attributes:
        config (object): A Configuration Factory object.
        container (object): The Docker container object.
        for schema_dict in schema_dicts:
            docker_client (object): Docker client.
            db_environment (list): Database environment configuration variables.
            db_ports (dict): Dictionary of database port mappings.
Example #5
0
import os

from alembic.config import Config
from data_resource_api.logging import LogFactory

logger = LogFactory.get_console_logger("config")


class ConfigFunctions:
    def __init__(self, app_config):
        self.app_config = app_config

    def get_sleep_interval(self):
        """Retrieve the thread's sleep interval.

        Returns:
            int: The sleep interval (in seconds) for the thread.

        Note:
            The method will look for an enviroment variable (SLEEP_INTERVAL).
            If the environment variable isn't set or cannot be parsed as an integer,
            the method returns the default interval of 30 seconds.
        """

        return self.app_config.DATA_MODEL_SLEEP_INTERVAL

    def get_data_resource_schema_path(self):
        """Retrieve the path to look for data resource specifications.

        Returns:
            str: The search path for data resource schemas.
import json
import os
from hashlib import md5

from data_resource_api.logging import LogFactory

logger = LogFactory.get_console_logger("descriptor-utils")


class DescriptorsLoader:
    """Yields Descriptor objects when given a list and/or a directory of
    descriptors.

    Use iter_descriptors() to yield.
    """
    def __init__(self, directories: list = [], dict_descriptors: list = []):
        self.directories = directories
        self.dict_descriptors = dict_descriptors

    def iter_descriptors(self):
        files = DescriptorsFromDirectory(self.directories).iter_files()
        yield from files

        for descriptor in self.dict_descriptors:
            yield Descriptor(descriptor)


class DescriptorsFromDirectory:
    """Helper class that handles yielding descriptors from a directory.

    Use iter_files() to yield Descriptor objects.
Example #7
0
import os

from alembic import command
from data_resource_api.db import Checksum, Migrations, Session
from data_resource_api.logging import LogFactory


logger = LogFactory.get_console_logger("db-handler")


class DBHandler:
    def __init__(self, config):
        self.config = config

    def add_model_checksum(
        self, table_name: str, model_checksum: str = "0", descriptor_json: dict = {}
    ):
        """Adds a new checksum for a data model.

        Args:
            table_name (str): Name of the table to add the checksum.
            checksum (str): Checksum value.
        """
        session = Session()
        try:
            checksum = Checksum()
            checksum.data_resource = table_name
            checksum.model_checksum = model_checksum
            checksum.descriptor_json = descriptor_json
            session.add(checksum)
            session.commit()
Example #8
0
"""ORM Factory.

A factory for building SQLAlchemy ORM models from a Frictionless
TableSchema specification.
"""

import warnings

from data_resource_api.app.utils.junc_holder import JuncHolder
from data_resource_api.factories.table_schema_types import (
    TABLESCHEMA_TO_SQLALCHEMY_TYPES, )
from data_resource_api.logging import LogFactory
from sqlalchemy import Column, ForeignKey, Integer, String, Table, exc
from tableschema import Schema

logger = LogFactory.get_console_logger("orm-factory")


class ORMFactory:
    """ORM Factory.

    Note:
        This sole purpose of this factory is to build ORM models on demand for other
        factories and modules.
    """
    def __init__(self, base):
        self.base = base

    def evaluate_foreign_key(self, foreign_keys, field_name, field_type):
        """Determine if a field is a foreign key.
Example #9
0
# import time module, Observer, FileSystemEventHandler
import os
import time

from data_resource_api.app.utils.config import ConfigFunctions
from data_resource_api.app.utils.db_handler import DBHandler
from data_resource_api.config import ConfigurationFactory
from data_resource_api.logging import LogFactory
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer

logger = LogFactory.get_console_logger("file-watcher")


class MigrationFileWatcher:
    def __init__(self):
        self.observer = Observer()
        self.app_config = ConfigurationFactory.from_env()
        self.config = ConfigFunctions(self.app_config)
        _, self.watchDirectory = self.config.get_alembic_config()

    def run(self):
        event_handler = Handler()
        self.observer.schedule(event_handler,
                               self.watchDirectory,
                               recursive=False)
        self.observer.start()
        try:
            while True:
                time.sleep(5)
        except:
"""Versioned Resource.

This class extends the Flask Restful Resource class with the ability to
look for the API version number in the request header.
"""

from data_resource_api.api.v1_0_0 import ResourceHandler as V1_0_0_ResourceHandler
from data_resource_api.app.utils.exception_handler import MethodNotAllowed
from flask import request
from flask_restful import Resource
from data_resource_api.logging import LogFactory

logger = LogFactory.get_console_logger("versioned-resource")


class VersionedResourceParent(Resource):
    __slots__ = [
        "data_resource_name",
        "data_model",
        "table_schema",
        "api_schema",
        "restricted_fields",
    ]

    def __init__(self):
        Resource.__init__(self)

    def get_api_version(self, headers):
        try:
            api_version = headers["X-Api-Version"]
        except KeyError:
import os
import sys
import time

from data_resource_api.app.utils.db_handler import DBHandler
from data_resource_api.app.utils.descriptor import DescriptorsLoader
from data_resource_api.db import Checksum, Migrations, Session
from data_resource_api.logging import LogFactory

logger = LogFactory.get_console_logger("backwards-compat")

# This will upgrade the DB of a 1.0.4 version (or earlier?) database
# to be compatible with the 1.1.0 version.
#
# The following manual steps need to occur before running the upgrade script.
#
#     Update alembic version number to match the new first migration — 000000000000
#     Change the down_revision for the second migration to the new first migration — 000000000000
#     Change the alembic_version to the most recent migration you have
#
# DELETE FROM alembic_version;
# INSERT INTO alembic_version(version_num) VALUES (‘123456789ABCDEF’);
#
# You can now run the upgrade script. Do this by when running the DMM
# setting the parameter from --data-model-manager to --upgrade

SCHEMA_DIR = "/data-resource/schema"
MIGRATION_DIR = "/data-resource/migrations/versions"


# Do we need to perform this upgrade checks