Ejemplo n.º 1
0
    def __init__(self, source: Union['Manifest', Dict, Text, Callable]):
        filepath, data = self._load_manifest_data(source)

        self.env = Environment()
        self.filepath = filepath
        self.data = data
        self.bindings = []
        self.bootstraps = []
        self.default_bootstrap = None
        self.values = None
        self.scanner = None
        self.app = None
Ejemplo n.º 2
0
    def __init__(
        self,
        middleware: List['Middleware'] = None,
        manifest: Manifest = None,
        mode: Mode = Mode.normal,
    ):
        # thread-local storage
        self.local = local()
        self.local.is_bootstrapped = False
        self.local.is_started = False
        self.local.manifest = Manifest(manifest) if manifest else None
        self.local.bootstrapper_thread_id = None
        self.local.thread_executor = None
        self.local.process_executor = None
        self.local.tx_manager = None

        self.shared = DictObject()  # storage shared by threads
        self.shared.manifest_data = None
        self.shared.app_counter = 1

        self.env = Environment()
        self._mode = mode
        self._json = JsonEncoder()
        self._binder = ResourceBinder()
        self._namespace = {}
        self._arg_loader = None
        self._logger = None
        self._root_pid = os.getpid()
        self._actions = DictObject()

        self._middleware = deque([
            m for m in (middleware or [])
            if isinstance(self, m.app_types)
        ])

        # set default main thread name
        current_thread().name = (
            f'{get_class_name(self)}MainThread'
        )
Ejemplo n.º 3
0
 def __init__(self, store_class_name: Text = None, *args, **kwargs):
     super().__init__(*args, **kwargs)
     self.env = Environment()
     self.store_class_name = store_class_name or 'SqlalchemyStore'
Ejemplo n.º 4
0
class Store(object, metaclass=StoreMeta):

    env = Environment()
    ravel = None

    def __init__(self, *args, **kwargs):
        self._history = StoreHistory(store=self)
        self._is_bound = False
        self._resource_type = None

    def __repr__(self):
        if self.is_bound:
            return (f'<{get_class_name(self)}'
                    f'(resource_type={get_class_name(self.resource_type)})>')
        else:
            return (f'<{get_class_name(self)}>')

    def dispatch(self,
                 method_name: Text,
                 args: Tuple = None,
                 kwargs: Dict = None):
        """
        Delegate a Store call to the named method, performing any side-effects,
        like creating a StoreHistory event if need be. This is used internally
        by Resource to call into store methods.
        """
        # call the requested Store method
        func = getattr(self, method_name)
        exc = None
        try:
            result = func(*(args or tuple()), **(kwargs or {}))
        except Exception as exc:
            raise exc

        # create and store the Store call in a history event
        if self.history.is_recording_method(method_name):
            event = StoreEvent(method_name, args, kwargs, result, exc)
            self._history.append(event)

        # finally faise the exception if one was generated
        if exc is not None:
            data = {'method': method_name, 'args': args, 'kwargs': kwargs}
            if not isinstance(exc, RavelError):
                raise RavelError(data=data, wrapped_exception=exc)
            else:
                exc.data.update(data)
                raise exc

        return result

    @property
    def is_bound(self) -> bool:
        return self._is_bound

    @property
    def resource_type(self) -> Type['Resource']:
        return self._resource_type

    @property
    def schema(self) -> 'Schema':
        if self._resource_type is not None:
            return self._resource_type.ravel.schema
        return None

    @property
    def app(self) -> 'Application':
        return self.ravel.app

    @property
    def history(self) -> 'StoreHistory':
        return self._history

    def replay(self,
               history: StoreHistory = None,
               reads=True,
               writes=True) -> Dict[StoreEvent, object]:
        """
        Replay events (interface calls) from this or another store's history,
        returning an ordered mapping from the event object to the return value
        of the corresponding store method.

        Args:
        - `history`: the history to replay in this store.
        - `reads`: replay "read" events, like query, get, get_many.
        - `writes`: replay "write" events, like create, update, etc.
        """
        results = OrderedDict()

        # use this store's own history if none specified
        if history is None:
            history = self.history

        for event in history:
            is_read = event.method in self.history.read_method_names
            is_write = event.method in self.history.write_method_names
            if (is_read and reads) or (is_write and writes):
                func = getattr(self, event.method)
                result = func(*event.args, **event.kwargs)
                results[event] = result

        return results

    def bind(self, resource_type: Type['Resource'], **kwargs):
        t1 = datetime.now()
        self._resource_type = resource_type
        self.on_bind(resource_type, **kwargs)

        t2 = datetime.now()
        secs = (t2 - t1).total_seconds()
        console.debug(f'bound {TypeUtils.get_class_name(resource_type)} to '
                      f'{TypeUtils.get_class_name(self)} '
                      f'in {secs:.2f}s')

        self._is_bound = True

    @classmethod
    def bootstrap(cls, app: 'Application' = None, **kwargs):
        """
        Perform class-level initialization, like getting
        a connectio pool, for example.
        """
        t1 = datetime.now()

        cls.ravel.app = app
        cls.on_bootstrap(**kwargs)

        cls.ravel.local.is_bootstrapped = True

        t2 = datetime.now()
        secs = (t2 - t1).total_seconds()
        console.debug(f'bootstrapped {TypeUtils.get_class_name(cls)} '
                      f'in {secs:.2f}s')

        return cls

    @classmethod
    def on_bootstrap(cls, **kwargs):
        pass

    def on_bind(cls, resource_type: Type['Resource'], **kwargs):
        pass

    @classmethod
    def is_bootstrapped(cls):
        return getattr(cls.ravel.local, 'is_bootstrapped', False)

    @classmethod
    def has_transaction(cls):
        raise NotImplementedError()

    @classmethod
    def begin(cls, **kwargs):
        raise NotImplementedError()

    @classmethod
    def commit(cls, **kwargs):
        raise NotImplementedError()

    @classmethod
    def rollback(cls, **kwargs):
        raise NotImplementedError()

    def create_id(self, record: Dict) -> object:
        """
        Generate and return a new ID for the given not-yet-created record. If
        this method returns None, the backend database/storage technology
        must create and return it instead.
        """
        new_id = record.get(ID)
        if new_id is None:
            new_id = self.resource_type.ravel.defaults[ID]()
        return new_id

    def increment_rev(self, rev: Text = None) -> Text:
        time_ms = int(1000000 * time.time())

        if rev:
            rev_no = int(rev.split('-')[1], 36) + 1
        else:
            rev_no = 1

        return f'{base36.dumps(time_ms)}-{base36.dumps(rev_no)}'

    @abstractmethod
    def exists(self, _id) -> bool:
        """
        Return True if the record with the given _id exists.
        """

    @abstractmethod
    def exists_many(self, _ids: Set) -> Dict[object, bool]:
        """
        Return a mapping from _id to a boolean, indicating if the specified
        resource exists.
        """

    @abstractmethod
    def count(self) -> int:
        """
        Return the total number of stored records.
        """

    @abstractmethod
    def query(self,
              predicate: 'Predicate',
              fields: Set[Text] = None,
              **kwargs) -> List[Dict]:
        """
        TODO: rename to "select"
        Return all records whose fields match a logical predicate.
        """

    @abstractmethod
    def fetch(self, _id, fields: Dict = None) -> Dict:
        """
        Read a single record by _id, selecting only the designated fields (or
        all by default).
        """

    @abstractmethod
    def fetch_many(self, _ids: List, fields: Dict = None) -> Dict:
        """
        Read multiple records by _id, selecting only the designated fields (or
        all by default).
        """

    @abstractmethod
    def fetch_all(self, fields: Set[Text] = None) -> Dict:
        """
        Return all records managed by this Store.
        """

    @abstractmethod
    def create(self, data: Dict) -> Dict:
        """
        Create a new record with the _id. If the _id is contained is not
        contained in the data dict nor provided as the _id argument, it is the
        responsibility of the Store class to generate the _id.
        """

    @abstractmethod
    def create_many(self, records: List[Dict]) -> List[Dict]:
        """
        Create a new record.  It is the responsibility of the Store class to
        generate the _id.
        """

    @abstractmethod
    def update(self, _id, data: Dict) -> Dict:
        """
        Update a record with the data passed in.
        """

    @abstractmethod
    def update_many(self, _ids: List, data: List[Dict] = None) -> List[Dict]:
        """
        Update multiple records. If a single data dict is passed in, then try to
        apply the same update to all records; otherwise, if a list of data dicts
        is passed in, try to zip the _ids with the data dicts and apply each
        unique update or each group of identical updates individually.
        """

    @abstractmethod
    def delete(self, _id) -> None:
        """
        Delete a single record.
        """

    @abstractmethod
    def delete_many(self, _ids: List) -> None:
        """
        Delete multiple records.
        """

    @abstractmethod
    def delete_all(self) -> None:
        """
Ejemplo n.º 5
0
class FilesystemStore(Store):

    env = Environment()
    root = None
    paths = None

    def __init__(
        self,
        ftype: Text = None,
        extension: Text = None,
    ):
        super().__init__()

        self._paths = DictObject()
        self._cache_store = SimulationStore()

        # convert the ftype string arg into a File class ref
        if not ftype:
            self._ftype = Yaml
        elif not isinstance(ftype, BaseFile):
            self._ftype = import_object(ftype)

        assert issubclass(self.ftype, BaseFile)

        known_extension = self._ftype.has_extension(extension)
        if known_extension:
            self._extension = known_extension
        if extension is None:
            self._extension = self._ftype.default_extension()

    @property
    def paths(self):
        return self._paths

    @property
    def extension(self):
        return self._extension

    @property
    def ftype(self):
        return self._ftype

    @classmethod
    def on_bootstrap(
        cls,
        ftype: Text = None,
        root: Text = None,
        use_recursive_merge=True,
        store_primitives=False,
        prefetch: bool = True,
        yaml_loader_class: Text = 'FullLoader'
    ):
        cls.ftype = import_object(ftype) if ftype else Yaml
        cls.root = root or cls.root
        cls.use_recursive_merge = use_recursive_merge
        cls.store_primitives = store_primitives
        cls.do_prefetch = prefetch

        if 'yaml' in cls.ftype.extensions():
            cls.yaml_loader_class = getattr(yaml, yaml_loader_class, None)
        else:
            cls.yaml_loader_class = None

        if not cls.root:
            raise MissingBootstrapParameterError('missing parameter: root')

    def on_bind(
        self,
        resource_type,
        root: Text = None,
        ftype: BaseFile = None,
        store_primitives=None,
        prefetch: bool = None,
        yaml_loader_class: Text = None,
    ):
        """
        Ensure the data dir exists for this Resource type.
        """
        if isinstance(ftype, str):
            self.ftype = import_object(ftype)

        if store_primitives is not None:
            self.store_primitives = store_primitives

        if prefetch is not None:
            self.do_prefetch = prefetch

        if yaml_loader_class is not None:
            if self.ftype.lower() == 'yaml':
                self.yaml_loader_class = getattr(
                    yaml, yaml_loader_class, None
                )
            else:
                self.yaml_loader_class = None

        self.paths.root = root or self.root
        self.paths.records = os.path.join(
            self.paths.root, StringUtils.snake(resource_type.__name__)
        )

        os.makedirs(self.paths.records, exist_ok=True)

        # bootstrap, bind, and backfill the in-memory cache
        if self.do_prefetch:
            self.bust_cache(self.do_prefetch)

    def bust_cache(self, prefetch=False):
        self._cache_store = SimulationStore()
        if not self._cache_store.is_bootstrapped():
            self._cache_store.bootstrap(self.resource_type.ravel.app)

        self._cache_store.bind(self.resource_type)

        if prefetch:
            self._cache_store.create_many(
                record for record
                in self.fetch_all(ignore_cache=True).values()
                if record
            )

    @classmethod
    def has_transaction(cls):
        return False

    @classmethod
    def begin(cls, **kwargs):
        pass

    @classmethod
    def commit(cls, **kwargs):
        pass

    @classmethod
    def rollback(cls, **kwargs):
        pass

    def create_id(self, record):
        return record.get(ID, UuidString.next_id())

    def exists(self, _id: Text) -> bool:
        return BaseFile.exists(self.mkpath(_id))

    def exists_many(self, _ids: Set) -> Dict[object, bool]:
        return {_id: self.exists(_id) for _id in _ids}

    def create(self, record: Dict) -> Dict:
        _id = self.create_id(record)
        record = self.update(_id, record)
        record[ID] = _id
        self._cache_store.create(record)
        return record

    def create_many(self, records):
        created_records = []
        for record in records:
            created_records.append(self.create(record))
        self._cache_store.create_many(created_records)
        return created_records

    def count(self) -> int:
        fnames = glob.glob(f'{self.paths.records}/*.{self.extension}')
        return len(fnames)

    def fetch(self, _id, fields=None) -> Dict:
        records = self.fetch_many([_id], fields=fields)
        record = records.get(_id) if records else {}
        return record

    def fetch_many(
        self,
        _ids: List = None,
        fields: Set[Text] = None,
        ignore_cache=False
    ) -> Dict:
        """
        """
        if not _ids:
            _ids = []

        # reduce _ids to its unique members by making it a set
        if not isinstance(_ids, set):
            all_ids = set(_ids)
        else:
            all_ids = _ids

        ids_to_fetch_from_fs = set()

        # we do not want to ignore the cache here
        if not ignore_cache:
            cached_records = self._cache_store.fetch_many(all_ids, fields=fields)
            for record_id, record in cached_records.items():
                if record is None:
                    ids_to_fetch_from_fs.add(record_id)
        # otherwise we will go straight to the filesystem
        else:
            cached_records = {}
            ids_to_fetch_from_fs = self._fetch_all_ids()

        # if there are any remaining ID's not returned from cache,
        # fetch them from file system
        if ids_to_fetch_from_fs:

            # prepare the set of field names to fetch
            fields = fields if isinstance(fields, set) else set(fields or [])
            if not fields:
                fields = set(self.resource_type.Schema.fields.keys())
            fields |= {ID, REV}

            records = {}
            non_null_records = []

            for _id in ids_to_fetch_from_fs:
                fpath = self.mkpath(_id)
                try:
                    record = self.ftype.read(
                        fpath, loader_class=self.yaml_loader_class
                    )
                except FileNotFoundError:
                    records[_id] = None
                    console.debug(
                        message='file not found by filesystem store',
                        data={'filepath': fpath}
                    )
                    continue

                if record:
                    record, errors = self.schema.process(record)
                    if errors:
                        raise Exception(
                            f'validation error while loading '
                            f'{_id}.{self.extension}'
                        )
                    record.setdefault(ID, _id)
                    records[_id] = {k: record.get(k) for k in fields}

                    non_null_records.append(record)

                    # if for some reason a file was created manually
                    # with a _rev, we create one here and save it
                    if REV not in record:
                        record[REV] = self.increment_rev()
                        self.update(_id, record)
                else:
                    records[ID] = None

            self._cache_store.create_many(non_null_records)
            cached_records.update(records)

        return cached_records

    def fetch_all(self, fields: Set[Text] = None, ignore_cache=False) -> Dict:
        return self.fetch_many(None, fields=fields, ignore_cache=ignore_cache)

    def update(self, _id, data: Dict) -> Dict:
        fpath = self.mkpath(_id)
        base_record = self.ftype.read(
            fpath,
            loader_class=self.yaml_loader_class
        )

        schema = self.resource_type.ravel.schema
        base_record, errors = schema.process(base_record)

        if base_record:
            # this is an upsert
            if self.use_recursive_merge:
                record = DictUtils.merge(base_record, data)
            else:
                record = dict(base_record, **data)
        else:
            record = data

        record[REV] = self.increment_rev(record.get(REV))
        if ID not in record:
            record[ID] = _id

        if self.store_primitives:
            json = self.app.json
            self.ftype.write(
                path=fpath,
                data=json.decode(json.encode(record))
            )
        else:
            self.ftype.write(path=fpath, data=record)

        self._cache_store.update(_id, record)
        return record

    def update_many(self, _ids: List, updates: List = None) -> Dict:
        return {
            _id: self.update(_id, data)
            for _id, data in zip(_ids, updates)
        }

    def delete(self, _id) -> None:
        self._cache_store.delete(_id)
        fpath = self.mkpath(_id)
        os.remove(fpath)

    def delete_many(self, _ids: List) -> None:
        for _id in _ids:
            self.delete(_id)

    def delete_all(self):
        _ids = self._fetch_all_ids()
        self.delete_many(_ids)

    def query(self, *args, **kwargs):
        if self._cache_store is not None and not self._cache_store.count():
            self.fetch_all(ignore_cache=True)

        return self._cache_store.query(*args, **kwargs)

    def mkpath(self, fname: Text) -> Text:
        fname = self.ftype.format_file_name(fname)
        return os.path.join(self.paths.records, fname)

    def _fetch_all_ids(self):
        _ids = set()
        for fpath in glob.glob(f'{self.paths.records}/*.{self.extension}'):
            fname = fpath.split('/')[-1]
            basename = os.path.splitext(fname)[0]
            _ids.add(basename)
        return _ids
Ejemplo n.º 6
0
class FalconService(AbstractWsgiService):

    env = Environment()

    def __init__(self, router=None, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._json_encoder = JsonEncoder()
        self._route_2_resource = {}
        self._router = router

    @property
    def falcon_middleware(self):
        return []

    @property
    def falcon_resources(self):
        return self._route_2_resource

    @property
    def request_type(self):
        return Request

    @property
    def response_type(self):
        return Response

    @staticmethod
    def handle_error(exc, request, response, params):
        response.status = falcon.HTTP_500
        traceback.print_exc()

    def on_start(self, *args, **kwargs):
        console.info(
            message='starting Falcon web service',
            data={'endpoints': sorted(self.actions.keys())}
        )
        return self.entrypoint(*args, **kwargs)

    def entrypoint(
        self, environ=None, start_response=None, *args, **kwargs
    ):
        middleware = self.falcon_middleware
        for m in middleware:
            if isinstance(m, Middleware):
                m.bind(self)

        falcon_app = falcon.App(
            middleware=middleware,
            request_type=self.request_type,
            response_type=self.response_type,
            router=self._router
        )
        falcon_app.req_options = Request.Options()
        falcon_app.resp_options = Response.Options()
        falcon_app.add_error_handler(Exception, self.handle_error)

        for route, resource in self._route_2_resource.items():
            falcon_app.add_route(route, resource)

        return falcon_app

    def on_decorate(self, endpoint):
        super().on_decorate(endpoint)
        for route in endpoint.routes:
            resource = self._route_2_resource.get(route)
            if resource is None:
                resource = FalconResource(route)
                self._route_2_resource[route] = resource
                resource.add_endpoint(endpoint)
            else:
                resource.add_endpoint(endpoint)

    def on_request(self, endpoint, request, response, *args, **kwargs):
        if request.content_length:
            app_kwargs = dict(request.media or {}, **kwargs)
        else:
            app_kwargs = dict(kwargs)

        app_kwargs.update(request.params)

        # append URL path variables
        route = request.path.strip('/').split('/')
        route_template = request.uri_template.strip('/').split('/')
        for k, v in zip(route_template, route):
            if k and (k[0] == '{' and k[-1] == '}'):
                app_kwargs[k[1:-1]] = v

        return (tuple(), app_kwargs)

    def on_response(
        self,
        action,
        result,
        raw_args=None,
        raw_kwargs=None,
        *args,
        **kwargs
    ):
        request, response = raw_args
        response.media = result
        return result
Ejemplo n.º 7
0
class SqlalchemyStore(Store):
    """
    A SQLAlchemy-based store, which keeps a single connection pool (AKA
    Engine) shared by all threads; however, each thread keeps singleton
    thread-local database connection and transaction objects, managed through
    connect()/close() and begin()/end().
    """

    env = Environment(
        SQLALCHEMY_STORE_ECHO=fields.Bool(default=False),
        SQLALCHEMY_STORE_SHOW_QUERIES=fields.Bool(default=False),
        SQLALCHEMY_STORE_DIALECT=fields.Enum(fields.String(),
                                             Dialect.values(),
                                             default=Dialect.sqlite),
        SQLALCHEMY_STORE_PROTOCOL=fields.String(default='sqlite'),
        SQLALCHEMY_STORE_USER=fields.String(),
        SQLALCHEMY_STORE_HOST=fields.String(),
        SQLALCHEMY_STORE_PORT=fields.String(),
        SQLALCHEMY_STORE_NAME=fields.String(),
    )

    # id_column_names is a mapping from table name to its _id column name
    _id_column_names = {}

    # only one thread needs to bootstrap the SqlalchemyStore. This lock is
    # used to ensure that this is what happens when the host app bootstraps.
    _bootstrap_lock = RLock()

    @classmethod
    def get_default_adapters(cls, dialect: Dialect,
                             table_name) -> List[Field.Adapter]:
        # TODO: Move this into the adapters file

        adapters = [
            fields.Field.adapt(
                on_adapt=lambda field: sa.Text,
                on_encode=lambda x: cls.ravel.app.json.encode(x),
                on_decode=lambda x: cls.ravel.app.json.decode(x),
            ),
            fields.Email.adapt(on_adapt=lambda field: sa.Text),
            fields.Bytes.adapt(on_adapt=lambda field: sa.LargeBinary),
            fields.BcryptString.adapt(on_adapt=lambda field: sa.Text),
            fields.Float.adapt(on_adapt=lambda field: sa.Float),
            fields.DateTime.adapt(on_adapt=lambda field: UtcDateTime),
            fields.Timestamp.adapt(on_adapt=lambda field: UtcDateTime),
            fields.Bool.adapt(on_adapt=lambda field: sa.Boolean),
            fields.TimeDelta.adapt(on_adapt=lambda field: sa.Interval),
            fields.Enum.adapt(
                on_adapt=lambda field: {
                    fields.String: sa.Text,
                    fields.Int: sa.Integer,
                    fields.Float: sa.Float,
                }[type(field.nested)]),
        ]
        adapters.extend(
            field_class.adapt(on_adapt=lambda field: sa.Text)
            for field_class in {
                fields.String, fields.FormatString, fields.UuidString,
                fields.DateTimeString
            })
        adapters.extend(
            field_class.adapt(on_adapt=lambda field: sa.BigInteger)
            for field_class in {
                fields.Int,
                fields.Uint32,
                fields.Uint64,
                fields.Uint,
                fields.Int32,
            })
        if dialect == Dialect.postgresql:
            adapters.extend(cls.get_postgresql_default_adapters(table_name))
        elif dialect == Dialect.mysql:
            adapters.extend(cls.get_mysql_default_adapters(table_name))
        elif dialect == Dialect.sqlite:
            adapters.extend(cls.get_sqlite_default_adapters(table_name))

        return adapters

    @classmethod
    def get_postgresql_default_adapters(cls,
                                        table_name) -> List[Field.Adapter]:
        pg_types = sa.dialects.postgresql

        def on_adapt_list(field):
            if isinstance(field.nested, fields.Enum):
                name = f'{table_name}__{field.name}'
                return ArrayOfEnum(
                    pg_types.ENUM(*field.nested.values, name=name))
            return pg_types.ARRAY({
                fields.String: sa.Text,
                fields.Email: sa.Text,
                fields.Uuid: pg_types.UUID,
                fields.Int: sa.Integer,
                fields.Bool: sa.Boolean,
                fields.Float: sa.Float,
                fields.DateTime: UtcDateTime,
                fields.Timestamp: UtcDateTime,
                fields.Dict: pg_types.JSONB,
                fields.Field: pg_types.JSONB,
                fields.Nested: pg_types.JSONB,
            }.get(type(field.nested), sa.Text))

        return [
            Point.adapt(
                on_adapt=lambda field: GeoalchemyGeometry(field.geo_type),
                on_encode=lambda x: x.to_EWKT_string(),
                on_decode=lambda x:
                (PointGeometry(x['geometry']['coordinates']) if x else None)),
            Polygon.adapt(
                on_adapt=lambda field: GeoalchemyGeometry(field.geo_type),
                on_encode=lambda x: x.to_EWKT_string(),
                on_decode=lambda x: PolygonGeometry(x['geometry'][
                    'coordinates'] if x else None)),
            fields.Field.adapt(on_adapt=lambda field: pg_types.JSONB),
            fields.Uuid.adapt(on_adapt=lambda field: pg_types.UUID),
            fields.Dict.adapt(on_adapt=lambda field: pg_types.JSONB),
            fields.Nested.adapt(on_adapt=lambda field: pg_types.JSONB, ),
            fields.Set.adapt(on_adapt=lambda field: pg_types.JSONB,
                             on_encode=lambda x: list(x),
                             on_decode=lambda x: set(x)),
            fields.UuidString.adapt(
                on_adapt=lambda field: pg_types.UUID,
                on_decode=lambda x: x.replace('-', '') if x else x,
            ),
            fields.List.adapt(on_adapt=on_adapt_list)
        ]

    @classmethod
    def get_mysql_default_adapters(cls, table_name) -> List[Field.Adapter]:
        return [
            fields.Dict.adapt(on_adapt=lambda field: sa.JSON),
            fields.Nested.adapt(on_adapt=lambda field: sa.JSON),
            fields.List.adapt(on_adapt=lambda field: sa.JSON),
            fields.Set.adapt(
                on_adapt=lambda field: sa.JSON,
                on_encode=lambda x: cls.ravel.app.json.encode(x),
                on_decode=lambda x: set(cls.ravel.app.json.decode(x))),
        ]

    @classmethod
    def get_sqlite_default_adapters(cls, table_name) -> List[Field.Adapter]:
        adapters = [
            field_class.adapt(
                on_adapt=lambda field: sa.Text,
                on_encode=lambda x: cls.ravel.app.json.encode(x),
                on_decode=lambda x: cls.ravel.app.json.decode(x),
            ) for field_class in {fields.Dict, fields.List, fields.Nested}
        ]
        adapters.append(
            fields.Set.adapt(
                on_adapt=lambda field: sa.Text,
                on_encode=lambda x: cls.ravel.app.json.encode(x),
                on_decode=lambda x: set(cls.ravel.app.json.decode(x))))
        return adapters

    def __init__(self, adapters: List[Field.Adapter] = None):
        super().__init__()
        self._custom_adapters = adapters or []
        self._table = None
        self._builder = None
        self._adapters = None
        self._id_column = None
        self._options = {}

    @property
    def adapters(self):
        return self._adapters

    @property
    def id_column_name(self):
        return self.resource_type.Schema.fields[ID].source

    def prepare(self, record: Dict, serialize=True) -> Dict:
        """
        When inserting or updating data, the some raw values in the record
        dict must be transformed before their corresponding sqlalchemy column
        type will accept the data.
        """
        cb_name = 'on_encode' if serialize else 'on_decode'
        prepared_record = {}
        for k, v in record.items():
            if k in (REV):
                prepared_record[k] = v
            adapter = self._adapters.get(k)
            if adapter:
                callback = getattr(adapter, cb_name, None)
                if callback:
                    try:
                        prepared_record[k] = callback(v)
                        continue
                    except Exception:
                        console.error(
                            message=f'failed to adapt column value: {k}',
                            data={
                                'value': v,
                                'field': adapter.field_class
                            })
                        raise
            prepared_record[k] = v
        return prepared_record

    def adapt_id(self, _id, serialize=True):
        cb_name = 'on_encode' if serialize else 'on_decode'
        adapter = self._adapters.get(self.id_column_name)
        if adapter:
            callback = getattr(adapter, cb_name)
            if callback:
                return callback(_id)
        return _id

    @classmethod
    def on_bootstrap(cls,
                     url=None,
                     dialect=None,
                     echo=False,
                     db=None,
                     **kwargs):
        """
        Initialize the SQLAlchemy connection pool (AKA Engine).
        """
        with cls._bootstrap_lock:
            cls.ravel.kwargs = kwargs

            # construct the URL to the DB server
            # url can be a string or a dict
            if isinstance(url, dict):
                url_parts = url
                cls.ravel.app.shared.sqla_url = (
                    '{protocol}://{user}@{host}:{port}/{db}'.format(
                        **url_parts))
            elif isinstance(url, str):
                cls.ravel.app.shared.sqla_url = url
            else:
                url_parts = dict(
                    protocol=cls.env.SQLALCHEMY_STORE_PROTOCOL,
                    user=cls.env.SQLALCHEMY_STORE_USER or '',
                    host=('@' + cls.env.SQLALCHEMY_STORE_HOST
                          if cls.env.SQLALCHEMY_STORE_HOST else ''),
                    port=(':' + cls.env.SQLALCHEMY_STORE_PORT
                          if cls.env.SQLALCHEMY_STORE_PORT else ''),
                    db=('/' + (db or cls.env.SQLALCHEMY_STORE_NAME or '')))
                cls.ravel.app.shared.sqla_url = url or (
                    '{protocol}://{user}{host}{port}{db}'.format(**url_parts))

            cls.dialect = dialect or cls.env.SQLALCHEMY_STORE_DIALECT

            from sqlalchemy.dialects import postgresql, sqlite, mysql

            cls.sa_dialect = None
            if cls.dialect == Dialect.postgresql:
                cls.sa_dialect = postgresql
            elif cls.dialect == Dialect.sqlite:
                cls.sa_dialect = sqlite
            elif cls.dialect == Dialect.mysql:
                cls.sa_dialect = mysql

            console.debug(message='creating sqlalchemy engine',
                          data={
                              'echo': cls.env.SQLALCHEMY_STORE_ECHO,
                              'dialect': cls.dialect,
                              'url': cls.ravel.app.shared.sqla_url,
                          })

            cls.ravel.local.sqla_tx = None
            cls.ravel.local.sqla_conn = None
            cls.ravel.local.sqla_metadata = sa.MetaData()
            cls.ravel.local.sqla_metadata.bind = sa.create_engine(
                name_or_url=cls.ravel.app.shared.sqla_url,
                echo=bool(echo or cls.env.SQLALCHEMY_STORE_ECHO),
                strategy='threadlocal')

            # set global thread-local sqlalchemy store method aliases
            cls.ravel.app.local.create_tables = cls.create_tables

    def on_bind(self,
                resource_type: Type['Resource'],
                table: Text = None,
                schema: 'Schema' = None,
                **kwargs):
        """
        Initialize SQLAlchemy data strutures used for constructing SQL
        expressions used to manage the bound resource type.
        """
        # map each of the resource's schema fields to a corresponding adapter,
        # which is used to prepare values upon insert and update.
        table = (table
                 or SqlalchemyTableBuilder.derive_table_name(resource_type))
        field_class_2_adapter = {
            adapter.field_class: adapter
            for adapter in self.get_default_adapters(self.dialect, table) +
            self._custom_adapters
        }
        self._adapters = {
            field_name: field_class_2_adapter[type(field)]
            for field_name, field in self.resource_type.Schema.fields.items()
            if (type(field) in field_class_2_adapter
                and field.meta.get('ravel_on_resolve') is None)
        }

        # build the Sqlalchemy Table object for the bound resource type.
        self._builder = SqlalchemyTableBuilder(self)

        try:
            self._table = self._builder.build_table(name=table, schema=schema)
        except Exception:
            console.error(f'failed to build sa.Table: {table}')
            raise

        self._id_column = getattr(self._table.c, self.id_column_name)

        # remember which column is the _id column
        self._id_column_names[self._table.name] = self.id_column_name

        # set SqlalchemyStore options here, using bootstrap-level
        # options as base/default options.
        self._options = dict(self.ravel.kwargs, **kwargs)

    def query(
        self,
        predicate: 'Predicate',
        fields: Set[Text] = None,
        limit: int = None,
        offset: int = None,
        order_by: Tuple = None,
        **kwargs,
    ):
        fields = fields or {k: None for k in self._adapters}
        fields.update({
            self.id_column_name: None,
            self.resource_type.Schema.fields[REV].source: None,
        })

        columns = []
        table_alias = self.table.alias(''.join(
            s.strip('_')[0] for s in self.table.name.split('_')))
        for k in fields:
            col = getattr(table_alias.c, k)
            if isinstance(col.type, GeoalchemyGeometry):
                columns.append(sa.func.ST_AsGeoJSON(col).label(k))
            else:
                columns.append(col)

        predicate = Predicate.deserialize(predicate)
        filters = self._prepare_predicate(table_alias, predicate)

        # build the query object
        query = sa.select(columns).where(filters)

        if order_by:
            sa_order_by = [
                sa.desc(getattr(table_alias.c, x.key)) if x.desc else sa.asc(
                    getattr(table_alias.c, x.key)) for x in order_by
            ]
            query = query.order_by(*sa_order_by)

        if limit is not None:
            query = query.limit(max(0, limit))
        if offset is not None:
            query = query.offset(max(0, limit))

        console.debug(
            message=(f'SQL: SELECT FROM {self.table}' +
                     (f' OFFSET {offset}' if offset is not None else '') +
                     (f' LIMIT {limit}' if limit else '') +
                     (f' ORDER BY {", ".join(x.to_sql() for x in order_by)}'
                      if order_by else '')),
            data={
                'stack':
                traceback.format_stack(),
                'statement':
                str(query.compile(
                    compile_kwargs={'literal_binds': True})).split('\n')
            } if self.env.SQLALCHEMY_STORE_SHOW_QUERIES else None)
        # execute query, aggregating resulting records
        cursor = self.conn.execute(query)
        records = []

        while True:
            page = [
                self.prepare(dict(row.items()), serialize=False)
                for row in cursor.fetchmany(512)
            ]
            if page:
                records.extend(page)
            else:
                break

        return records

    def _prepare_predicate(self, table, pred, empty=set()):
        if isinstance(pred, ConditionalPredicate):
            if not pred.ignore_field_adapter:
                adapter = self._adapters.get(pred.field.source)
                if adapter and adapter.on_encode:
                    pred.value = adapter.on_encode(pred.value)
            col = getattr(table.c, pred.field.source)
            if pred.op == OP_CODE.EQ:
                return col == pred.value
            elif pred.op == OP_CODE.NEQ:
                return col != pred.value
            if pred.op == OP_CODE.GEQ:
                return col >= pred.value
            elif pred.op == OP_CODE.GT:
                return col > pred.value
            elif pred.op == OP_CODE.LT:
                return col < pred.value
            elif pred.op == OP_CODE.LEQ:
                return col <= pred.value
            elif pred.op == OP_CODE.INCLUDING:
                return col.in_(pred.value)
            elif pred.op == OP_CODE.EXCLUDING:
                return ~col.in_(pred.value)
            elif pred.op == POSTGIS_OP_CODE.CONTAINS:
                if isinstance(pred.value, GeometryObject):
                    EWKT_str = pred.value.to_EWKT_string()
                else:
                    EWKT_str = pred.value
                return sa.func.ST_Contains(
                    col,
                    sa.func.ST_GeomFromEWKT(EWKT_str),
                )
            elif pred.op == POSTGIS_OP_CODE.CONTAINED_BY:
                if isinstance(pred.value, GeometryObject):
                    EWKT_str = pred.value.to_EWKT_string()
                else:
                    EWKT_str = pred.value
                return sa.func.ST_Contains(sa.func.ST_GeomFromEWKT(EWKT_str),
                                           col)
            elif pred.op == POSTGIS_OP_CODE.WITHIN_RADIUS:
                center = pred.value['center']
                radius = pred.value['radius']
                return sa.func.ST_PointInsideCircle(col, center[0], center[1],
                                                    radius)
            else:
                raise Exception('unrecognized conditional predicate')
        elif isinstance(pred, BooleanPredicate):
            if pred.op == OP_CODE.AND:
                lhs_result = self._prepare_predicate(table, pred.lhs)
                rhs_result = self._prepare_predicate(table, pred.rhs)
                return sa.and_(lhs_result, rhs_result)
            elif pred.op == OP_CODE.OR:
                lhs_result = self._prepare_predicate(table, pred.lhs)
                rhs_result = self._prepare_predicate(table, pred.rhs)
                return sa.or_(lhs_result, rhs_result)
            else:
                raise Exception('unrecognized boolean predicate')
        else:
            raise Exception('unrecognized predicate type')

    def exists(self, _id) -> bool:
        columns = [sa.func.count(self._id_column)]
        query = (sa.select(columns).where(
            self._id_column == self.adapt_id(_id)))
        result = self.conn.execute(query)
        return bool(result.scalar())

    def exists_many(self, _ids: Set) -> Dict[object, bool]:
        columns = [self._id_column, sa.func.count(self._id_column)]
        query = (sa.select(columns).where(
            self._id_column.in_([self.adapt_id(_id) for _id in _ids])))
        return {row[0]: row[1] for row in self.conn.execute(query)}

    def count(self) -> int:
        query = sa.select([sa.func.count(self._id_column)])
        result = self.conn.execute(query)
        return result.scalar()

    def fetch(self, _id, fields=None) -> Dict:
        records = self.fetch_many(_ids=[_id], fields=fields)
        return records[_id] if records else None

    def fetch_many(self, _ids: List, fields=None, as_list=False) -> Dict:
        prepared_ids = [self.adapt_id(_id, serialize=True) for _id in _ids]

        if fields:
            if not isinstance(fields, set):
                fields = set(fields)
        else:
            fields = {
                f.source
                for f in self.resource_type.Schema.fields.values()
                if f.name in self._adapters
            }
        fields.update({
            self.id_column_name,
            self.resource_type.Schema.fields[REV].source,
        })

        columns = []
        for k in fields:
            col = getattr(self.table.c, k)
            if isinstance(col.type, GeoalchemyGeometry):
                columns.append(sa.func.ST_AsGeoJSON(col).label(k))
            else:
                columns.append(col)

        select_stmt = sa.select(columns)

        id_col = getattr(self.table.c, self.id_column_name)

        if prepared_ids:
            select_stmt = select_stmt.where(id_col.in_(prepared_ids))
        cursor = self.conn.execute(select_stmt)
        records = {} if not as_list else []

        while True:
            page = cursor.fetchmany(512)
            if page:
                for row in page:
                    raw_record = dict(row.items())
                    record = self.prepare(raw_record, serialize=False)
                    _id = self.adapt_id(row[self.id_column_name],
                                        serialize=False)
                    if as_list:
                        records.append(record)
                    else:
                        records[_id] = record
            else:
                break

        return records

    def fetch_all(self, fields: Set[Text] = None) -> Dict:
        return self.fetch_many([], fields=fields)

    def create(self, record: dict) -> dict:
        record[self.id_column_name] = self.create_id(record)
        prepared_record = self.prepare(record, serialize=True)
        insert_stmt = self.table.insert().values(**prepared_record)
        _id = prepared_record.get('_id', '')
        console.debug(f'SQL: INSERT {str(_id)[:7] + " " if _id else ""}'
                      f'INTO {self.table}')
        try:
            if self.supports_returning:
                insert_stmt = insert_stmt.return_defaults()
                result = self.conn.execute(insert_stmt)
                return dict(record, **(result.returned_defaults or {}))
            else:
                result = self.conn.execute(insert_stmt)
                return self.fetch(_id=record[self.id_column_name])
        except Exception:
            console.error(message=f'failed to insert record',
                          data={
                              'record': record,
                              'resource': get_class_name(self.resource_type),
                          })
            raise

    def create_many(self, records: List[Dict]) -> Dict:
        prepared_records = []
        nullable_fields = self.resource_type.Schema.nullable_fields
        for record in records:
            record[self.id_column_name] = self.create_id(record)
            prepared_record = self.prepare(record, serialize=True)
            prepared_records.append(prepared_record)
            for nullable_field in nullable_fields.values():
                if nullable_field.name not in prepared_record:
                    prepared_record[nullable_field.name] = None

        try:
            self.conn.execute(self.table.insert(), prepared_records)
        except Exception:
            console.error(f'failed to insert records')
            raise

        n = len(prepared_records)
        id_list_str = (', '.join(
            str(x['_id'])[:7] for x in prepared_records if x.get('_id')))
        console.debug(f'SQL: INSERT {id_list_str} INTO {self.table} ' +
                      (f'(count: {n})' if n > 1 else ''))

        if self.supports_returning:
            # TODO: use implicit returning if possible
            pass

        return self.fetch_many((rec[self.id_column_name] for rec in records),
                               as_list=True)

    def update(self, _id, data: Dict) -> Dict:
        prepared_id = self.adapt_id(_id)
        prepared_data = self.prepare(data, serialize=True)
        if prepared_data:
            update_stmt = (self.table.update().values(**prepared_data).where(
                self._id_column == prepared_id))
        else:
            return prepared_data
        if self.supports_returning:
            update_stmt = update_stmt.return_defaults()
            console.debug(f'SQL: UPDATE {self.table}')
            result = self.conn.execute(update_stmt)
            return dict(data, **(result.returned_defaults or {}))
        else:
            self.conn.execute(update_stmt)
            if self._options.get('fetch_on_update', True):
                return self.fetch(_id)
            return data

    def update_many(self, _ids: List, data: List[Dict] = None) -> None:
        assert data

        prepared_ids = []
        prepared_records = []

        for _id, record in zip(_ids, data):
            prepared_id = self.adapt_id(_id)
            prepared_record = self.prepare(record, serialize=True)
            if prepared_record:
                prepared_ids.append(prepared_id)
                prepared_records.append(prepared_record)
                prepared_record[ID] = prepared_id

        if prepared_records:
            n = len(prepared_records)
            console.debug(f'SQL: UPDATE {self.table} ' +
                          (f'({n}x)' if n > 1 else ''))
            values = {k: bindparam(k) for k in prepared_records[0].keys()}
            update_stmt = (self.table.update().where(
                self._id_column == bindparam(self.id_column_name)).values(
                    **values))
            self.conn.execute(update_stmt, prepared_records)

        if self._options.get('fetch_on_update', True):
            if self.supports_returning:
                # TODO: use implicit returning if possible
                return self.fetch_many(_ids)
            else:
                return self.fetch_many(_ids)
        return

    def delete(self, _id) -> None:
        prepared_id = self.adapt_id(_id)
        delete_stmt = self.table.delete().where(self._id_column == prepared_id)
        self.conn.execute(delete_stmt)

    def delete_many(self, _ids: list) -> None:
        prepared_ids = [self.adapt_id(_id) for _id in _ids]
        delete_stmt = self.table.delete().where(
            self._id_column.in_(prepared_ids))
        self.conn.execute(delete_stmt)

    def delete_all(self):
        delete_stmt = self.table.delete()
        self.conn.execute(delete_stmt)

    @property
    def table(self):
        return self._table

    @property
    def conn(self):
        sqla_conn = getattr(self.ravel.local, 'sqla_conn', None)
        if sqla_conn is None:
            # lazily initialize a connection for this thread
            self.connect()
        return self.ravel.local.sqla_conn

    @property
    def supports_returning(self):
        if not self.is_bootstrapped():
            return False
        metadata = self.get_metadata()
        return metadata.bind.dialect.implicit_returning

    @classmethod
    def create_tables(cls, overwrite=False):
        """
        Create all tables for all SqlalchemyStores used in the host app.
        """
        if not cls.is_bootstrapped():
            console.error(f'{get_class_name(cls)} cannot create '
                          f'tables unless bootstrapped')
            return

        meta = cls.get_metadata()
        engine = cls.get_engine()

        if overwrite:
            console.info('dropping Resource SQL tables...')
            meta.drop_all(engine)

        # create all tables
        console.info('creating Resource SQL tables...')
        meta.create_all(engine)

    @classmethod
    def get_active_connection(cls):
        return getattr(cls.ravel.local, 'sqla_conn', None)

    @classmethod
    def connect(cls, refresh=True):
        """
        Create a singleton thread-local SQLAlchemy connection, shared across
        all Resources backed by a SQLAlchemy store. When working with multiple
        threads or processes, make sure to 
        """
        sqla_conn = getattr(cls.ravel.local, 'sqla_conn', None)
        metadata = cls.ravel.local.sqla_metadata
        if sqla_conn is not None:
            console.warning(
                message='sqlalchemy store already has connection', )
            if refresh:
                cls.close()
                cls.ravel.local.sqla_conn = metadata.bind.connect()
        else:
            cls.ravel.local.sqla_conn = metadata.bind.connect()

        return cls.ravel.local.sqla_conn

    @classmethod
    def close(cls):
        """
        Return the thread-local database connection to the sqlalchemy
        connection pool (AKA the "engine").
        """
        sqla_conn = getattr(cls.ravel.local, 'sqla_conn', None)
        if sqla_conn is not None:
            console.debug('closing sqlalchemy connection')
            sqla_conn.close()
            cls.ravel.local.sqla_conn = None
        else:
            console.warning('sqlalchemy has no connection to close')

    @classmethod
    def begin(cls, auto_connect=True, **kwargs):
        """
        Initialize a thread-local transaction. An exception is raised if
        there's already a pending transaction.
        """
        conn = cls.get_active_connection()
        if conn is None:
            if auto_connect:
                conn = cls.connect()
            else:
                raise Exception('no active sqlalchemy connection')

        existing_tx = getattr(cls.ravel.local, 'sqla_tx', None)
        if existing_tx is not None:
            console.debug('there is already an open transaction')
        else:
            new_tx = cls.ravel.local.sqla_conn.begin()
            cls.ravel.local.sqla_tx = new_tx

    @classmethod
    def commit(cls, rollback=True, **kwargs):
        """
        Call commit on the thread-local database transaction. "Begin" must be
        called to start a new transaction at this point, if a new transaction
        is desired.
        """
        def perform_sqlalchemy_commit():
            tx = getattr(cls.ravel.local, 'sqla_tx', None)
            if tx is not None:
                cls.ravel.local.sqla_tx.commit()
                cls.ravel.local.sqla_tx = None

        # try to commit the transaction.
        console.debug(f'committing sqlalchemy transaction')
        try:
            perform_sqlalchemy_commit()
        except Exception:
            if rollback:
                # if the commit fails, rollback the transaction
                console.critical(f'rolling back sqlalchemy transaction')
                cls.rollback()
            else:
                console.exception(f'sqlalchemy transaction failed commit')
        finally:
            # ensure we close the connection either way
            cls.close()

    @classmethod
    def rollback(cls, **kwargs):
        tx = getattr(cls.ravel.local, 'sqla_tx', None)
        if tx is not None:
            cls.ravel.local.sqla_tx = None
            try:
                tx.rollback()
            except:
                console.exception(f'sqlalchemy transaction failed to rollback')

    @classmethod
    def has_transaction(cls) -> bool:
        return cls.ravel.local.sqla_tx is not None

    @classmethod
    def get_metadata(cls):
        return cls.ravel.local.sqla_metadata

    @classmethod
    def get_engine(cls):
        return cls.get_metadata().bind

    @classmethod
    def dispose(cls):
        meta = cls.get_metadata()
        if not meta:
            cls.ravel.local.sqla_metadata = None
            return

        engine = meta.bind
        engine.dispose()