def test_update(client, json_load, json_dump): body = sdict(string='bar', number=2, precise=1.1, dt=datetime(2000, 1, 1)) req = client.post('/sample', data=json_dump(body), headers=[('content-type', 'application/json')]) data = json_load(req.data) rid = data['id'] change = sdict(string='baz') req = client.put(f'/sample/{rid}', data=json_dump(change), headers=[('content-type', 'application/json')]) assert req.status == 200 data = json_load(req.data) assert data['string'] == 'baz' #: validation tests change = sdict(number='baz') req = client.put(f'/sample/{rid}', data=json_dump(change), headers=[('content-type', 'application/json')]) assert req.status == 422 data = json_load(req.data) assert data['errors']['number']
async def test_pipes(db, json_dump): fake_mod = sdict(_queryable_fields=['str', 'int'], model=Sample, ext=sdict(config=sdict(query_param='where'))) pipe = JSONQueryPipe(fake_mod) qdict = {'$or': [{'str': 'bar'}, {'int': {'$gt': 0}}]} current.request = sdict(query_params=sdict(where=json_dump(qdict))) res = await pipe.pipe_request(_fake_pipe, dbset=Sample.all()) assert queries_equal( res['dbset'].query, Sample.all().where(lambda m: (m.str == 'bar') | (m.int > 0)).query)
def db_config(): config = sdict() config.adapter = 'mongodb' config.host = os.environ.get('MONGO_HOST', 'localhost') config.port = int(os.environ.get('MONGO_PORT', '27017')) config.database = os.environ.get('MONGO_DB', 'test') return config
def db(): app = App(__name__) db = Database( app, config=sdict( uri='sqlite:memory', auto_migrate=True, auto_connect=True)) db.define_models(Register) return db
def db(): app = App(__name__) db = Database(app, config=sdict(uri='sqlite:memory', auto_migrate=True, auto_connect=False)) return db
def pack_with_list_envelope_and_meta(self, envelope, data, **extras): count = len(data) return { envelope: data, self.meta_envelope: self.build_meta(sdict(count=lambda c=count: c), (1, count)), **extras }
def db_config(): config = sdict() config.adapter = 'postgres:psycopg2' config.host = os.environ.get('POSTGRES_HOST', 'localhost') config.port = int(os.environ.get('POSTGRES_PORT', 5432)) config.user = os.environ.get('POSTGRES_USER', 'postgres') config.password = os.environ.get('POSTGRES_PASSWORD', 'postgres') config.database = os.environ.get('POSTGRES_DB', 'test') return config
def _conditions_parser( op_set: Set[str], op_validators: Dict[str, Callable[[Any], Any]], op_parsers: Dict[str, Callable[[str, Any, sdict], Any]], op_remap: Dict[str, str], op_outer: Set[str], query_dict: Dict[str, Any], accepted_set: Set[str], outer: OuterCollector, parent: Optional[str] = None ) -> Dict[str, Any]: query, ctx = {}, sdict( op_set=op_set, op_validators=op_validators, op_parsers=op_parsers, op_remap=op_remap, op_outer=op_outer, accepted_set=accepted_set, outer=outer, parent=parent ) query_key_set = set(query_dict.keys()) fields_keys = defaultdict(list) for key in query_key_set: fields_keys[key.split(".")[0]].append((key, query_dict[key])) for key in query_key_set & op_outer: outer.data[op_remap[key]].append( op_parsers[key](key, query_dict[key], query, ctx) ) query_key_set.remove(key) for key in query_key_set & op_set: query[op_remap[key]] = op_parsers[key]( key, query_dict[key], query, ctx ) for key in accepted_set & set(fields_keys.keys()): for original_key, value in fields_keys[key]: if isinstance(value, dict): with outer.ctx(original_key) as step_outer: parsed = _conditions_parser( op_set, op_validators, op_parsers, op_remap, op_outer, value, accepted_set, outer=step_outer, parent=parent ) if not parsed: continue query[original_key] = parsed else: query[original_key] = value return query
def db(): app = App(__name__) db = Database(app, config=sdict(uri='sqlite://validators.db', auto_connect=True, auto_migrate=True)) db.define_models([ A, AA, AAA, B, Consist, Len, Inside, Num, Eq, Match, Anyone, Proc, Person, Thing, Allowed, Mixed ]) return db
def db(): app = App(__name__) db = Database(app, config=sdict(uri='sqlite://dal.db', auto_connect=True, auto_migrate=True)) db.define_models([ Stuff, Person, Thing, Feature, Price, Doctor, Patient, Appointment, User, Organization, Membership, House, Mouse, NeedSplit, Zoo, Animal, Elephant, Dog, Subscription ]) return db
def test_create(client, json_load, json_dump): body = sdict(string='bar', number=2, precise=1.1, dt=datetime(2000, 1, 1)) req = client.post('/sample', data=json_dump(body), headers=[('content-type', 'application/json')]) assert req.status == 201 data = json_load(req.data) assert data['id'] assert data['string'] == 'bar' #: validation tests body = sdict(string='bar', number='foo', precise=1.1, dt=datetime(2000, 1, 1)) req = client.post('/sample', data=json_dump(body), headers=[('content-type', 'application/json')]) assert req.status == 422 data = json_load(req.data) assert data['errors']['number']
def _conditions_parser(op_set: Set[str], op_validators: Dict[str, Callable[[Any], Any]], op_parsers: Dict[str, Callable[[str, Any, sdict], Any]], model: ModelType, query_dict: Dict[str, Any], accepted_set: Set[str], parent: Optional[str] = None) -> Union[Query, None]: query, ctx = None, sdict(op_set=op_set, op_validators=op_validators, op_parsers=op_parsers, model=model, accepted_set=accepted_set, parent=parent) query_key_set = set(query_dict.keys()) step_conditions, inner_conditions = [], [] for key in query_key_set & op_set: step_conditions.append(op_parsers[key](key, query_dict[key], ctx)) if step_conditions: step_query = reduce( lambda a, b: operator.and_(a, b) if a and b else None, step_conditions) query = query & step_query if query else step_query for key in accepted_set & query_key_set: value = query_dict[key] if not isinstance(value, dict): value = {'$eq': value} inner_conditions.append( _conditions_parser(op_set, op_validators, op_parsers, model, value, accepted_set, parent=key)) if inner_conditions: inner_query = reduce( lambda a, b: operator.and_(a, b) if a and b else None, inner_conditions) query = query & inner_query if query else inner_query return query
def __init__( self, ext, name, import_name, model, serializer=None, parser=None, enabled_methods=None, disabled_methods=None, list_envelope=None, single_envelope=None, meta_envelope=None, groups_envelope=None, use_envelope_on_parse=None, serialize_meta=None, url_prefix=None, hostname=None, pipeline=[] ): #: overridable methods self._fetcher_method = self._get_dbset self._select_method = self._get_row self.error_400 = self.build_error_400 self.error_404 = self.build_error_404 self.error_422 = self.build_error_422 self.build_meta = self._build_meta #: callbacks self._before_create_callbacks = [] self._before_update_callbacks = [] self._after_params_callbacks = [] self._after_create_callbacks = [] self._after_update_callbacks = [] self._after_delete_callbacks = [] #: service pipe injection add_service_pipe = True super_pipeline = list(pipeline) if any( isinstance(pipe, JSONServicePipe) for pipe in ext.app.pipeline ) or any( isinstance(pipe, JSONServicePipe) for pipe in super_pipeline ): add_service_pipe = False if add_service_pipe: super_pipeline.insert(0, JSONServicePipe()) #: initialize super().__init__( ext.app, name, import_name, url_prefix=url_prefix, hostname=hostname, pipeline=super_pipeline ) self.ext = ext self._pagination = sdict() for key in ( 'page_param', 'pagesize_param', 'min_pagesize', 'max_pagesize', 'default_pagesize' ): self._pagination[key] = self.ext.config[key] self._sort_param = self.ext.config.sort_param self.default_sort = self.ext.config.default_sort self._path_base = self.ext.config.base_path self._path_rid = self.ext.config.id_path self._serializer_class = serializer or \ self.ext.config.default_serializer self._parser_class = parser or self.ext.config.default_parser self._parsing_params_kwargs = {} self.model = model self.serializer = self._serializer_class(self.model) self.parser = self._parser_class(self.model) self.enabled_methods = list(self._all_methods & set( list( enabled_methods if enabled_methods is not None else self.ext.config.default_enabled_methods ) )) self.disabled_methods = list(self._all_methods & set( list( disabled_methods if disabled_methods is not None else self.ext.config.default_disabled_methods ) )) self.list_envelope = list_envelope or self.ext.config.list_envelope self.single_envelope = ( single_envelope if single_envelope is not None else self.ext.config.single_envelope ) self.meta_envelope = ( meta_envelope if meta_envelope is not None else self.ext.config.meta_envelope ) self.groups_envelope = ( groups_envelope if groups_envelope is not None else self.ext.config.groups_envelope ) self.use_envelope_on_parse = ( use_envelope_on_parse if use_envelope_on_parse is not None else self.ext.config.use_envelope_on_parse ) self.serialize_meta = ( serialize_meta if serialize_meta is not None else self.ext.config.serialize_meta ) self._queryable_fields = [] self._sortable_fields = [] self._sortable_dict = {} self._groupable_fields = [] self._statsable_fields = [] self._json_query_pipe = JSONQueryPipe(self) self._group_field_pipe = FieldPipe(self, '_groupable_fields') self._stats_field_pipe = FieldsPipe(self, '_statsable_fields') self.allowed_sorts = [self.default_sort] self._init_pipelines() #: custom init self.init() #: configure module self._after_initialize()
def _build_query_ctx(self): return sdict(outer=OuterCollector())
def _build_query_ctx(self): return sdict()
def test_computations(db): row = sdict(price=12.95, quantity=3) rv = db.Stuff.total.compute(row) assert rv == 12.95 * 3
def test_user_assign_valid_level(): app = App(__name__) app.config.logging.pytest = sdict(level='info') result = _call_create_logger(app) assert result.handlers[-1].level == logging.INFO
def _parse(accepted_set, params): rv = sdict() for key in accepted_set & set(params): rv[key] = params[key] return rv
def test_user_no_assign_level(): app = App(__name__) app.config.logging.pytest = sdict() result = _call_create_logger(app) assert result.handlers[-1].level == logging.WARNING
async def test_pipes(db, json_dump): fake_mod = sdict( _queryable_fields=['string', 'number', 'geo'], model=Sample, ext=sdict( config=sdict( query_param='where' ) ) ) pipe = JSONQueryPipe(fake_mod) pipe_aggr = AggregateJSONQueryPipe(fake_mod) qdict = { '$or': [ {'string': 'bar'}, {'number': {'$gt': 0}}, {'geo': { '$geo.near': { 'coordinates': {'lat': 44.10, 'lon': 16.10}, 'distance': {'min': 2000, 'max': 5000} } }} ] } current.request = sdict( query_params=sdict( where=json_dump(qdict) ) ) res = await pipe.pipe_request(_fake_pipe, query=MongoQuery()) assert res['query'].result == {'$and': [{ '$or': [ {'string': 'bar'}, {'number': {'$gt': 0}}, {'geo': { '$near': { '$geometry': { 'type': 'Point', 'coordinates': [16.10, 44.10]}, '$minDistance': 2000, '$maxDistance': 5000}}} ] }]} assert 'aggregation_steps' not in res qdict = { 'string': 'foo', 'number': {'$gt': 0}, 'geo': { '$geo.near': { 'coordinates': {'lat': 44.10, 'lon': 16.10}, 'distance': {'min': 2000, 'max': 5000} } } } current.request = sdict( query_params=sdict( where=json_dump(qdict) ) ) res = await pipe_aggr.pipe_request(_fake_pipe, query=MongoQuery()) assert res['query'].result == {'$and': [{ 'string': 'foo', 'number': {'$gt': 0} }]} assert res['aggregation_steps'] == [{ '$geoNear': { 'key': 'geo', 'near': { 'type': 'Point', 'coordinates': [16.1, 44.1] }, 'spherical': False, 'distanceField': 'distance', 'maxDistance': 5000, 'minDistance': 2000 } }]