def process_args_and_fetch_rows( q, default_limit=None, default_sort=None, default_orderby=None, default_offset=None, default_page=None, default_per_page=None): if isinstance(q, Response): return q if '_f' in request.args: filters = _json.loads(request.args['_f']) if isinstance(filters, str) or isinstance(filters, six.text_type): filters = _json.loads(filters) # print("filters are ", filters) q = filter_query_using_filters_list(q, filters) # print("after applying filters") # print(str(q)) filtered_query = filter_query_using_args(q) count_only = boolify(request.args.get('count_only', 'false')) if count_only: return as_json(filtered_query.count()) result = fetch_results_in_requested_format( filtered_query, default_limit=default_limit, default_sort=default_sort, default_orderby=default_orderby, default_offset=default_offset, default_page=default_page, default_per_page=default_per_page ) return result
def process_args_and_fetch_rows( q, default_limit=None, default_sort=None, default_orderby=None, default_offset=None, default_page=None, default_per_page=None): if isinstance(q, Response): return q if '_f' in request.args: filters = _json.loads(request.args['_f']) if isinstance(filters, str) or isinstance(filters, unicode): filters = _json.loads(filters) q = filter_query_using_filters_list(q, filters) filtered_query = filter_query_using_args(q) count_only = boolify(request.args.get('count_only', 'false')) if count_only: return as_json(filtered_query.count()) result = fetch_results_in_requested_format( filtered_query, default_limit=default_limit, default_sort=default_sort, default_orderby=default_orderby, default_offset=default_offset, default_page=default_page, default_per_page=default_per_page ) return result
def _serializable_params(args, check_groupby=False): params = {} if '_ds' in args: params['dict_struct'] = _json.loads(args['_ds']) if isinstance(params['dict_struct'], str) or isinstance(params['dict_struct'], six.text_type): params['dict_struct'] = _json.loads(params['dict_struct']) if 'attrs' in args: attrs = args.get('attrs') if attrs.lower() == 'none': params['attrs_to_serialize'] = [] else: params['attrs_to_serialize'] = attrs.split(',') if 'rels' in args: rels = args.get('rels') if rels.lower() == 'none': params['rels_to_serialize'] = [] else: params['rels_to_serialize'] = [ (rel.partition(':')[0], rel.partition(':')[2]) for rel in rels.split(',') if ':' in rel] if 'expand' in args: expand = args.get('expand') if expand.lower() == 'none': params['rels_to_expand'] = [] else: params['rels_to_expand'] = expand.split(',') if 'grouprelby' in request.args: params['group_listrels_by'] = { arg.partition(':')[0]: arg.partition(':')[2].split(',') for arg in request.args.getlist('grouprelby')} if check_groupby and 'groupby' in request.args: params['groupby'] = request.args.get('groupby').split(',') if 'preserve_order' in request.args: params['preserve_order'] = boolify(request.args.get('preserve_order')) return params
def _serializable_params(args, check_groupby=False): params = {} if '_ds' in args: params['dict_struct'] = _json.loads(args['_ds']) if isinstance(params['dict_struct'], str) or isinstance(params['dict_struct'], unicode): params['dict_struct'] = _json.loads(params['dict_struct']) if 'attrs' in args: attrs = args.get('attrs') if attrs.lower() == 'none': params['attrs_to_serialize'] = [] else: params['attrs_to_serialize'] = attrs.split(',') if 'rels' in args: rels = args.get('rels') if rels.lower() == 'none': params['rels_to_serialize'] = [] else: params['rels_to_serialize'] = [ (rel.partition(':')[0], rel.partition(':')[2]) for rel in rels.split(',') if ':' in rel] if 'expand' in args: expand = args.get('expand') if expand.lower() == 'none': params['rels_to_expand'] = [] else: params['rels_to_expand'] = expand.split(',') if 'grouprelby' in request.args: params['group_listrels_by'] = { arg.partition(':')[0]: arg.partition(':')[2].split(',') for arg in request.args.getlist('grouprelby')} if check_groupby and 'groupby' in request.args: params['groupby'] = request.args.get('groupby').split(',') if 'preserve_order' in request.args: params['preserve_order'] = boolify(request.args.get('preserve_order')) return params
def type_coerce_value(column_type, value): if value is None: return None if isinstance(value, unicode) or isinstance(value, str): if value.lower() == 'none' or value.lower() == 'null' or value.strip() == '': return None if column_type is sqltypes.Integer: value = int(value) elif column_type is sqltypes.Numeric: value = Decimal(value) elif column_type is sqltypes.Boolean: value = boolify(value) elif column_type is sqltypes.DateTime: value = dateutil.parser.parse(value) elif column_type is sqltypes.Date: value = dateutil.parser.parse(value).date() return value
def type_coerce_value(column_type, value): if value is None: return None if isinstance(value, six.text_type) or isinstance(value, str): if value.lower() == 'none' or value.lower() == 'null' or value.strip( ) == '': return None if column_type is sqltypes.Integer: value = int(value) elif column_type is sqltypes.Numeric: value = Decimal(value) elif column_type is sqltypes.Boolean: value = boolify(value) elif column_type is sqltypes.DateTime: value = dateutil.parser.parse(value) elif column_type is sqltypes.Date: value = dateutil.parser.parse(value).date() return value
def process_args_and_render_json_list(q, **kwargs): if isinstance(q, Response): return q if '_f' in request.args: filters = _json.loads(request.args['_f']) if isinstance(filters, str) or isinstance(filters, unicode): filters = _json.loads(filters) q = filter_query_using_filters_list(q, filters) filtered_query = filter_query_using_args(q) count_only = boolify(request.args.get('count_only', 'false')) if count_only: return as_json(filtered_query.count()) try: result = fetch_results_in_requested_format( filtered_query, default_limit=kwargs.pop('default_limit', None), default_sort=kwargs.pop('default_sort', None), default_orderby=kwargs.pop('default_orderby', None), default_offset=kwargs.pop('default_offset', None), default_page=kwargs.pop('default_page', None), default_per_page=kwargs.pop('default_per_page', None)) except: traceback.print_exc() per_page = request.args.get('per_page', PER_PAGE_ITEMS_COUNT) return as_json( { "status": "failure", "error": "PAGE_NOT_FOUND", "total_pages": int(math.ceil(float(filtered_query.count()) / int(per_page))) }, status=404, wrap=False) return convert_result_to_response(result, **kwargs)
def process_args_and_render_json_list(q, **kwargs): if isinstance(q, Response): return q if '_f' in request.args: filters = _json.loads(request.args['_f']) if isinstance(filters, str) or isinstance(filters, unicode): filters = _json.loads(filters) q = filter_query_using_filters_list(q, filters) filtered_query = filter_query_using_args(q) count_only = boolify(request.args.get('count_only', 'false')) if count_only: return as_json(filtered_query.count()) try: result = fetch_results_in_requested_format( filtered_query, default_limit=kwargs.pop('default_limit', None), default_sort=kwargs.pop('default_sort', None), default_orderby=kwargs.pop('default_orderby', None), default_offset=kwargs.pop('default_offset', None), default_page=kwargs.pop('default_page', None), default_per_page=kwargs.pop('default_per_page', None)) except: traceback.print_exc() per_page = request.args.get('per_page', PER_PAGE_ITEMS_COUNT) return as_json({ "status": "failure", "error": "PAGE_NOT_FOUND", "total_pages": int(math.ceil(float(filtered_query.count()) / int(per_page))) }, status=404, wrap=False) return convert_result_to_response(result, **kwargs)
def filter_query_with_key(query, keyword, value, op): if '.' in keyword: kw_split_arr = keyword.split('.') prefix_names = kw_split_arr[:-1] attr_name = kw_split_arr[-1] _query = query model_class = query.model_class if prefix_names[0] in query.model_class._decl_class_registry: for class_name in prefix_names: if class_name not in query.model_class._decl_class_registry: return query model_class = query.model_class._decl_class_registry[ class_name] _query = _query.join(model_class) elif prefix_names[0] in query.model_class.all_keys(): model_class = query.model_class for rel_or_proxy_name in prefix_names: if rel_or_proxy_name in model_class.relationship_keys(): mapped_rel = next( r for r in model_class.__mapper__.relationships if r.key == rel_or_proxy_name) model_class = mapped_rel.mapper.class_ _query = _query.join(model_class) elif rel_or_proxy_name in model_class.association_proxy_keys(): assoc_proxy = getattr(model_class, rel_or_proxy_name) assoc_rel = next( r for r in model_class.__mapper__.relationships if r.key == assoc_proxy.target_collection) assoc_rel_class = assoc_rel.mapper.class_ _query = _query.join(assoc_rel_class) actual_rel_in_assoc_class = next( r for r in assoc_rel_class.__mapper__.relationships if r.key == assoc_proxy.value_attr) model_class = actual_rel_in_assoc_class.mapper.class_ _query = _query.join(model_class) else: model_class = query.model_class attr_name = keyword _query = query if hasattr(model_class, attr_name): key = getattr(model_class, attr_name) if op == '~': value = "%{0}%".format(value) if op in ['=', '>', '<', '>=', '<=', '!']: columns = getattr( getattr(model_class, '__mapper__'), 'columns') if attr_name in columns: if value == 'none': value = None if value is not None: column_type = type( columns[attr_name].type) if column_type is sqltypes.Integer: value = int(value) elif column_type is sqltypes.Numeric: value = Decimal(value) elif column_type is sqltypes.Boolean: value = boolify(value) elif column_type is sqltypes.DateTime: value = dateutil.parser.parse(value) return _query.filter(getattr( key, OPERATOR_FUNC[op])(value)) else: return query
def filter_query_with_key(query, keyword, value, op): if '.' in keyword: kw_split_arr = keyword.split('.') prefix_names = kw_split_arr[:-1] attr_name = kw_split_arr[-1] _query = query model_class = query.model_class if prefix_names[0] in query.model_class._decl_class_registry: for class_name in prefix_names: if class_name not in query.model_class._decl_class_registry: return query model_class = query.model_class._decl_class_registry[ class_name] _query = _query.join(model_class) elif prefix_names[0] in query.model_class.all_keys(): model_class = query.model_class for rel_or_proxy_name in prefix_names: if rel_or_proxy_name in model_class.relationship_keys(): mapped_rel = next( r for r in model_class.__mapper__.relationships if r.key == rel_or_proxy_name) model_class = mapped_rel.mapper.class_ _query = _query.join(model_class) elif rel_or_proxy_name in model_class.association_proxy_keys(): assoc_proxy = getattr(model_class, rel_or_proxy_name) assoc_rel = next( r for r in model_class.__mapper__.relationships if r.key == assoc_proxy.target_collection) assoc_rel_class = assoc_rel.mapper.class_ _query = _query.join(assoc_rel_class) actual_rel_in_assoc_class = next( r for r in assoc_rel_class.__mapper__.relationships if r.key == assoc_proxy.value_attr) model_class = actual_rel_in_assoc_class.mapper.class_ _query = _query.join(model_class) else: model_class = query.model_class attr_name = keyword _query = query if hasattr(model_class, attr_name): key = getattr(model_class, attr_name) if op == '~': value = "%{0}%".format(value) if op in ['=', '>', '<', '>=', '<=', '!']: columns = getattr(getattr(model_class, '__mapper__'), 'columns') if attr_name in columns: if value == 'none': value = None if value is not None: column_type = type(columns[attr_name].type) if column_type is sqltypes.Integer: value = int(value) elif column_type is sqltypes.Numeric: value = Decimal(value) elif column_type is sqltypes.Boolean: value = boolify(value) elif column_type is sqltypes.DateTime: value = dateutil.parser.parse(value) return _query.filter(getattr(key, OPERATOR_FUNC[op])(value)) else: return query