def ads_list(): oblast_district = request.args.get('oblast_district') type_conversion_dict = TypeConversionDict( under_construction=request.args.get('min_price') or MIN_PRICE) min_price = type_conversion_dict.get('under_construction', type=int) type_conversion_dict = TypeConversionDict( under_construction=request.args.get('max_price') or MAX_PRICE) max_price = type_conversion_dict.get('under_construction', type=int) type_conversion_dict = TypeConversionDict( under_construction=request.args.get('new_building')) under_construction = type_conversion_dict.get('under_construction', type=bool) flats = Flat.query.join(Location).filter( Location.oblast_district == oblast_district, Flat.price >= min_price, Flat.price <= max_price, Flat.under_construction.is_(under_construction), Flat.is_active.is_(True)).order_by(Flat.price.asc()) if not any( [oblast_district, min_price != MIN_PRICE, max_price != MAX_PRICE]): flats = Flat.query.order_by(Flat.price.asc()) page, per_page, offset = get_page_args(page_parameter='page', per_page_parameter='per_page') pagination_flats = get_flats(flats, offset=offset, per_page=per_page) pagination = Pagination(page=page, per_page=per_page, total=flats.count()) return render_template('ads_list.html', flats=pagination_flats, pagination=pagination, page=page, per_page=per_page, major_locations=MAJOR_LOCATIONS, locations=LOCATIONS)
def before_request(): decorate_endpoints() d = TypeConversionDict(request.cookies) d['request_time'] = time.time() request.cookies = ImmutableTypeConversionDict(d) logging.info("before_request %s" % request)
def stac_search(): """ Search api for stac items. """ if request.method == "GET": args = request.args else: args = TypeConversionDict(request.get_json()) products = args.get("collections", default=[], type=_array_arg) if "collection" in args: products.append(args.get("collection")) # Fallback for legacy 'product' argument elif "product" in args: products.append(args.get("product")) return _geojson_stac_response(_handle_search_request(args, products))
def _handle_search_request( request_args: TypeConversionDict, product_names: List[str], ) -> Dict: bbox = request_args.get("bbox", type=partial(_array_arg, expect_size=4, expect_type=float)) # Stac-api <=0.7.0 used 'time', later versions use 'datetime' time = request_args.get("datetime") or request_args.get("time") limit = request_args.get("limit", default=DEFAULT_PAGE_SIZE, type=int) ids = request_args.get("ids", default=None, type=partial(_array_arg, expect_type=uuid.UUID)) offset = request_args.get("_o", default=0, type=int) # Request the full Item information. This forces us to go to the # ODC dataset table for every record, which can be extremely slow. full_information = request_args.get("_full", default=DEFAULT_RETURN_FULL_ITEMS, type=_bool_argument) if limit > PAGE_SIZE_LIMIT: abort( 400, f"Max page size is {PAGE_SIZE_LIMIT}. " f"Use the next links instead of a large limit.", ) if bbox is not None and len(bbox) != 4: abort( 400, "Expected bbox of size 4. [min lon, min lat, max long, max lat]") if time is not None: time = _parse_time_range(time) def next_page_url(next_offset): return url_for( ".stac_search", collections=product_names, bbox="{},{},{},{}".format(*bbox) if bbox else None, time=_unparse_time_range(time) if time else None, ids=",".join(map(str, ids)) if ids else None, limit=limit, _o=next_offset, ) return search_stac_items( product_names=product_names, bbox=bbox, time=time, dataset_ids=ids, limit=limit, offset=offset, get_next_url=next_page_url, full_information=full_information, )
def _handle_search_request( request_args: TypeConversionDict, product_names: List[str], require_geometry: bool = True, include_total_count: bool = True, ) -> Dict: bbox = request_args.get("bbox", type=partial(_array_arg, expect_size=4, expect_type=float)) # Stac-api <=0.7.0 used 'time', later versions use 'datetime' time = request_args.get("datetime") or request_args.get("time") limit = request_args.get("limit", default=DEFAULT_PAGE_SIZE, type=int) ids = request_args.get("ids", default=None, type=partial(_array_arg, expect_type=uuid.UUID)) offset = request_args.get("_o", default=0, type=int) # Request the full Item information. This forces us to go to the # ODC dataset table for every record, which can be extremely slow. full_information = request_args.get("_full", default=DEFAULT_RETURN_FULL_ITEMS, type=_bool_argument) if "intersects" in request_args: raise werkzeug.exceptions.NotImplemented( "'intersects' queries are not yet supported, sorry.") if limit > PAGE_SIZE_LIMIT: abort( 400, f"Max page size is {PAGE_SIZE_LIMIT}. " f"Use the next links instead of a large limit.", ) if bbox is not None and len(bbox) != 4: abort( 400, "Expected bbox of size 4. [min lon, min lat, max long, max lat]") if time is not None: time = _parse_time_range(time) def next_page_url(next_offset): return url_for( ".stac_search", collections=product_names, bbox="{},{},{},{}".format(*bbox) if bbox else None, time=_unparse_time_range(time) if time else None, ids=",".join(map(str, ids)) if ids else None, limit=limit, _o=next_offset, _full=full_information, ) feature_collection = search_stac_items( product_names=product_names, bbox=bbox, time=time, dataset_ids=ids, limit=limit, offset=offset, get_next_url=next_page_url, full_information=full_information, require_geometry=require_geometry, include_total_count=include_total_count, ) feature_collection["links"].extend(( dict( href=url_for(".stac_search"), rel="search", title="Search", type="application/geo+json", method="GET", ), dict( href=url_for(".stac_search"), rel="search", title="Search", type="application/geo+json", method="POST", ), )) return feature_collection