def post(self, request): if is_experimental_elasticsearch_api(request): logger.info("Using experimental Elasticsearch functionality for '/download/transactions'") request.data["award_levels"] = ["elasticsearch_transactions", "sub_awards"] else: request.data["award_levels"] = ["transactions", "sub_awards"] request.data["constraint_type"] = "row_count" return BaseDownloadViewSet.post(self, request, "award")
def process_cache_response(self, view_instance, view_method, request, args, kwargs): if is_experimental_elasticsearch_api(request): # bypass cache altogether response = view_method(view_instance, request, *args, **kwargs) response = view_instance.finalize_response(request, response, *args, **kwargs) response["Cache-Trace"] = "no-cache" return response key = self.calculate_key( view_instance=view_instance, view_method=view_method, request=request, args=args, kwargs=kwargs ) response = None try: response = self.cache.get(key) except Exception: msg = "Problem while retrieving key [{k}] from cache for path:'{p}'" logger.exception(msg.format(k=key, p=str(request.path))) if not response: response = view_method(view_instance, request, *args, **kwargs) response = view_instance.finalize_response(request, response, *args, **kwargs) # While returning a Queryset is functional most of the time, it isn't # fully supported by Django Rest Framework. This check was inserted # in local mode to catch if a Queryset is being returned by the view # which could cause an exception when setting the cache if settings.IS_LOCAL and response and not response.is_rendered: if contains_queryset(response.data): raise RuntimeError( "Your view is returning a QuerySet. QuerySets are not" " really designed to be pickled and can cause caching" " issues. Please materialize the QuerySet using a List" " or some other more primitive data structure." ) response["Cache-Trace"] = "no-cache" response.render() # should be rendered, before pickling while storing to cache if not response.status_code >= 400 or self.cache_errors: if self.cache_errors: logger.error(self.cache_errors) try: self.cache.set(key, response, self.timeout) response["Cache-Trace"] = "set-cache" except Exception: msg = "Problem while writing to cache: path:'{p}' data:'{d}'" logger.exception(msg.format(p=str(request.path), d=str(request.data))) else: response["Cache-Trace"] = "hit-cache" if not hasattr(response, "_closable_objects"): response._closable_objects = [] response["key"] = key return response
def post(self, request: Request) -> Response: self.original_filters = request.data.get("filters") json_request = self.validate_request_data(request.data) self.group = GROUPING_LOOKUP[json_request["group"]] self.subawards = json_request["subawards"] self.filters = json_request["filters"] self.elasticsearch = is_experimental_elasticsearch_api(request) if not self.elasticsearch: mirror_request_to_elasticsearch(request) # time_period is optional so we're setting a default window from API_SEARCH_MIN_DATE to end of the current FY. # Otherwise, users will see blank results for years current_fy = generate_fiscal_year(datetime.now(timezone.utc)) if self.group == "fiscal_year": end_date = "{}-09-30".format(current_fy) else: current_fiscal_month = generate_fiscal_month( datetime.now(timezone.utc)) days_in_month = monthrange(current_fy, current_fiscal_month)[1] end_date = f"{current_fy}-{current_fiscal_month}-{days_in_month}" default_time_period = { "start_date": settings.API_SEARCH_MIN_DATE, "end_date": end_date } time_periods = self.filters.get("time_period", [default_time_period]) if self.elasticsearch and not self.subawards: logger.info( "Using experimental Elasticsearch functionality for 'spending_over_time'" ) results = self.query_elasticsearch(time_periods) else: db_results, values = self.database_data_layer() results = bolster_missing_time_periods( filter_time_periods=time_periods, queryset=db_results, date_range_type=values[-1], columns={"aggregated_amount": "aggregated_amount"}, ) return Response( OrderedDict([ ("group", self.group), ("results", results), ( "messages", get_generic_filters_message( self.original_filters.keys(), [elem["name"] for elem in AWARD_FILTER]), ), ]))
def post(self, request: Request) -> Response: models = [ {"name": "subawards", "key": "subawards", "type": "boolean", "default": False, "optional": True}, ] models.extend(copy.deepcopy(AWARD_FILTER)) models.extend(copy.deepcopy(PAGINATION)) original_filters = request.data.get("filters") validated_payload = TinyShield(models).block(request.data) validated_payload["elasticsearch"] = is_experimental_elasticsearch_api(request) if not validated_payload["elasticsearch"]: mirror_request_to_elasticsearch(request) return Response(self.perform_search(validated_payload, original_filters))
def post(self, request): models = [ {"name": "subawards", "key": "subawards", "type": "boolean", "default": False}, { "name": "object_class", "key": "filter|object_class", "type": "array", "array_type": "text", "text_type": "search", }, { "name": "program_activity", "key": "filter|program_activity", "type": "array", "array_type": "integer", "array_max": maxsize, }, ] models.extend(copy.deepcopy(AWARD_FILTER)) models.extend(copy.deepcopy(PAGINATION)) json_request = TinyShield(models).block(request.data) subawards = json_request["subawards"] filters = add_date_range_comparison_types( json_request.get("filters", None), subawards, gte_date_type="action_date", lte_date_type="date_signed" ) elasticsearch = is_experimental_elasticsearch_api(request) if elasticsearch and not subawards: logger.info("Using experimental Elasticsearch functionality for 'spending_by_award_count'") results = self.query_elasticsearch(filters) return Response({"results": results, "messages": [get_time_period_message()]}) if filters is None: raise InvalidParameterException("Missing required request parameters: 'filters'") empty_results = {"contracts": 0, "idvs": 0, "grants": 0, "direct_payments": 0, "loans": 0, "other": 0} if subawards: empty_results = {"subcontracts": 0, "subgrants": 0} if "award_type_codes" in filters and "no intersection" in filters["award_type_codes"]: # "Special case": there will never be results when the website provides this value return Response({"results": empty_results}) if subawards: results = self.handle_subawards(filters) else: results = self.handle_awards(filters, empty_results) return Response({"results": results, "messages": [get_time_period_message()]})
def process_cache_response(self, view_instance, view_method, request, args, kwargs): if is_experimental_elasticsearch_api(request): # bypass cache altogether response = view_method(view_instance, request, *args, **kwargs) response = view_instance.finalize_response(request, response, *args, **kwargs) response["Cache-Trace"] = "no-cache" return response key = self.calculate_key(view_instance=view_instance, view_method=view_method, request=request, args=args, kwargs=kwargs) response = None try: response = self.cache.get(key) except Exception: msg = "Problem while retrieving key [{k}] from cache for path:'{p}'" logger.exception(msg.format(k=key, p=str(request.path))) if not response: response = view_method(view_instance, request, *args, **kwargs) response = view_instance.finalize_response(request, response, *args, **kwargs) response["Cache-Trace"] = "no-cache" response.render( ) # should be rendered, before picklining while storing to cache if not response.status_code >= 400 or self.cache_errors: if self.cache_errors: logger.error(self.cache_errors) try: self.cache.set(key, response, self.timeout) response["Cache-Trace"] = "set-cache" except Exception: msg = "Problem while writing to cache: path:'{p}' data:'{d}'" logger.exception( msg.format(p=str(request.path), d=str(request.data))) else: response["Cache-Trace"] = "hit-cache" if not hasattr(response, "_closable_objects"): response._closable_objects = [] response["key"] = key return response
def post(self, request): """Return all awards matching the provided filters and limits""" self.original_filters = request.data.get("filters") json_request = self.validate_request_data(request.data) self.is_subaward = json_request["subawards"] self.constants = GLOBAL_MAP[ "subaward"] if self.is_subaward else GLOBAL_MAP["award"] self.filters = add_date_range_comparison_types( json_request.get("filters"), self.is_subaward, gte_date_type="action_date", lte_date_type="date_signed") self.fields = json_request["fields"] self.pagination = { "limit": json_request["limit"], "lower_bound": (json_request["page"] - 1) * json_request["limit"], "page": json_request["page"], "sort_key": json_request.get("sort") or self.fields[0], "sort_order": json_request["order"], "upper_bound": json_request["page"] * json_request["limit"] + 1, } self.elasticsearch = is_experimental_elasticsearch_api(request) if not self.elasticsearch: mirror_request_to_elasticsearch(request) if self.if_no_intersection( ): # Like an exception, but API response is a HTTP 200 with a JSON payload return Response(self.populate_response(results=[], has_next=False)) raise_if_award_types_not_valid_subset(self.filters["award_type_codes"], self.is_subaward) raise_if_sort_key_not_valid(self.pagination["sort_key"], self.fields, self.is_subaward) if self.elasticsearch and not self.is_subaward: self.last_record_unique_id = json_request.get( "last_record_unique_id") self.last_record_sort_value = json_request.get( "last_record_sort_value") logger.info( "Using experimental Elasticsearch functionality for 'spending_by_award'" ) return Response( self.construct_es_response(self.query_elasticsearch())) return Response(self.create_response(self.construct_queryset()))
def post(self, request: Request) -> Response: """Return all budget function/subfunction titles matching the provided search text""" categories = [ "awarding_agency", "awarding_subagency", "funding_agency", "funding_subagency", "recipient_duns", "recipient_parent_duns", "cfda", "psc", "naics", "county", "district", "country", "state_territory", "federal_account", ] models = [ { "name": "category", "key": "category", "type": "enum", "enum_values": categories, "optional": False }, { "name": "subawards", "key": "subawards", "type": "boolean", "default": False, "optional": True }, ] models.extend(copy.deepcopy(AWARD_FILTER)) models.extend(copy.deepcopy(PAGINATION)) # Apply/enforce POST body schema and data validation in request original_filters = request.data.get("filters") validated_payload = TinyShield(models).block(request.data) validated_payload["elasticsearch"] = is_experimental_elasticsearch_api( request) if not validated_payload["elasticsearch"]: mirror_request_to_elasticsearch(request) # Execute the business logic for the endpoint and return a python dict to be converted to a Django response business_logic_lookup = { "awarding_agency": AwardingAgencyViewSet().perform_search, "awarding_subagency": AwardingSubagencyViewSet().perform_search, "cfda": CfdaViewSet().perform_search, "country": CountryViewSet().perform_search, "county": CountyViewSet().perform_search, "district": DistrictViewSet().perform_search, "federal_account": FederalAccountViewSet().perform_search, "funding_agency": FundingAgencyViewSet().perform_search, "funding_subagency": FundingSubagencyViewSet().perform_search, "naics": NAICSViewSet().perform_search, "psc": PSCViewSet().perform_search, "recipient_duns": RecipientDunsViewSet().perform_search, "state_territory": StateTerritoryViewSet().perform_search, } business_logic_func = business_logic_lookup.get( validated_payload["category"]) if business_logic_func: response = business_logic_func(validated_payload, original_filters) else: response = BusinessLogic(validated_payload, original_filters).results() return Response(response)
def post(self, request: Request) -> Response: models = [ { "name": "subawards", "key": "subawards", "type": "boolean", "default": False }, { "name": "scope", "key": "scope", "type": "enum", "optional": False, "enum_values": ["place_of_performance", "recipient_location"], }, { "name": "geo_layer", "key": "geo_layer", "type": "enum", "optional": False, "enum_values": ["state", "county", "district"], }, { "name": "geo_layer_filters", "key": "geo_layer_filters", "type": "array", "array_type": "text", "text_type": "search", }, ] models.extend(copy.deepcopy(AWARD_FILTER)) models.extend(copy.deepcopy(PAGINATION)) original_filters = request.data.get("filters") json_request = TinyShield(models).block(request.data) agg_key_dict = { "county": "county_agg_key", "district": "congressional_agg_key", "state": "state_agg_key", } location_dict = { "county": "county_code", "district": "congressional_code", "state": "state_code" } model_dict = { "place_of_performance": "pop", "recipient_location": "recipient_location", # 'subawards_place_of_performance': 'pop', # 'subawards_recipient_location': 'recipient_location' } self.scope_field_name = model_dict[json_request["scope"]] self.agg_key = f"{self.scope_field_name}_{agg_key_dict[json_request['geo_layer']]}" self.filters = json_request.get("filters") self.geo_layer = GeoLayer(json_request["geo_layer"]) self.geo_layer_filters = json_request.get("geo_layer_filters") self.loc_field_name = location_dict[self.geo_layer.value] self.loc_lookup = f"{self.scope_field_name}_{self.loc_field_name}" self.subawards = json_request["subawards"] if self.subawards: # We do not use matviews for Subaward filtering, just the Subaward download filters self.model_name = SubawardView self.queryset = subaward_filter(self.filters) self.obligation_column = "amount" result = self.query_django() elif is_experimental_elasticsearch_api(request): if self.scope_field_name == "pop": scope_filter_name = "place_of_performance_scope" else: scope_filter_name = "recipient_scope" # Only search for values within USA, but don't overwrite a user's search if scope_filter_name not in self.filters: self.filters[scope_filter_name] = "domestic" self.obligation_column = "generated_pragmatic_obligation" filter_query = QueryWithFilters.generate_transactions_elasticsearch_query( self.filters) result = self.query_elasticsearch(filter_query) else: self.queryset, self.model_name = spending_by_geography( self.filters) self.obligation_column = "generated_pragmatic_obligation" result = self.query_django() return Response({ "scope": json_request["scope"], "geo_layer": self.geo_layer.value, "results": result, "messages": get_generic_filters_message( original_filters.keys(), [elem["name"] for elem in AWARD_FILTER]), })