예제 #1
0
def validate_idv_request(request_data):
    _validate_required_parameters(request_data, ["award_id"])
    award_id, piid = _validate_award_id(request_data)

    return {
        "account_level":
        "treasury_account",
        "download_types": [
            "idv_orders", "idv_transaction_history",
            "idv_federal_account_funding"
        ],
        "file_format":
        request_data.get("file_format", "csv"),
        "include_file_description": {
            "source": settings.IDV_DOWNLOAD_README_FILE_PATH,
            "destination": "readme.txt"
        },
        "piid":
        piid,
        "is_for_idv":
        True,
        "filters": {
            "idv_award_id":
            award_id,
            "award_type_codes":
            tuple(set(contract_type_mapping) | set(idv_type_mapping)),
        },
        "limit":
        parse_limit(request_data),
        "include_data_dictionary":
        True,
    }
def validate_award_request(request_data):
    """Analyze request and raise any formatting errors as Exceptions"""

    _validate_required_parameters(request_data, ["award_levels", "filters"])
    filters = _validate_filters(request_data)
    award_levels = _validate_award_levels(request_data)

    json_request = {"download_types": award_levels, "filters": {}}

    # Overriding all other filters if the keyword filter is provided in year-constraint download
    # Make sure this is after checking the award_levels
    constraint_type = request_data.get("constraint_type")
    if constraint_type == "year" and "elasticsearch_keyword" in filters:
        json_request["filters"] = {
            "elasticsearch_keyword": filters["elasticsearch_keyword"],
            "award_type_codes": list(award_type_mapping.keys()),
        }
        json_request["limit"] = settings.MAX_DOWNLOAD_LIMIT
        return json_request

    # Set defaults of non-required parameters
    json_request["columns"] = request_data.get("columns", [])
    json_request["file_format"] = request_data.get("file_format", "csv")

    check_types_and_assign_defaults(filters, json_request["filters"],
                                    SHARED_AWARD_FILTER_DEFAULTS)

    json_request["filters"]["award_type_codes"] = _validate_award_type_codes(
        filters)

    _validate_and_update_locations(filters, json_request)
    _validate_tas_codes(filters, json_request)

    # Validate time periods
    total_range_count = validate_time_periods(filters, json_request)

    if constraint_type == "row_count":
        # Validate limit exists and is below MAX_DOWNLOAD_LIMIT
        json_request["limit"] = parse_limit(request_data)

        # Validate row_count-constrained filter types and assign defaults
        check_types_and_assign_defaults(filters, json_request["filters"],
                                        ROW_CONSTRAINT_FILTER_DEFAULTS)
    elif constraint_type == "year":
        # Validate combined total dates within one year (allow for leap years)
        if total_range_count > 366:
            raise InvalidParameterException(
                "Invalid Parameter: time_period total days must be within a year"
            )

        # Validate year-constrained filter types and assign defaults
        check_types_and_assign_defaults(filters, json_request["filters"],
                                        YEAR_CONSTRAINT_FILTER_DEFAULTS)
    else:
        raise InvalidParameterException(
            'Invalid parameter: constraint_type must be "row_count" or "year"')

    return json_request
예제 #3
0
def validate_assistance_request(request_data):
    _validate_required_parameters(request_data, ["award_id"])
    award_id, _, fain, uri, generated_unique_award_id = _validate_award_id(
        request_data)

    request_data["file_format"] = str(request_data.get("file_format",
                                                       "csv")).lower()
    _validate_file_format(request_data)

    award = fain
    if "AGG" in generated_unique_award_id:
        award = uri
    return {
        "account_level":
        "treasury_account",
        "download_types": [
            "assistance_transactions", "sub_grants",
            "assistance_federal_account_funding"
        ],
        "file_format":
        request_data["file_format"],
        "include_file_description": {
            "source": settings.ASSISTANCE_DOWNLOAD_README_FILE_PATH,
            "destination": "AssistanceAwardSummary_download_readme.txt",
        },
        "award_id":
        award_id,
        "assistance_id":
        award,
        "is_for_idv":
        False,
        "is_for_contract":
        False,
        "is_for_assistance":
        True,
        "filters": {
            "award_id": award_id,
            "award_type_codes": tuple(set(assistance_type_mapping))
        },
        "limit":
        parse_limit(request_data),
        "include_data_dictionary":
        True,
        "columns":
        request_data.get("columns", []),
    }
예제 #4
0
def validate_contract_request(request_data):
    _validate_required_parameters(request_data, ["award_id"])
    award_id, piid, _, _, _ = _validate_award_id(request_data)

    request_data["file_format"] = str(request_data.get("file_format",
                                                       "csv")).lower()
    _validate_file_format(request_data)

    return {
        "account_level":
        "treasury_account",
        "download_types": [
            "sub_contracts", "contract_transactions",
            "contract_federal_account_funding"
        ],
        "file_format":
        request_data["file_format"],
        "include_file_description": {
            "source": settings.CONTRACT_DOWNLOAD_README_FILE_PATH,
            "destination": "ContractAwardSummary_download_readme.txt",
        },
        "award_id":
        award_id,
        "piid":
        piid,
        "is_for_idv":
        False,
        "is_for_contract":
        True,
        "is_for_assistance":
        False,
        "filters": {
            "award_id": award_id,
            "award_type_codes": tuple(set(contract_type_mapping))
        },
        "limit":
        parse_limit(request_data),
        "include_data_dictionary":
        True,
        "columns":
        request_data.get("columns", []),
    }
    def validate_award_request(self, request_data):
        """Analyze request and raise any formatting errors as Exceptions"""
        json_request = {}
        constraint_type = request_data.get('constraint_type', None)

        # Validate required parameters
        for required_param in ['award_levels', 'filters']:
            if required_param not in request_data:
                raise InvalidParameterException(
                    'Missing one or more required query parameters: {}'.format(required_param)
                )

        if not isinstance(request_data['award_levels'], list):
            raise InvalidParameterException('Award levels parameter not provided as a list')
        elif len(request_data['award_levels']) == 0:
            raise InvalidParameterException('At least one award level is required.')
        for award_level in request_data['award_levels']:
            if award_level not in VALUE_MAPPINGS:
                raise InvalidParameterException('Invalid award_level: {}'.format(award_level))
        json_request['download_types'] = request_data['award_levels']

        # Overriding all other filters if the keyword filter is provided in year-constraint download
        # Make sure this is after checking the award_levels
        if constraint_type == 'year' and 'elasticsearch_keyword' in request_data['filters']:
            json_request['filters'] = {
                'elasticsearch_keyword': request_data['filters']['elasticsearch_keyword'],
                'award_type_codes': list(award_type_mapping.keys()),
            }
            json_request['limit'] = settings.MAX_DOWNLOAD_LIMIT
            return json_request

        if not isinstance(request_data['filters'], dict):
            raise InvalidParameterException('Filters parameter not provided as a dict')
        elif len(request_data['filters']) == 0:
            raise InvalidParameterException('At least one filter is required.')
        json_request['filters'] = {}

        # Set defaults of non-required parameters
        json_request['columns'] = request_data.get('columns', [])
        json_request['file_format'] = request_data.get('file_format', 'csv')

        # Validate shared filter types and assign defaults
        filters = request_data['filters']
        check_types_and_assign_defaults(filters, json_request['filters'], SHARED_AWARD_FILTER_DEFAULTS)

        # Validate award type types
        if not filters.get('award_type_codes', None) or len(filters['award_type_codes']) < 1:
            filters['award_type_codes'] = list(award_type_mapping.keys())
        for award_type_code in filters['award_type_codes']:
            if award_type_code not in award_type_mapping:
                raise InvalidParameterException('Invalid award_type: {}'.format(award_type_code))
        json_request['filters']['award_type_codes'] = filters['award_type_codes']

        # Validate locations
        for location_filter in ['place_of_performance_locations', 'recipient_locations']:
            if filters.get(location_filter):
                for location_dict in filters[location_filter]:
                    if not isinstance(location_dict, dict):
                        raise InvalidParameterException('Location is not a dictionary: {}'.format(location_dict))
                    location_error_handling(location_dict.keys())
                json_request['filters'][location_filter] = filters[location_filter]

        # Validate time periods
        total_range_count = validate_time_periods(filters, json_request)

        if constraint_type == 'row_count':
            # Validate limit exists and is below MAX_DOWNLOAD_LIMIT
            json_request['limit'] = parse_limit(request_data)

            # Validate row_count-constrainted filter types and assign defaults
            check_types_and_assign_defaults(filters, json_request['filters'], ROW_CONSTRAINT_FILTER_DEFAULTS)
        elif constraint_type == 'year':
            # Validate combined total dates within one year (allow for leap years)
            if total_range_count > 366:
                raise InvalidParameterException('Invalid Parameter: time_period total days must be within a year')

            # Validate year-constrainted filter types and assign defaults
            check_types_and_assign_defaults(filters, json_request['filters'], YEAR_CONSTRAINT_FILTER_DEFAULTS)
        else:
            raise InvalidParameterException('Invalid parameter: constraint_type must be "row_count" or "year"')

        return json_request
예제 #6
0
    def validate_award_request(self, request_data):
        """Analyze request and raise any formatting errors as Exceptions"""
        json_request = {}
        constraint_type = request_data.get('constraint_type', None)

        # Validate required parameters
        for required_param in ['award_levels', 'filters']:
            if required_param not in request_data:
                raise InvalidParameterException(
                    'Missing one or more required query parameters: {}'.format(
                        required_param))

        if not isinstance(request_data['award_levels'], list):
            raise InvalidParameterException(
                'Award levels parameter not provided as a list')
        elif len(request_data['award_levels']) == 0:
            raise InvalidParameterException(
                'At least one award level is required.')
        for award_level in request_data['award_levels']:
            if award_level not in VALUE_MAPPINGS:
                raise InvalidParameterException(
                    'Invalid award_level: {}'.format(award_level))
        json_request['download_types'] = request_data['award_levels']

        # Overriding all other filters if the keyword filter is provided in year-constraint download
        # Make sure this is after checking the award_levels
        if constraint_type == 'year' and 'elasticsearch_keyword' in request_data[
                'filters']:
            json_request['filters'] = {
                'elasticsearch_keyword':
                request_data['filters']['elasticsearch_keyword'],
                'award_type_codes':
                list(award_type_mapping.keys())
            }
            json_request['limit'] = settings.MAX_DOWNLOAD_LIMIT
            return json_request

        if not isinstance(request_data['filters'], dict):
            raise InvalidParameterException(
                'Filters parameter not provided as a dict')
        elif len(request_data['filters']) == 0:
            raise InvalidParameterException('At least one filter is required.')
        json_request['filters'] = {}

        # Set defaults of non-required parameters
        json_request['columns'] = request_data.get('columns', [])
        json_request['file_format'] = request_data.get('file_format', 'csv')

        # Validate shared filter types and assign defaults
        filters = request_data['filters']
        check_types_and_assign_defaults(filters, json_request['filters'],
                                        SHARED_AWARD_FILTER_DEFAULTS)

        # Validate award type types
        if not filters.get('award_type_codes',
                           None) or len(filters['award_type_codes']) < 1:
            filters['award_type_codes'] = list(award_type_mapping.keys())
        for award_type_code in filters['award_type_codes']:
            if award_type_code not in award_type_mapping:
                raise InvalidParameterException(
                    'Invalid award_type: {}'.format(award_type_code))
        json_request['filters']['award_type_codes'] = filters[
            'award_type_codes']

        # Validate locations
        for location_filter in [
                'place_of_performance_locations', 'recipient_locations'
        ]:
            if filters.get(location_filter):
                for location_dict in filters[location_filter]:
                    if not isinstance(location_dict, dict):
                        raise InvalidParameterException(
                            'Location is not a dictionary: {}'.format(
                                location_dict))
                    location_error_handling(location_dict.keys())
                json_request['filters'][location_filter] = filters[
                    location_filter]

        # Validate time periods
        total_range_count = validate_time_periods(filters, json_request)

        if constraint_type == 'row_count':
            # Validate limit exists and is below MAX_DOWNLOAD_LIMIT
            json_request['limit'] = parse_limit(request_data)

            # Validate row_count-constrainted filter types and assign defaults
            check_types_and_assign_defaults(filters, json_request['filters'],
                                            ROW_CONSTRAINT_FILTER_DEFAULTS)
        elif constraint_type == 'year':
            # Validate combined total dates within one year (allow for leap years)
            if total_range_count > 366:
                raise InvalidParameterException(
                    'Invalid Parameter: time_period total days must be within a year'
                )

            # Validate year-constrainted filter types and assign defaults
            check_types_and_assign_defaults(filters, json_request['filters'],
                                            YEAR_CONSTRAINT_FILTER_DEFAULTS)
        else:
            raise InvalidParameterException(
                'Invalid parameter: constraint_type must be "row_count" or "year"'
            )

        return json_request