Ejemplo n.º 1
0
    def post(
        self,
        request: Request,
        origination: Optional[str] = None,
        special_request_type: Optional[str] = None,
        validator_type: Optional[Type[DownloadValidatorBase]] = None,
    ):
        if special_request_type == "disaster":
            filename = (DownloadJob.objects.filter(
                file_name__startswith=settings.
                COVID19_DOWNLOAD_FILENAME_PREFIX,
                error_message__isnull=True).order_by(
                    "-update_date").values_list("file_name",
                                                flat=True).first())
            return self.get_download_response(file_name=filename)

        if validator_type is None:
            # This should only ever occur for developers, but helps to track down possible issues
            raise InvalidParameterException(
                "Invalid parameters: require valid 'special_request_type' or 'validator_type'"
            )

        validator = validator_type(request.data)
        json_request = order_nested_object(validator.json_request)
        ordered_json_request = json.dumps(json_request)

        # Check if the same request has been called today
        # TODO!!! Use external_data_load_date to determine data freshness
        updated_date_timestamp = datetime.strftime(datetime.now(timezone.utc),
                                                   "%Y-%m-%d")
        cached_download = (DownloadJob.objects.filter(
            json_request=ordered_json_request,
            update_date__gte=updated_date_timestamp).exclude(
                job_status_id=JOB_STATUS_DICT["failed"]).values(
                    "download_job_id", "file_name").first())

        if cached_download and not settings.IS_LOCAL:
            # By returning the cached files, there should be no duplicates on a daily basis
            write_to_log(
                message=
                f"Generating file from cached download job ID: {cached_download['download_job_id']}"
            )
            cached_filename = cached_download["file_name"]
            return self.get_download_response(file_name=cached_filename)

        final_output_zip_name = create_unique_filename(json_request,
                                                       origination=origination)
        download_job = DownloadJob.objects.create(
            job_status_id=JOB_STATUS_DICT["ready"],
            file_name=final_output_zip_name,
            json_request=ordered_json_request)

        log_new_download_job(request, download_job)
        self.process_request(download_job)

        return self.get_download_response(file_name=final_output_zip_name)
    def post(
        self,
        request: Request,
        request_type: Optional[DownloadRequestType] = None,
        origination: Optional[str] = None,
        validator_type: Optional[Type[DownloadValidatorBase]] = None,
    ):
        if request_type == DownloadRequestType.DISASTER:
            filename = (DownloadJob.objects.filter(
                file_name__startswith=settings.
                COVID19_DOWNLOAD_FILENAME_PREFIX,
                error_message__isnull=True).order_by(
                    "-update_date").values_list("file_name",
                                                flat=True).first())
            return self.get_download_response(file_name=filename)

        if validator_type is not None:
            validator = validator_type(request.data)
            json_request = validator.json_request
        else:
            json_request = request_type.value["validate_func"](request.data)
            json_request["request_type"] = request_type.value["name"]
        json_request = order_nested_object(json_request)
        ordered_json_request = json.dumps(json_request)

        # Check if the same request has been called today
        # TODO!!! Use external_data_load_date to determine data freshness
        updated_date_timestamp = datetime.strftime(datetime.now(timezone.utc),
                                                   "%Y-%m-%d")
        cached_download = (DownloadJob.objects.filter(
            json_request=ordered_json_request,
            update_date__gte=updated_date_timestamp).exclude(
                job_status_id=JOB_STATUS_DICT["failed"]).values(
                    "download_job_id", "file_name").first())

        if cached_download and not settings.IS_LOCAL:
            # By returning the cached files, there should be no duplicates on a daily basis
            write_to_log(
                message=
                f"Generating file from cached download job ID: {cached_download['download_job_id']}"
            )
            cached_filename = cached_download["file_name"]
            return self.get_download_response(file_name=cached_filename)

        final_output_zip_name = create_unique_filename(json_request,
                                                       origination=origination)
        download_job = DownloadJob.objects.create(
            job_status_id=JOB_STATUS_DICT["ready"],
            file_name=final_output_zip_name,
            json_request=ordered_json_request)

        log_new_download_job(request, download_job)
        self.process_request(download_job)

        return self.get_download_response(file_name=final_output_zip_name)
Ejemplo n.º 3
0
    def post(self,
             request: Request,
             request_type: str = "award",
             origination: Optional[str] = None):
        if request_type == "award":
            json_request = validate_award_request(request.data)
        elif request_type == "idv":
            json_request = validate_idv_request(request.data)
        elif request_type == "contract":
            json_request = validate_contract_request(request.data)
        elif request_type == "assistance":
            json_request = validate_assistance_request(request.data)
        else:
            json_request = validate_account_request(request.data)

        json_request["request_type"] = request_type
        ordered_json_request = json.dumps(order_nested_object(json_request))

        # Check if the same request has been called today
        # TODO!!! Use external_data_load_date to determine data freshness
        updated_date_timestamp = datetime.strftime(datetime.now(timezone.utc),
                                                   "%Y-%m-%d")
        cached_download = (DownloadJob.objects.filter(
            json_request=ordered_json_request,
            update_date__gte=updated_date_timestamp).exclude(
                job_status_id=JOB_STATUS_DICT["failed"]).values(
                    "download_job_id", "file_name").first())

        if cached_download and not settings.IS_LOCAL:
            # By returning the cached files, there should be no duplicates on a daily basis
            write_to_log(
                message=
                f"Generating file from cached download job ID: {cached_download['download_job_id']}"
            )
            cached_filename = cached_download["file_name"]
            return self.get_download_response(file_name=cached_filename)

        final_output_zip_name = create_unique_filename(json_request,
                                                       origination=origination)
        download_job = DownloadJob.objects.create(
            job_status_id=JOB_STATUS_DICT["ready"],
            file_name=final_output_zip_name,
            json_request=ordered_json_request)

        log_new_download_job(request, download_job)
        self.process_request(download_job)

        return self.get_download_response(file_name=final_output_zip_name)
    def post(self, request, request_type='award'):
        if request_type == 'award':
            json_request = validate_award_request(request.data)
        elif request_type == 'idv':
            json_request = validate_idv_request(request.data)
        else:
            json_request = validate_account_request(request.data)

        json_request['request_type'] = request_type
        ordered_json_request = json.dumps(order_nested_object(json_request))

        # Check if the same request has been called today
        # TODO!!! Use external_data_load_date to determine data freshness
        updated_date_timestamp = datetime.strftime(datetime.now(timezone.utc),
                                                   "%Y-%m-%d")
        cached_download = (DownloadJob.objects.filter(
            json_request=ordered_json_request,
            update_date__gte=updated_date_timestamp).exclude(
                job_status_id=JOB_STATUS_DICT["failed"]).values(
                    "download_job_id", "file_name").first())

        if cached_download and not settings.IS_LOCAL:
            # By returning the cached files, there should be no duplicates on a daily basis
            write_to_log(
                message='Generating file from cached download job ID: {}'.
                format(cached_download['download_job_id']))
            cached_filename = cached_download['file_name']
            return self.get_download_response(file_name=cached_filename)

        request_agency = json_request.get('filters', {}).get('agency', None)
        final_output_zip_name = create_unique_filename(json_request,
                                                       request_agency)
        download_job = DownloadJob.objects.create(
            job_status_id=JOB_STATUS_DICT['ready'],
            file_name=final_output_zip_name,
            json_request=ordered_json_request)

        log_new_download_job(request, download_job)
        self.process_request(download_job)

        return self.get_download_response(file_name=final_output_zip_name)
    def post(self, request, request_type='award'):
        if request_type == 'award':
            json_request = self.validate_award_request(request.data)
        else:
            json_request = self.validate_account_request(request.data)

        json_request['request_type'] = request_type
        ordered_json_request = json.dumps(order_nested_object(json_request))

        # Check if the same request has been called today
        # TODO!!! Use external_data_load_date to determine data freshness
        updated_date_timestamp = datetime.strftime(datetime.now(timezone.utc), "%Y-%m-%d")
        cached_download = (
            DownloadJob.objects.filter(json_request=ordered_json_request, update_date__gte=updated_date_timestamp)
            .exclude(job_status_id=JOB_STATUS_DICT["failed"])
            .values("download_job_id", "file_name")
            .first()
        )

        if cached_download and not settings.IS_LOCAL:
            # By returning the cached files, there should be no duplicates on a daily basis
            write_to_log(
                message='Generating file from cached download job ID: {}'.format(cached_download['download_job_id'])
            )
            cached_filename = cached_download['file_name']
            return self.get_download_response(file_name=cached_filename)

        request_agency = json_request.get('filters', {}).get('agency', None)
        final_output_zip_name = create_unique_filename(json_request["download_types"], request_agency)
        download_job = DownloadJob.objects.create(
            job_status_id=JOB_STATUS_DICT['ready'], file_name=final_output_zip_name, json_request=ordered_json_request
        )

        log_new_download_job(request, download_job)
        self.process_request(download_job)

        return self.get_download_response(file_name=final_output_zip_name)