def _debug_generate_unified_query(
            ingest_view: DirectIngestPreProcessedIngestView,
            ingest_view_export_args: GcsfsIngestViewExportArgs
    ) -> Tuple[str, List[bigquery.ScalarQueryParameter]]:
        """Generates a single query that is date bounded such that it represents the data that has changed for this view
        between the specified date bounds in the provided export args.

        If there is no lower bound, this produces a query for a historical query up to the upper bound date. Otherwise,
        it diffs two historical queries to produce a delta query, using the SQL 'EXCEPT DISTINCT' function.

        Important Note: This query is meant for debug use only. In the actual DirectIngest flow, query results for
        individual dates are persisted into temporary tables, and those temporary tables are then diff'd using SQL's
        `EXCEPT DISTINCT` function.
        """

        query_params = [
            bigquery.ScalarQueryParameter(UPPER_BOUND_TIMESTAMP_PARAM_NAME,
                                          bigquery.enums.SqlTypeNames.DATETIME.value,
                                          ingest_view_export_args.upper_bound_datetime_to_export)
        ]
        query = ingest_view.date_parametrized_view_query(UPPER_BOUND_TIMESTAMP_PARAM_NAME)
        if ingest_view_export_args.upper_bound_datetime_prev:
            query_params.append(
                bigquery.ScalarQueryParameter(LOWER_BOUND_TIMESTAMP_PARAM_NAME,
                                              bigquery.enums.SqlTypeNames.DATETIME.value,
                                              ingest_view_export_args.upper_bound_datetime_prev)
            )
            query = DirectIngestIngestViewExportManager.create_date_diff_query(
                upper_bound_query=query,
                upper_bound_prev_query=ingest_view.date_parametrized_view_query(LOWER_BOUND_TIMESTAMP_PARAM_NAME),
                do_reverse_date_diff=ingest_view.do_reverse_date_diff)
            query = DirectIngestPreProcessedIngestView.add_order_by_suffix(
                query=query, order_by_cols=ingest_view.order_by_cols)
        return query, query_params
    def export_view_for_args(
            self, ingest_view_export_args: GcsfsIngestViewExportArgs) -> bool:
        """Performs an Cloud Storage export of a single ingest view with date bounds specified in the provided args. If
        the provided args contain an upper and lower bound date, the exported view contains only the delta between the
        two dates. If only the upper bound is provided, then the exported view contains historical results up until the
        bound date.

        Note: In order to prevent resource exhaustion in BigQuery, the ultimate query in this method is broken down
        into distinct parts. This method first persists the results of historical queries for each given bound date
        (upper and lower) into temporary tables. The delta between those tables is then queried separately using
        SQL's `EXCEPT DISTINCT` and those final results are exported to Cloud Storage.
        """
        if not self.region.are_ingest_view_exports_enabled_in_env():
            raise ValueError(
                f'Ingest view exports not enabled for region [{self.region.region_code}]'
            )

        metadata = self.file_metadata_manager.get_ingest_view_metadata_for_export_job(
            ingest_view_export_args)

        if not metadata:
            raise ValueError(
                f'Found no metadata for the given job args: [{ingest_view_export_args}].'
            )

        if metadata.export_time:
            logging.warning('Already exported view for args [%s] - returning.',
                            ingest_view_export_args)
            return False

        output_path = self._generate_output_path(ingest_view_export_args,
                                                 metadata)
        logging.info('Generated output path [%s]', output_path.uri())

        if not metadata.normalized_file_name:
            self.file_metadata_manager.register_ingest_view_export_file_name(
                metadata, output_path)

        ingest_view = self.ingest_views_by_tag[
            ingest_view_export_args.ingest_view_name]
        single_date_table_ids = []
        single_date_table_export_jobs = []

        upper_bound_table_name = \
            f'{ingest_view_export_args.ingest_view_name}_' \
            f'{ingest_view_export_args.upper_bound_datetime_to_export.strftime(TABLE_NAME_DATE_FORMAT)}_' \
            f'upper_bound'
        export_job = self._generate_export_job_for_date(
            table_name=upper_bound_table_name,
            ingest_view=ingest_view,
            date_bound=ingest_view_export_args.upper_bound_datetime_to_export)
        single_date_table_ids.append(upper_bound_table_name)
        single_date_table_export_jobs.append(export_job)

        query = SELECT_SUBQUERY.format(
            project_id=self.big_query_client.project_id,
            dataset_id=ingest_view.dataset_id,
            table_name=upper_bound_table_name)

        if ingest_view_export_args.upper_bound_datetime_prev:
            lower_bound_table_name = \
                f'{ingest_view_export_args.ingest_view_name}_' \
                f'{ingest_view_export_args.upper_bound_datetime_prev.strftime(TABLE_NAME_DATE_FORMAT)}_' \
                f'lower_bound'
            export_job = self._generate_export_job_for_date(
                table_name=lower_bound_table_name,
                ingest_view=ingest_view,
                date_bound=ingest_view_export_args.upper_bound_datetime_prev)
            single_date_table_export_jobs.append(export_job)
            single_date_table_ids.append(lower_bound_table_name)

            filter_query = SELECT_SUBQUERY.format(
                project_id=self.big_query_client.project_id,
                dataset_id=ingest_view.dataset_id,
                table_name=lower_bound_table_name).rstrip().rstrip(';')
            query = query.rstrip().rstrip(';')
            query = f'(\n{query}\n) EXCEPT DISTINCT (\n{filter_query}\n);'

        query = DirectIngestPreProcessedIngestView.add_order_by_suffix(
            query=query, order_by_cols=ingest_view.order_by_cols)

        # Wait for completion of all async date queries
        for query_job in single_date_table_export_jobs:
            query_job.result()
        logging.info(
            'Completed loading results of individual date queries into intermediate tables.'
        )

        logging.info('Generated final export query [%s]', str(query))

        export_configs = [
            ExportQueryConfig(
                query=query,
                query_parameters=[],
                intermediate_dataset_id=ingest_view.dataset_id,
                intermediate_table_name=
                f'{ingest_view_export_args.ingest_view_name}_latest_export',
                output_uri=output_path.uri(),
                output_format=bigquery.DestinationFormat.CSV)
        ]

        logging.info('Starting export to cloud storage.')
        self.big_query_client.export_query_results_to_cloud_storage(
            export_configs=export_configs)
        logging.info('Export to cloud storage complete.')

        for table_id in single_date_table_ids:
            self.big_query_client.delete_table(
                dataset_id=ingest_view.dataset_id, table_id=table_id)
            logging.info('Deleted intermediate table [%s]', table_id)

        self.file_metadata_manager.mark_ingest_view_exported(metadata)

        return True