class TestDirectIngestGcsFileSystem(TestCase):
    """Tests for the FakeGCSFileSystem."""

    STORAGE_DIR_PATH = GcsfsDirectoryPath(bucket_name='storage_bucket',
                                          relative_path='region_subdir')

    INGEST_DIR_PATH = GcsfsDirectoryPath(bucket_name='my_bucket')

    def setUp(self) -> None:
        self.fs = DirectIngestGCSFileSystem(FakeGCSFileSystem())

    def fully_process_file(self,
                           dt: datetime.datetime,
                           path: GcsfsFilePath,
                           file_type_differentiation_on: bool = False) -> None:
        """Mimics all the file system calls for a single file in the direct
        ingest system, from getting added to the ingest bucket, turning to a
        processed file, then getting moved to storage."""

        fixture_util.add_direct_ingest_path(self.fs.gcs_file_system,
                                            path,
                                            has_fixture=False)

        start_num_total_files = len(self.fs.gcs_file_system.all_paths)
        # pylint: disable=protected-access
        start_ingest_paths = self.fs._ls_with_file_prefix(
            self.INGEST_DIR_PATH, '', None)
        start_storage_paths = self.fs._ls_with_file_prefix(
            self.STORAGE_DIR_PATH, '', None)
        if file_type_differentiation_on:
            start_raw_storage_paths = self.fs._ls_with_file_prefix(
                self.STORAGE_DIR_PATH,
                '',
                file_type_filter=GcsfsDirectIngestFileType.RAW_DATA)
            start_ingest_view_storage_paths = self.fs._ls_with_file_prefix(
                self.STORAGE_DIR_PATH,
                '',
                file_type_filter=GcsfsDirectIngestFileType.INGEST_VIEW)
        else:
            start_raw_storage_paths = []
            start_ingest_view_storage_paths = []

        # File is renamed to normalized path
        file_type = GcsfsDirectIngestFileType.RAW_DATA \
            if file_type_differentiation_on else GcsfsDirectIngestFileType.UNSPECIFIED

        self.fs.mv_path_to_normalized_path(path, file_type, dt)

        if file_type_differentiation_on:
            raw_unprocessed = self.fs.get_unprocessed_file_paths(
                self.INGEST_DIR_PATH,
                file_type_filter=GcsfsDirectIngestFileType.RAW_DATA)
            self.assertEqual(len(raw_unprocessed), 1)
            self.assertTrue(
                self.fs.is_seen_unprocessed_file(raw_unprocessed[0]))

            # ... raw file imported to BQ

            processed_path = self.fs.mv_path_to_processed_path(
                raw_unprocessed[0])

            processed = self.fs.get_processed_file_paths(
                self.INGEST_DIR_PATH, None)
            self.assertEqual(len(processed), 1)

            self.fs.copy(
                processed_path,
                GcsfsFilePath.from_absolute_path(
                    to_normalized_unprocessed_file_path_from_normalized_path(
                        processed_path.abs_path(),
                        file_type_override=GcsfsDirectIngestFileType.
                        INGEST_VIEW)))
            self.fs.mv_path_to_storage(processed_path, self.STORAGE_DIR_PATH)

        ingest_unprocessed_filter = GcsfsDirectIngestFileType.INGEST_VIEW if file_type_differentiation_on else None

        ingest_unprocessed = self.fs.get_unprocessed_file_paths(
            self.INGEST_DIR_PATH, file_type_filter=ingest_unprocessed_filter)
        self.assertEqual(len(ingest_unprocessed), 1)
        self.assertTrue(self.fs.is_seen_unprocessed_file(
            ingest_unprocessed[0]))

        # ... file is ingested

        # File is moved to processed path
        self.fs.mv_path_to_processed_path(ingest_unprocessed[0])
        processed = self.fs.get_processed_file_paths(self.INGEST_DIR_PATH,
                                                     None)
        self.assertEqual(len(processed), 1)
        self.assertTrue(self.fs.is_processed_file(processed[0]))

        unprocessed = self.fs.get_unprocessed_file_paths(
            self.INGEST_DIR_PATH, None)
        self.assertEqual(len(unprocessed), 0)

        # File is moved to storage
        ingest_move_type_filter = GcsfsDirectIngestFileType.INGEST_VIEW \
            if file_type_differentiation_on else None

        self.fs.mv_processed_paths_before_date_to_storage(
            self.INGEST_DIR_PATH,
            self.STORAGE_DIR_PATH,
            date_str_bound=dt.date().isoformat(),
            include_bound=True,
            file_type_filter=ingest_move_type_filter)

        end_ingest_paths = self.fs._ls_with_file_prefix(self.INGEST_DIR_PATH,
                                                        '',
                                                        file_type_filter=None)
        end_storage_paths = self.fs._ls_with_file_prefix(self.STORAGE_DIR_PATH,
                                                         '',
                                                         file_type_filter=None)
        if file_type_differentiation_on:
            end_raw_storage_paths = self.fs._ls_with_file_prefix(
                self.STORAGE_DIR_PATH,
                '',
                file_type_filter=GcsfsDirectIngestFileType.RAW_DATA)
            end_ingest_view_storage_paths = self.fs._ls_with_file_prefix(
                self.STORAGE_DIR_PATH,
                '',
                file_type_filter=GcsfsDirectIngestFileType.INGEST_VIEW)
        else:
            end_raw_storage_paths = []
            end_ingest_view_storage_paths = []

        # Each file gets re-exported as ingest view
        splitting_factor = 2 if file_type_differentiation_on else 1

        expected_final_total_files = start_num_total_files + splitting_factor - 1
        self.assertEqual(len(self.fs.gcs_file_system.all_paths),
                         expected_final_total_files)
        self.assertEqual(len(end_ingest_paths), len(start_ingest_paths) - 1)
        self.assertEqual(len(end_storage_paths),
                         len(start_storage_paths) + 1 * splitting_factor)
        if file_type_differentiation_on:
            self.assertEqual(
                len(end_raw_storage_paths) +
                len(end_ingest_view_storage_paths), len(end_storage_paths))
            self.assertEqual(len(end_raw_storage_paths),
                             len(start_raw_storage_paths) + 1)
            self.assertEqual(len(end_ingest_view_storage_paths),
                             len(start_ingest_view_storage_paths) + 1)

        for sp in end_storage_paths:
            parts = filename_parts_from_path(sp)
            if sp.abs_path() not in {
                    p.abs_path()
                    for p in start_storage_paths
            }:
                self.assertTrue(sp.abs_path().startswith(
                    self.STORAGE_DIR_PATH.abs_path()))
                dir_path, storage_file_name = os.path.split(sp.abs_path())
                if parts.file_type != GcsfsDirectIngestFileType.UNSPECIFIED:
                    self.assertTrue(parts.file_type.value in dir_path)
                name, _ = path.file_name.split('.')
                self.assertTrue(name in storage_file_name)

    def test_direct_ingest_file_moves(self) -> None:
        self.fully_process_file(
            datetime.datetime.now(),
            GcsfsFilePath(bucket_name='my_bucket', blob_name='test_file.csv'))

    def test_direct_ingest_multiple_file_moves(self) -> None:
        self.fully_process_file(
            datetime.datetime.now(),
            GcsfsFilePath(bucket_name='my_bucket', blob_name='test_file.csv'))

        self.fully_process_file(
            datetime.datetime.now(),
            GcsfsFilePath(bucket_name='my_bucket',
                          blob_name='test_file_2.csv'))

    def test_move_to_storage_with_conflict(self) -> None:
        dt = datetime.datetime.now()
        self.fully_process_file(
            dt,
            GcsfsFilePath(bucket_name='my_bucket', blob_name='test_file.csv'))

        # Try uploading a file with a duplicate name that has already been
        # moved to storage
        self.fully_process_file(
            dt,
            GcsfsFilePath(bucket_name='my_bucket', blob_name='test_file.csv'))

        # pylint: disable=protected-access
        storage_paths = self.fs._ls_with_file_prefix(self.STORAGE_DIR_PATH,
                                                     '',
                                                     file_type_filter=None)
        self.assertEqual(len(storage_paths), 2)

        found_first_file = False
        found_second_file = False
        for path in storage_paths:
            self.assertTrue(filename_parts_from_path(path))
            if path.abs_path().endswith('test_file.csv'):
                found_first_file = True
            if path.abs_path().endswith('test_file-(1).csv'):
                found_second_file = True

        self.assertTrue(found_first_file)
        self.assertTrue(found_second_file)

    def test_direct_ingest_file_moves_with_file_types(self) -> None:
        self.fully_process_file(datetime.datetime.now(),
                                GcsfsFilePath(bucket_name='my_bucket',
                                              blob_name='test_file.csv'),
                                file_type_differentiation_on=True)

    def test_direct_ingest_multiple_file_moves_with_file_types(self) -> None:
        self.fully_process_file(datetime.datetime.now(),
                                GcsfsFilePath(bucket_name='my_bucket',
                                              blob_name='test_file.csv'),
                                file_type_differentiation_on=True)

        self.fully_process_file(datetime.datetime.now(),
                                GcsfsFilePath(bucket_name='my_bucket',
                                              blob_name='test_file_2.csv'),
                                file_type_differentiation_on=True)

    def test_move_to_storage_with_conflict_with_file_types(self) -> None:
        dt = datetime.datetime.now()
        self.fully_process_file(dt,
                                GcsfsFilePath(bucket_name='my_bucket',
                                              blob_name='test_file.csv'),
                                file_type_differentiation_on=True)

        # Try uploading a file with a duplicate name that has already been
        # moved to storage
        self.fully_process_file(dt,
                                GcsfsFilePath(bucket_name='my_bucket',
                                              blob_name='test_file.csv'),
                                file_type_differentiation_on=True)

        # pylint: disable=protected-access
        storage_paths = self.fs._ls_with_file_prefix(self.STORAGE_DIR_PATH,
                                                     '',
                                                     file_type_filter=None)
        self.assertEqual(len(storage_paths), 4)

        found_first_file = False
        found_second_file = False
        for path in storage_paths:
            if path.abs_path().endswith('test_file.csv'):
                found_first_file = True
            if path.abs_path().endswith('test_file-(1).csv'):
                found_second_file = True

        self.assertTrue(found_first_file)
        self.assertTrue(found_second_file)
class IngestOperationsStore:
    """
    A store for tracking the current state of direct ingest.
    """
    def __init__(self, override_project_id: Optional[str] = None) -> None:
        self.project_id = (metadata.project_id() if override_project_id is None
                           else override_project_id)
        self.fs = DirectIngestGCSFileSystem(GcsfsFactory.build())
        self.cloud_task_manager = DirectIngestCloudTaskManagerImpl()
        self.cloud_tasks_client = tasks_v2.CloudTasksClient()

    @property
    def state_codes_launched_in_env(self) -> List[StateCode]:
        return get_direct_ingest_states_launched_in_env()

    @staticmethod
    def get_queues_for_region(state_code: StateCode) -> List[str]:
        """Returns the list of formatted direct ingest queues for given state"""
        queues = set()
        for ingest_instance in DirectIngestInstance:
            queues.update(
                get_direct_ingest_queues_for_state(state_code,
                                                   ingest_instance))

        return list(queues)

    def start_ingest_run(self, state_code: StateCode,
                         instance_str: str) -> None:
        """This function is called through the Ingest Operations UI in the admin panel.
        It calls to start a direct ingest run for the given region_code in the given instance
        Requires:
        - state_code: (required) State code to start ingest for (i.e. "US_ID")
        - instance: (required) Which instance to start ingest for (either PRIMARY or SECONDARY)
        """
        try:
            instance = DirectIngestInstance[instance_str]
        except KeyError as e:
            logging.error("Received an invalid instance: %s.", instance_str)
            raise ValueError(
                f"Invalid instance [{instance_str}] received", ) from e

        can_start_ingest = state_code in self.state_codes_launched_in_env

        formatted_state_code = state_code.value.lower()
        region = get_region(formatted_state_code, is_direct_ingest=True)

        # Get the ingest bucket for this region and instance
        ingest_bucket_path = gcsfs_direct_ingest_bucket_for_region(
            region_code=formatted_state_code,
            system_level=SystemLevel.for_region(region),
            ingest_instance=instance,
            project_id=self.project_id,
        )

        logging.info(
            "Creating cloud task to schedule next job and kick ingest for %s instance in %s.",
            instance,
            formatted_state_code,
        )
        self.cloud_task_manager.create_direct_ingest_handle_new_files_task(
            region=region,
            ingest_instance=instance,
            ingest_bucket=ingest_bucket_path,
            can_start_ingest=can_start_ingest,
        )

    def update_ingest_queues_state(self, state_code: StateCode,
                                   new_queue_state: str) -> None:
        """This function is called through the Ingest Operations UI in the admin panel.
        It updates the state of the following queues by either pausing or resuming the queues:
         - direct-ingest-state-<region_code>-bq-import-export
         - direct-ingest-state-<region_code>-process-job-queue
         - direct-ingest-state-<region_code>-scheduler
         - direct-ingest-state-<region_code>-sftp-queue    (for select regions)

        Requires:
        - state_code: (required) State code to pause queues for
        - new_state: (required) Either 'PAUSED' or 'RUNNING'
        """
        queues_to_update = self.get_queues_for_region(state_code)

        if new_queue_state not in [
                QUEUE_STATE_ENUM.RUNNING.name,
                QUEUE_STATE_ENUM.PAUSED.name,
        ]:
            logging.error(
                "Received an invalid queue state: %s. This method should only be used "
                "to update queue states to PAUSED or RUNNING",
                new_queue_state,
            )
            raise ValueError(
                f"Invalid queue state [{new_queue_state}] received", )

        for queue in queues_to_update:
            queue_path = self.cloud_tasks_client.queue_path(
                self.project_id, _TASK_LOCATION, queue)

            if new_queue_state == QUEUE_STATE_ENUM.PAUSED.name:
                logging.info("Pausing queue: %s", new_queue_state)
                self.cloud_tasks_client.pause_queue(name=queue_path)
            else:
                logging.info("Resuming queue: %s", new_queue_state)
                self.cloud_tasks_client.resume_queue(name=queue_path)

    def get_ingest_queue_states(self,
                                state_code: StateCode) -> List[Dict[str, str]]:
        """Returns a list of dictionaries that contain the name and states of direct ingest queues for a given region"""
        ingest_queue_states: List[Dict[str, str]] = []
        queues_to_update = self.get_queues_for_region(state_code)

        for queue_name in queues_to_update:
            queue_path = self.cloud_tasks_client.queue_path(
                self.project_id, _TASK_LOCATION, queue_name)
            queue = self.cloud_tasks_client.get_queue(name=queue_path)
            queue_state = {
                "name": queue_name,
                "state": QUEUE_STATE_ENUM(queue.state).name,
            }
            ingest_queue_states.append(queue_state)

        return ingest_queue_states

    def _get_bucket_metadata(self, path: GcsfsBucketPath) -> BucketSummaryType:
        """Returns a dictionary containing the following info for a given bucket:
        i.e. {
            name: bucket_name,
            unprocessedFilesRaw: how many unprocessed raw data files in the bucket,
            processedFilesRaw: how many processed raw data files are in the bucket (should be zero),
            unprocessedFilesIngestView: how many unprocessed ingest view files in the bucket,
            processedFilesIngestView: how many processed ingest view files are in the bucket
        }
        """
        bucket_metadata: BucketSummaryType = {
            "name": path.abs_path(),
        }

        for file_type in GcsfsDirectIngestFileType:
            file_type_str = self.get_file_type_api_string(file_type)
            unprocessed_files = self.fs.get_unprocessed_file_paths(
                path, file_type)
            bucket_metadata[f"unprocessedFiles{file_type_str}"] = len(
                unprocessed_files)

            processed_files = self.fs.get_processed_file_paths(path, file_type)
            bucket_metadata[f"processedFiles{file_type_str}"] = len(
                processed_files)

        return bucket_metadata

    @staticmethod
    def get_file_type_api_string(file_type: GcsfsDirectIngestFileType) -> str:
        """Get the string representation of the file type to use in the response."""
        if file_type == GcsfsDirectIngestFileType.INGEST_VIEW:
            return "IngestView"
        if file_type == GcsfsDirectIngestFileType.RAW_DATA:
            return "Raw"
        raise ValueError(f"Unexpected file_type [{file_type}]")

    def get_ingest_instance_summaries(
            self, state_code: StateCode) -> List[Dict[str, Any]]:
        """Returns a list of dictionaries containing the following info for a given instance:
        i.e. {
            instance: the direct ingest instance,
            dbName: database name for this instance,
            storage: storage bucket absolute path,
            ingest: {
                name: bucket_name,
                unprocessedFilesRaw: how many unprocessed raw data files in the bucket,
                processedFilesRaw: how many processed raw data files are in the bucket (should be zero),
                unprocessedFilesIngestView: how many unprocessed ingest view files in the bucket,
                processedFilesIngestView: how many processed ingest view files are in the bucket (should be zero),
            },
            operations: {
                unprocessedFilesRaw: number of unprocessed raw files in the operations database
                unprocessedFilesIngestView: number of unprocessed ingest view files in the operations database
                dateOfEarliestUnprocessedIngestView: date of earliest unprocessed ingest file, if it exists
            }
        }
        """
        formatted_state_code = state_code.value.lower()

        ingest_instance_summaries: List[Dict[str, Any]] = []
        for instance in DirectIngestInstance:
            # Get the ingest bucket path
            ingest_bucket_path = gcsfs_direct_ingest_bucket_for_region(
                region_code=formatted_state_code,
                system_level=SystemLevel.STATE,
                ingest_instance=instance,
                project_id=self.project_id,
            )
            # Get an object containing information about the ingest bucket
            ingest_bucket_metadata = self._get_bucket_metadata(
                ingest_bucket_path)

            # Get the storage bucket for this instance
            storage_bucket_path = gcsfs_direct_ingest_storage_directory_path_for_region(
                region_code=formatted_state_code,
                system_level=SystemLevel.STATE,
                ingest_instance=instance,
                project_id=self.project_id,
            )

            # Get the database name corresponding to this instance
            ingest_db_name = self._get_database_name_for_state(
                state_code, instance)

            # Get the operations metadata for this ingest instance
            operations_db_metadata = self._get_operations_db_metadata(
                state_code, ingest_db_name)

            ingest_instance_summary: Dict[str, Any] = {
                "instance": instance.value,
                "storage": storage_bucket_path.abs_path(),
                "ingest": ingest_bucket_metadata,
                "dbName": ingest_db_name,
                "operations": operations_db_metadata,
            }

            ingest_instance_summaries.append(ingest_instance_summary)

        return ingest_instance_summaries

    @staticmethod
    def _get_database_name_for_state(state_code: StateCode,
                                     instance: DirectIngestInstance) -> str:
        """Returns the database name for the given state and instance"""
        return SQLAlchemyDatabaseKey.for_state_code(
            state_code,
            instance.database_version(SystemLevel.STATE,
                                      state_code=state_code),
        ).db_name

    @staticmethod
    def _get_operations_db_metadata(
            state_code: StateCode,
            ingest_db_name: str) -> Dict[str, Union[int, Optional[datetime]]]:
        """Returns the following dictionary with information about the operations database for the state:
        {
            unprocessedFilesRaw: <int>
            unprocessedFilesIngestView: <int>
            dateOfEarliestUnprocessedIngestView: <datetime>
        }

        If running locally, this does not hit the live DB instance and only returns fake data.
        """
        if in_development():
            return {
                "unprocessedFilesRaw": -1,
                "unprocessedFilesIngestView": -2,
                "dateOfEarliestUnprocessedIngestView": datetime(2021, 4, 28),
            }

        file_metadata_manager = PostgresDirectIngestFileMetadataManager(
            region_code=state_code.value,
            ingest_database_name=ingest_db_name,
        )

        try:
            # Raw files are processed in the primary instance, not secondary
            num_unprocessed_raw_files = (
                file_metadata_manager.get_num_unprocessed_raw_files())
        except DirectIngestInstanceError as _:
            num_unprocessed_raw_files = 0

        return {
            "unprocessedFilesRaw":
            num_unprocessed_raw_files,
            "unprocessedFilesIngestView":
            file_metadata_manager.get_num_unprocessed_ingest_files(),
            "dateOfEarliestUnprocessedIngestView":
            file_metadata_manager.get_date_of_earliest_unprocessed_ingest_file(
            ),
        }