def test_dicom_search_instances(self):
        # Search and compare the metadata of a persistent DICOM store.
        # Both refine and comprehensive search will be tested.
        input_dict_all = {}
        input_dict_all['project_id'] = self.project
        input_dict_all['region'] = REGION
        input_dict_all['dataset_id'] = DATA_SET_ID
        input_dict_all['dicom_store_id'] = PERSISTENT_DICOM_STORE_NAME
        input_dict_all['search_type'] = "instances"

        input_dict_refine = {}
        input_dict_refine['project_id'] = self.project
        input_dict_refine['region'] = REGION
        input_dict_refine['dataset_id'] = DATA_SET_ID
        input_dict_refine['dicom_store_id'] = PERSISTENT_DICOM_STORE_NAME
        input_dict_refine['search_type'] = "instances"
        input_dict_refine['params'] = {
            'StudyInstanceUID': 'study_000000001',
            'limit': 500,
            'offset': 0
        }

        expected_dict_all = {}
        expected_dict_all['result'] = self.expected_output_all_metadata
        expected_dict_all['status'] = 200
        expected_dict_all['input'] = input_dict_all
        expected_dict_all['success'] = True

        expected_dict_refine = {}
        expected_dict_refine['result'] = self.expected_output_refined_metadata
        expected_dict_refine['status'] = 200
        expected_dict_refine['input'] = input_dict_refine
        expected_dict_refine['success'] = True

        with self.test_pipeline as p:
            results_all = (p
                           | 'create all dict' >> beam.Create([input_dict_all])
                           | 'search all' >> DicomSearch())
            results_refine = (
                p
                | 'create refine dict' >> beam.Create([input_dict_refine])
                | 'search refine' >> DicomSearch())

            assert_that(results_all,
                        equal_to([expected_dict_all]),
                        label='all search assert')
            assert_that(results_refine,
                        equal_to([expected_dict_refine]),
                        label='refine search assert')
Пример #2
0
    def test_param_dict_passing(self, FakeClient):
        input_dict = {}
        input_dict = {}
        input_dict['project_id'] = "test_project"
        input_dict['region'] = "test_region"
        input_dict['dataset_id'] = "test_dataset_id"
        input_dict['dicom_store_id'] = "test_dicom_store_id"
        input_dict['search_type'] = "instances"
        input_dict['params'] = {'PatientName': 'Brian'}

        expected_dict = {}
        expected_dict['result'] = [{
            'PatientName': 'Brian',
            'Age': 20,
            'TestResult': 'Positive'
        }]
        expected_dict['status'] = 200
        expected_dict['input'] = input_dict
        expected_dict['success'] = True

        fc = FakeHttpClient()
        FakeClient.return_value = fc
        with TestPipeline() as p:
            results = (p | beam.Create([input_dict]) | DicomSearch())
            assert_that(results, equal_to([expected_dict]))
Пример #3
0
    def test_dicom_store_instance(self):
        # Store DICOM files to a empty DICOM store from a GCS bucket,
        # then check if the store metadata match.
        input_dict = {}
        input_dict['project_id'] = self.project
        input_dict['region'] = REGION
        input_dict['dataset_id'] = DATA_SET_ID
        input_dict['dicom_store_id'] = self.temp_dicom_store
        input_dict['search_type'] = "instances"

        expected_dict = {}
        expected_dict['result'] = self.expected_output_metadata
        expected_dict['status'] = 200
        expected_dict['input'] = input_dict
        expected_dict['success'] = True

        with TestPipeline() as p:
            gcs_path = DICOM_FILES_PATH + "/*"
            results = (p
                       | fileio.MatchFiles(gcs_path)
                       | fileio.ReadMatches()
                       | UploadToDicomStore(input_dict, 'fileio')
                       | beam.Map(lambda x: x['success']))
            assert_that(results, equal_to([True] * NUM_INSTANCE))

        with TestPipeline() as p:
            results = (p | beam.Create([input_dict]) | DicomSearch())
            assert_that(results, equal_to([expected_dict]))
Пример #4
0
  def test_missing_parameters(self, FakeClient):
    input_dict = {}
    input_dict['project_id'] = "test_project"
    input_dict['region'] = "test_region"

    expected_invalid_dict = {}
    expected_invalid_dict['result'] = []
    expected_invalid_dict['status'] = 'Must have dataset_id in the dict.'
    expected_invalid_dict['input'] = input_dict
    expected_invalid_dict['success'] = False

    fc = FakeHttpClient()
    FakeClient.return_value = fc
    with TestPipeline() as p:
      results = (p | beam.Create([input_dict]) | DicomSearch())
      assert_that(results, equal_to([expected_invalid_dict]))
Пример #5
0
    def test_dicom_search(self):
        # Search and compare the metadata of a persistent DICOM store.
        input_dict = {}
        input_dict['project_id'] = self.project
        input_dict['region'] = REGION
        input_dict['dataset_id'] = DATA_SET_ID
        input_dict['dicom_store_id'] = PERSISTENT_DICOM_STORE_NAME
        input_dict['search_type'] = "instances"

        expected_dict = {}
        expected_dict['result'] = self.expected_output_metadata
        expected_dict['status'] = 200
        expected_dict['input'] = input_dict
        expected_dict['success'] = True

        with TestPipeline() as p:
            results = (p | beam.Create([input_dict]) | DicomSearch())
            assert_that(results, equal_to([expected_dict]))
Пример #6
0
  def test_client_search_notfound(self, FakeClient):
    input_dict = {}
    # search instances in a not exist store
    input_dict['project_id'] = "wrong_project"
    input_dict['region'] = "wrong_region"
    input_dict['dataset_id'] = "wrong_dataset_id"
    input_dict['dicom_store_id'] = "wrong_dicom_store_id"
    input_dict['search_type'] = "instances"

    expected_invalid_dict = {}
    expected_invalid_dict['result'] = []
    expected_invalid_dict['status'] = 204
    expected_invalid_dict['input'] = input_dict
    expected_invalid_dict['success'] = False

    fc = FakeHttpClient()
    FakeClient.return_value = fc
    with TestPipeline() as p:
      results = (p | beam.Create([input_dict]) | DicomSearch())
      assert_that(results, equal_to([expected_invalid_dict]))
Пример #7
0
  def test_wrong_input_type(self, FakeClient):
    input_dict = {}
    input_dict['project_id'] = "test_project"
    input_dict['region'] = "test_region"
    input_dict['dataset_id'] = "test_dataset_id"
    input_dict['dicom_store_id'] = "test_dicom_store_id"
    input_dict['search_type'] = "not exist type"

    expected_invalid_dict = {}
    expected_invalid_dict['result'] = []
    expected_invalid_dict[
        'status'] = 'Search type can only be "studies", "instances" or "series"'
    expected_invalid_dict['input'] = input_dict
    expected_invalid_dict['success'] = False

    fc = FakeHttpClient()
    FakeClient.return_value = fc
    with TestPipeline() as p:
      results = (p | beam.Create([input_dict]) | DicomSearch())
      assert_that(results, equal_to([expected_invalid_dict]))
Пример #8
0
  def test_Qido_search_small_buffer_flush(self, FakeClient):
    input_dict = {}
    input_dict['project_id'] = "test_project"
    input_dict['region'] = "test_region"
    input_dict['dataset_id'] = "test_dataset_id"
    input_dict['dicom_store_id'] = "test_dicom_store_id"
    input_dict['search_type'] = "instances"

    fc = FakeHttpClient()
    FakeClient.return_value = fc

    expected_dict = {}
    expected_dict['result'] = fc.dicom_metadata
    expected_dict['status'] = 200
    expected_dict['input'] = input_dict
    expected_dict['success'] = True

    with TestPipeline() as p:
      results = (p | beam.Create([input_dict] * 5) | DicomSearch(buffer_size=1))
      assert_that(results, equal_to([expected_dict] * 5))