Beispiel #1
0
    def test_dicom_store_instance(self):
        # Store DICOM files to a empty DICOM store from a GCS bucket,
        # then check if the store metadata match.
        input_dict = {}
        input_dict['project_id'] = self.project
        input_dict['region'] = REGION
        input_dict['dataset_id'] = DATA_SET_ID
        input_dict['dicom_store_id'] = self.temp_dicom_store
        input_dict['search_type'] = "instances"

        expected_dict = {}
        expected_dict['result'] = self.expected_output_metadata
        expected_dict['status'] = 200
        expected_dict['input'] = input_dict
        expected_dict['success'] = True

        with TestPipeline() as p:
            gcs_path = DICOM_FILES_PATH + "/*"
            results = (p
                       | fileio.MatchFiles(gcs_path)
                       | fileio.ReadMatches()
                       | UploadToDicomStore(input_dict, 'fileio')
                       | beam.Map(lambda x: x['success']))
            assert_that(results, equal_to([True] * NUM_INSTANCE))

        with TestPipeline() as p:
            results = (p | beam.Create([input_dict]) | DicomSearch())
            assert_that(results, equal_to([expected_dict]))
    def test_dicom_store_instance_from_gcs(self):
        # Store DICOM files to a empty DICOM store from a GCS bucket,
        # then check if the store metadata match.
        input_dict_store = {}
        input_dict_store['project_id'] = self.project
        input_dict_store['region'] = REGION
        input_dict_store['dataset_id'] = DATA_SET_ID
        input_dict_store['dicom_store_id'] = self.temp_dicom_store

        expected_output = [True] * NUM_INSTANCE

        with self.test_pipeline as p:
            gcs_path = DICOM_FILES_PATH + "/io_test_files/*"
            results = (p
                       | fileio.MatchFiles(gcs_path)
                       | fileio.ReadMatches()
                       | UploadToDicomStore(input_dict_store, 'fileio')
                       | beam.Map(lambda x: x['success']))
            assert_that(results,
                        equal_to(expected_output),
                        label='store first assert')

        # Check the metadata using client
        result, status_code = DicomApiHttpClient().qido_search(
            self.project, REGION, DATA_SET_ID, self.temp_dicom_store,
            'instances')

        self.assertEqual(status_code, 200)

        # List comparison based on different version of python
        self.assertCountEqual(result, self.expected_output_all_metadata)
Beispiel #3
0
  def test_store_fileio_file_small_buffer_flush(self, FakeClient):
    input_dict = {}
    input_dict['project_id'] = "test_project"
    input_dict['region'] = "test_region"
    input_dict['dataset_id'] = "test_dataset_id"
    input_dict['dicom_store_id'] = "test_dicom_store_id"

    fc = FakeHttpClient()
    FakeClient.return_value = fc

    temp_dir = '%s%s' % (self._new_tempdir(), os.sep)
    dict_input_1 = {
        'PatientName': 'George', 'Age': 23, 'TestResult': 'Negative'
    }
    str_input_1 = json.dumps(dict_input_1)
    self._create_temp_file(dir=temp_dir, content=str_input_1)
    dict_input_2 = {'PatientName': 'Peter', 'Age': 54, 'TestResult': 'Positive'}
    str_input_2 = json.dumps(dict_input_2)
    self._create_temp_file(dir=temp_dir, content=str_input_2)
    dict_input_3 = {'PatientName': 'Zen', 'Age': 27, 'TestResult': 'Negative'}
    str_input_3 = json.dumps(dict_input_3)
    self._create_temp_file(dir=temp_dir, content=str_input_3)

    with TestPipeline() as p:
      results = (
          p
          | beam.Create([FileSystems.join(temp_dir, '*')])
          | fileio.MatchAll()
          | fileio.ReadMatches()
          | UploadToDicomStore(input_dict, 'fileio', buffer_size=1)
          | beam.Map(lambda x: x['success']))
      assert_that(results, equal_to([True] * 3))
    self.assertTrue(dict_input_1 in fc.dicom_metadata)
    self.assertTrue(dict_input_2 in fc.dicom_metadata)
    self.assertTrue(dict_input_3 in fc.dicom_metadata)
Beispiel #4
0
  def test_store_byte_file_small_buffer_flush(self, FakeClient):
    input_dict = {}
    input_dict['project_id'] = "test_project"
    input_dict['region'] = "test_region"
    input_dict['dataset_id'] = "test_dataset_id"
    input_dict['dicom_store_id'] = "test_dicom_store_id"

    fc = FakeHttpClient()
    FakeClient.return_value = fc

    dict_input_1 = {
        'PatientName': 'George', 'Age': 23, 'TestResult': 'Negative'
    }
    str_input_1 = json.dumps(dict_input_1)
    bytes_input_1 = bytes(str_input_1.encode("utf-8"))
    dict_input_2 = {'PatientName': 'Peter', 'Age': 54, 'TestResult': 'Positive'}
    str_input_2 = json.dumps(dict_input_2)
    bytes_input_2 = bytes(str_input_2.encode("utf-8"))
    dict_input_3 = {'PatientName': 'Zen', 'Age': 27, 'TestResult': 'Negative'}
    str_input_3 = json.dumps(dict_input_3)
    bytes_input_3 = bytes(str_input_3.encode("utf-8"))
    with TestPipeline() as p:
      results = (
          p
          | beam.Create([bytes_input_1, bytes_input_2, bytes_input_3])
          | UploadToDicomStore(input_dict, 'bytes', buffer_size=1)
          | beam.Map(lambda x: x['success']))
      assert_that(results, equal_to([True] * 3))
    self.assertTrue(dict_input_1 in fc.dicom_metadata)
    self.assertTrue(dict_input_2 in fc.dicom_metadata)
    self.assertTrue(dict_input_3 in fc.dicom_metadata)
Beispiel #5
0
  def test_missing_parameters(self, FakeClient):
    input_dict = {}
    input_dict['project_id'] = "test_project"
    input_dict['region'] = "test_region"

    expected_invalid_dict = {}
    expected_invalid_dict['result'] = []
    expected_invalid_dict['status'] = 'Must have dataset_id in the dict.'
    expected_invalid_dict['input'] = input_dict
    expected_invalid_dict['success'] = False

    fc = FakeHttpClient()
    FakeClient.return_value = fc
    with self.assertRaisesRegex(ValueError,
                                "Must have dataset_id in the dict."):
      p = TestPipeline()
      _ = (p | beam.Create(['']) | UploadToDicomStore(input_dict, 'bytes'))
Beispiel #6
0
  def test_destination_notfound(self, FakeClient):
    input_dict = {}
    # search instances in a not exist store
    input_dict['project_id'] = "wrong_project"
    input_dict['region'] = "wrong_region"
    input_dict['dataset_id'] = "wrong_dataset_id"
    input_dict['dicom_store_id'] = "wrong_dicom_store_id"

    expected_invalid_dict = {}
    expected_invalid_dict['status'] = 204
    expected_invalid_dict['input'] = ''
    expected_invalid_dict['success'] = False

    fc = FakeHttpClient()
    FakeClient.return_value = fc
    with TestPipeline() as p:
      results = (
          p | beam.Create(['']) | UploadToDicomStore(input_dict, 'bytes'))
      assert_that(results, equal_to([expected_invalid_dict]))
Beispiel #7
0
  def test_store_byte_file(self, FakeClient):
    input_dict = {}
    input_dict['project_id'] = "test_project"
    input_dict['region'] = "test_region"
    input_dict['dataset_id'] = "test_dataset_id"
    input_dict['dicom_store_id'] = "test_dicom_store_id"

    fc = FakeHttpClient()
    FakeClient.return_value = fc

    dict_input = {'PatientName': 'George', 'Age': 23, 'TestResult': 'Negative'}
    str_input = json.dumps(dict_input)
    bytes_input = bytes(str_input.encode("utf-8"))
    with TestPipeline() as p:
      results = (
          p
          | beam.Create([bytes_input])
          | UploadToDicomStore(input_dict, 'bytes')
          | beam.Map(lambda x: x['success']))
      assert_that(results, equal_to([True]))
    self.assertTrue(dict_input in fc.dicom_metadata)