Exemplo n.º 1
0
 def test_init(self):
     data_storage = s3_storage.S3Storage(
         storage_dir=self.storage_dir,
         nbr_workers=self.nbr_workers,
         access_point='test_bucket_name',
     )
     self.assertEqual(data_storage.bucket_name, 'test_bucket_name')
Exemplo n.º 2
0
 def test_upload_serialized(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     key = "/".join([self.storage_dir, self.im_name])
     key_byte_tuple = (key, self.im_encoded)
     data_storage.upload_serialized(key_byte_tuple)
     byte_string = self.conn.Object(self.bucket_name,
                                    key).get()['Body'].read()
     nose.tools.assert_equal(byte_string, self.im_encoded)
Exemplo n.º 3
0
 def test_upload_file_get_im(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     data_storage.upload_file(file_path=self.file_path)
     # Load the temporary image
     im_out = data_storage.get_im(file_name=self.im_name)
     # Assert that contents are the same
     nose.tools.assert_equal(im_out.dtype, np.uint16)
     numpy.testing.assert_array_equal(im_out, self.im)
Exemplo n.º 4
0
 def test_upload_im(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     key = "/".join([self.storage_dir, self.im_name])
     data_storage.upload_im(im_name=self.im_name, im=self.im)
     byte_string = self.conn.Object(self.bucket_name,
                                    key).get()['Body'].read()
     im = im_utils.deserialize_im(byte_string)
     numpy.testing.assert_array_equal(im, self.im)
Exemplo n.º 5
0
 def test_upload_existing_im(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     key = "/".join([self.storage_dir, self.im_name])
     data_storage.upload_im(im_name=self.im_name, im=self.im)
     with captured_output() as (out, err):
         data_storage.upload_im(im_name=self.im_name, im=self.im)
     std_output = out.getvalue().strip()
     self.assertEqual(
         std_output,
         "Key {} already exists.".format(key),
     )
Exemplo n.º 6
0
 def test_get_stack(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     data_storage.upload_frames(self.stack_names, self.im_stack)
     # Load image stack in memory
     im_out = data_storage.get_stack(self.stack_names, )
     nose.tools.assert_equal(self.im_stack.shape, im_out.shape)
     for im_nbr in range(self.im_stack.shape[-1]):
         # Assert that contents are the same
         numpy.testing.assert_array_equal(
             im_out[..., im_nbr],
             self.im_stack[..., im_nbr],
         )
Exemplo n.º 7
0
def migrate_db(credentials_filename):
    """
    Updates sha256 checksums for all files and frames

    :param credentials_filename: Full path to DB credentials file
    """
    # Edit this depending on where your database credential file is stored
    # This assumes it's stored in dir above imagingDB
    dir_name = os.path.abspath(os.path.join('..'))
    dest_dir = os.path.join(dir_name, 'temp_downloads')
    os.makedirs(dest_dir, exist_ok=True)
    credentials_str = db_utils.get_connection_str(
        credentials_filename=credentials_filename, )
    # Get files and compute checksums
    with db_ops.session_scope(credentials_str) as session:
        files = session.query(db_ops.FileGlobal)
        for file in files:
            if file.sha256 is None:
                data_loader = s3_storage.S3Storage(
                    storage_dir=file.storage_dir, )
                file_name = file.metadata_json["file_origin"]
                file_name = file_name.split("/")[-1]
                dest_path = os.path.join(dest_dir, file_name)
                data_loader.download_file(
                    file_name=file_name,
                    dest_path=dest_path,
                )
                checksum = meta_utils.gen_sha256(dest_path)
                file.sha256 = checksum

    # Get frames and compute checksums
    with db_ops.session_scope(credentials_filename) as session:
        frames = session.query(db_ops.Frames)
        for frame in frames:
            if frame.sha256 is None:
                data_loader = s3_storage.S3Storage(
                    storage_dir=frame.frames_global.storage_dir, )
                im = data_loader.get_im(frame.file_name)
                checksum = meta_utils.gen_sha256(im)
                frame.sha256 = checksum
Exemplo n.º 8
0
 def test_download_file(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     data_storage.upload_file(file_path=self.file_path)
     # Download the temporary image then read it and validate
     data_storage.download_file(
         file_name=self.im_name,
         dest_dir=self.temp_path,
     )
     # Read downloaded file and assert that contents are the same
     dest_path = os.path.join(self.temp_path, self.im_name)
     im_out = cv2.imread(dest_path,
                         cv2.IMREAD_ANYCOLOR | cv2.IMREAD_ANYDEPTH)
     nose.tools.assert_equal(im_out.dtype, np.uint16)
     numpy.testing.assert_array_equal(im_out, self.im)
Exemplo n.º 9
0
 def test_upload_file(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     data_storage.upload_file(file_path=self.file_path)
     # Make sure the image uploaded in setUp is unchanged
     key = "/".join([self.storage_dir, self.im_name])
     byte_string = self.conn.Object(
         self.bucket_name,
         key,
     ).get()['Body'].read()
     # Construct an array from the bytes and decode image
     im_out = im_utils.deserialize_im(byte_string)
     # Assert that contents are the same
     nose.tools.assert_equal(im_out.dtype, np.uint16)
     numpy.testing.assert_array_equal(im_out, self.im)
Exemplo n.º 10
0
 def test_upload_frames(self):
     # Upload image stack
     storage_dir = "raw_frames/ML-2005-05-23-10-00-00-0001"
     data_storage = s3_storage.S3Storage(storage_dir, self.nbr_workers)
     data_storage.upload_frames(self.stack_names, self.im_stack)
     # Get images from uploaded stack and validate that the contents are unchanged
     for im_nbr in range(len(self.stack_names)):
         key = "/".join([storage_dir, self.stack_names[im_nbr]])
         byte_string = self.conn.Object(self.bucket_name,
                                        key).get()['Body'].read()
         # Construct an array from the bytes and decode image
         im = im_utils.deserialize_im(byte_string)
         # Assert that contents are the same
         nose.tools.assert_equal(im.dtype, np.uint16)
         nose.tools.assert_equal(im.shape, (10, 15))
         numpy.testing.assert_array_equal(im, self.im_stack[..., im_nbr])
Exemplo n.º 11
0
 def test_get_stack_with_shape(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     data_storage.upload_frames(self.stack_names, self.im_stack)
     # Load image stack in memory
     stack_shape = (10, 15, 1, 2)
     im_out = data_storage.get_stack_with_shape(self.stack_names,
                                                stack_shape=stack_shape,
                                                bit_depth=np.uint16)
     im_out = np.squeeze(im_out)
     nose.tools.assert_equal(self.im_stack.shape, im_out.shape)
     for im_nbr in range(self.im_stack.shape[-1]):
         # Assert that contents are the same
         numpy.testing.assert_array_equal(
             im_out[..., im_nbr],
             self.im_stack[..., im_nbr],
         )
Exemplo n.º 12
0
 def test_download_files(self):
     storage_dir = "raw_frames/ML-2005-05-23-10-00-00-0001"
     data_storage = s3_storage.S3Storage(storage_dir, self.nbr_workers)
     data_storage.upload_frames(self.stack_names, self.im_stack)
     data_storage.download_files(
         file_names=self.stack_names,
         dest_dir=self.temp_path,
     )
     # Read downloaded file and assert that contents are the same
     for i, im_name in enumerate(self.stack_names):
         dest_path = os.path.join(self.temp_path, im_name)
         im_out = cv2.imread(
             dest_path,
             cv2.IMREAD_ANYCOLOR | cv2.IMREAD_ANYDEPTH,
         )
         nose.tools.assert_equal(im_out.dtype, np.uint16)
         numpy.testing.assert_array_equal(im_out, self.im_stack[..., i])
Exemplo n.º 13
0
 def test_upload_frames_color(self):
     # Create color image stack
     im_stack = np.ones((10, 15, 3, 2), np.uint16) * 3000
     im_stack[0:5, 2:4, :, 0] = 42
     im_stack[3:7, 12:14, :, 1] = 10000
     # Expected color image shape
     expected_shape = im_stack[..., 0].shape
     # Mock frame upload
     storage_dir = "raw_frames/ML-2005-05-23-10-00-00-0001"
     data_storage = s3_storage.S3Storage(storage_dir, self.nbr_workers)
     data_storage.upload_frames(self.stack_names, im_stack)
     # Get images and validate that the contents are unchanged
     for im_nbr in range(len(self.stack_names)):
         key = "/".join([storage_dir, self.stack_names[im_nbr]])
         byte_string = self.conn.Object(self.bucket_name,
                                        key).get()['Body'].read()
         # Construct an array from the bytes and decode image
         im = im_utils.deserialize_im(byte_string)
         # Assert that contents are the same
         nose.tools.assert_equal(im.shape, expected_shape)
         nose.tools.assert_equal(im.dtype, np.uint16)
         numpy.testing.assert_array_equal(im, im_stack[..., im_nbr])
Exemplo n.º 14
0
    def test_get_stack_from_meta(self):
        # Upload image stack
        storage_dir = "raw_frames/ML-2005-05-23-10-00-00-0001"
        data_storage = s3_storage.S3Storage(storage_dir, self.nbr_workers)
        data_storage.upload_frames(self.stack_names, self.im_stack)
        global_meta = {
            "storage_dir": storage_dir,
            "nbr_frames": 2,
            "im_height": 10,
            "im_width": 15,
            "nbr_slices": 1,
            "nbr_channels": 2,
            "im_colors": 1,
            "bit_depth": "uint16",
            "nbr_timepoints": 1,
            "nbr_positions": 1,
        }
        frames_meta = meta_utils.make_dataframe(
            nbr_frames=global_meta["nbr_frames"], )

        nbr_frames = self.im_stack.shape[2]
        sha = [None] * nbr_frames
        for i in range(nbr_frames):
            sha[i] = meta_utils.gen_sha256(self.im_stack[..., i])

        frames_meta.loc[0] = [0, 0, 0, "A", "im1.png", 0, sha[0]]
        frames_meta.loc[1] = [1, 0, 0, "B", "im2.png", 0, sha[1]]
        im_stack, dim_order = data_storage.get_stack_from_meta(
            global_meta,
            frames_meta,
        )
        # Stack has X = 10, Y = 15, grayscale, Z = 1, C = 2, T = 1, P = 1
        # so expected stack shape and order should be:
        expected_shape = (10, 15, 2)
        nose.tools.assert_equal(im_stack.shape, expected_shape)
        nose.tools.assert_equal(dim_order, "XYC")
Exemplo n.º 15
0
 def test_existing_storage_path(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     data_storage.upload_file(file_path=self.file_path)
     storage_path = os.path.join(self.storage_dir, self.im_name)
     self.assertFalse(
         data_storage.nonexistent_storage_path(storage_path=storage_path, ))
Exemplo n.º 16
0
 def test_nonexistent_storage_path(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     storage_path = os.path.join(self.storage_dir, self.im_name)
     self.assertTrue(
         data_storage.nonexistent_storage_path(storage_path=storage_path, ))
Exemplo n.º 17
0
 def test_assert_unique_id_exists(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     data_storage.upload_file(file_path=self.file_path)
     data_storage.assert_unique_id()
Exemplo n.º 18
0
 def test_assert_unique_id(self):
     data_storage = s3_storage.S3Storage(self.storage_dir, self.nbr_workers)
     data_storage.assert_unique_id()