Ejemplo n.º 1
0
 def test_create_ingest_job(self):
     """Method to test creation o a ingest job from a config_data dict"""
     ingest_mgmr = IngestManager()
     ingest_mgmr.validate_config_file(self.example_config_data)
     ingest_mgmr.validate_properties()
     ingest_mgmr.owner = self.user.id
     job = ingest_mgmr.create_ingest_job()
     assert (job.id is not None)
Ejemplo n.º 2
0
 def test_create_ingest_credentials(self):
     """"""
     ingest_mgmr = IngestManager()
     ingest_mgmr = IngestManager()
     ingest_mgmr.validate_config_file(self.example_config_data)
     ingest_mgmr.validate_properties()
     ingest_mgmr.owner = self.user.pk
     job = ingest_mgmr.create_ingest_job()
     ingest_mgmr.job = job
Ejemplo n.º 3
0
 def test_generate_upload_queue_args_tile_job(self):
     """Ensure ingest_type set properly"""
     ingest_mgmr = IngestManager()
     ingest_mgmr.validate_config_file(self.example_config_data)
     ingest_mgmr.validate_properties()
     ingest_mgmr.owner = self.user.pk
     job = ingest_mgmr.create_ingest_job()
     actual = ingest_mgmr._generate_upload_queue_args(job)
     assert actual['ingest_type'] == IngestJob.TILE_INGEST
     assert actual['z_chunk_size'] == 16
Ejemplo n.º 4
0
 def test_generate_upload_queue_args_volumetric_job(self):
     """Ensure ingest_type set properly"""
     ingest_mgmr = IngestManager()
     ingest_mgmr.validate_config_file(self.volumetric_config_data)
     ingest_mgmr.validate_properties()
     ingest_mgmr.owner = self.user.pk
     job = ingest_mgmr.create_ingest_job()
     actual = ingest_mgmr._generate_upload_queue_args(job)
     assert actual['ingest_type'] == IngestJob.VOLUMETRIC_INGEST
     assert actual['z_chunk_size'] == 64
     assert actual['ingest_queue'] is None
Ejemplo n.º 5
0
 def test_create_ingest_job_volumetric(self):
     ingest_mgmr = IngestManager()
     ingest_mgmr.validate_config_file(self.volumetric_config_data)
     ingest_mgmr.validate_properties()
     ingest_mgmr.owner = self.user.pk
     job = ingest_mgmr.create_ingest_job()
     assert (job.id is not None)
     assert (job.ingest_type == IngestJob.VOLUMETRIC_INGEST)
     assert (job.tile_size_x == 1024)
     assert (job.tile_size_y == 1024)
     assert (job.tile_size_z == 64)
     assert (job.tile_size_t == 1)
Ejemplo n.º 6
0
 def test_create_ingest_job(self):
     """Method to test creation o a ingest job from a config_data dict"""
     ingest_mgmr = IngestManager()
     ingest_mgmr.validate_config_file(self.example_config_data)
     ingest_mgmr.validate_properties()
     ingest_mgmr.owner = self.user.pk
     job = ingest_mgmr.create_ingest_job()
     assert (job.id is not None)
     assert (job.ingest_type == IngestJob.TILE_INGEST)
     assert (job.tile_size_x == 512)
     assert (job.tile_size_y == 512)
     assert (job.tile_size_z == 1)
     assert (job.tile_size_t == 1)
Ejemplo n.º 7
0
 def test_generate_upload_tasks(self):
     """
     Test that the correct number of messages are being uploaded
     """
     ingest_mgmr = IngestManager()
     ingest_mgmr.validate_config_file(self.example_config_data)
     ingest_mgmr.validate_properties()
     ingest_mgmr.owner = self.user.pk
     job = ingest_mgmr.create_ingest_job()
     ingest_mgmr.job = job
     with patch.object(IngestManager, 'upload_task_file') as mock_method:
         ingest_mgmr.generate_upload_tasks(job.id)
         self.assertEquals(ingest_mgmr.file_index, 1)
         self.assertEquals(ingest_mgmr.num_of_chunks, 48)
         self.assertEquals(ingest_mgmr.count_of_tiles, 640)
Ejemplo n.º 8
0
    def test_upload_tile_index_table(self):
        """"""
        ingest_mgmr = IngestManager()
        ingest_mgmr.validate_config_file(self.example_config_data)
        ingest_mgmr.validate_properties()
        ingest_mgmr.owner = self.user.pk
        ingest_job = ingest_mgmr.create_ingest_job()
        assert (ingest_job.id is not None)

        # Get the chunks in this job
        # Get the project information
        bosskey = ingest_job.collection + '&' + ingest_job.experiment + '&' + ingest_job.channel_layer
        lookup_key = (LookUpKey.get_lookup_key(bosskey)).lookup_key
        [col_id, exp_id, ch_id] = lookup_key.split('&')
        project_info = [col_id, exp_id, ch_id]
        proj_name = ingest_job.collection + '&' + ingest_job.experiment
        tile_index_db = BossTileIndexDB(proj_name)
        tilebucket = TileBucket(str(col_id) + '&' + str(exp_id))

        for time_step in range(ingest_job.t_start, ingest_job.t_stop, 1):
            # For each time step, compute the chunks and tile keys

            for z in range(ingest_job.z_start, ingest_job.z_stop, 16):
                for y in range(ingest_job.y_start, ingest_job.y_stop,
                               ingest_job.tile_size_y):
                    for x in range(ingest_job.x_start, ingest_job.x_stop,
                                   ingest_job.tile_size_x):

                        # compute the chunk indices
                        chunk_x = int(x / ingest_job.tile_size_x)
                        chunk_y = int(y / ingest_job.tile_size_y)
                        chunk_z = int(z / 16)

                        # Compute the number of tiles in the chunk
                        if ingest_job.z_stop - z >= 16:
                            num_of_tiles = 16
                        else:
                            num_of_tiles = ingest_job.z_stop - z

                        # Generate the chunk key
                        chunk_key = (BossBackend(
                            ingest_mgmr.config)).encode_chunk_key(
                                num_of_tiles, project_info,
                                ingest_job.resolution, chunk_x, chunk_y,
                                chunk_z, time_step)
                        # Upload the chunk to the tile index db
                        tile_index_db.createCuboidEntry(
                            chunk_key, ingest_job.id)
                        key_map = {}
                        for tile in range(0, num_of_tiles):
                            # get the object key and upload it
                            #tile_key = tilebucket.encodeObjectKey(ch_id, ingest_job.resolution,
                            #                              chunk_x, chunk_y, tile, time_step)
                            tile_key = 'fakekey' + str(tile)
                            tile_index_db.markTileAsUploaded(
                                chunk_key, tile_key)

                        # for each chunk key, delete entries from the tile_bucket

        # Check if data has been uploaded
        chunks = list(tile_index_db.getTaskItems(ingest_job.id))
        assert (len(chunks) != 0)

        ingest_mgmr.delete_tiles(ingest_job)
        chunks = list(tile_index_db.getTaskItems(ingest_job.id))
        assert (len(chunks) == 0)