def test_get_ndarray(self):
     """
     Get some data from the server and check it.
     """
     start, stop = (0,9,5,50), (1,10,20,150)
     dvid_vol = VoxelsAccessor( TEST_DVID_SERVER, self.data_uuid, self.data_name )
     subvolume = dvid_vol.get_ndarray( start, stop )
     assert (self.original_data[roi_to_slice(start, stop)] == subvolume).all()
예제 #2
0
 def test_get_ndarray(self):
     """
     Get some data from the server and check it.
     """
     start, stop = (50, 5, 9, 0), (150, 20, 10, 1)
     dvid_vol = VoxelsAccessor(TEST_DVID_SERVER, self.data_uuid,
                               self.data_name)
     subvolume = dvid_vol.get_ndarray(start, stop)
     assert (self.original_data[roi_to_slice(start,
                                             stop)] == subvolume).all()
 def test_get_ndarray_throttled(self):
     """
     Get some data from the server and check it.
     Enable throttle with throttle=True
      
     Note: This test doesn't really exercise our handling of 503 responses...
     """
     start, stop = (50,5,9,0), (150,20,10,1)
     dvid_vol = VoxelsAccessor( TEST_DVID_SERVER, self.data_uuid, self.data_name, throttle=True )
     subvolume = dvid_vol.get_ndarray( start, stop )
     assert (self.original_data[roi_to_slice(start, stop)] == subvolume).all()
    def test_get_ndarray_throttled_2(self):
        """
        Get some data from the server and check it.
        Enable throttle via query_args
 
        Note: This test doesn't really exercise our handling of 503 responses...
        """
        start, stop = (0,9,5,50), (1,10,20,150)
        dvid_vol = VoxelsAccessor( TEST_DVID_SERVER, self.data_uuid, self.data_name, query_args={'throttle' : 'on'} )
        subvolume = dvid_vol.get_ndarray( start, stop )
        assert (self.original_data[roi_to_slice(start, stop)] == subvolume).all()
 def test_extra_query_args(self):
     """
     Create a VoxelsAccessor that uses extra query args 
     They come after the '?' in the REST URI.  For example:
     http://localhost/api/node/mydata/_0_1_2/10_10_10/0_0_0?roi=whatever&attenuation=3
     """
     # Retrieve from server
     start, stop = (0,9,5,50), (1,10,20,150)
     query_args = {'roi' : 'some_ref', 'attenuation' : 5}
     dvid_vol = VoxelsAccessor( TEST_DVID_SERVER, self.data_uuid, self.data_name, query_args=query_args )
     subvolume = dvid_vol.get_ndarray( start, stop )
     
     # Compare
     assert (subvolume == self.original_data[roi_to_slice(start, stop)]).all()
예제 #6
0
    def test_get_ndarray_throttled_2(self):
        """
        Get some data from the server and check it.
        Enable throttle via query_args
 
        Note: This test doesn't really exercise our handling of 503 responses...
        """
        start, stop = (50, 5, 9, 0), (150, 20, 10, 1)
        dvid_vol = VoxelsAccessor(TEST_DVID_SERVER,
                                  self.data_uuid,
                                  self.data_name,
                                  query_args={'throttle': 'on'})
        subvolume = dvid_vol.get_ndarray(start, stop)
        assert (self.original_data[roi_to_slice(start,
                                                stop)] == subvolume).all()
예제 #7
0
def download_to_h5( hostname, uuid, instance, roi, output_filepath, dset_name=None, compression='lzf', overlap_px=0):
    """
    """
    ns = DVIDNodeService(hostname, uuid)
    va = VoxelsAccessor(hostname, uuid, instance, throttle=True)
    
    dset_name = dset_name or instance

    assert roi, "Must provide a ROI"
    logger.info("Downloading {hostname}/api/node/{uuid}/{instance}?roi={roi} to {output_filepath}/{dset_name}".format(**locals()))

    substacks, _packing_factor = ns.get_roi_partition(roi, SUBSTACK_SIZE / DVID_BLOCK_SIZE)

    # Substack tuples are (size, z, y, x)
    substacks_zyx = np.array(substacks)[:, 1:]
    
    # If the user specified an 'overlap', we add it to all substacks.
    # Technically, this isn't very efficient, because a lot of overlapping
    # pixels on the interior of the ROI will be fetched twice.
    substacks_zyx[:,0] -= overlap_px
    substacks_zyx[:,1] += overlap_px

    roi_bb = ( np.min(substacks_zyx, axis=0),
               np.max(substacks_zyx, axis=0)+SUBSTACK_SIZE )
    
    with h5py.File(output_filepath, 'a') as output_file:
        try:
            del output_file[dset_name]
        except KeyError:
            pass
        
        dset = output_file.create_dataset( dset_name, shape=roi_bb[1], dtype=va.dtype, chunks=True, compression=compression )
    
        for i, substack_zyx in enumerate(substacks_zyx):
            logger.info("Substack {}/{} {}: Downloading...".format( i, len(substacks_zyx), list(substack_zyx) ))
            
            # Append a singleton channel axis
            substack_bb = np.array(( tuple(substack_zyx) + (0,),
                                     tuple(substack_zyx + SUBSTACK_SIZE) + (1,) ))
            
            # Includes singleton channel
            substack_data = va.get_ndarray(*substack_bb)

            logger.info("Substack {}/{} {}: Writing...".format( i, len(substacks_zyx), list(substack_zyx) ))
            dset[bb_to_slicing(*substack_bb[:,:-1])] = substack_data[...,0]

    logger.info("DONE Downloading {hostname}/api/node/{uuid}/{instance}?roi={roi} to {output_filepath}/{dset_name}".format(**locals()))
    def test_post_reduced_dim_slicing(self):
        # Cutout dims
        start, stop = (64,32,0,0), (96,64,32,1)
        shape = numpy.subtract( stop, start )
   
        # Generate test data
        new_subvolume = numpy.random.randint( 0,1000, shape ).astype( numpy.uint8 )
   
        # Send to server
        dvid_vol = VoxelsAccessor( TEST_DVID_SERVER, self.data_uuid, self.data_name )
        dvid_vol[64:96, 32:64, 0:32, 0] = new_subvolume[...,0]

        # Now read it back
        read_subvolume = dvid_vol.get_ndarray( start, stop )
        assert (read_subvolume == new_subvolume).all()

        # Modify our master copy so other tests don't get messed up.
        self.original_data[roi_to_slice(start, stop)] = new_subvolume
예제 #9
0
    def test_extra_query_args(self):
        """
        Create a VoxelsAccessor that uses extra query args 
        They come after the '?' in the REST URI.  For example:
        http://localhost/api/node/mydata/_0_1_2/10_10_10/0_0_0?roi=whatever&attenuation=3
        """
        # Retrieve from server
        start, stop = (50, 5, 9, 0), (150, 20, 10, 1)
        query_args = {'roi': 'some_ref', 'attenuation': 5}
        dvid_vol = VoxelsAccessor(TEST_DVID_SERVER,
                                  self.data_uuid,
                                  self.data_name,
                                  query_args=query_args)
        subvolume = dvid_vol.get_ndarray(start, stop)

        # Compare
        assert (subvolume == self.original_data[roi_to_slice(start,
                                                             stop)]).all()
    def test_zy_post_negative_coordinates(self):
        """
        Just make sure nothing blows up if we post to negative coordinates.
        """
        # Cutout dims (must be block-aligned for the POST)
        start, stop = (-64,0,-32,0), (128,32,32,1)
        shape = numpy.subtract( stop, start )
   
        # Generate test data
        subvolume = numpy.random.randint( 0,1000, shape ).astype(numpy.uint8)
 
        dvid_vol = VoxelsAccessor( TEST_DVID_SERVER, self.data_uuid, self.data_name )
 
        # Send to server
        dvid_vol.post_ndarray(start, stop, subvolume)
         
        # Now try to 'get' data from negative coords
        read_back_vol = dvid_vol.get_ndarray(start, stop)
        assert (read_back_vol == subvolume).all()
예제 #11
0
    def test_post_reduced_dim_slicing(self):
        # Cutout dims
        start, stop = (64, 32, 0, 0), (96, 64, 32, 1)
        shape = numpy.subtract(stop, start)

        # Generate test data
        new_subvolume = numpy.random.randint(0, 1000,
                                             shape).astype(numpy.uint8)

        # Send to server
        dvid_vol = VoxelsAccessor(TEST_DVID_SERVER, self.data_uuid,
                                  self.data_name)
        dvid_vol[64:96, 32:64, 0:32, 0] = new_subvolume[..., 0]

        # Now read it back
        read_subvolume = dvid_vol.get_ndarray(start, stop)
        assert (read_subvolume == new_subvolume).all()

        # Modify our master copy so other tests don't get messed up.
        self.original_data[roi_to_slice(start, stop)] = new_subvolume
예제 #12
0
    def test_zy_post_negative_coordinates(self):
        """
        Just make sure nothing blows up if we post to negative coordinates.
        """
        # Cutout dims (must be block-aligned for the POST)
        start, stop = (-64, 0, -32, 0), (128, 32, 32, 1)
        shape = numpy.subtract(stop, start)

        # Generate test data
        subvolume = numpy.random.randint(0, 1000, shape).astype(numpy.uint8)

        dvid_vol = VoxelsAccessor(TEST_DVID_SERVER, self.data_uuid,
                                  self.data_name)

        # Send to server
        dvid_vol.post_ndarray(start, stop, subvolume)

        # Now try to 'get' data from negative coords
        read_back_vol = dvid_vol.get_ndarray(start, stop)
        assert (read_back_vol == subvolume).all()
예제 #13
0
    def test_zz_quickstart_usage(self):
        import json
        import numpy
        from libdvid import DVIDConnection, ConnectionMethod
        from libdvid.voxels import VoxelsAccessor, VoxelsMetadata

        # Open a connection to DVID
        connection = DVIDConnection("127.0.0.1:8000")

        # Get detailed dataset info: /api/repos/info (note: /api is prepended automatically)
        status, body, _error_message = connection.make_request(
            "/repos/info", ConnectionMethod.GET)
        dataset_details = json.loads(body)
        # print(json.dumps( dataset_details, indent=4 ))

        # Create a new remote volume (assuming you already know the uuid of the node)
        uuid = UUID
        voxels_metadata = VoxelsMetadata.create_default_metadata(
            (0, 0, 0, 1), numpy.uint8, 'zyxc', 1.0, "")
        VoxelsAccessor.create_new("127.0.0.1:8000", uuid, "my_volume",
                                  voxels_metadata)

        # Use the VoxelsAccessor convenience class to manipulate a particular data volume
        accessor = VoxelsAccessor("127.0.0.1:8000", uuid, "my_volume")
        # print(dvid_volume.axiskeys, dvid_volume.dtype, dvid_volume.minindex, dvid_volume.shape)

        # Add some data (must be block-aligned)
        # Must include all channels.
        updated_data = numpy.ones((256, 192, 128, 1), dtype=numpy.uint8)
        accessor[256:512, 32:224, 0:128, 0] = updated_data
        # OR:
        #accessor.post_ndarray( (0,10,20,30), (1,110,120,130), updated_data )

        # Read from it (First axis is channel.)
        cutout_array = accessor[300:330, 40:120, 10:110, 0]
        # OR:
        cutout_array = accessor.get_ndarray((300, 40, 10, 0),
                                            (330, 120, 110, 1))

        assert isinstance(cutout_array, numpy.ndarray)
        assert cutout_array.shape == (30, 80, 100, 1)
    def test_post_ndarray(self):
        """
        Modify a remote subvolume and verify that the server wrote it.
        """
        # Cutout dims
        start, stop =  (64,32,0,0), (96,64,32,1)
        shape = numpy.subtract( stop, start )
   
        # Generate test data
        new_subvolume = numpy.random.randint( 0,1000, shape ).astype( numpy.uint8 )
   
        # Send to server
        dvid_vol = VoxelsAccessor( TEST_DVID_SERVER, self.data_uuid, self.data_name )
        dvid_vol.post_ndarray(start, stop, new_subvolume)

        # Now read it back
        read_subvolume = dvid_vol.get_ndarray( start, stop )
        assert (read_subvolume == new_subvolume).all()

        # Modify our master copy so other tests don't get messed up.
        self.original_data[roi_to_slice(start, stop)] = new_subvolume
예제 #15
0
   def test_zz_quickstart_usage(self):
       import json
       import numpy
       from libdvid import DVIDConnection, ConnectionMethod
       from libdvid.voxels import VoxelsAccessor, VoxelsMetadata
          
       # Open a connection to DVID
       connection = DVIDConnection( "localhost:8000" )
         
       # Get detailed dataset info: /api/repos/info (note: /api is prepended automatically)
       status, body, error_message = connection.make_request( "/repos/info", ConnectionMethod.GET)
       dataset_details = json.loads(body)
       # print json.dumps( dataset_details, indent=4 )
         
       # Create a new remote volume (assuming you already know the uuid of the node)
       uuid = UUID
       voxels_metadata = VoxelsMetadata.create_default_metadata( (1,0,0,0), numpy.uint8, 'cxyz', 1.0, "" )
       VoxelsAccessor.create_new( "localhost:8000", uuid, "my_volume", voxels_metadata )
 
       # Use the VoxelsAccessor convenience class to manipulate a particular data volume     
       accessor = VoxelsAccessor( "localhost:8000", uuid, "my_volume" )
       # print dvid_volume.axiskeys, dvid_volume.dtype, dvid_volume.minindex, dvid_volume.shape
          
       # Add some data (must be block-aligned)
       # Must include all channels.
       # Must be FORTRAN array, using FORTRAN indexing order conventions
       # (Use order='F', and make sure you're indexing it as cxyz)
       updated_data = numpy.ones( (1,128,192,256), dtype=numpy.uint8, order='F' )
       updated_data = numpy.asfortranarray(updated_data)
       accessor[:, 0:128, 32:224, 256:512] = updated_data
       # OR:
       #accessor.post_ndarray( (0,10,20,30), (1,110,120,130), updated_data )
         
       # Read from it (First axis is channel.)
       cutout_array = accessor[:, 10:110, 40:120, 300:330]
       # OR:
       cutout_array = accessor.get_ndarray( (0,10,40,300), (1,110,120,330) )
 
       assert isinstance(cutout_array, numpy.ndarray)
       assert cutout_array.shape == (1,100,80,30)
예제 #16
0
    def test_post_ndarray(self):
        """
        Modify a remote subvolume and verify that the server wrote it.
        """
        # Cutout dims
        start, stop = (64, 32, 0, 0), (96, 64, 32, 1)
        shape = numpy.subtract(stop, start)

        # Generate test data
        new_subvolume = numpy.random.randint(0, 1000,
                                             shape).astype(numpy.uint8)

        # Send to server
        dvid_vol = VoxelsAccessor(TEST_DVID_SERVER, self.data_uuid,
                                  self.data_name)
        dvid_vol.post_ndarray(start, stop, new_subvolume)

        # Now read it back
        read_subvolume = dvid_vol.get_ndarray(start, stop)
        assert (read_subvolume == new_subvolume).all()

        # Modify our master copy so other tests don't get messed up.
        self.original_data[roi_to_slice(start, stop)] = new_subvolume
   def test_zz_quickstart_usage(self):
       import json
       import numpy
       from libdvid import DVIDConnection, ConnectionMethod
       from libdvid.voxels import VoxelsAccessor, VoxelsMetadata
          
       # Open a connection to DVID
       connection = DVIDConnection( "127.0.0.1:8000" )
         
       # Get detailed dataset info: /api/repos/info (note: /api is prepended automatically)
       status, body, _error_message = connection.make_request( "/repos/info", ConnectionMethod.GET)
       dataset_details = json.loads(body)
       # print(json.dumps( dataset_details, indent=4 ))
         
       # Create a new remote volume (assuming you already know the uuid of the node)
       uuid = UUID
       voxels_metadata = VoxelsMetadata.create_default_metadata( (0,0,0,1), numpy.uint8, 'zyxc', 1.0, "" )
       VoxelsAccessor.create_new( "127.0.0.1:8000", uuid, "my_volume", voxels_metadata )
 
       # Use the VoxelsAccessor convenience class to manipulate a particular data volume     
       accessor = VoxelsAccessor( "127.0.0.1:8000", uuid, "my_volume" )
       # print(dvid_volume.axiskeys, dvid_volume.dtype, dvid_volume.minindex, dvid_volume.shape)
          
       # Add some data (must be block-aligned)
       # Must include all channels.
       updated_data = numpy.ones( (256,192,128,1), dtype=numpy.uint8)
       accessor[256:512, 32:224, 0:128, 0] = updated_data
       # OR:
       #accessor.post_ndarray( (0,10,20,30), (1,110,120,130), updated_data )
         
       # Read from it (First axis is channel.)
       cutout_array = accessor[300:330, 40:120, 10:110, 0]
       # OR:
       cutout_array = accessor.get_ndarray( (300,40,10,0), (330,120,110,1) )
 
       assert isinstance(cutout_array, numpy.ndarray)
       assert cutout_array.shape == (30,80,100,1)
예제 #18
0
def download_to_h5(hostname,
                   uuid,
                   instance,
                   roi,
                   output_filepath,
                   dset_name=None,
                   compression='lzf',
                   overlap_px=0):
    """
    """
    ns = DVIDNodeService(hostname, uuid)
    va = VoxelsAccessor(hostname, uuid, instance, throttle=True)

    dset_name = dset_name or instance

    assert roi, "Must provide a ROI"
    logger.info(
        "Downloading {hostname}/api/node/{uuid}/{instance}?roi={roi} to {output_filepath}/{dset_name}"
        .format(**locals()))

    substacks, _packing_factor = ns.get_roi_partition(
        roi, SUBSTACK_SIZE / DVID_BLOCK_SIZE)

    # Substack tuples are (size, z, y, x)
    substacks_zyx = np.array(substacks)[:, 1:]

    # If the user specified an 'overlap', we add it to all substacks.
    # Technically, this isn't very efficient, because a lot of overlapping
    # pixels on the interior of the ROI will be fetched twice.
    substacks_zyx[:, 0] -= overlap_px
    substacks_zyx[:, 1] += overlap_px

    roi_bb = (np.min(substacks_zyx,
                     axis=0), np.max(substacks_zyx, axis=0) + SUBSTACK_SIZE)

    with h5py.File(output_filepath, 'a') as output_file:
        try:
            del output_file[dset_name]
        except KeyError:
            pass

        dset = output_file.create_dataset(dset_name,
                                          shape=roi_bb[1],
                                          dtype=va.dtype,
                                          chunks=True,
                                          compression=compression)

        for i, substack_zyx in enumerate(substacks_zyx):
            logger.info("Substack {}/{} {}: Downloading...".format(
                i, len(substacks_zyx), list(substack_zyx)))

            # Append a singleton channel axis
            substack_bb = np.array(
                (tuple(substack_zyx) + (0, ),
                 tuple(substack_zyx + SUBSTACK_SIZE) + (1, )))

            # Includes singleton channel
            substack_data = va.get_ndarray(*substack_bb)

            logger.info("Substack {}/{} {}: Writing...".format(
                i, len(substacks_zyx), list(substack_zyx)))
            dset[bb_to_slicing(*substack_bb[:, :-1])] = substack_data[..., 0]

    logger.info(
        "DONE Downloading {hostname}/api/node/{uuid}/{instance}?roi={roi} to {output_filepath}/{dset_name}"
        .format(**locals()))
예제 #19
0
def copy_voxels( source_details,
                 destination_details,
                 transfer_cube_width_px=512,
                 roi=None,
                 subvol_bounds_zyx=None ):
    """
    Transfer voxels data from one DVID server to another.
    
    source_details:
        Either a tuple of (hostname, uuid, instance),
        or a url of the form http://hostname/api/node/uuid/instance
    
    destination_details:
        Same format as source_details, or just an instance name
        (in which case the destination is presumed to be in the same host/node as the source).
    
    transfer_cube_width_px:
        The data will be transferred one 'substack' at a time, with the given substack width.
    
    NOTE: Exactly ONE of the following parameters should be provided.
    
    roi:
        Same format as destination_details, but should point to a ROI instance.
    
    subvol_bounds_zyx:
        A tuple (start_zyx, stop_zyx) indicating a rectangular region to copy (instead of a ROI).
        Specified in pixel coordinates. Must be aligned to DVID block boundaries.
        For example: ((0,0,0), (1024, 1024, 512))
    """
    if isinstance(source_details, str):
        source_details = parse_instance_url( source_details )
    else:
        source_details = InstanceDetails(*source_details)
    src_accessor = VoxelsAccessor( *source_details )
    
    if isinstance(destination_details, str):
        destination_details = str_to_details( destination_details, default=source_details )
    else:
        destination_details = InstanceDetails(*destination_details)
    dest_accessor = VoxelsAccessor( *destination_details )

    assert (roi is not None) ^ (subvol_bounds_zyx is not None), \
        "You must provide roi OR subvol_bounds-zyx (but not both)."

    # Figure out what blocks ('substacks') we're copying
    if subvol_bounds_zyx:
        assert False, "User beware: The subvol_bounds_zyx option hasn't been tested yet. " \
                      "Now that you've been warned, comment out this assertion and give it a try. "\
                      "(It *should* work...)"

        assert len(subvol_bounds_zyx) == 2, "Invalid value for subvol_bounds_zyx"
        assert list(map(len, subvol_bounds_zyx)) == [3,3], "Invalid value for subvol_bounds_zyx"

        subvol_bounds_zyx = np.array(subvol_bounds_zyx)
        subvol_shape = subvol_bounds_zyx[1] - subvol_bounds_zyx[0]
        np.array(subvol_bounds_zyx) / transfer_cube_width_px
        assert (subvol_shape % transfer_cube_width_px).all(), \
            "subvolume must be divisible by the transfer_cube_width_px"
        
        blocks_zyx = []
        transfer_block_indexes = np.ndindex( *(subvol_shape / transfer_cube_width_px) )
        for tbi in transfer_block_indexes:
            start_zyx = tbi*transfer_cube_width_px + subvol_bounds_zyx[0]
            blocks_zyx.append( SubstackZYX(transfer_cube_width_px, *start_zyx) )        
    elif roi is not None:
        if isinstance(roi, str):
            roi_details = str_to_details( roi, default=source_details )
        else:
            roi_details = InstanceDetails(*roi)
        roi_node = DVIDNodeService(roi_details.host, roi_details.uuid)
        blocks_zyx = roi_node.get_roi_partition(roi_details.instance, transfer_cube_width_px/DVID_BLOCK_WIDTH)[0]
    else:
        assert False

    # Fetch/write the blocks one at a time
    # TODO: We could speed this up if we used a threadpool...
    logger.debug( "Beginning Transfer of {} blocks ({} px each)".format( len(blocks_zyx), transfer_cube_width_px ) )
    for block_index, block_zyx in enumerate(blocks_zyx, start=1):
        start_zyxc = np.array(tuple(block_zyx[1:]) + (0,)) # skip item 0 ('size'), append channel
        stop_zyxc = start_zyxc + transfer_cube_width_px
        stop_zyxc[-1] = 1

        logger.debug("Fetching block: {} ({}/{})".format(start_zyxc[:-1], block_index, len(blocks_zyx)) )
        src_block_data = src_accessor.get_ndarray( start_zyxc, stop_zyxc )
        
        logger.debug("Writing block:  {} ({}/{})".format(start_zyxc[:-1], block_index, len(blocks_zyx)) )
        dest_accessor.post_ndarray( start_zyxc, stop_zyxc, new_data=src_block_data )
        
    logger.debug("DONE.")