Ejemplo n.º 1
0
    def setUp(self):
        """
        Create a blockwise fileset to test with.
        """
        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest

        try:
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest

        testConfig = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "synapse_small",
            "format" : "hdf5",
            "axes" : "txyzc",
            "shape" : [1,400,400,100,1],
            "dtype" : "numpy.uint8",
            "block_shape" : [1, 50, 50, 50, 100],
            "block_file_name_format" : "cube{roiString}.h5/volume/data"
        }
        """
        self.tempDir = tempfile.mkdtemp()
        self.configpath = os.path.join(self.tempDir, "config.json")

        logger.debug("Loading config file...")
        with open(self.configpath, 'w') as f:
            f.write(testConfig)

        logger.debug("Creating random test data...")
        bfs = BlockwiseFileset(self.configpath, 'a')
        dataShape = tuple(bfs.description.shape)
        self.data = numpy.random.randint(255,
                                         size=dataShape).astype(numpy.uint8)

        logger.debug("Writing test data...")
        datasetRoi = ([0, 0, 0, 0, 0], dataShape)
        bfs.writeData(datasetRoi, self.data)
        block_starts = getIntersectingBlocks(bfs.description.block_shape,
                                             datasetRoi)
        for block_start in block_starts:
            bfs.setBlockStatus(block_start, BlockwiseFileset.BLOCK_AVAILABLE)
        bfs.close()
Ejemplo n.º 2
0
    def setUp(self):
        """
        Create a blockwise fileset to test with.
        """
        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest
        
        try:
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest
        
        testConfig = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "synapse_small",
            "format" : "hdf5",
            "axes" : "txyzc",
            "shape" : [1,400,400,100,1],
            "dtype" : "numpy.uint8",
            "block_shape" : [1, 50, 50, 50, 100],
            "block_file_name_format" : "cube{roiString}.h5/volume/data"
        }
        """
        self.tempDir = tempfile.mkdtemp()
        self.configpath = os.path.join(self.tempDir, "config.json")

        logger.debug( "Loading config file..." )
        with open(self.configpath, 'w') as f:
            f.write(testConfig)
        
        logger.debug( "Creating random test data..." )
        bfs = BlockwiseFileset( self.configpath, 'a' )
        dataShape = tuple(bfs.description.shape)
        self.data = numpy.random.randint( 255, size=dataShape ).astype(numpy.uint8)
        
        logger.debug( "Writing test data..." )
        datasetRoi = ([0,0,0,0,0], dataShape)
        bfs.writeData( datasetRoi, self.data )
        block_starts = getIntersectingBlocks(bfs.description.block_shape, datasetRoi)
        for block_start in block_starts:
            bfs.setBlockStatus(block_start, BlockwiseFileset.BLOCK_AVAILABLE)
        bfs.close()
Ejemplo n.º 3
0
    def setupClass(cls):
        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest

        try:
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest

        testConfig = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,
            "name" : "synapse_small",
            "format" : "hdf5",
            "axes" : "txyzc",
            "shape" : [1,10,20,5,1],
            "dtype" : "object",
            "block_shape" : [1, 5, 4, 1, 100],
            "block_file_name_format" : "cube{roiString}.h5/volume/data"
        }
        """
        cls.tempDir = tempfile.mkdtemp()
        cls.description_path = os.path.join(cls.tempDir, "config.json")
        with open(cls.description_path, 'w') as f:
            f.write(testConfig)

        logger.debug("Loading config file...")
        cls.bfs = BlockwiseFileset(cls.description_path, 'a')
        cls.dataShape = tuple(cls.bfs.description.shape)

        def make_dummy_dict(x):
            return {str(x): numpy.array([x, x])}

        vec_make_dummy_dict = numpy.vectorize(make_dummy_dict)

        int_data = numpy.random.randint(255,
                                        size=cls.dataShape).astype(numpy.uint8)
        dict_data = vec_make_dummy_dict(int_data)
        cls.data = dict_data
Ejemplo n.º 4
0
    def setupClass(cls):
        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest

        try:
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest
        
        testConfig = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,
            "name" : "synapse_small",
            "format" : "hdf5",
            "axes" : "txyzc",
            "shape" : [1,10,20,5,1],
            "dtype" : "object",
            "block_shape" : [1, 5, 4, 1, 100],
            "block_file_name_format" : "cube{roiString}.h5/volume/data"
        }
        """
        cls.tempDir = tempfile.mkdtemp()
        cls.description_path = os.path.join(cls.tempDir, "config.json")
        with open(cls.description_path, 'w') as f:
            f.write(testConfig)
    
        logger.debug( "Loading config file..." )
        cls.bfs = BlockwiseFileset( cls.description_path, 'a' )
        cls.dataShape = tuple(cls.bfs.description.shape)

        def make_dummy_dict(x):
            return {str(x) : numpy.array([x,x])}
        vec_make_dummy_dict = numpy.vectorize(make_dummy_dict)

        int_data = numpy.random.randint(255, size=cls.dataShape ).astype(numpy.uint8)
        dict_data = vec_make_dummy_dict(int_data)
        cls.data = dict_data
    def setupClass(cls):
        # The openconnectome site appears to be down at the moment.
        # This test fails when that happens...
        raise nose.SkipTest
        
        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest
                
        try:
            from lazyflow.utility.io.blockwiseFileset import BlockwiseFileset
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest
        
        cls.tempDir = tempfile.mkdtemp()
        logger.debug("Working in {}".format( cls.tempDir ))

        # Create the two sub-descriptions
        Bock11VolumeDescription = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level0",
            "format" : "hdf5",
            "axes" : "zyx",
            "## NOTE 1": "The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "## NOTE 2": "The website says that the data goes up to plane 4156, but it actually errors out past 4150",
            "origin_offset" : [2917, 0, 0],
            "bounds" : [4150, 135424, 119808],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/ocp/ca/bock11/hdf5/0/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "CUTOUT"
        }
        """

        blockwiseFilesetDescription = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "bock11-blocks",
            "format" : "hdf5",
            "axes" : "zyx",
            "shape" : [40,40,40],
            "dtype" : "numpy.uint8",
            "block_shape" : [20, 20, 20],
            "block_file_name_format" : "block-{roiString}.h5/CUTOUT"
        }
        """
        
        # Combine them into the composite description (see RESTfulBlockwiseFileset.DescriptionFields)
        compositeDescription = \
        """
        {{
            "_schema_name" : "RESTful-blockwise-fileset-description",
            "_schema_version" : 1.0,

            "remote_description" : {remote_description},
            "local_description" : {local_description}        
        }}
        """.format( remote_description=Bock11VolumeDescription, local_description=blockwiseFilesetDescription )
        
        # Create the description file
        cls.descriptionFilePath = os.path.join(cls.tempDir, "description.json")
        with open(cls.descriptionFilePath, 'w') as f:
            f.write(compositeDescription)
Ejemplo n.º 6
0
    def setupClass(cls):
        # The openconnectome site appears to be down at the moment.
        # This test fails when that happens...
        raise nose.SkipTest

        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest
        
        try:
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest

        cls.tempDir = tempfile.mkdtemp()
        logger.debug("Working in {}".format( cls.tempDir ))

        # Create the two sub-descriptions
        Bock11VolumeDescription = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level0",
            "format" : "hdf5",
            "axes" : "zyx",
            "##NOTE":"The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "origin_offset" : [2917, 50000, 50000],
            "bounds" : [4156, 135424, 119808],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/ocp/ca/bock11/hdf5/0/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "CUTOUT"
        }
        """

        blockwiseFilesetDescription = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "bock11-blocks",
            "format" : "hdf5",
            "axes" : "zyx",
            "shape" : [40,40,40],
            "dtype" : "numpy.uint8",
            "block_shape" : [20, 20, 20],
            "block_file_name_format" : "block-{roiString}.h5/CUTOUT",
            "dataset_root_dir" : "blocks"
        }
        """
        
        # Combine them into the composite description (see RESTfulBlockwiseFileset.DescriptionFields)
        compositeDescription = \
        """
        {{
            "_schema_name" : "RESTful-blockwise-fileset-description",
            "_schema_version" : 1.0,

            "remote_description" : {remote_description},
            "local_description" : {local_description}        
        }}
        """.format( remote_description=Bock11VolumeDescription, local_description=blockwiseFilesetDescription )
        
        # Create the description file
        cls.descriptionFilePath = os.path.join(cls.tempDir, "description.json")
        with open(cls.descriptionFilePath, 'w') as f:
            f.write(compositeDescription)

        # Create a new fileset that views the same data and stores it the 
        #  same way locally, but this time we'll use an offset 'view'
        # Start with a copy of the non-offset description
        offsetDescription = RESTfulBlockwiseFileset.readDescription(cls.descriptionFilePath)
        offsetDescription.local_description.view_origin = numpy.array([0,20,0])
        offsetDescription.local_description.dataset_root_dir = "offset_blocks"
        cls.descriptionFilePath_offset = os.path.join(cls.tempDir, "description_offset.json")
        RESTfulBlockwiseFileset.writeDescription(cls.descriptionFilePath_offset, offsetDescription)
    def setupClass(cls):
        # The openconnectome site appears to be down at the moment.
        # This test fails when that happens...
        raise nose.SkipTest

        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest

        try:
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest

        cls.tempDir = tempfile.mkdtemp()
        logger.debug("Working in {}".format(cls.tempDir))

        # Create the two sub-descriptions
        Bock11VolumeDescription = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level0",
            "format" : "hdf5",
            "axes" : "zyx",
            "##NOTE":"The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "origin_offset" : [2917, 50000, 50000],
            "bounds" : [4156, 135424, 119808],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/ocp/ca/bock11/hdf5/0/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "CUTOUT"
        }
        """

        blockwiseFilesetDescription = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "bock11-blocks",
            "format" : "hdf5",
            "axes" : "zyx",
            "shape" : [40,40,40],
            "dtype" : "numpy.uint8",
            "block_shape" : [20, 20, 20],
            "block_file_name_format" : "block-{roiString}.h5/CUTOUT",
            "dataset_root_dir" : "blocks"
        }
        """

        # Combine them into the composite description (see RESTfulBlockwiseFileset.DescriptionFields)
        compositeDescription = \
        """
        {{
            "_schema_name" : "RESTful-blockwise-fileset-description",
            "_schema_version" : 1.0,

            "remote_description" : {remote_description},
            "local_description" : {local_description}        
        }}
        """.format( remote_description=Bock11VolumeDescription, local_description=blockwiseFilesetDescription )

        # Create the description file
        cls.descriptionFilePath = os.path.join(cls.tempDir, "description.json")
        with open(cls.descriptionFilePath, 'w') as f:
            f.write(compositeDescription)

        # Create a new fileset that views the same data and stores it the
        #  same way locally, but this time we'll use an offset 'view'
        # Start with a copy of the non-offset description
        offsetDescription = RESTfulBlockwiseFileset.readDescription(
            cls.descriptionFilePath)
        offsetDescription.local_description.view_origin = numpy.array(
            [0, 20, 0])
        offsetDescription.local_description.dataset_root_dir = "offset_blocks"
        cls.descriptionFilePath_offset = os.path.join(
            cls.tempDir, "description_offset.json")
        RESTfulBlockwiseFileset.writeDescription(
            cls.descriptionFilePath_offset, offsetDescription)
Ejemplo n.º 8
0
    def setupClass(cls):
        # The openconnectome site appears to be down at the moment.
        # This test fails when that happens...
        raise nose.SkipTest

        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest

        try:
            from lazyflow.utility.io.blockwiseFileset import BlockwiseFileset
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest

        cls.tempDir = tempfile.mkdtemp()
        logger.debug("Working in {}".format(cls.tempDir))

        # Create the two sub-descriptions
        Bock11VolumeDescription = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level0",
            "format" : "hdf5",
            "axes" : "zyx",
            "## NOTE 1": "The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "## NOTE 2": "The website says that the data goes up to plane 4156, but it actually errors out past 4150",
            "origin_offset" : [2917, 0, 0],
            "bounds" : [4150, 135424, 119808],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/ocp/ca/bock11/hdf5/0/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "CUTOUT"
        }
        """

        blockwiseFilesetDescription = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "bock11-blocks",
            "format" : "hdf5",
            "axes" : "zyx",
            "shape" : [40,40,40],
            "dtype" : "numpy.uint8",
            "block_shape" : [20, 20, 20],
            "block_file_name_format" : "block-{roiString}.h5/CUTOUT"
        }
        """

        # Combine them into the composite description (see RESTfulBlockwiseFileset.DescriptionFields)
        compositeDescription = \
        """
        {{
            "_schema_name" : "RESTful-blockwise-fileset-description",
            "_schema_version" : 1.0,

            "remote_description" : {remote_description},
            "local_description" : {local_description}        
        }}
        """.format( remote_description=Bock11VolumeDescription, local_description=blockwiseFilesetDescription )

        # Create the description file
        cls.descriptionFilePath = os.path.join(cls.tempDir, "description.json")
        with open(cls.descriptionFilePath, 'w') as f:
            f.write(compositeDescription)