def test_anno_minmal(self): """Upload a minimal and maximal annotation. Verify fields.""" # Create an in-memory HDF5 file tmpfile = tempfile.NamedTemporaryFile() h5fh = h5py.File ( tmpfile.name ) # Create the top level annotation id namespace idgrp = h5fh.create_group ( str(0) ) h5fh.flush() tmpfile.seek(0) p.annoid = putAnnotation(p, tmpfile) h5ret = getAnnotation(p) idgrpret = h5ret.get(str(p.annoid)) assert idgrpret assert ( idgrpret['ANNOTATION_TYPE'][0] == 1 ) assert not idgrpret.get('RESOLUTION') assert not idgrpret.get('XYZOFFSET') assert not idgrpret.get('VOXELS') assert not idgrpret.get('CUTOUT') mdgrpret = idgrpret['METADATA'] assert mdgrpret assert ( mdgrpret['CONFIDENCE'][0] == 0.0 ) assert ( mdgrpret['STATUS'][0] == 0 ) assert ( mdgrpret['KVPAIRS'][:] == '' ) assert ( mdgrpret['AUTHOR'][:] == 'unknown' )
def test_neuron(self): """Make a multiple segments that overlap and then query them as a neuron""" # create neuron makeAnno(p, 5) neuronid = p.annoid # create annotations for i in range(0, 3): # create annotations makeAnno(p, 4) f = setField(p, 'neuron', neuronid) # add data p.args = (3000, 3100, 4000, 4100, 100 + 2 * i, 100 + 2 * i + 3) image_data = np.ones([1, 3, 100, 100], dtype=np.uint32) * p.annoid response = postNPZ(p, image_data) # get the neuron annotation p.annoid = neuronid p.field = 'tight_cutout' h5ret = getAnnotation(p) idgrp = h5ret.get(str(p.annoid)) # count the voxels to make sure they remapped correctly assert (np.unique(np.array(idgrp['CUTOUT'][:, :, :])) == [0, neuronid ]).all() assert (len(np.nonzero(np.array( idgrp['CUTOUT'][:, :, :]))[0]) == 70000)
def test_anno_minmal(self): """Upload a minimal and maximal annotation. Verify fields.""" # Create an in-memory HDF5 file tmpfile = tempfile.NamedTemporaryFile() h5fh = h5py.File(tmpfile.name) # Create the top level annotation id namespace idgrp = h5fh.create_group(str(0)) h5fh.flush() tmpfile.seek(0) p.annoid = putAnnotation(p, tmpfile) h5ret = getAnnotation(p) idgrpret = h5ret.get(str(p.annoid)) assert idgrpret assert (idgrpret['ANNOTATION_TYPE'][0] == 1) assert not idgrpret.get('RESOLUTION') assert not idgrpret.get('XYZOFFSET') assert not idgrpret.get('VOXELS') assert not idgrpret.get('CUTOUT') mdgrpret = idgrpret['METADATA'] assert mdgrpret assert (mdgrpret['CONFIDENCE'][0] == 0.0) assert (mdgrpret['STATUS'][0] == 0) assert (mdgrpret['KVPAIRS'][:] == '') assert (mdgrpret['AUTHOR'][:] == 'unknown')
def test_neuron (self): """Make a multiple segments that overlap and then query them as a neuron""" # create neuron makeAnno(p,5) neuronid = p.annoid # create annotations for i in range(0,3): # create annotations makeAnno(p,4) f = setField(p,'neuron',neuronid) # add data p.args = (3000,3100,4000,4100,100+2*i,100+2*i+3) image_data = np.ones( [1,3,100,100], dtype=np.uint32 ) * p.annoid response = postNPZ(p, image_data) # get the neuron annotation p.annoid = neuronid p.field = 'tight_cutout' h5ret = getAnnotation(p) idgrp = h5ret.get(str(p.annoid)) # count the voxels to make sure they remapped correctly assert ( np.unique(np.array(idgrp['CUTOUT'][:,:,:])) == [0,neuronid] ).all() assert ( len(np.nonzero(np.array(idgrp['CUTOUT'][:,:,:]))[0]) == 70000 )
def test_anno_update(self): """Upload a Updated file with new data""" tmpfile = tempfile.NamedTemporaryFile() h5fh = h5py.File(tmpfile.name) # Create the top level annotation id namespace idgrp = h5fh.create_group(str(p.annoid)) # Create a metadata group mdgrp = idgrp.create_group("METADATA") # now lets add a bunch of random values for the specific annotation type ann_status = random.randint(0, 4) ann_confidence = random.random() ann_author = 'unittest_author2' # Annotation type idgrp.create_dataset("ANNOTATION_TYPE", (1, ), np.uint32, data=1) # Set Annotation specific metadata mdgrp.create_dataset("STATUS", (1, ), np.uint32, data=ann_status) mdgrp.create_dataset("CONFIDENCE", (1, ), np.float, data=ann_confidence) mdgrp.create_dataset("AUTHOR", (1, ), dtype=h5py.special_dtype(vlen=str), data=ann_author) h5fh.flush() tmpfile.seek(0) p.field = 'update' p.annoid = putAnnotation(p, tmpfile) p.field = None h5ret = getAnnotation(p) idgrpret = h5ret.get(str(p.annoid)) assert idgrpret assert (idgrpret['ANNOTATION_TYPE'][0] == 1) assert not idgrpret.get('RESOLUTION') assert not idgrpret.get('XYZOFFSET') assert not idgrpret.get('VOXELS') assert not idgrpret.get('CUTOUT') mdgrpret = idgrpret['METADATA'] assert mdgrpret assert (abs(mdgrpret['CONFIDENCE'][0] - ann_confidence) < 0.0001) #assert ( mdgrpret['CONFIDENCE'][0] == ann_confidence ) assert (mdgrpret['STATUS'][0] == ann_status) assert (mdgrpret['AUTHOR'][:] == ann_author)
def test_anno_update (self): """Upload a Updated file with new data""" tmpfile = tempfile.NamedTemporaryFile() h5fh = h5py.File ( tmpfile.name ) # Create the top level annotation id namespace idgrp = h5fh.create_group ( str(p.annoid) ) # Create a metadata group mdgrp = idgrp.create_group ( "METADATA" ) # now lets add a bunch of random values for the specific annotation type ann_status = random.randint(0,4) ann_confidence = random.random() ann_author = 'unittest_author2' # Annotation type idgrp.create_dataset ( "ANNOTATION_TYPE", (1,), np.uint32, data=1 ) # Set Annotation specific metadata mdgrp.create_dataset ( "STATUS", (1,), np.uint32, data=ann_status ) mdgrp.create_dataset ( "CONFIDENCE", (1,), np.float, data=ann_confidence ) mdgrp.create_dataset ( "AUTHOR", (1,), dtype=h5py.special_dtype(vlen=str), data=ann_author ) h5fh.flush() tmpfile.seek(0) p.field = 'update' p.annoid = putAnnotation(p, tmpfile) p.field = None h5ret = getAnnotation(p) idgrpret = h5ret.get(str(p.annoid)) assert idgrpret assert ( idgrpret['ANNOTATION_TYPE'][0] == 1 ) assert not idgrpret.get('RESOLUTION') assert not idgrpret.get('XYZOFFSET') assert not idgrpret.get('VOXELS') assert not idgrpret.get('CUTOUT') mdgrpret = idgrpret['METADATA'] assert mdgrpret assert ( abs(mdgrpret['CONFIDENCE'][0] - ann_confidence) < 0.0001 ) #assert ( mdgrpret['CONFIDENCE'][0] == ann_confidence ) assert ( mdgrpret['STATUS'][0] == ann_status ) assert ( mdgrpret['AUTHOR'][:] == ann_author )