def upload(self, channel, sl, imarray): """Transfer the array to the database""" with closing(ocpcadb.OCPCADB(self.proj)) as self.db: for y in range(0, self._yimgsz + 1, self.ycubedim): for x in range(0, self._ximgsz + 1, self.xcubedim): # zindex key = ndlib.XYZMorton([ x / self.xcubedim, y / self.ycubedim, (sl - self.startslice) / self.zcubedim ]) # Create a channel cube cube = imagecube.ImageCube16(self.cubedims) xmin = x ymin = y xmax = min(self._ximgsz, x + self.xcubedim) ymax = min(self._yimgsz, y + self.ycubedim) zmin = 0 zmax = min(sl + self.zcubedim, self.endslice + 1) # data for this key cube.data[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = imarray[zmin:zmax, ymin:ymax, xmin:xmax] print cube.data.shape #import pdb;pdb.set_trace() self.db.putChannelCube(key, channel, self.resolution, cube) print " Commiting at x={}, y={}, z={}".format(x, y, sl) self.db.conn.commit()
def ingest ( self ): """Read the stack and ingest""" with closing ( ocpcaproj.OCPCAProjectsDB() ) as projdb: proj = projdb.loadProject ( self.token ) with closing ( ocpcadb.OCPCADB (proj) ) as db: (startslice, endslice) = proj.datasetcfg.slicerange (xcubedim, ycubedim, zcubedim) = cubedims = proj.datasetcfg.cubedim[self.resolution] (ximagesz, yimagesz) = proj.datasetcfg.imagesz[self.resolution] batchsz = zcubedim # Ingest in database aligned slabs in the z dimension for sl in range( startslice, endslice, batchsz ): slab = np.zeros ( [zcubedim, yimagesz, ximagesz], dtype=np.uint8 ) # over each slice for b in range( batchsz ): #if we are at the end of the space, quit if ( sl + b <= endslice ): filename = '{}{:0>3}____z{}.0.tif'.format(self.path, sl+b, (sl+b-1)*25) #filename = '{}{:0>4}____z{}.0.tif'.format(self.path, sl+b, (sl+b-1)*25) print filename try: img = Image.open(filename,'r') slab [b,:,:] = np.asarray(img) except IOError, e: print "Failed to open file %s" % (e) img = np.zeros((yimagesz,ximagesz), dtype=np.uint8) slab [b,:,:] = img for y in range ( 0, yimagesz, ycubedim ): for x in range ( 0, ximagesz, xcubedim ): zidx = ndlib.XYZMorton ( [ x/xcubedim, y/ycubedim, (sl-startslice)/zcubedim] ) cubedata = np.zeros ( [zcubedim, ycubedim, xcubedim], dtype=np.uint8 ) xmin = x ymin = y xmax = ( min(ximagesz-1, x+xcubedim-1) ) + 1 ymax = ( min(yimagesz-1, y+ycubedim-1) ) + 1 zmin = 0 zmax = min(sl+zcubedim,endslice) cubedata[0:zmax-zmin,0:ymax-ymin,0:xmax-xmin] = slab[zmin:zmax,ymin:ymax,xmin:xmax] cube = imagecube.ImageCube16 ( cubedims ) cube.zeros() cube.data = cubedata if np.count_nonzero ( cube.data ) != 0: print zidx, ndlib.MortonXYZ(zidx) db.putCube ( zidx, self.resolution, cube ) print "Commiting at x=%s, y=%s, z=%s" % (x,y,sl) db.conn.commit() slab = None
def ingest(self): """ Read image stack and ingest """ # Load a database with closing(ocpcaproj.OCPCAProjectsDB()) as projdb: proj = projdb.loadToken(self.token) with closing(ocpcadb.OCPCADB(proj)) as db: ch = proj.getChannelObj(self.channel_name) # get the dataset configuration [[ximagesz, yimagesz, zimagesz], (starttime, endtime)] = proj.datasetcfg.imageSize(self.resolution) [xcubedim, ycubedim, zcubedim ] = cubedim = proj.datasetcfg.getCubeDims()[self.resolution] [xoffset, yoffset, zoffset] = proj.datasetcfg.getOffset()[self.resolution] # Get a list of the files in the directories for slice_number in range(zoffset, zimagesz, zcubedim): slab = np.zeros([zcubedim, yimagesz, ximagesz], dtype=np.uint32) for b in range(zcubedim): if (slice_number + b <= zimagesz): file_name = "{}{}{:0>4}.tif".format( self.path, self.token, slice_number + b) print "Open filename {}".format(file_name) try: img = Image.open(file_name, 'r') slab[b, :, :] = np.asarray(img) except IOError, e: print "Failed to open file %s" % (e) img = np.zeros((yimagesz, ximagesz), dtype=np.uint8) slab[b, :, :] = img for y in range(0, yimagesz + 1, ycubedim): for x in range(0, ximagesz + 1, xcubedim): # Getting a Cube id and ingesting the data one cube at a time zidx = ndlib.XYZMorton([ x / xcubedim, y / ycubedim, (slice_number - zoffset) / zcubedim ]) cube = Cube.getCube(cubedim, ch.getChannelType(), ch.getDataType()) cube.zeros() xmin = x ymin = y xmax = min(ximagesz, x + xcubedim) ymax = min(yimagesz, y + ycubedim) zmin = 0 zmax = min(slice_number + zcubedim, zimagesz + 1) cube.data[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] from operator import sub corner = map(sub, [x, y, slice_number], [xoffset, yoffset, zoffset]) if cube.data.any(): db.annotateDense(ch, corner, self.resolution, cube.data, 'O')
def main(): parser = argparse.ArgumentParser(description='Ingest the TIFF data') parser.add_argument('token', action="store", type=str, help='Token for the project') parser.add_argument('channel', action="store", type=str, help='Channel for the project') parser.add_argument('path', action="store", type=str, help='Directory with the image files') parser.add_argument('resolution', action="store", type=int, help='Resolution of data') parser.add_argument('--offset', action="store", type=int, default=0, help='Offset on disk') result = parser.parse_args() # Load a database with closing(ocpcaproj.OCPCAProjectsDB()) as projdb: proj = projdb.loadToken(result.token) with closing(ocpcadb.OCPCADB(proj)) as db: ch = proj.getChannelObj(result.channel) # get the dataset configuration [[ximagesz, yimagesz, zimagesz], (starttime, endtime)] = proj.datasetcfg.imageSize(result.resolution) [xcubedim, ycubedim, zcubedim] = cubedim = proj.datasetcfg.getCubeDims()[result.resolution] [xoffset, yoffset, zoffset] = proj.datasetcfg.getOffset()[result.resolution] # Get a list of the files in the directories for slice_number in range(zoffset, zimagesz + 1, zcubedim): slab = np.zeros([zcubedim, yimagesz, ximagesz], dtype=np.uint8) for b in range(zcubedim): if (slice_number + b <= zimagesz): try: # reading the raw data file_name = "{}{:0>5}.tif".format( result.path, (slice_number + b)) # silvestri15 #file_name = "{}full_{:0>6}.tif".format(result.path, slice_number + b + result.offset) print "Open filename {}".format(file_name) slab[b, :, :] = np.asarray(Image.open(file_name, 'r')) except IOError, e: print e slab[b, :, :] = np.zeros((yimagesz, ximagesz), dtype=np.uint8) for y in range(0, yimagesz + 1, ycubedim): for x in range(0, ximagesz + 1, xcubedim): # Getting a Cube id and ingesting the data one cube at a time zidx = ndlib.XYZMorton([ x / xcubedim, y / ycubedim, (slice_number - zoffset) / zcubedim ]) cube = Cube.getCube(cubedim, ch.getChannelType(), ch.getDataType()) cube.zeros() xmin, ymin = x, y xmax = min(ximagesz, x + xcubedim) ymax = min(yimagesz, y + ycubedim) zmin = 0 zmax = min(slice_number + zcubedim, zimagesz + 1) cube.data[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] if cube.isNotZeros(): db.putCube(ch, zidx, result.resolution, cube, update=True) slab = None
def uploadExistingProject(self): """Upload an existing project to S3""" # Uploading to a bucket with closing(ndproj.NDProjectsDB()) as projdb: proj = projdb.loadToken(self.token) with closing(spatialdb.SpatialDB(proj)) as db: ch = proj.getChannelObj(self.channel_name) if self.res == 0: start_res = 0 stop_res = proj.datasetcfg.scalinglevels else: start_res = self.res stop_res = self.res + 1 for cur_res in range(start_res, stop_res): start = time.time() # Get the source database sizes [[ximagesz, yimagesz, zimagesz], timerange] = proj.datasetcfg.imageSize(cur_res) [xcubedim, ycubedim, zcubedim] = cubedim = proj.datasetcfg.getCubeDims()[cur_res] [xoffset, yoffset, zoffset] = proj.datasetcfg.getOffset()[cur_res] [xs, ys, zs] = supercubedim = map(mul, cubedim, SUPERCUBESIZE) # Set the limits for iteration on the number of cubes in each dimension xlimit = (ximagesz - 1) / (xs) + 1 ylimit = (yimagesz - 1) / (ys) + 1 zlimit = (zimagesz - 1) / (zs) + 1 for z in range(zlimit): for y in range(ylimit): for x in range(xlimit): # cutout the data at the current resolution data = db.cutout(ch, [x * xs, y * ys, z * zs], [xs, ys, zs], cur_res).data zidx = ndlib.XYZMorton([x, y, z]) # m = hashlib.md5() # m.update('{}_{}'.format(zidx,cur_res)) # s3_key = m.hexdigest() # generateS3Key(ch.getChannelName(), cur_res, zidx) print "Inserting Cube {} at res {}".format( zidx, cur_res), [x, y, z] # data = blosc.pack_array(data) s3io.putCube(ch, cur_res, zidx, blosc.pack_array(data)) # Uploading the object to S3 # bucket.put_object(Key=s3_key, Body=data) print "Time for Resolution {} : {} secs".format( cur_res, time.time() - start)
def uploadCatmaidProject(self): """Ingest a CATMAID tile stack""" tilesz = 1024 # Load a database proj = ndproj.NDProjectsDB().loadToken(self.token) db = spatialdb.SpatialDB(proj) s3db = s3io.S3IO(db) ch = proj.getChannelObj(self.channel_name) # creating bucket # self.createS3Bucket(proj.getProjectName()) # get the dataset configuration [[ximagesz, yimagesz, zimagesz], (starttime, endtime)] = proj.datasetcfg.imageSize(self.resolution) [xcubedim, ycubedim, zcubedim] = cubedim = proj.datasetcfg.getCubeDims()[self.resolution] [xoffset, yoffset, zoffset] = proj.datasetcfg.getOffset()[self.resolution] [xsupercubedim, ysupercubedim, zsupercubedim] = supercubedim = map(mul, cubedim, SUPERCUBESIZE) if ch.getChannelType() in TIMESERIES_CHANNELS: logger.error( "Timeseries data not supported for CATMAID data. Error in {}". format(self.token)) raise NDWSError( "Timeseries data not supported for CATMAID data. Error in {}". format(self.token)) num_xtiles = ximagesz / tilesz num_ytiles = yimagesz / tilesz # over all the tiles in the slice for ytile in range(0, num_ytiles, 1): for xtile in range(0, num_xtiles, 1): # Get a list of the files in the directories for slice_number in range(zoffset, zimagesz, zsupercubedim): # empty slab slab = np.zeros([zsupercubedim, tilesz, tilesz], dtype=ND_dtypetonp.get(ch.getDataType())) # prefetch data self.fetchCatmaidData( range(slice_number, slice_number + zsupercubedim) if slice_number + zsupercubedim <= zimagesz else range( slice_number, zimagesz), xtile, ytile) for b in range(zsupercubedim): if (slice_number + b < zimagesz): try: # reading the raw data file_name = "{}{}".format( self.data_location, self.generateCatmaidFileName( slice_number + b, xtile, ytile)) logger.info( "Open filename {}".format(file_name)) # print "Open filename {}".format(file_name) slab[b, :, :] = np.asarray( Image.open(file_name, 'r'))[:, :, 0] except IOError, e: logger.warning("IOError {}.".format(e)) slab[b, :, :] = np.zeros( (tilesz, tilesz), dtype=ND_dtypetonp.get(ch.getDataType())) for y in range(ytile * tilesz, (ytile + 1) * tilesz, ysupercubedim): for x in range(xtile * tilesz, (xtile + 1) * tilesz, xsupercubedim): # Getting a Cube id and ingesting the data one cube at a time zidx = ndlib.XYZMorton([ (x - xoffset) / xsupercubedim, (y - yoffset) / ysupercubedim, (slice_number - zoffset) / zsupercubedim ]) cube = Cube.getCube(supercubedim, ch.getChannelType(), ch.getDataType()) cube.zeros() xmin = x % tilesz ymin = y % tilesz xmax = (min(ximagesz - xoffset - 1, x + xsupercubedim - 1) % tilesz) + 1 ymax = (min(yimagesz - yoffset - 1, y + ysupercubedim - 1) % tilesz) + 1 zmin = 0 zmax = min(slice_number - zoffset + zsupercubedim, zimagesz + 1) cube.data[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] if cube.isNotZeros(): s3db.putCube(ch, self.resolution, zidx, cube.toBlosc()) # clean up the slices fetched self.cleanCatmaidData( range(slice_number, slice_number + zsupercubedim) if slice_number + zsupercubedim <= zimagesz else range( slice_number, zimagesz), xtile, ytile)
def ingest(self): """Read the stack and ingest""" with closing(ocpcaproj.OCPCAProjectsDB()) as projdb: proj = projdb.loadProject(self.token) with closing(ocpcadb.OCPCADB(proj)) as db: (startslice, endslice) = proj.datasetcfg.slicerange (xcubedim, ycubedim, zcubedim) = cubedims = proj.datasetcfg.cubedim[self.resolution] (ximagesz, yimagesz) = proj.datasetcfg.imagesz[self.resolution] batchsz = zcubedim numxtiles = ximagesz / self.tilesz numytiles = yimagesz / self.tilesz # Ingest in database aligned slabs in the z dimension for sl in range(startslice, endslice, batchsz): # over all tiles in that slice for ytile in range(0, numytiles): for xtile in range(0, numxtiles): slab = np.zeros([zcubedim, self.tilesz, self.tilesz], dtype=np.uint8) # over each slice for b in range(batchsz): #if we are at the end of the space, quit if (sl + b <= endslice): filename = '{}z{:0>4}/c{:0>2}r{:0>2}.tif'.format( self.tilepath, sl + b, xtile + 1, ytile + 1) #filename = '{}{}/c{:0>3}r{:0>3}.jpg'.format(self.tilepath, sl+b, xtile, ytile ) #filename = '{}{}/{}_{}_{}.jpg'.format(self.tilepath, sl+b, ytile, xtile, self.resolution ) #filename = '{}{}/{}/{}_{}.jpg'.format(self.tilepath, sl+b, self.resolution, ytile, xtile ) #filename = '{}z{:0>4}/c{:0>2}r{:0>2}.tif'.format(self.tilepath, sl+b, ytile+1, xtile+1 ) print filename try: # add tile to stack img = Image.open(filename, 'r') slab[b, :, :] = np.asarray(img)[:, :, 0] except IOError, e: print "Failed to open file %s" % (e) img = np.zeros((self.tilesz, self.tilesz), dtype=np.uint8) slab[b, :, :] = img for y in range(ytile * self.tilesz, (ytile + 1) * self.tilesz, ycubedim): for x in range(xtile * self.tilesz, (xtile + 1) * self.tilesz, xcubedim): zidx = ndlib.XYZMorton([ x / xcubedim, y / ycubedim, (sl - startslice) / zcubedim ]) cubedata = np.zeros( [zcubedim, ycubedim, xcubedim], dtype=np.uint8) xmin = x % self.tilesz ymin = y % self.tilesz xmax = (min(ximagesz - 1, x + xcubedim - 1) % self.tilesz) + 1 ymax = (min(yimagesz - 1, y + ycubedim - 1) % self.tilesz) + 1 zmin = 0 zmax = min(sl + zcubedim, endslice) cubedata[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] cube = imagecube.ImageCube16(cubedims) cube.data = cubedata if np.count_nonzero(cube.data) != 0: db.putCube(zidx, self.resolution, cube) print "Commiting at x=%s, y=%s, z=%s" % (x, y, sl) db.conn.commit()
def ingest(self): """Read the stack and ingest""" with closing(ocpcaproj.OCPCAProjectsDB()) as projdb: proj = projdb.loadToken(self.token) with closing(ocpcadb.OCPCADB(proj)) as db: ch = proj.getChannelObj(self.channel) # get the dataset configuration [[ximagesz, yimagesz, zimagesz], (starttime, endtime)] = proj.datasetcfg.imageSize(self.resolution) [xcubedim, ycubedim, zcubedim ] = cubedim = proj.datasetcfg.getCubeDims()[self.resolution] [xoffset, yoffset, zoffset] = proj.datasetcfg.getOffset()[self.resolution] # for all specified resolutions for resolution in range(0, 1, 1): # extract parameters for iteration numxtiles = ximagesz / self.tilesz[0] numytiles = yimagesz / self.tilesz[1] # Ingest in database aligned slabs in the z dimension for slice_number in range(0, zimagesz, zcubedim): slab = np.zeros([zcubedim, yimagesz, ximagesz], dtype=np.uint32) # over all tiles in that slice for b in range(zcubedim): for ytile in range(numytiles): for xtile in range(numxtiles): # if we are at the end of the space, quit if slice_number + b <= zimagesz: try: filename = '{}{}/{}/{}/{}.png'.format( self.tilepath, resolution, slice_number + b + zoffset, ytile + 17, xtile + 16) print "Opening filename {}".format( filename) # add tile to stack imgdata = np.asarray( Image.open(filename, 'r').convert('RGBA')) imgdata = np.left_shift( imgdata[:, :, 3], 24, dtype=np.uint32) | np.left_shift( imgdata[:, :, 2], 16, dtype=np.uint32 ) | np.left_shift( imgdata[:, :, 1], 8, dtype=np.uint32) | np.uint32( imgdata[:, :, 0]) slab[b, ytile * self.tilesz[1]:(ytile + 1) * self.tilesz[1], xtile * self.tilesz[0]:(xtile + 1) * self.tilesz[0]] = imgdata except IOError, e: print "Failed to open file {}".format( filename) slab[b, ytile * self.tilesz[1]:(ytile + 1) * self.tilesz[1], xtile * self.tilesz[0]:(xtile + 1) * self.tilesz[0]] = np.zeros( [ self.tilesz[1], self.tilesz[0] ], dtype=np.uint32) for y in range(0, yimagesz + 1, ycubedim): for x in range(0, ximagesz + 1, xcubedim): # getting the cube id and ingesting the data one cube at a time zidx = ndlib.XYZMorton([ x / xcubedim, y / ycubedim, (slice_number) / zcubedim ]) cube = Cube.getCube(cubedim, ch.getChannelType(), ch.getDataType()) cube.zeros() xmin, ymin = x, y xmax = min(ximagesz, x + xcubedim) ymax = min(yimagesz, y + ycubedim) zmin = 0 zmax = min(slice_number + zcubedim, zimagesz + 1) cube.data[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] if cube.isNotZeros(): db.putCube(ch, zidx, self.resolution, cube, update=True)
def main(): parser = argparse.ArgumentParser( description='Build a transform DB for Kwame.') parser.add_argument('outtoken', action="store", help='Token for the Output project.') parser.add_argument('path', action="store", help='Path to data') parser.add_argument('resolution', action="store", type=int) result = parser.parse_args() with closing(ocpcaproj.OCPCAProjectsDB()) as outprojdb: outproj = outprojdb.loadProject(result.outtoken) with closing(ocpcadb.OCPCADB(outproj)) as outDB: # Get the source database sizes (ximagesz, yimagesz) = outproj.datasetcfg.imagesz[result.resolution] (xcubedim, ycubedim, zcubedim) = cubedims = outproj.datasetcfg.cubedim[result.resolution] (startslice, endslice) = outproj.datasetcfg.slicerange batchsz = zcubedim # Get the slices slices = endslice - startslice + 1 # Set the limits for iteration on the number of cubes in each dimension and the limits of iteration xlimit = (ximagesz - 1) / xcubedim + 1 ylimit = (yimagesz - 1) / ycubedim + 1 # Round up the zlimit to the next larger zlimit = (((slices - 1) / zcubedim + 1) * zcubedim) / zcubedim zscale = int(outproj.datasetcfg.zscale[result.resolution]) channel = "Grayscale" outDB.putChannel(channel, 1) for sl in range(startslice, endslice, batchsz): slab = np.zeros((batchsz, yimagesz, ximagesz), dtype=np.uint16) for b in range(batchsz): if (sl + b <= endslice): filename = '{}00-164_00-152_{:0>6}.tif'.format( result.path, (sl + b) * 80) #filename = '{}00-111_000-29_{:0>6}.tif'.format(result.path,(sl+b)*50) #filename = '{}00-199_000000_{:0>6}.tif'.format(result.path,(sl+b)*60) #filename = '{}00-462_000000_{:0>6}.tif'.format(result.path,(sl+b)*50) #filename = '{}00-427_000000_{:0>6}.tif'.format(result.path,(sl+b)*60) #filename = '{}00-222_000000_{:0>6}.tif'.format(result.path,(sl+b)*50) #filename = '{}00-415_000000_{:0>6}.tif'.format(result.path,(sl+b)*50) #filename = '{}00-117_000000_{:0>6}.tif'.format(result.path,(sl+b)*50) #filename = '{}00-298_000000_{:0>6}.tif'.format(result.path,(sl+b)*50) #filename = '{}00-398_000000_{:0>6}.tif'.format(result.path,(sl+b)*60) #filename = '{}00-532_000000_{:0>6}.tif'.format(result.path,(sl+b)*60) #filename = '{}00-199_000000_{:0>6}.tif'.format(result.path,(sl+b)*50) #filename = '{}00-544_000-53_{:0>6}.tif'.format(result.path,(sl+b)*50) #imageurl = 'Grayscale/{}/{},{}/{},{}/{}/'.format(result.resolution,0,ximagesz,0,yimagesz,sl+b) print "slice {}".format(sl + b) try: #imgdata = ocpcarest.cutout( imageurl, outproj, outDB ) imgdata = cv2.imread(filename, -1) if imgdata != None: img = Image.frombuffer('I;16', (imgdata.shape[::-1]), imgdata.flatten(), 'raw', 'I;16', 0, 1) slab[b, :, :] = np.asarray( img.resize([ximagesz, yimagesz])) img = None else: slab[b, :, :] = np.zeros((yimagesz, ximagesz), dtype=np.uint16) except IOError, e: print "Failed to get Cutout. {}".format(e) for y in range(0, yimagesz + 1, ycubedim): for x in range(0, ximagesz + 1, xcubedim): zidx = ndlib.XYZMorton([ x / xcubedim, y / ycubedim, (sl - startslice) / zcubedim ]) cubedata = np.zeros((zcubedim, ycubedim, xcubedim), dtype=np.uint16) xmin = x ymin = y xmax = ((min(ximagesz - 1, x + xcubedim - 1))) + 1 ymax = ((min(yimagesz - 1, y + ycubedim - 1))) + 1 zmin = 0 zmax = min(sl + zcubedim, endslice + 1) cubedata[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] cube = imagecube.ImageCube16(cubedims) cube.zeros() cube.data = cubedata if np.count_nonzero(cube.data) != 0: outDB.putChannelCube(zidx, 1, result.resolution, cube) print "Commiting at x:{},y:{},z{}".format(x, y, sl) outDB.conn.commit()
def ingest(self, channel_name): """ Read image stack and ingest """ # Load a database with closing(ocpcaproj.OCPCAProjectsDB()) as projdb: proj = projdb.loadToken(self.token) with closing(ocpcadb.OCPCADB(proj)) as db: ch = proj.getChannelObj(channel_name) # get the dataset configuration [[ximagesz, yimagesz, zimagesz], (starttime, endtime)] = proj.datasetcfg.imageSize(self.resolution) [xcubedim, ycubedim, zcubedim ] = cubedim = proj.datasetcfg.getCubeDims()[self.resolution] [xoffset, yoffset, zoffset] = proj.datasetcfg.getOffset()[self.resolution] # Get a list of the files in the directories file_name = "{}{}.tif".format(self.path, channel_name) print "Open filename {}".format(file_name) imgdata = tifffile.imread(file_name) for slice_number in range(zoffset, zimagesz + 1, zcubedim): slab = np.zeros([zcubedim, yimagesz, ximagesz], dtype=np.uint32) for b in range(zcubedim): if (slice_number + b <= zimagesz): if (slice_number + b) < zimagesz: slab[b, :, :] = imgdata[(slice_number + b), :, :] else: imgdata = np.zeros((yimagesz, ximagesz), dtype=np.uint32) slab[b, :, :] = imgdata for y in range(0, yimagesz + 1, ycubedim): for x in range(0, ximagesz + 1, xcubedim): # Getting a Cube id and ingesting the data one cube at a time zidx = ndlib.XYZMorton([ x / xcubedim, y / ycubedim, (slice_number - zoffset) / zcubedim ]) cube = Cube.getCube(cubedim, ch.getChannelType(), ch.getDataType()) cube.zeros() xmin = x ymin = y xmax = min(ximagesz, x + xcubedim) ymax = min(yimagesz, y + ycubedim) zmin = 0 zmax = min(slice_number + zcubedim, zimagesz + 1) cube.data[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] db.putCube(ch, zidx, self.resolution, cube, update=True)
def main(): parser = argparse.ArgumentParser(description='Ingest the TIFF data') parser.add_argument('token', action="store", type=str, help='Token for the project') parser.add_argument('channel', action="store", type=str, help='Channel for the project') parser.add_argument('path', action="store", type=str, help='Directory with the image files') parser.add_argument('resolution', action="store", type=int, help='Resolution of data') result = parser.parse_args() # Load a database with closing(ndproj.NDProjectsDB()) as projdb: proj = projdb.loadToken(result.token) with closing(SpatialDB(proj)) as db: ch = proj.getChannelObj(result.channel) # get the dataset configuration [[ximagesz, yimagesz, zimagesz], (starttime, endtime)] = proj.datasetcfg.imageSize(result.resolution) [xcubedim, ycubedim, zcubedim] = cubedim = proj.datasetcfg.getCubeDims()[result.resolution] [xoffset, yoffset, zoffset] = proj.datasetcfg.getOffset()[result.resolution] # Get a list of the files in the directories for slice_number in range(zoffset, zimagesz + 1, zcubedim): slab = np.zeros([zcubedim, yimagesz, ximagesz], dtype=np.uint32) for b in range(zcubedim): if (slice_number + b <= zimagesz): try: # reading the raw data file_name = "{}/{:0>4}.tif".format( result.path, slice_number + b) print "Open filename {}".format(file_name) img = Image.open(file_name, 'r').convert("RGBA") imgdata = np.asarray(img) slab[b, :, :] = np.left_shift( imgdata[:, :, 3], 24, dtype=np.uint32) | np.left_shift( imgdata[:, :, 2], 16, dtype=np.uint32) | np.left_shift( imgdata[:, :, 1], 8, dtype=np.uint32) | np.uint32(imgdata[:, :, 0]) except IOError, e: print e imgdata = np.zeros((yimagesz, ximagesz), dtype=np.uint32) slab[b, :, :] = imgdata for y in range(0, yimagesz + 1, ycubedim): for x in range(0, ximagesz + 1, xcubedim): # Getting a Cube id and ingesting the data one cube at a time zidx = ndlib.XYZMorton([ x / xcubedim, y / ycubedim, (slice_number - zoffset) / zcubedim ]) cube = Cube.getCube(cubedim, ch.getChannelType(), ch.getDataType()) cube.zeros() xmin = x ymin = y xmax = min(ximagesz, x + xcubedim) ymax = min(yimagesz, y + ycubedim) zmin = 0 zmax = min(slice_number + zcubedim, zimagesz + 1) cube.data[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] db.putCube(ch, zidx, result.resolution, cube, update=True)
def main(): parser = argparse.ArgumentParser(description='Ingest the TIFF data') parser.add_argument('token', action="store", type=str, help='Token for the project') parser.add_argument('channel', action="store", type=str, help='Channel for the project') parser.add_argument('path', action="store", type=str, help='Directory with the image files') parser.add_argument('resolution', action="store", type=int, help='Resolution of data') result = parser.parse_args() # Load a database with closing(ocpcaproj.OCPCAProjectsDB()) as projdb: proj = projdb.loadToken(result.token) with closing(ocpcadb.OCPCADB(proj)) as db: ch = proj.getChannelObj(result.channel) # get the dataset configuration [[ximagesz, yimagesz, zimagesz], (starttime, endtime)] = proj.datasetcfg.imageSize(result.resolution) [xcubedim, ycubedim, zcubedim] = cubedim = proj.datasetcfg.getCubeDims()[result.resolution] [xoffset, yoffset, zoffset] = proj.datasetcfg.getOffset()[result.resolution] file_name = "{}".format(result.path) tif_file = tifffile.imread(file_name) slice_number = 0 # Get a list of the files in the directories for iteration_number in range(starttime, endtime): slab = np.zeros([zcubedim, yimagesz, ximagesz], dtype=np.uint16) import pdb pdb.set_trace() slab[slice_number, :, :] = tif_file[iteration_number, :, :] for y in range(0, yimagesz + 1, ycubedim): for x in range(0, ximagesz + 1, xcubedim): # Getting a Cube id and ingesting the data one cube at a time zidx = ndlib.XYZMorton([ x / xcubedim, y / ycubedim, (slice_number - zoffset) / zcubedim ]) cube = Cube.getCube(cubedim, ch.getChannelType(), ch.getDataType(), timerange=[0, 1]) cube.zeros() xmin = x ymin = y xmax = min(ximagesz, x + xcubedim) ymax = min(yimagesz, y + ycubedim) zmin = 0 zmax = min(slice_number + zcubedim, zimagesz + 1) cube.data[0:zmax - zmin, 0:ymax - ymin, 0:xmax - xmin] = slab[zmin:zmax, ymin:ymax, xmin:xmax] db.putTimeCube(ch, zidx, iteration_number, result.resolution, cube, update=False)