Esempio n. 1
0
    def __init__(self, token, channel, resolution, file_type="tif"):

        # set the channel, res
        self.channel = channel
        self.resolution = resolution
        # load the project here
        self.proj = ndproj.NDProjectsDB().loadToken(token)
        self.file_type = file_type
Esempio n. 2
0
def deleteChannel(webargs, post_data):
    """Delete a list of channels using a JSON file"""

    # Get the token and load the project
    try:
        m = re.match("(\w+)/deleteChannel/$", webargs)
        token_name = m.group(1)
    except Exception as e:
        print("Error in URL format")
        raise

    nd_dict = json.loads(post_data)
    try:
        channels = nd_dict["channels"]
    except Exception as e:
        print("Missing requred fields")
        raise

    tk = Token.objects.get(token_name=token_name)
    ur = User.objects.get(id=tk.user_id)
    pr = Project.objects.get(project_name=tk.project_id)

    try:
        # Iterating over channel list to store channels
        for channel_name in channels:
            # Checking if the channel already exists or not
            if Channel.objects.get(channel_name=channel_name,
                                   project=pr.project_name):
                ch = Channel.objects.get(channel_name=channel_name,
                                         project=pr.project_name)
                # Checking if channel is readonly or not
                if ch.readonly == READONLY_FALSE:
                    # delete channel table using the ndproj interface
                    pd = ndproj.NDProjectsDB().getProjDB(pr.project_name)
                    pd.deleteNDChannel(ch.channel_name)
                    ch.delete()
        return_json = "SUCCESS"
    except Exception as e:
        print("Error saving models")
        return_json = "FAILED"

    return json.dumps(return_json)
Esempio n. 3
0
    def __init__(self, token, channels, centroid):

        # arguments
        self.token = token
        self.channels = channels
        self.centroid = centroid

        # parameter defaults.  set be accessors.
        self.sog_width = 200
        self.sog_frame = 20
        self.width = 11
        self.normalize = True
        self.normalize2 = False
        self.resolution = 0
        self.refchannels = []
        self.emchannels = []
        self.enhance = None

        # pattern for using contexts to close databases
        # get the project
        with closing(ndproj.NDProjectsDB()) as projdb:
            self.proj = projdb.loadToken(token)
Esempio n. 4
0
    def uploadExistingProject(self):
        """Upload an existing project to S3"""

        # Uploading to a bucket

        with closing(ndproj.NDProjectsDB()) as projdb:
            proj = projdb.loadToken(self.token)

        with closing(spatialdb.SpatialDB(proj)) as db:

            ch = proj.getChannelObj(self.channel_name)

            if self.res == 0:
                start_res = 0
                stop_res = proj.datasetcfg.scalinglevels
            else:
                start_res = self.res
                stop_res = self.res + 1

            for cur_res in range(start_res, stop_res):

                start = time.time()
                # Get the source database sizes
                [[ximagesz, yimagesz, zimagesz],
                 timerange] = proj.datasetcfg.imageSize(cur_res)
                [xcubedim, ycubedim,
                 zcubedim] = cubedim = proj.datasetcfg.getCubeDims()[cur_res]
                [xoffset, yoffset,
                 zoffset] = proj.datasetcfg.getOffset()[cur_res]

                [xs, ys, zs] = supercubedim = map(mul, cubedim, SUPERCUBESIZE)

                # Set the limits for iteration on the number of cubes in each dimension
                xlimit = (ximagesz - 1) / (xs) + 1
                ylimit = (yimagesz - 1) / (ys) + 1
                zlimit = (zimagesz - 1) / (zs) + 1

                for z in range(zlimit):
                    for y in range(ylimit):
                        for x in range(xlimit):

                            # cutout the data at the current resolution
                            data = db.cutout(ch, [x * xs, y * ys, z * zs],
                                             [xs, ys, zs], cur_res).data
                            zidx = ndlib.XYZMorton([x, y, z])
                            # m = hashlib.md5()
                            # m.update('{}_{}'.format(zidx,cur_res))
                            # s3_key = m.hexdigest()
                            # generateS3Key(ch.getChannelName(), cur_res, zidx)

                            print "Inserting Cube {} at res {}".format(
                                zidx, cur_res), [x, y, z]
                            # data = blosc.pack_array(data)
                            s3io.putCube(ch, cur_res, zidx,
                                         blosc.pack_array(data))
                            # Uploading the object to S3
                            # bucket.put_object(Key=s3_key, Body=data)

                print "Time for Resolution {} : {} secs".format(
                    cur_res,
                    time.time() - start)
Esempio n. 5
0
    def uploadCatmaidProject(self):
        """Ingest a CATMAID tile stack"""

        tilesz = 1024
        # Load a database
        proj = ndproj.NDProjectsDB().loadToken(self.token)
        db = spatialdb.SpatialDB(proj)
        s3db = s3io.S3IO(db)
        ch = proj.getChannelObj(self.channel_name)

        # creating bucket
        # self.createS3Bucket(proj.getProjectName())

        # get the dataset configuration
        [[ximagesz, yimagesz, zimagesz],
         (starttime, endtime)] = proj.datasetcfg.imageSize(self.resolution)
        [xcubedim, ycubedim,
         zcubedim] = cubedim = proj.datasetcfg.getCubeDims()[self.resolution]
        [xoffset, yoffset,
         zoffset] = proj.datasetcfg.getOffset()[self.resolution]
        [xsupercubedim, ysupercubedim,
         zsupercubedim] = supercubedim = map(mul, cubedim, SUPERCUBESIZE)

        if ch.getChannelType() in TIMESERIES_CHANNELS:
            logger.error(
                "Timeseries data not supported for CATMAID data. Error in {}".
                format(self.token))
            raise NDWSError(
                "Timeseries data not supported for CATMAID data. Error in {}".
                format(self.token))

        num_xtiles = ximagesz / tilesz
        num_ytiles = yimagesz / tilesz

        # over all the tiles in the slice
        for ytile in range(0, num_ytiles, 1):
            for xtile in range(0, num_xtiles, 1):

                # Get a list of the files in the directories
                for slice_number in range(zoffset, zimagesz, zsupercubedim):
                    # empty slab
                    slab = np.zeros([zsupercubedim, tilesz, tilesz],
                                    dtype=ND_dtypetonp.get(ch.getDataType()))

                    # prefetch data
                    self.fetchCatmaidData(
                        range(slice_number, slice_number + zsupercubedim)
                        if slice_number + zsupercubedim <= zimagesz else range(
                            slice_number, zimagesz), xtile, ytile)

                    for b in range(zsupercubedim):
                        if (slice_number + b < zimagesz):
                            try:
                                # reading the raw data
                                file_name = "{}{}".format(
                                    self.data_location,
                                    self.generateCatmaidFileName(
                                        slice_number + b, xtile, ytile))
                                logger.info(
                                    "Open filename {}".format(file_name))
                                # print "Open filename {}".format(file_name)
                                slab[b, :, :] = np.asarray(
                                    Image.open(file_name, 'r'))[:, :, 0]
                            except IOError, e:
                                logger.warning("IOError {}.".format(e))
                                slab[b, :, :] = np.zeros(
                                    (tilesz, tilesz),
                                    dtype=ND_dtypetonp.get(ch.getDataType()))

                    for y in range(ytile * tilesz, (ytile + 1) * tilesz,
                                   ysupercubedim):
                        for x in range(xtile * tilesz, (xtile + 1) * tilesz,
                                       xsupercubedim):

                            # Getting a Cube id and ingesting the data one cube at a time
                            zidx = ndlib.XYZMorton([
                                (x - xoffset) / xsupercubedim,
                                (y - yoffset) / ysupercubedim,
                                (slice_number - zoffset) / zsupercubedim
                            ])
                            cube = Cube.getCube(supercubedim,
                                                ch.getChannelType(),
                                                ch.getDataType())
                            cube.zeros()

                            xmin = x % tilesz
                            ymin = y % tilesz
                            xmax = (min(ximagesz - xoffset - 1,
                                        x + xsupercubedim - 1) % tilesz) + 1
                            ymax = (min(yimagesz - yoffset - 1,
                                        y + ysupercubedim - 1) % tilesz) + 1
                            zmin = 0
                            zmax = min(slice_number - zoffset + zsupercubedim,
                                       zimagesz + 1)

                            cube.data[0:zmax - zmin, 0:ymax - ymin,
                                      0:xmax - xmin] = slab[zmin:zmax,
                                                            ymin:ymax,
                                                            xmin:xmax]
                            if cube.isNotZeros():
                                s3db.putCube(ch, self.resolution, zidx,
                                             cube.toBlosc())

                    # clean up the slices fetched
                    self.cleanCatmaidData(
                        range(slice_number, slice_number + zsupercubedim)
                        if slice_number + zsupercubedim <= zimagesz else range(
                            slice_number, zimagesz), xtile, ytile)
Esempio n. 6
0
def main():

    parser = argparse.ArgumentParser(description='Ingest the TIFF data')
    parser.add_argument('token',
                        action="store",
                        type=str,
                        help='Token for the project')
    parser.add_argument('channel',
                        action="store",
                        type=str,
                        help='Channel for the project')
    parser.add_argument('path',
                        action="store",
                        type=str,
                        help='Directory with the image files')
    parser.add_argument('resolution',
                        action="store",
                        type=int,
                        help='Resolution of data')

    result = parser.parse_args()

    # Load a database
    with closing(ndproj.NDProjectsDB()) as projdb:
        proj = projdb.loadToken(result.token)

    with closing(SpatialDB(proj)) as db:

        ch = proj.getChannelObj(result.channel)
        # get the dataset configuration
        [[ximagesz, yimagesz, zimagesz],
         (starttime, endtime)] = proj.datasetcfg.imageSize(result.resolution)
        [xcubedim, ycubedim,
         zcubedim] = cubedim = proj.datasetcfg.getCubeDims()[result.resolution]
        [xoffset, yoffset,
         zoffset] = proj.datasetcfg.getOffset()[result.resolution]

        # Get a list of the files in the directories
        for slice_number in range(zoffset, zimagesz + 1, zcubedim):
            slab = np.zeros([zcubedim, yimagesz, ximagesz], dtype=np.uint32)
            for b in range(zcubedim):
                if (slice_number + b <= zimagesz):
                    try:
                        # reading the raw data
                        file_name = "{}/{:0>4}.tif".format(
                            result.path, slice_number + b)
                        print "Open filename {}".format(file_name)
                        img = Image.open(file_name, 'r').convert("RGBA")
                        imgdata = np.asarray(img)
                        slab[b, :, :] = np.left_shift(
                            imgdata[:, :, 3], 24,
                            dtype=np.uint32) | np.left_shift(
                                imgdata[:, :, 2], 16,
                                dtype=np.uint32) | np.left_shift(
                                    imgdata[:, :, 1], 8,
                                    dtype=np.uint32) | np.uint32(imgdata[:, :,
                                                                         0])
                    except IOError, e:
                        print e
                        imgdata = np.zeros((yimagesz, ximagesz),
                                           dtype=np.uint32)
                        slab[b, :, :] = imgdata

            for y in range(0, yimagesz + 1, ycubedim):
                for x in range(0, ximagesz + 1, xcubedim):

                    # Getting a Cube id and ingesting the data one cube at a time
                    zidx = ndlib.XYZMorton([
                        x / xcubedim, y / ycubedim,
                        (slice_number - zoffset) / zcubedim
                    ])
                    cube = Cube.getCube(cubedim, ch.getChannelType(),
                                        ch.getDataType())
                    cube.zeros()

                    xmin = x
                    ymin = y
                    xmax = min(ximagesz, x + xcubedim)
                    ymax = min(yimagesz, y + ycubedim)
                    zmin = 0
                    zmax = min(slice_number + zcubedim, zimagesz + 1)

                    cube.data[0:zmax - zmin, 0:ymax - ymin,
                              0:xmax - xmin] = slab[zmin:zmax, ymin:ymax,
                                                    xmin:xmax]
                    db.putCube(ch, zidx, result.resolution, cube, update=True)